api added
This commit is contained in:
parent
c44a724a05
commit
e5829f0525
|
|
@ -0,0 +1,31 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_initializer /api_initializer
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/api_validations /api_validations
|
||||
COPY /api_services/schemas /schemas
|
||||
COPY /api_services/api_modules /api_modules
|
||||
|
||||
COPY /api_services/api_middlewares /api_middlewares
|
||||
COPY /api_services/api_builds/auth_service/endpoints /api_initializer/endpoints
|
||||
COPY /api_services/api_builds/auth_service/events /api_initializer/events
|
||||
COPY /api_services/api_builds/auth_service/validations /api_initializer/validations
|
||||
# COPY /api_services/api_builds/auth_service/index.py /api_initializer/index.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||
|
|
@ -0,0 +1,158 @@
|
|||
from typing import Union
|
||||
|
||||
from fastapi import APIRouter, Request, status, Header, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from config import api_config
|
||||
from validations.request.auth.validations import (
|
||||
RequestLogin,
|
||||
RequestResetPassword,
|
||||
RequestSelectLiving,
|
||||
RequestSelectEmployee,
|
||||
RequestCreatePassword,
|
||||
RequestChangePassword,
|
||||
RequestForgotPasswordPhone,
|
||||
RequestForgotPasswordEmail,
|
||||
RequestVerifyOTP,
|
||||
)
|
||||
from events.auth.events import AuthHandlers
|
||||
from endpoints.index import endpoints_index
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
from events.auth.events import LoginHandler
|
||||
|
||||
auth_route = APIRouter(prefix="/authentication", tags=["Authentication Cluster"])
|
||||
|
||||
|
||||
auth_route_login = "AuthLoginViaDomainAndCreds"
|
||||
@auth_route.post(
|
||||
path="/login",
|
||||
summary="Login via domain and access key : [email] | [phone]",
|
||||
description="Login Route",
|
||||
operation_id=endpoints_index[auth_route_login]
|
||||
)
|
||||
def login(data: RequestLogin, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Login via domain and access key : [email] | [phone]"""
|
||||
return AuthHandlers.LoginHandler.authentication_login_with_domain_and_creds(headers=headers, data=data)
|
||||
|
||||
|
||||
auth_route_select_living = "AuthSelectLiving"
|
||||
@auth_route.post(
|
||||
path="/select",
|
||||
summary="Select token object company or occupant type",
|
||||
description="Selection of users company or occupant type",
|
||||
operation_id=endpoints_index[auth_route_select_living]
|
||||
)
|
||||
def select_living(data: Union[RequestSelectLiving, RequestSelectEmployee], headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Select token object company or occupant type"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return AuthHandlers.LoginHandler.authentication_select_company_or_occupant_type(request=headers.request, data=data)
|
||||
|
||||
|
||||
auth_route_create_password = "AuthCreatePassword"
|
||||
@auth_route.post(
|
||||
path="/password/create",
|
||||
summary="Create password with access token",
|
||||
description="Create password",
|
||||
operation_id=endpoints_index[auth_route_create_password]
|
||||
)
|
||||
def create_password(data: RequestCreatePassword, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Create password with access token"""
|
||||
# token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return AuthHandlers.PasswordHandler.create_password(password=data.password, password_token=data.password_token)
|
||||
|
||||
|
||||
auth_route_change_password = "AuthChangePassword"
|
||||
@auth_route.post(
|
||||
path="/password/change",
|
||||
summary="Change password with access token",
|
||||
description="Change password",
|
||||
operation_id=endpoints_index[auth_route_change_password]
|
||||
)
|
||||
def change_password(data: RequestChangePassword, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Change password with access token"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
||||
|
||||
auth_route_reset_password = "AuthResetPassword"
|
||||
@auth_route.post(
|
||||
path="/password/reset",
|
||||
summary="Reset password with access token",
|
||||
description="Reset password",
|
||||
operation_id=endpoints_index[auth_route_reset_password]
|
||||
)
|
||||
def reset_password(data: RequestResetPassword, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Reset password with access token"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
||||
|
||||
auth_route_logout = "AuthLogout"
|
||||
@auth_route.get(
|
||||
path="/logout",
|
||||
summary="Logout user",
|
||||
description="Logout only single session of user which domain is provided",
|
||||
operation_id=endpoints_index[auth_route_logout]
|
||||
)
|
||||
def logout(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Logout user"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
||||
|
||||
auth_route_disconnect = "AuthDisconnect"
|
||||
@auth_route.get(
|
||||
path="/disconnect",
|
||||
summary="Disconnect all sessions",
|
||||
description="Disconnect all sessions of user in access token",
|
||||
operation_id=endpoints_index[auth_route_disconnect]
|
||||
)
|
||||
def disconnect(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Disconnect all sessions"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
||||
|
||||
auth_route_check_token = "AuthCheckToken"
|
||||
@auth_route.get(
|
||||
path="/token/check",
|
||||
summary="Check if token is valid",
|
||||
description="Check if access token is valid for user",
|
||||
operation_id=endpoints_index[auth_route_check_token]
|
||||
)
|
||||
def check_token(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Check if token is valid"""
|
||||
try:
|
||||
if token_object := LoginHandler.authentication_check_token_valid(access_token=headers.token, domain=headers.domain):
|
||||
return JSONResponse(status_code=status.HTTP_200_OK, content={"success": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return JSONResponse(status_code=status.HTTP_401_UNAUTHORIZED, content={"success": False})
|
||||
|
||||
|
||||
auth_route_refresh_token = "AuthRefreshToken"
|
||||
@auth_route.get(
|
||||
path="/token/refresh",
|
||||
summary="Refresh if token is valid",
|
||||
description="Refresh if access token is valid for user",
|
||||
operation_id=endpoints_index[auth_route_refresh_token]
|
||||
)
|
||||
def refresh_token(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Refresh if token is valid"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
||||
|
||||
auth_route_verify_otp = "AuthVerifyOTP"
|
||||
@auth_route.get(
|
||||
path="/password/verify-otp",
|
||||
summary="Verify OTP for password reset",
|
||||
description="Verify OTP for password reset",
|
||||
operation_id=endpoints_index[auth_route_verify_otp]
|
||||
)
|
||||
def verify_otp(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""Verify OTP for password reset"""
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
return None
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
|
||||
endpoints_index: dict = {
|
||||
"AuthLoginViaDomainAndCreds": "1b94a704-7768-436d-bc20-655d92b34d83",
|
||||
"AuthSelectLiving": "585d578e-2b72-4f71-b996-530fc0613568",
|
||||
"AuthCreatePassword": "a4252148-2bac-42df-aa3a-1784f4cbd599",
|
||||
"AuthChangePassword": "d55834fa-6d7f-4007-9591-a50d3266b3aa",
|
||||
"AuthResetPassword": "29f14043-2a79-4230-bf66-a709ae954dc5",
|
||||
"AuthLogout": "616a992a-2a73-4709-a394-f043caa75937",
|
||||
"AuthDisconnect": "55dd1df1-4a00-41f9-92a9-fb776aee1cd3",
|
||||
"AuthCheckToken": "040e7a48-1ce0-432c-9bd9-5b05c2c7aef3",
|
||||
"AuthRefreshToken": "0ca54d41-d9ca-4143-b974-1050d65769b7",
|
||||
"AuthVerifyOTP": "4192e7a5-cf52-4d09-8b51-2088d77271d0",
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
from fastapi import APIRouter
|
||||
from .auth.router import auth_route
|
||||
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
"""Get all routes"""
|
||||
return [auth_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
"""Get all safe endpoint urls"""
|
||||
return [
|
||||
("/", "GET"),
|
||||
("/docs", "GET"),
|
||||
("/redoc", "GET"),
|
||||
("/openapi.json", "GET"),
|
||||
("/metrics", "GET"),
|
||||
("/authentication/login", "POST"),
|
||||
("/authentication/password/reset", "POST"),
|
||||
("/authentication/password/create", "POST"),
|
||||
("/authentication/password/verify-otp", "POST"),
|
||||
]
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
|
||||
|
||||
__all__ = []
|
||||
|
|
@ -0,0 +1,472 @@
|
|||
import arrow
|
||||
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from config import api_config
|
||||
from schemas import (
|
||||
Users,
|
||||
People,
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
OccupantTypes,
|
||||
Employees,
|
||||
Addresses,
|
||||
Companies,
|
||||
Staff,
|
||||
Duty,
|
||||
Duties,
|
||||
Departments,
|
||||
Event2Employee,
|
||||
Application2Occupant,
|
||||
Event2Occupant,
|
||||
Application2Employee,
|
||||
RelationshipEmployee2Build,
|
||||
Events,
|
||||
EndpointRestriction,
|
||||
)
|
||||
from api_modules.token.password_module import PasswordModule
|
||||
from api_controllers.mongo.database import mongo_handler
|
||||
from api_validations.token.validations import TokenDictType, EmployeeTokenObject, OccupantTokenObject, CompanyToken, OccupantToken, UserType
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_modules.redis.redis_handlers import RedisHandlers
|
||||
from validations.password.validations import PasswordHistoryViaUser
|
||||
|
||||
|
||||
class UserHandlers:
|
||||
|
||||
@staticmethod
|
||||
def check_user_exists(access_key: str, db_session) -> Users:
|
||||
"""Check if the user exists in the database."""
|
||||
Users.set_session(db_session)
|
||||
if "@" in access_key:
|
||||
found_user: Users = Users.query.filter(Users.email == access_key.lower()).first()
|
||||
else:
|
||||
found_user: Users = Users.query.filter(Users.phone_number == access_key.replace(" ", "")).first()
|
||||
if not found_user:
|
||||
raise ValueError("EYS_0003")
|
||||
return found_user
|
||||
|
||||
@staticmethod
|
||||
def check_password_valid(domain: str, id_: str, password: str, password_hashed: str) -> bool:
|
||||
"""Check if the password is valid."""
|
||||
if PasswordModule.check_password(domain=domain, id_=id_, password=password, password_hashed=password_hashed):
|
||||
return True
|
||||
raise ValueError("EYS_0004")
|
||||
|
||||
@staticmethod
|
||||
def update_password():
|
||||
return
|
||||
|
||||
|
||||
class LoginHandler:
|
||||
|
||||
@staticmethod
|
||||
def is_occupant(email: str):
|
||||
return not str(email).split("@")[1] == api_config.ACCESS_EMAIL_EXT
|
||||
|
||||
@staticmethod
|
||||
def is_employee(email: str):
|
||||
return str(email).split("@")[1] == api_config.ACCESS_EMAIL_EXT
|
||||
|
||||
@classmethod
|
||||
# headers: CommonHeaders
|
||||
def do_employee_login(cls, headers: CommonHeaders, data: Any, db_session):
|
||||
"""Handle employee login."""
|
||||
|
||||
language = headers.request.headers.get("language", "tr")
|
||||
domain = headers.request.headers.get("domain", None)
|
||||
timezone = headers.request.headers.get("tz", None) or "GMT+3"
|
||||
|
||||
user_handler, other_domains_list, main_domain = UserHandlers(), [], ""
|
||||
found_user = user_handler.check_user_exists(access_key=data.access_key, db_session=db_session)
|
||||
with mongo_handler.collection(f"{str(found_user.related_company)}*Domain") as collection:
|
||||
result = collection.find_one({"user_uu_id": str(found_user.uu_id)})
|
||||
if not result:
|
||||
raise ValueError("EYS_00087")
|
||||
other_domains_list = result.get("other_domains_list", [])
|
||||
main_domain = result.get("main_domain", None)
|
||||
if domain not in other_domains_list or not main_domain:
|
||||
raise ValueError("EYS_00088")
|
||||
|
||||
if not user_handler.check_password_valid(domain=main_domain, id_=str(found_user.uu_id), password=data.password, password_hashed=found_user.hash_password):
|
||||
raise ValueError("EYS_0005")
|
||||
list_of_returns = (
|
||||
Employees.id, Employees.uu_id, People.id, People.uu_id, Users.id, Users.uu_id, Companies.id, Companies.uu_id,
|
||||
Departments.id, Departments.uu_id, Duty.id, Duty.uu_id, Companies.public_name, Companies.company_type, Duty.duty_name,
|
||||
Addresses.letter_address
|
||||
)
|
||||
|
||||
list_employee_query = db_session.query(*list_of_returns
|
||||
).join(Staff, Staff.id == Employees.staff_id
|
||||
).join(People, People.id == Employees.people_id
|
||||
).join(Duties, Duties.id == Staff.duties_id
|
||||
).join(Duty, Duty.id == Duties.duties_id
|
||||
).join(Departments, Departments.id == Duties.department_id
|
||||
).join(Companies, Companies.id == Departments.company_id
|
||||
).join(Users, Users.person_id == People.id
|
||||
).outerjoin(Addresses, Addresses.id == Companies.official_address_id
|
||||
).filter(Employees.people_id == found_user.person_id)
|
||||
list_employees, list_employees_query_all = [], list_employee_query.all()
|
||||
if not list_employees_query_all:
|
||||
ValueError("No Employee found for this user")
|
||||
|
||||
for employee in list_employees_query_all:
|
||||
single_employee = {}
|
||||
for ix, returns in enumerate(list_of_returns):
|
||||
single_employee[str(returns)] = employee[ix]
|
||||
list_employees.append(single_employee)
|
||||
companies_uu_id_list, companies_id_list, companies_list, duty_uu_id_list, duty_id_list = [], [], [], [], []
|
||||
for list_employee in list_employees:
|
||||
companies_id_list.append(int(list_employee["Companies.id"]))
|
||||
companies_uu_id_list.append(str(list_employee["Companies.uu_id"]))
|
||||
duty_uu_id_list.append(str(list_employee["Duty.uu_id"]))
|
||||
duty_id_list.append(int(list_employee["Duty.id"]))
|
||||
companies_list.append({
|
||||
"uu_id": str(list_employee["Employees.uu_id"]),
|
||||
"public_name": list_employee["Companies.public_name"],
|
||||
"company_type": list_employee["Companies.company_type"],
|
||||
"company_address": list_employee["Addresses.letter_address"],
|
||||
"duty": list_employee["Duty.duty_name"],
|
||||
"company_uuid": str(list_employee["Companies.uu_id"])
|
||||
})
|
||||
model_value = EmployeeTokenObject(
|
||||
user_type=UserType.employee.value, user_uu_id=str(found_user.uu_id), user_id=found_user.id, person_id=found_user.person_id,
|
||||
person_uu_id=str(list_employees[0]["People.uu_id"]), request=dict(headers.request.headers), domain_list=other_domains_list,
|
||||
companies_uu_id_list=companies_uu_id_list, companies_id_list=companies_id_list, duty_uu_id_list=duty_uu_id_list, duty_id_list=duty_id_list,
|
||||
).model_dump()
|
||||
employee_uuid = str(companies_list[0]["uu_id"])
|
||||
print('employee_uuid', employee_uuid)
|
||||
set_to_redis_dict = dict(
|
||||
user=found_user, token=model_value, add_uuid=employee_uuid,
|
||||
header_info=dict(language=headers.language, domain=headers.domain, timezone=headers.timezone),
|
||||
)
|
||||
user_dict = found_user.get_dict()
|
||||
person_dict = found_user.person.get_dict()
|
||||
if access_token := RedisHandlers().set_object_to_redis(**set_to_redis_dict):
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"user_type": UserType.employee.name,
|
||||
"user": {
|
||||
"uuid": user_dict["uu_id"], "avatar": user_dict["avatar"], "email": user_dict["email"], "phone_number": user_dict["phone_number"], "user_tag": user_dict["user_tag"],
|
||||
"password_expiry_begins": str(arrow.get(user_dict["password_expiry_begins"]).shift(days=int(user_dict["password_expires_day"]))),
|
||||
"person": {
|
||||
"uuid": person_dict["uu_id"], "firstname": person_dict["firstname"], "surname": person_dict["surname"],
|
||||
"middle_name": person_dict["middle_name"], "sex_code": person_dict["sex_code"], "person_tag": person_dict["person_tag"],
|
||||
"country_code": person_dict["country_code"], "birth_date": person_dict["birth_date"],
|
||||
},
|
||||
},
|
||||
"selection_list": companies_list,
|
||||
}
|
||||
raise ValueError("Something went wrong")
|
||||
|
||||
@classmethod
|
||||
# headers=headers, data=data, db_session=db_session
|
||||
def do_occupant_login(cls, headers: CommonHeaders, data: Any, db_session):
|
||||
"""
|
||||
Handle occupant login.
|
||||
"""
|
||||
language = headers.request.headers.get("language", "tr")
|
||||
domain = headers.request.headers.get("domain", None)
|
||||
timezone = headers.request.headers.get("tz", None) or "GMT+3"
|
||||
BuildParts.set_session(db_session)
|
||||
OccupantTypes.set_session(db_session)
|
||||
|
||||
user_handler = UserHandlers()
|
||||
found_user = user_handler.check_user_exists(access_key=data.access_key, db_session=db_session)
|
||||
other_domains_list, main_domain = [], ""
|
||||
with mongo_handler.collection(f"{str(found_user.related_company)}*Domain") as collection:
|
||||
result = collection.find_one({"user_uu_id": str(found_user.uu_id)})
|
||||
if not result:
|
||||
raise ValueError("EYS_00087")
|
||||
other_domains_list = result.get("other_domains_list", [])
|
||||
main_domain = result.get("main_domain", None)
|
||||
if domain not in other_domains_list or not main_domain:
|
||||
raise ValueError("EYS_00088")
|
||||
|
||||
if not user_handler.check_password_valid(domain=main_domain, id_=str(found_user.uu_id), password=data.password, password_hashed=found_user.hash_password):
|
||||
raise ValueError("EYS_0005")
|
||||
|
||||
occupants_selection_dict: Dict[str, Any] = {}
|
||||
living_spaces: list[BuildLivingSpace] = BuildLivingSpace.query.filter(BuildLivingSpace.person_id == found_user.person_id).all()
|
||||
|
||||
if not living_spaces:
|
||||
raise ValueError("EYS_0006")
|
||||
for living_space in living_spaces:
|
||||
build_part = BuildParts.query.filter(BuildParts.id == living_space.build_parts_id).first()
|
||||
if not build_part:
|
||||
raise ValueError("EYS_0007")
|
||||
|
||||
build = build_part.buildings
|
||||
occupant_type = OccupantTypes.query.filter(OccupantTypes.id == living_space.occupant_type_id).first()
|
||||
occupant_data = {
|
||||
"build_living_space_uu_id": str(living_space.uu_id), "part_uu_id": str(build_part.uu_id), "part_name": build_part.part_name(), "part_level": build_part.part_level,
|
||||
"occupant_uu_id": str(occupant_type.uu_id), "description": occupant_type.occupant_description, "code": occupant_type.occupant_code,
|
||||
}
|
||||
|
||||
build_key = str(build.uu_id)
|
||||
if build_key not in occupants_selection_dict:
|
||||
occupants_selection_dict[build_key] = {"build_uu_id": build_key, "build_name": build.build_name, "build_no": build.build_no, "occupants": [occupant_data],}
|
||||
else:
|
||||
occupants_selection_dict[build_key]["occupants"].append(occupant_data)
|
||||
|
||||
person = found_user.person
|
||||
model_value = OccupantTokenObject(
|
||||
user_type=UserType.occupant.value, user_uu_id=str(found_user.uu_id), user_id=found_user.id, person_id=person.id,
|
||||
person_uu_id=str(person.uu_id), domain_list=other_domains_list, request=dict(request.headers), available_occupants=occupants_selection_dict,
|
||||
).model_dump()
|
||||
redis_handler = RedisHandlers()
|
||||
if access_token := redis_handler.set_object_to_redis(
|
||||
user=found_user, token=model_value, add_uuid=living_space.uu_id,
|
||||
header_info=dict(language=language, domain=domain, timezone=timezone)
|
||||
):
|
||||
return {"access_token": access_token, "user_type": UserType.occupant.name, "selection_list": occupants_selection_dict}
|
||||
raise ValueError("Something went wrong")
|
||||
|
||||
@classmethod
|
||||
def authentication_login_with_domain_and_creds(cls, headers: CommonHeaders, data: Any):
|
||||
"""
|
||||
Authenticate user with domain and credentials.
|
||||
|
||||
Args:
|
||||
headers: CommonHeaders object
|
||||
data: Request body containing login credentials
|
||||
{
|
||||
"access_key": "karatay.berkay.sup@evyos.com.tr",
|
||||
"password": "string",
|
||||
"remember_me": false
|
||||
}
|
||||
Returns:
|
||||
SuccessResponse containing authentication token and user info
|
||||
"""
|
||||
|
||||
with Users.new_session() as db_session:
|
||||
if cls.is_employee(data.access_key):
|
||||
return cls.do_employee_login(headers=headers, data=data, db_session=db_session)
|
||||
elif cls.is_occupant(data.access_key):
|
||||
return cls.do_occupant_login(headers=headers, data=data, db_session=db_session)
|
||||
else:
|
||||
raise ValueError("Invalid email format")
|
||||
|
||||
@classmethod
|
||||
def raise_error_if_request_has_no_token(cls, request: Any) -> None:
|
||||
"""Validate request has required token headers."""
|
||||
if not hasattr(request, "headers"):
|
||||
raise ValueError("Request has no headers")
|
||||
if not request.headers.get(api_config.ACCESS_TOKEN_TAG):
|
||||
raise ValueError("Request has no access token")
|
||||
|
||||
@classmethod
|
||||
def get_access_token_from_request(cls, request: Any) -> str:
|
||||
"""Extract access token from request headers."""
|
||||
cls.raise_error_if_request_has_no_token(request=request)
|
||||
return request.headers.get(api_config.ACCESS_TOKEN_TAG)
|
||||
|
||||
@classmethod
|
||||
def handle_employee_selection(cls, access_token: str, data: Any, token_dict: TokenDictType):
|
||||
with Users.new_session() as db_session:
|
||||
if data.uuid not in token_dict.companies_uu_id_list:
|
||||
ValueError("EYS_0011")
|
||||
list_of_returns = (
|
||||
Employees.id, Employees.uu_id, People.id, People.uu_id, Users.id, Users.uu_id, Companies.id, Companies.uu_id,
|
||||
Departments.id, Departments.uu_id, Duty.id, Duty.uu_id, Addresses.id, Addresses.letter_address, Staff.id, Staff.uu_id,
|
||||
Duties.id, Duties.uu_id,
|
||||
)
|
||||
|
||||
selected_company_query = db_session.query(*list_of_returns
|
||||
).join(Staff, Staff.id == Employees.staff_id
|
||||
).join(People, People.id == Employees.people_id
|
||||
).join(Duties, Duties.id == Staff.duties_id
|
||||
).join(Duty, Duty.id == Duties.duties_id
|
||||
).join(Departments, Departments.id == Duties.department_id
|
||||
).join(Companies, Companies.id == Departments.company_id
|
||||
).join(Users, Users.person_id == People.id
|
||||
).outerjoin(Addresses, Addresses.id == Companies.official_address_id
|
||||
).filter(Employees.uu_id == data.uuid, Users.id == token_dict.user_id)
|
||||
|
||||
selected_company_first = selected_company_query.first()
|
||||
if not selected_company_first:
|
||||
ValueError("Selected company not found")
|
||||
|
||||
result_with_keys_dict = {}
|
||||
for ix, selected_company_item in enumerate(selected_company_first):
|
||||
result_with_keys_dict[str(list_of_returns[ix])] = selected_company_item
|
||||
|
||||
if not selected_company_first:
|
||||
ValueError("EYS_0010")
|
||||
|
||||
# Get reachable events
|
||||
# reachable_event_codes = Event2Employee.get_event_codes(employee_id=int(result_with_keys_dict['Employees.id']), db=db_session)
|
||||
# TODO: erase this bypass later
|
||||
filter_endpoints_and_events = db_session.query(EndpointRestriction.operation_uu_id, Events.function_code
|
||||
).join(EndpointRestriction, EndpointRestriction.id == Events.endpoint_id).filter().all()
|
||||
reachable_event_codes = {endpoint_name: function_code for endpoint_name, function_code in filter_endpoints_and_events}
|
||||
# Get reachable applications
|
||||
reachable_app_codes = Application2Employee.get_application_codes(employee_id=int(result_with_keys_dict['Employees.id']), db=db_session)
|
||||
|
||||
company_token = CompanyToken(
|
||||
company_uu_id=str(result_with_keys_dict['Companies.uu_id']),
|
||||
company_id=int(result_with_keys_dict['Companies.id']),
|
||||
department_id=int(result_with_keys_dict['Departments.id']),
|
||||
department_uu_id=str(result_with_keys_dict['Departments.uu_id']),
|
||||
duty_id=int(result_with_keys_dict['Duty.id']),
|
||||
duty_uu_id=str(result_with_keys_dict['Duty.uu_id']),
|
||||
bulk_duties_id=int(result_with_keys_dict['Duties.id']),
|
||||
staff_id=int(result_with_keys_dict['Staff.id']),
|
||||
staff_uu_id=str(result_with_keys_dict['Staff.uu_id']),
|
||||
employee_id=int(result_with_keys_dict['Employees.id']),
|
||||
employee_uu_id=str(result_with_keys_dict['Employees.uu_id']),
|
||||
reachable_event_codes=reachable_event_codes,
|
||||
reachable_app_codes=reachable_app_codes,
|
||||
)
|
||||
redis_handler = RedisHandlers()
|
||||
redis_result = redis_handler.update_token_at_redis(
|
||||
token=access_token, add_payload=company_token, add_uuid=str(result_with_keys_dict['Employees.uu_id'])
|
||||
)
|
||||
return {"selected_uu_id": data.uuid}
|
||||
|
||||
@classmethod
|
||||
def handle_occupant_selection(cls, access_token: str, data: Any, token_dict: TokenDictType):
|
||||
"""Handle occupant type selection"""
|
||||
with BuildLivingSpace.new_session() as db:
|
||||
# Get selected occupant type
|
||||
selected_build_living_space: BuildLivingSpace = BuildLivingSpace.filter_one(BuildLivingSpace.uu_id == data.build_living_space_uu_id, db=db).data
|
||||
if not selected_build_living_space:
|
||||
raise ValueError("EYS_0012")
|
||||
|
||||
# Get reachable events
|
||||
reachable_event_codes = Event2Occupant.get_event_codes(build_living_space_id=selected_build_living_space.id, db=db)
|
||||
occupant_type = OccupantTypes.filter_one_system(OccupantTypes.id == selected_build_living_space.occupant_type_id, db=db).data
|
||||
build_part = BuildParts.filter_one(BuildParts.id == selected_build_living_space.build_parts_id, db=db)
|
||||
build = build_part.buildings
|
||||
reachable_app_codes = Application2Occupant.get_application_codes(build_living_space_id=selected_build_living_space.id, db=db)
|
||||
# responsible_employee = Employees.filter_one(
|
||||
# Employees.id == build_part.responsible_employee_id,
|
||||
# db=db,
|
||||
# ).data
|
||||
# related_company = RelationshipEmployee2Build.filter_one(
|
||||
# RelationshipEmployee2Build.member_id == build.id,
|
||||
# db=db,
|
||||
# ).data
|
||||
# Get company
|
||||
# company_related = Companies.filter_one(
|
||||
# Companies.id == related_company.company_id,
|
||||
# db=db,
|
||||
# ).data
|
||||
|
||||
# Create occupant token
|
||||
occupant_token = OccupantToken(
|
||||
living_space_id=selected_build_living_space.id,
|
||||
living_space_uu_id=selected_build_living_space.uu_id.__str__(),
|
||||
occupant_type_id=occupant_type.id,
|
||||
occupant_type_uu_id=occupant_type.uu_id.__str__(),
|
||||
occupant_type=occupant_type.occupant_type,
|
||||
build_id=build.id,
|
||||
build_uuid=build.uu_id.__str__(),
|
||||
build_part_id=build_part.id,
|
||||
build_part_uuid=build_part.uu_id.__str__(),
|
||||
# responsible_employee_id=responsible_employee.id,
|
||||
# responsible_employee_uuid=responsible_employee.uu_id.__str__(),
|
||||
# responsible_company_id=company_related.id,
|
||||
# responsible_company_uuid=company_related.uu_id.__str__(),
|
||||
reachable_event_codes=reachable_event_codes,
|
||||
reachable_app_codes=reachable_app_codes,
|
||||
)
|
||||
redis_handler = RedisHandlers()
|
||||
redis_handler.update_token_at_redis(
|
||||
token=access_token, add_payload=occupant_token, add_uuid=occupant_token.living_space_uu_id
|
||||
)
|
||||
return {"selected_uu_id": occupant_token.living_space_uu_id}
|
||||
|
||||
@classmethod # Requires auth context
|
||||
def authentication_select_company_or_occupant_type(cls, request: Any, data: Any):
|
||||
"""
|
||||
Handle selection of company or occupant type
|
||||
{"data": {"build_living_space_uu_id": ""}} | {"data": {"company_uu_id": ""}}
|
||||
{
|
||||
"data": {"company_uu_id": "e9869a25-ba4d-49dc-bb0d-8286343b184b"}
|
||||
}
|
||||
{
|
||||
"data": {"build_living_space_uu_id": "e9869a25-ba4d-49dc-bb0d-8286343b184b"}
|
||||
}
|
||||
"""
|
||||
access_token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
if not access_token:
|
||||
raise ValueError("EYS_0001")
|
||||
|
||||
token_object = RedisHandlers.get_object_from_redis(access_token=access_token)
|
||||
if token_object.is_employee:
|
||||
return cls.handle_employee_selection(access_token=access_token, data=data, token_dict=token_object)
|
||||
elif token_object.is_occupant:
|
||||
return cls.handle_occupant_selection(access_token=access_token, data=data, token_dict=token_object)
|
||||
|
||||
@classmethod
|
||||
def authentication_check_token_valid(cls, domain, access_token: str) -> bool:
|
||||
redis_handler = RedisHandlers()
|
||||
if auth_token := redis_handler.get_object_from_redis(access_token=access_token):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class PasswordHandler:
|
||||
|
||||
@staticmethod
|
||||
def create_password(password, password_token=None):
|
||||
with Users.new_session() as db_session:
|
||||
Users.set_session(db_session)
|
||||
found_user = Users.query.filter(Users.password_token == password_token).first()
|
||||
if not found_user:
|
||||
raise ValueError("EYS_0031")
|
||||
|
||||
if found_user.password_token:
|
||||
replace_day = 0
|
||||
try:
|
||||
replace_day = int(str(found_user.password_expires_day or 0).split(",")[0].replace(" days", ""))
|
||||
except Exception as e:
|
||||
err = e
|
||||
token_is_expired = arrow.now() >= arrow.get(str(found_user.password_expiry_begins)).shift(days=replace_day)
|
||||
if not str(password_token) == str(found_user.password_token) or token_is_expired:
|
||||
raise ValueError("EYS_0032")
|
||||
|
||||
collection_name = f"{found_user.related_company}*Domain"
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
domain_via_user = mongo_engine.find_one({"user_uu_id": str(found_user.uu_id)})
|
||||
if not domain_via_user:
|
||||
raise ValueError("EYS_0024")
|
||||
domain_via_user = domain_via_user.get("main_domain", None)
|
||||
new_password_dict = {
|
||||
"password": PasswordModule.create_hashed_password(domain=domain_via_user, id_=str(found_user.uu_id), password=password),
|
||||
"date": str(arrow.now().date()),
|
||||
}
|
||||
history_dict = PasswordHistoryViaUser(user_uu_id=str(found_user.uu_id), password_add=new_password_dict, access_history_detail={"request": "", "ip": ""})
|
||||
found_user.password_expiry_begins = str(arrow.now())
|
||||
found_user.hash_password = new_password_dict.get("password")
|
||||
found_user.password_token = "" if found_user.password_token else ""
|
||||
|
||||
collection_name = f"{found_user.related_company}*PasswordHistory"
|
||||
with mongo_handler.collection(collection_name) as mongo_engine_sc:
|
||||
password_history_item = mongo_engine_sc.find_one({"user_uu_id": str(found_user.uu_id)})
|
||||
if not password_history_item:
|
||||
mongo_engine_sc.insert_one(document={"user_uu_id": str(found_user.uu_id), "password_history": []})
|
||||
password_history_item = mongo_engine_sc.find_one({"user_uu_id": str(found_user.uu_id)})
|
||||
password_history_list = password_history_item.get("password_history", [])
|
||||
hashed_password = history_dict.password_add.get("password")
|
||||
for password_in_history in password_history_list:
|
||||
print('password_history_list', password_history_list, password_in_history)
|
||||
if str(password_in_history.get("password")) == str(hashed_password):
|
||||
raise ValueError("EYS_0032")
|
||||
if len(password_history_list) > 3:
|
||||
password_history_list.pop(0)
|
||||
password_history_list.append(history_dict.password_add)
|
||||
return mongo_engine_sc.update_one(
|
||||
filter={"user_uu_id": str(found_user.uu_id)},
|
||||
update={"$set": {
|
||||
"password_history": password_history_list, "modified_at": arrow.now().timestamp(), "access_history_detail": history_dict.access_history_detail
|
||||
}}, upsert=True,
|
||||
)
|
||||
found_user.save(db=db_session)
|
||||
return found_user
|
||||
|
||||
|
||||
class AuthHandlers:
|
||||
|
||||
LoginHandler: LoginHandler = LoginHandler()
|
||||
PasswordHandler: PasswordHandler = PasswordHandler()
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
|
||||
events_index: dict = {
|
||||
"Slot1": "",
|
||||
"Slot2": "",
|
||||
"Slot3": "",
|
||||
"Slot4": "",
|
||||
"Slot5": "",
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DomainViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
main_domain: str
|
||||
other_domains_list: Optional[list] = None
|
||||
|
||||
|
||||
class PasswordHistoryViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
password_add: dict
|
||||
access_history_detail: Optional[dict]
|
||||
|
||||
|
||||
class AccessHistoryViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
access_history: dict
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
|
||||
class RequestLogin(BaseModel):
|
||||
access_key: str
|
||||
password: str
|
||||
remember_me: Optional[bool]
|
||||
|
||||
|
||||
class RequestVerifyOTP(BaseModel):
|
||||
token: str
|
||||
otp: str
|
||||
|
||||
|
||||
class RequestSelectEmployee(BaseModel):
|
||||
uuid: str
|
||||
|
||||
|
||||
class RequestResetPassword(BaseModel):
|
||||
password_token: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
|
||||
class RequestSelectLiving(BaseModel):
|
||||
uuid: str
|
||||
|
||||
class RequestCreatePassword(BaseModel):
|
||||
password_token: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
return self.password == self.re_password
|
||||
|
||||
|
||||
class RequestChangePassword(BaseModel):
|
||||
old_password: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
return self.password == self.re_password
|
||||
|
||||
|
||||
class RequestForgotPasswordEmail(BaseModel):
|
||||
email: str
|
||||
|
||||
|
||||
class RequestForgotPasswordPhone(BaseModel):
|
||||
phone_number: str
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_initializer /api_initializer
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/api_validations /api_validations
|
||||
COPY /api_services/schemas /schemas
|
||||
COPY /api_services/api_modules /api_modules
|
||||
|
||||
COPY /api_services/api_middlewares /api_middlewares
|
||||
COPY /api_services/api_builds/building_service/endpoints /api_initializer/endpoints
|
||||
COPY /api_services/api_builds/building_service/events /api_initializer/events
|
||||
COPY /api_services/api_builds/building_service/validations /api_initializer/validations
|
||||
COPY /api_services/api_builds/building_service/index.py /api_initializer/index.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.areas.cluster import AreaRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
area_endpoint_route = APIRouter(prefix="/areas", tags=["Areas Cluster"])
|
||||
|
||||
area_list = "AreaList"
|
||||
@area_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all areas endpoint",
|
||||
operation_id=endpoints_index[area_list],
|
||||
)
|
||||
def area_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = AreaRouterCluster.get_event_cluster(area_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
area_create = "AreaCreate"
|
||||
@area_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create area endpoint",
|
||||
operation_id=endpoints_index[area_create],
|
||||
)
|
||||
def area_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = AreaRouterCluster.get_event_cluster(area_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
area_update = "AreaUpdate"
|
||||
@area_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update area endpoint",
|
||||
operation_id=endpoints_index[area_update],
|
||||
)
|
||||
def area_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = AreaRouterCluster.get_event_cluster(area_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, headers=headers)
|
||||
|
||||
|
||||
area_delete = "AreaDelete"
|
||||
@area_endpoint_route.post(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete area endpoint",
|
||||
operation_id=endpoints_index[area_delete],
|
||||
)
|
||||
def area_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = AreaRouterCluster.get_event_cluster(area_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.building_parts.cluster import PartsRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
parts_endpoint_route = APIRouter(prefix="/parts", tags=["Parts Cluster"])
|
||||
|
||||
|
||||
parts_list = "PartsList"
|
||||
@parts_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all parts endpoint",
|
||||
operation_id=endpoints_index[parts_list],
|
||||
)
|
||||
def parts_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PartsRouterCluster.get_event_cluster(parts_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
parts_create = "PartsCreate"
|
||||
@parts_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create part endpoint",
|
||||
operation_id=endpoints_index[parts_create],
|
||||
)
|
||||
def parts_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PartsRouterCluster.get_event_cluster(parts_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
parts_update = "PartsUpdate"
|
||||
@parts_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update part endpoint",
|
||||
operation_id=endpoints_index[parts_update],
|
||||
)
|
||||
def parts_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PartsRouterCluster.get_event_cluster(parts_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, headers=headers)
|
||||
|
||||
|
||||
parts_delete = "PartsDelete"
|
||||
@parts_endpoint_route.post(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete part endpoint",
|
||||
operation_id=endpoints_index[parts_delete],
|
||||
)
|
||||
def parts_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PartsRouterCluster.get_event_cluster(parts_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.builds.cluster import BuildRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
build_endpoint_route = APIRouter(prefix="/builds", tags=["Builds Cluster"])
|
||||
|
||||
build_list = "BuildList"
|
||||
@build_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all builds endpoint",
|
||||
operation_id=endpoints_index[build_list],
|
||||
)
|
||||
def build_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = BuildRouterCluster.get_event_cluster(build_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
build_create = "BuildCreate"
|
||||
@build_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create build endpoint",
|
||||
operation_id=endpoints_index[build_create],
|
||||
)
|
||||
def build_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = BuildRouterCluster.get_event_cluster(build_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
build_update = "BuildUpdate"
|
||||
@build_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update build endpoint",
|
||||
operation_id=endpoints_index[build_update],
|
||||
)
|
||||
def build_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = BuildRouterCluster.get_event_cluster(build_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, headers=headers)
|
||||
|
||||
|
||||
build_delete = "BuildDelete"
|
||||
@build_endpoint_route.post(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete build endpoint",
|
||||
operation_id=endpoints_index[build_delete],
|
||||
)
|
||||
def build_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = BuildRouterCluster.get_event_cluster(build_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.living_space.cluster import LivingSpaceRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
living_space_endpoint_route = APIRouter(prefix="/living-space", tags=["Living Space Cluster"])
|
||||
|
||||
|
||||
living_space_list = "LivingSpaceList"
|
||||
@living_space_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all living spaces endpoint",
|
||||
operation_id=endpoints_index[living_space_list],
|
||||
)
|
||||
def living_space_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = LivingSpaceRouterCluster.get_event_cluster(living_space_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
living_space_create = "LivingSpaceCreate"
|
||||
@living_space_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create living space endpoint",
|
||||
operation_id=endpoints_index[living_space_create],
|
||||
)
|
||||
def living_space_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = LivingSpaceRouterCluster.get_event_cluster(living_space_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
living_space_update = "LivingSpaceUpdate"
|
||||
@living_space_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update living space endpoint",
|
||||
operation_id=endpoints_index[living_space_update],
|
||||
)
|
||||
def living_space_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = LivingSpaceRouterCluster.get_event_cluster(living_space_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, headers=headers)
|
||||
|
||||
|
||||
living_space_delete = "LivingSpaceDelete"
|
||||
@living_space_endpoint_route.post(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete living space endpoint",
|
||||
operation_id=endpoints_index[living_space_delete],
|
||||
)
|
||||
def living_space_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = LivingSpaceRouterCluster.get_event_cluster(living_space_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
from fastapi import APIRouter
|
||||
from .builds.router import build_endpoint_route
|
||||
from .building_parts.router import parts_endpoint_route
|
||||
from .areas.router import area_endpoint_route
|
||||
from .living_space.router import living_space_endpoint_route
|
||||
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
return [build_endpoint_route, parts_endpoint_route, area_endpoint_route, living_space_endpoint_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("/", "GET"),
|
||||
("/docs", "GET"),
|
||||
("/redoc", "GET"),
|
||||
("/openapi.json", "GET"),
|
||||
("/metrics", "GET"),
|
||||
]
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
from .builds.cluster import BuildRouterCluster
|
||||
from .areas.cluster import AreaRouterCluster
|
||||
from .living_space.cluster import LivingSpaceRouterCluster
|
||||
from .building_parts.cluster import PartsRouterCluster
|
||||
|
||||
__all__ = [
|
||||
"BuildRouterCluster",
|
||||
"AreaRouterCluster",
|
||||
"LivingSpaceRouterCluster",
|
||||
"PartsRouterCluster",
|
||||
]
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperAreaListEvent,
|
||||
SuperAreaCreateEvent,
|
||||
SuperAreaUpdateEvent,
|
||||
SuperAreaDeleteEvent,
|
||||
)
|
||||
|
||||
AreaRouterCluster = RouterCluster(name="AreaRouterCluster")
|
||||
|
||||
AreaListEventCluster = EventCluster(name="AreaListEventCluster", endpoint_uu_id=endpoints_index["AreaList"])
|
||||
AreaListEventCluster.add_event(SuperAreaListEvent)
|
||||
|
||||
AreaCreateEventCluster = EventCluster(name="AreaCreateEventCluster", endpoint_uu_id=endpoints_index["AreaCreate"])
|
||||
AreaCreateEventCluster.add_event(SuperAreaCreateEvent)
|
||||
|
||||
AreaUpdateEventCluster = EventCluster(name="AreaUpdateEventCluster", endpoint_uu_id=endpoints_index["AreaUpdate"])
|
||||
AreaUpdateEventCluster.add_event(SuperAreaUpdateEvent)
|
||||
|
||||
AreaDeleteEventCluster = EventCluster(name="AreaDeleteEventCluster", endpoint_uu_id=endpoints_index["AreaDelete"])
|
||||
AreaDeleteEventCluster.add_event(SuperAreaDeleteEvent)
|
||||
|
||||
AreaRouterCluster.set_event_cluster(AreaListEventCluster)
|
||||
AreaRouterCluster.set_event_cluster(AreaCreateEventCluster)
|
||||
AreaRouterCluster.set_event_cluster(AreaUpdateEventCluster)
|
||||
AreaRouterCluster.set_event_cluster(AreaDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from schemas import (
|
||||
BuildArea,
|
||||
Build,
|
||||
BuildParts,
|
||||
AccountRecords,
|
||||
)
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
|
||||
|
||||
# List all area Super User
|
||||
SuperAreaListEvent = Event(
|
||||
name="super_area_list",
|
||||
key="0d5ba7be-028c-43ce-9a99-f495f812a835",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Area List all endpoint",
|
||||
)
|
||||
|
||||
# Create area Super User
|
||||
SuperAreaCreateEvent = Event(
|
||||
name="super_area_create",
|
||||
key="0ba2a06d-f4fa-47b9-a305-2225414ffc4a",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Area Create endpoint",
|
||||
)
|
||||
|
||||
# Update area Super User
|
||||
SuperAreaUpdateEvent = Event(
|
||||
name="super_area_update",
|
||||
key="ecec956a-eadf-4556-b4e1-4ee81a6b8fb2",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Area Update endpoint",
|
||||
)
|
||||
|
||||
# Delete area Super User
|
||||
SuperAreaDeleteEvent = Event(
|
||||
name="super_area_delete",
|
||||
key="4b2ca548-4113-4942-8a76-1f4337fba98a",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Area Delete endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_area_list_callable(list_options: PaginateOnly, headers: CommonHeaders):
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
if token.is_employee:
|
||||
raise Exception("Forbidden for employees")
|
||||
|
||||
# TODO: Pydantic Model must be implemnted for list_options.query
|
||||
with AccountRecords.new_session() as db_session:
|
||||
AccountRecords.set_session(db_session)
|
||||
list_of_fields = [
|
||||
AccountRecords.iban,
|
||||
AccountRecords.bank_date,
|
||||
AccountRecords.currency,
|
||||
AccountRecords.currency_value,
|
||||
AccountRecords.process_comment,
|
||||
AccountRecords.add_comment_note,
|
||||
AccountRecords.receive_debit,
|
||||
AccountRecords.is_email_send,
|
||||
AccountRecords.is_notification_send,
|
||||
]
|
||||
account_records_query = db_session.query(*list_of_fields
|
||||
).join(BuildParts, BuildParts.id == AccountRecords.build_parts_id
|
||||
).filter(BuildParts.id == token.selected_occupant.build_part_id)
|
||||
if list_options.query:
|
||||
account_records_query = account_records_query.filter(*AccountRecords.convert(list_options.query))
|
||||
|
||||
pagination = Pagination(data=account_records_query)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=account_records_query, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
SuperAreaListEvent.event_callable = super_area_list_callable
|
||||
|
||||
|
||||
def super_area_create_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": data,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperAreaCreateEvent.event_callable = super_area_create_callable
|
||||
|
||||
|
||||
def super_area_update_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": data,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperAreaUpdateEvent.event_callable = super_area_update_callable
|
||||
|
||||
|
||||
def super_area_delete_callable(uu_id: str, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-DELETE",
|
||||
"data": data,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperAreaDeleteEvent.event_callable = super_area_delete_callable
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperPartsListEvent,
|
||||
SuperPartsCreateEvent,
|
||||
SuperPartsUpdateEvent,
|
||||
SuperPartsDeleteEvent,
|
||||
)
|
||||
|
||||
PartsRouterCluster = RouterCluster(name="PartsRouterCluster")
|
||||
|
||||
PartsListEventCluster = EventCluster(name="PartsListEventCluster", endpoint_uu_id=endpoints_index["PartsList"])
|
||||
PartsListEventCluster.add_event(SuperPartsListEvent)
|
||||
|
||||
PartsCreateEventCluster = EventCluster(name="PartsCreateEventCluster", endpoint_uu_id=endpoints_index["PartsCreate"])
|
||||
PartsCreateEventCluster.add_event(SuperPartsCreateEvent)
|
||||
|
||||
PartsUpdateEventCluster = EventCluster(name="PartsUpdateEventCluster", endpoint_uu_id=endpoints_index["PartsUpdate"])
|
||||
PartsUpdateEventCluster.add_event(SuperPartsUpdateEvent)
|
||||
|
||||
PartsDeleteEventCluster = EventCluster(name="PartsDeleteEventCluster", endpoint_uu_id=endpoints_index["PartsDelete"])
|
||||
PartsDeleteEventCluster.add_event(SuperPartsDeleteEvent)
|
||||
|
||||
PartsRouterCluster.set_event_cluster(PartsListEventCluster)
|
||||
PartsRouterCluster.set_event_cluster(PartsCreateEventCluster)
|
||||
PartsRouterCluster.set_event_cluster(PartsUpdateEventCluster)
|
||||
PartsRouterCluster.set_event_cluster(PartsDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from schemas import (
|
||||
Build,
|
||||
BuildParts,
|
||||
AccountRecords,
|
||||
)
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
|
||||
|
||||
# List all endpoint Super Parts
|
||||
SuperPartsListEvent = Event(
|
||||
name="super_parts_list",
|
||||
key="018e659d-380d-4b2a-b5a4-d77530cb8de0",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Parts List all endpoint",
|
||||
)
|
||||
|
||||
# Create endpoint Super Parts
|
||||
SuperPartsCreateEvent = Event(
|
||||
name="super_parts_create",
|
||||
key="fdfede54-f0e7-4d48-8eae-269479ad9abb",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Parts Create endpoint",
|
||||
)
|
||||
|
||||
# Update endpoint Super Parts
|
||||
SuperPartsUpdateEvent = Event(
|
||||
name="super_parts_update",
|
||||
key="ace6137c-fe8e-45bd-ae51-bc1c293f8373",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Parts Update endpoint",
|
||||
)
|
||||
|
||||
# Delete endpoint Super Parts
|
||||
SuperPartsDeleteEvent = Event(
|
||||
name="super_parts_delete",
|
||||
key="8fb7f505-7a3a-4260-9959-ae0e5c8f9bfe",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Parts Delete endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_parts_list_callable(list_options: PaginateOnly, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPartsListEvent.event_callable = super_parts_list_callable
|
||||
|
||||
|
||||
def super_parts_create_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPartsCreateEvent.event_callable = super_parts_create_callable
|
||||
|
||||
|
||||
def super_parts_update_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPartsUpdateEvent.event_callable = super_parts_update_callable
|
||||
|
||||
|
||||
def super_parts_delete_callable(uu_id: str, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPartsDeleteEvent.event_callable = super_parts_delete_callable
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperBuildListEvent,
|
||||
SuperBuildCreateEvent,
|
||||
SuperBuildUpdateEvent,
|
||||
SuperBuildDeleteEvent,
|
||||
)
|
||||
|
||||
BuildRouterCluster = RouterCluster(name="BuildRouterCluster")
|
||||
|
||||
BuildListEventCluster = EventCluster(name="BuildListEventCluster", endpoint_uu_id=endpoints_index["BuildList"])
|
||||
BuildListEventCluster.add_event(SuperBuildListEvent)
|
||||
|
||||
BuildCreateEventCluster = EventCluster(name="BuildCreateEventCluster", endpoint_uu_id=endpoints_index["BuildCreate"])
|
||||
BuildCreateEventCluster.add_event(SuperBuildCreateEvent)
|
||||
|
||||
BuildUpdateEventCluster = EventCluster(name="BuildUpdateEventCluster", endpoint_uu_id=endpoints_index["BuildUpdate"])
|
||||
BuildUpdateEventCluster.add_event(SuperBuildUpdateEvent)
|
||||
|
||||
BuildDeleteEventCluster = EventCluster(name="BuildDeleteEventCluster", endpoint_uu_id=endpoints_index["BuildDelete"])
|
||||
BuildDeleteEventCluster.add_event(SuperBuildDeleteEvent)
|
||||
|
||||
BuildRouterCluster.set_event_cluster(BuildListEventCluster)
|
||||
BuildRouterCluster.set_event_cluster(BuildCreateEventCluster)
|
||||
BuildRouterCluster.set_event_cluster(BuildUpdateEventCluster)
|
||||
BuildRouterCluster.set_event_cluster(BuildDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,118 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from schemas import (
|
||||
Build,
|
||||
BuildParts,
|
||||
AccountRecords,
|
||||
)
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
|
||||
|
||||
# List all endpoint FL-REP
|
||||
SuperBuildListEvent = Event(
|
||||
name="build_list",
|
||||
key="e8586858-db39-4520-bb1a-338ab9c5f043",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Build List all endpoint",
|
||||
)
|
||||
|
||||
SuperBuildCreateEvent = Event(
|
||||
name="build_create",
|
||||
key="79519e0f-c4a6-4afc-a494-d0e547ba39bc",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Build Create endpoint",
|
||||
)
|
||||
|
||||
SuperBuildUpdateEvent = Event(
|
||||
name="build_update",
|
||||
key="ca51080e-11f2-46f7-a1ba-caa1c40b3fd6",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Build Update endpoint",
|
||||
)
|
||||
|
||||
SuperBuildDeleteEvent = Event(
|
||||
name="build_delete",
|
||||
key="a30d32cc-c931-41d6-8a66-d6c04479098f",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Build Delete endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_build_list_callable(list_options: PaginateOnly, headers: CommonHeaders):
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
if token.is_employee:
|
||||
raise Exception("Forbidden for employees")
|
||||
|
||||
# TODO: Pydantic Model must be implemnted for list_options.query
|
||||
with AccountRecords.new_session() as db_session:
|
||||
AccountRecords.set_session(db_session)
|
||||
list_of_fields = [
|
||||
AccountRecords.iban,
|
||||
AccountRecords.bank_date,
|
||||
AccountRecords.currency,
|
||||
AccountRecords.currency_value,
|
||||
AccountRecords.process_comment,
|
||||
AccountRecords.add_comment_note,
|
||||
AccountRecords.receive_debit,
|
||||
AccountRecords.is_email_send,
|
||||
AccountRecords.is_notification_send,
|
||||
]
|
||||
account_records_query = db_session.query(*list_of_fields
|
||||
).join(BuildParts, BuildParts.id == AccountRecords.build_parts_id
|
||||
).filter(BuildParts.id == token.selected_occupant.build_part_id)
|
||||
if list_options.query:
|
||||
account_records_query = account_records_query.filter(*AccountRecords.convert(list_options.query))
|
||||
|
||||
pagination = Pagination(data=account_records_query)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=account_records_query, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
SuperBuildListEvent.event_callable = super_build_list_callable
|
||||
|
||||
|
||||
def super_build_create_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperBuildCreateEvent.event_callable = super_build_create_callable
|
||||
|
||||
|
||||
def super_build_update_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperBuildUpdateEvent.event_callable = super_build_update_callable
|
||||
|
||||
|
||||
def super_build_delete_callable(uu_id: str, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperBuildDeleteEvent.event_callable = super_build_delete_callable
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperLivingSpaceListEvent,
|
||||
SuperLivingSpaceCreateEvent,
|
||||
SuperLivingSpaceUpdateEvent,
|
||||
SuperLivingSpaceDeleteEvent,
|
||||
)
|
||||
|
||||
LivingSpaceRouterCluster = RouterCluster(name="LivingSpaceRouterCluster")
|
||||
|
||||
LivingSpaceListEventCluster = EventCluster(name="LivingSpaceListEventCluster", endpoint_uu_id=endpoints_index["LivingSpaceList"])
|
||||
LivingSpaceListEventCluster.add_event(SuperLivingSpaceListEvent)
|
||||
|
||||
LivingSpaceCreateEventCluster = EventCluster(name="LivingSpaceCreateEventCluster", endpoint_uu_id=endpoints_index["LivingSpaceCreate"])
|
||||
LivingSpaceCreateEventCluster.add_event(SuperLivingSpaceCreateEvent)
|
||||
|
||||
LivingSpaceUpdateEventCluster = EventCluster(name="LivingSpaceUpdateEventCluster", endpoint_uu_id=endpoints_index["LivingSpaceUpdate"])
|
||||
LivingSpaceUpdateEventCluster.add_event(SuperLivingSpaceUpdateEvent)
|
||||
|
||||
LivingSpaceDeleteEventCluster = EventCluster(name="LivingSpaceDeleteEventCluster", endpoint_uu_id=endpoints_index["LivingSpaceDelete"])
|
||||
LivingSpaceDeleteEventCluster.add_event(SuperLivingSpaceDeleteEvent)
|
||||
|
||||
LivingSpaceRouterCluster.set_event_cluster(LivingSpaceListEventCluster)
|
||||
LivingSpaceRouterCluster.set_event_cluster(LivingSpaceCreateEventCluster)
|
||||
LivingSpaceRouterCluster.set_event_cluster(LivingSpaceUpdateEventCluster)
|
||||
LivingSpaceRouterCluster.set_event_cluster(LivingSpaceDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from schemas import (
|
||||
Build,
|
||||
BuildParts,
|
||||
AccountRecords,
|
||||
)
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
|
||||
|
||||
# List all endpoint FL-REP
|
||||
SuperLivingSpaceListEvent = Event(
|
||||
name="super_living_space_list",
|
||||
key="e3eced11-c464-4893-8b49-d2858c160ed0",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Living Space List all endpoint",
|
||||
)
|
||||
|
||||
SuperLivingSpaceCreateEvent = Event(
|
||||
name="super_living_space_create",
|
||||
key="9e26f770-3475-4831-9da9-4684119b13ae",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Living Space Create endpoint",
|
||||
)
|
||||
|
||||
SuperLivingSpaceUpdateEvent = Event(
|
||||
name="super_living_space_update",
|
||||
key="ecd15d27-e5e8-4bd1-972b-9b4508cfac77",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Living Space Update endpoint",
|
||||
)
|
||||
|
||||
SuperLivingSpaceDeleteEvent = Event(
|
||||
name="super_living_space_delete",
|
||||
key="4fcadb8c-2e26-4af3-acb5-bcbf87cae0c0",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Living Space Delete endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_living_space_list_callable(list_options: PaginateOnly, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperLivingSpaceListEvent.event_callable = super_living_space_list_callable
|
||||
|
||||
|
||||
def super_living_space_create_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperLivingSpaceCreateEvent.event_callable = super_living_space_create_callable
|
||||
|
||||
|
||||
def super_living_space_update_callable(data, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperLivingSpaceUpdateEvent.event_callable = super_living_space_update_callable
|
||||
|
||||
|
||||
def super_living_space_delete_callable(uu_id: str, headers: CommonHeaders):
|
||||
return {
|
||||
"message": "MSG0003-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperLivingSpaceDeleteEvent.event_callable = super_living_space_delete_callable
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
|
||||
endpoints_index: dict = {
|
||||
"AreaList": "31382497-e0fd-4574-9684-072a85ce8f29",
|
||||
"AreaCreate": "e8b1c9ee-a25a-4434-a018-31866826f1ed",
|
||||
"AreaUpdate": "c57093e1-0e0b-459b-bedf-87280c040ab1",
|
||||
"AreaDelete": "e5239349-99c8-4fe1-8db9-4876abf25686",
|
||||
"BuildList": "b34319e8-1829-426e-b0ab-9c6a1a3a8cbd",
|
||||
"BuildCreate": "66c3ed83-ce3c-4c3c-a158-aa5bef20a3e8",
|
||||
"BuildUpdate": "d0587d29-da23-4c11-a861-c27fe7f81761",
|
||||
"BuildDelete": "13382948-d259-484e-b30f-3b42f9e20a42",
|
||||
"LivingSpaceList": "98d9ac9e-fca4-4b0a-bf1e-ba2b3348c873",
|
||||
"LivingSpaceCreate": "319e3777-4a19-4f32-b7bd-e7af3a9e541c",
|
||||
"LivingSpaceUpdate": "e4873948-7aa5-46cd-9c41-42c33339f4d6",
|
||||
"LivingSpaceDelete": "ead99b10-efad-4ddb-8b7c-3c9474238a20",
|
||||
"PartsList": "d8bb7636-b768-4083-9313-af63fea1d1c3",
|
||||
"PartsCreate": "755588f0-aabb-4be0-9a49-2be47ea4aaee",
|
||||
"PartsUpdate": "44493fe1-367c-4eb9-afbf-bc7f7daca158",
|
||||
"PartsDelete": "f2cea40b-295b-4a55-9b13-02c82979c53e",
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_initializer /api_initializer
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/api_validations /api_validations
|
||||
COPY /api_services/schemas /schemas
|
||||
COPY /api_services/api_modules /api_modules
|
||||
|
||||
COPY /api_services/api_middlewares /api_middlewares
|
||||
COPY /api_services/api_builds/identity_service/endpoints /api_initializer/endpoints
|
||||
COPY /api_services/api_builds/identity_service/events /api_initializer/events
|
||||
COPY /api_services/api_builds/identity_service/validations /api_initializer/validations
|
||||
COPY /api_services/api_builds/identity_service/index.py /api_initializer/index.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.people.cluster import PeopleRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
people_endpoint_route = APIRouter(prefix="/people", tags=["People Cluster"])
|
||||
|
||||
|
||||
people_list = "PeopleList"
|
||||
@people_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all people endpoint",
|
||||
operation_id=endpoints_index[people_list],
|
||||
)
|
||||
def people_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PeopleRouterCluster.get_event_cluster(people_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
people_create = "PeopleCreate"
|
||||
@people_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create people endpoint",
|
||||
operation_id=endpoints_index[people_create],
|
||||
)
|
||||
def people_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PeopleRouterCluster.get_event_cluster(people_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
people_update = "PeopleUpdate"
|
||||
@people_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update people endpoint",
|
||||
operation_id=endpoints_index[people_update],
|
||||
)
|
||||
def people_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PeopleRouterCluster.get_event_cluster(people_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, header=headers)
|
||||
|
||||
|
||||
people_delete = "PeopleDelete"
|
||||
@people_endpoint_route.delete(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete people endpoint",
|
||||
operation_id=endpoints_index[people_delete],
|
||||
)
|
||||
def people_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = PeopleRouterCluster.get_event_cluster(people_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
from fastapi import APIRouter
|
||||
from .people.router import people_endpoint_route
|
||||
from .user.router import user_endpoint_route
|
||||
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
return [people_endpoint_route, user_endpoint_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("/", "GET"),
|
||||
("/docs", "GET"),
|
||||
("/redoc", "GET"),
|
||||
("/openapi.json", "GET"),
|
||||
("/metrics", "GET"),
|
||||
]
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.user.cluster import UserRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
user_endpoint_route = APIRouter(prefix="/users", tags=["User Cluster"])
|
||||
|
||||
|
||||
user_list = "UserList"
|
||||
@user_endpoint_route.post(
|
||||
path="/list",
|
||||
description="List all users endpoint",
|
||||
operation_id=endpoints_index[user_list],
|
||||
)
|
||||
def user_list_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = UserRouterCluster.get_event_cluster(user_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data, headers=headers)
|
||||
|
||||
|
||||
user_create = "UserCreate"
|
||||
@user_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create user endpoint",
|
||||
operation_id=endpoints_index[user_create],
|
||||
)
|
||||
def user_create_route(data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = UserRouterCluster.get_event_cluster(user_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(data=data, headers=headers)
|
||||
|
||||
|
||||
user_update = "UserUpdate"
|
||||
@user_endpoint_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update user endpoint",
|
||||
operation_id=endpoints_index[user_update],
|
||||
)
|
||||
def user_update_route(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = UserRouterCluster.get_event_cluster(user_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, data=data, headers=headers)
|
||||
|
||||
|
||||
user_delete = "UserDelete"
|
||||
@user_endpoint_route.delete(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete user endpoint",
|
||||
operation_id=endpoints_index[user_delete],
|
||||
)
|
||||
def user_delete_route(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = UserRouterCluster.get_event_cluster(user_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(uu_id=uu_id, headers=headers)
|
||||
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
from .people.cluster import PeopleRouterCluster
|
||||
from .user.cluster import UserRouterCluster
|
||||
|
||||
__all__ = [
|
||||
"PeopleRouterCluster",
|
||||
"UserRouterCluster",
|
||||
]
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperPeopleListEvent,
|
||||
SuperPeopleCreateEvent,
|
||||
SuperPeopleUpdateEvent,
|
||||
SuperPeopleDeleteEvent,
|
||||
)
|
||||
|
||||
PeopleRouterCluster = RouterCluster(name="PeopleRouterCluster")
|
||||
|
||||
PeopleListEventCluster = EventCluster(name="PeopleListEventCluster", endpoint_uu_id=endpoints_index["PeopleList"])
|
||||
PeopleListEventCluster.add_event(SuperPeopleListEvent)
|
||||
|
||||
PeopleCreateEventCluster = EventCluster(name="PeopleCreateEventCluster", endpoint_uu_id=endpoints_index["PeopleCreate"])
|
||||
PeopleCreateEventCluster.add_event(SuperPeopleCreateEvent)
|
||||
|
||||
PeopleUpdateEventCluster = EventCluster(name="PeopleUpdateEventCluster", endpoint_uu_id=endpoints_index["PeopleUpdate"])
|
||||
PeopleUpdateEventCluster.add_event(SuperPeopleUpdateEvent)
|
||||
|
||||
PeopleDeleteEventCluster = EventCluster(name="PeopleDeleteEventCluster", endpoint_uu_id=endpoints_index["PeopleDelete"])
|
||||
PeopleDeleteEventCluster.add_event(SuperPeopleDeleteEvent)
|
||||
|
||||
PeopleRouterCluster.set_event_cluster(PeopleListEventCluster)
|
||||
PeopleRouterCluster.set_event_cluster(PeopleCreateEventCluster)
|
||||
PeopleRouterCluster.set_event_cluster(PeopleUpdateEventCluster)
|
||||
PeopleRouterCluster.set_event_cluster(PeopleDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from api_validations.token.validations import TokenDictType
|
||||
from schemas import (
|
||||
Build,
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
People,
|
||||
)
|
||||
|
||||
|
||||
# List all endpoint Super User
|
||||
SuperPeopleListEvent = Event(
|
||||
name="super_people_list",
|
||||
key="0f8a8b7f-0615-4507-916b-030d48cb5c1d",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super People List all flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Create endpoint Super User
|
||||
SuperPeopleCreateEvent = Event(
|
||||
name="super_people_create",
|
||||
key="e18657b7-7a5a-43b8-b43a-422cbc783326",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super People Create flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Update endpoint Super User
|
||||
SuperPeopleUpdateEvent = Event(
|
||||
name="super_people_update",
|
||||
key="02a774aa-1f7d-472b-98f1-7b4a58d43e31",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super People Update flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Delete endpoint Super User
|
||||
SuperPeopleDeleteEvent = Event(
|
||||
name="super_people_delete",
|
||||
key="b56fd146-b11a-466a-84c9-4c72fb0b9ffa",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super People Delete flat representative users endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_people_list_callable(list_options: PaginateOnly, token: TokenDictType):
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
if token.is_employee:
|
||||
raise Exception("Forbidden for employees")
|
||||
|
||||
# TODO: Pydantic Model must be implemnted for list_options.query
|
||||
with People.new_session() as db_session:
|
||||
People.set_session(db_session)
|
||||
db_session.query(
|
||||
).join(BuildParts, BuildParts.id == BuildLivingSpace.build_parts_id
|
||||
).join(People, People.person_id == BuildLivingSpace.people_id
|
||||
).filter()
|
||||
if list_options.query:
|
||||
people_list = People.query.filter(*People.convert(list_options.query))
|
||||
else:
|
||||
people_list = People.query.filter()
|
||||
|
||||
pagination = Pagination(data=people_list)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=people_list, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
SuperPeopleListEvent.event_callable = super_people_list_callable
|
||||
|
||||
|
||||
def super_people_create_callable(data: People, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPeopleCreateEvent.event_callable = super_people_create_callable
|
||||
|
||||
|
||||
def super_people_update_callable(data: People, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPeopleUpdateEvent.event_callable = super_people_update_callable
|
||||
|
||||
|
||||
def super_people_delete_callable(data: People, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0004-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperPeopleDeleteEvent.event_callable = super_people_delete_callable
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperUserListEvent,
|
||||
SuperUserCreateEvent,
|
||||
SuperUserUpdateEvent,
|
||||
SuperUserDeleteEvent,
|
||||
)
|
||||
|
||||
UserRouterCluster = RouterCluster(name="UserRouterCluster")
|
||||
|
||||
UserListAllEventCluster = EventCluster(name="UserListAllEventCluster", endpoint_uu_id=endpoints_index["UserList"])
|
||||
UserListAllEventCluster.add_event(SuperUserListEvent)
|
||||
|
||||
UserCreateEventCluster = EventCluster(name="UserCreateEventCluster", endpoint_uu_id=endpoints_index["UserCreate"])
|
||||
UserCreateEventCluster.add_event(SuperUserCreateEvent)
|
||||
|
||||
UserUpdateEventCluster = EventCluster(name="UserUpdateEventCluster", endpoint_uu_id=endpoints_index["UserUpdate"])
|
||||
UserUpdateEventCluster.add_event(SuperUserUpdateEvent)
|
||||
|
||||
UserDeleteEventCluster = EventCluster(name="UserDeleteEventCluster", endpoint_uu_id=endpoints_index["UserDelete"])
|
||||
UserDeleteEventCluster.add_event(SuperUserDeleteEvent)
|
||||
|
||||
UserRouterCluster.set_event_cluster(UserListAllEventCluster)
|
||||
UserRouterCluster.set_event_cluster(UserCreateEventCluster)
|
||||
UserRouterCluster.set_event_cluster(UserUpdateEventCluster)
|
||||
UserRouterCluster.set_event_cluster(UserDeleteEventCluster)
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from api_validations.token.validations import TokenDictType
|
||||
from schemas import (
|
||||
Build,
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
Users,
|
||||
UsersTokens,
|
||||
People,
|
||||
)
|
||||
|
||||
# List all endpoint Super User
|
||||
SuperUserListEvent = Event(
|
||||
name="super_user_list",
|
||||
key="202eec81-b382-4623-911b-709f1b841f3f",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List all flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Create endpoint Super User
|
||||
SuperUserCreateEvent = Event(
|
||||
name="super_user_create",
|
||||
key="2f0a3691-114d-48b7-b166-9572fc889695",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Create flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Update endpoint Super User
|
||||
SuperUserUpdateEvent = Event(
|
||||
name="super_user_update",
|
||||
key="8a8c8dd6-43ad-40df-86bd-345488273f52",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Update flat representative users endpoint",
|
||||
)
|
||||
|
||||
# Delete endpoint Super User
|
||||
SuperUserDeleteEvent = Event(
|
||||
name="super_user_delete",
|
||||
key="e8c77554-4b0e-491f-aab5-67a5ef670999",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Delete flat representative users endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_user_list_callable(list_options: PaginateOnly, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperUserListEvent.event_callable = super_user_list_callable
|
||||
|
||||
|
||||
def super_user_create_callable(data: dict, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperUserCreateEvent.event_callable = super_user_create_callable
|
||||
|
||||
|
||||
def super_user_update_callable(data: dict, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0002-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperUserUpdateEvent.event_callable = super_user_update_callable
|
||||
|
||||
|
||||
def super_user_delete_callable(data: dict, token: TokenDictType):
|
||||
return {
|
||||
"message": "MSG0004-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperUserDeleteEvent.event_callable = super_user_delete_callable
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
|
||||
endpoints_index: dict = {
|
||||
"UserList": "5a4ebed3-f764-473c-bc80-57b14082b636",
|
||||
"UserCreate": "4fb2e886-98b4-49c7-a388-7e0607893af7",
|
||||
"UserUpdate": "d4b046ac-0a93-4804-a233-8ee616152c5a",
|
||||
"UserDelete": "98d5deac-8517-42da-8ce8-6a0ed79ba915",
|
||||
"PeopleList": "155d7d42-9f41-4126-be3a-c3367a507f95",
|
||||
"PeopleCreate": "b4d785a7-aac7-4d55-9aa3-bac871fe7252",
|
||||
"PeopleUpdate": "448e1b4e-60b4-467f-a5cc-02c37522d5cc",
|
||||
"PeopleDelete": "72a59bcd-52e5-42ec-b491-59c21c5e4014",
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/schemas /schemas
|
||||
COPY /api_services/api_modules /api_modules
|
||||
|
||||
COPY /api_services/api_builds/initial-service /initial-service
|
||||
COPY /api_services/api_builds/initial-service /
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "initial-service/app.py"]
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
# version_path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+psycopg2://postgres:password@10.10.2.14:5432/postgres
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from alembic import context
|
||||
from schemas import *
|
||||
from api_controllers.postgres.engine import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url with environment variables if they exist
|
||||
db_host = os.getenv("POSTGRES_HOST", None)
|
||||
db_port = os.getenv("POSTGRES_PORT", None)
|
||||
db_user = os.getenv("POSTGRES_USER", None)
|
||||
db_password = os.getenv("POSTGRES_PASSWORD", None)
|
||||
db_name = os.getenv("POSTGRES_DB", None)
|
||||
|
||||
# Build the connection URL from environment variables
|
||||
db_url = f"postgresql+psycopg2://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
|
||||
# Override the sqlalchemy.url in the alembic.ini file
|
||||
config.set_main_option("sqlalchemy.url", db_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
import os
|
||||
|
||||
from api_controllers.postgres.engine import get_db
|
||||
from init_app_defaults import create_application_defaults
|
||||
from init_enums import init_api_enums_build_types
|
||||
from init_alembic import generate_alembic
|
||||
from init_occupant_types import create_occupant_types_defaults
|
||||
from init_services import create_modules_and_services_and_actions
|
||||
from init_address import create_one_address
|
||||
from init_occ_defaults import create_occupant_defaults
|
||||
|
||||
set_alembic = bool(int(os.getenv("SET_ALEMBIC"), 0))
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(f"Set alembic: {set_alembic}")
|
||||
|
||||
with get_db() as db_session:
|
||||
if set_alembic:
|
||||
generate_alembic(session=db_session)
|
||||
try:
|
||||
create_one_address(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating address: {e}")
|
||||
try:
|
||||
init_api_enums_build_types(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating enums: {e}")
|
||||
try:
|
||||
create_application_defaults(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating application defaults: {e}")
|
||||
try:
|
||||
create_occupant_types_defaults(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating occupant types defaults: {e}")
|
||||
try:
|
||||
create_modules_and_services_and_actions(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating modules and services and actions: {e}")
|
||||
try:
|
||||
create_occupant_defaults(db_session=db_session)
|
||||
except Exception as e:
|
||||
print(f"Error creating occupant defaults: {e}")
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
from schemas import (
|
||||
Addresses,
|
||||
AddressCity,
|
||||
AddressStreet,
|
||||
AddressLocality,
|
||||
AddressDistrict,
|
||||
AddressNeighborhood,
|
||||
AddressState,
|
||||
AddressCountry,
|
||||
)
|
||||
|
||||
|
||||
def create_one_address(db_session):
|
||||
|
||||
address_list = []
|
||||
AddressCountry.set_session(db_session)
|
||||
country = AddressCountry.query.filter_by(country_name="TÜRKİYE", country_code="TR").first()
|
||||
if not country:
|
||||
country = AddressCountry.create(
|
||||
country_name="TÜRKİYE", country_code="TR", is_confirmed=True
|
||||
)
|
||||
country.save()
|
||||
address_list.append(country)
|
||||
else:
|
||||
print(f"Country already exists {country.to_dict()}")
|
||||
|
||||
AddressState.set_session(db_session)
|
||||
state = AddressState.query.filter_by(state_name="TÜRKİYE", state_code="TR").first()
|
||||
if not state:
|
||||
state = AddressState.create(
|
||||
state_name="TÜRKİYE",
|
||||
state_code="TR",
|
||||
phone_code="90",
|
||||
country_id=country.id,
|
||||
country_uu_id=str(country.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
state.save()
|
||||
address_list.append(state)
|
||||
else:
|
||||
print(f"State already exists {state.to_dict()}")
|
||||
|
||||
AddressCity.set_session(db_session)
|
||||
city = AddressCity.query.filter_by(city_name="ANKARA", city_code="6").first()
|
||||
if not city:
|
||||
city = AddressCity.create(
|
||||
city_name="ANKARA",
|
||||
city_code="6",
|
||||
licence_plate="06",
|
||||
state_id=state.id,
|
||||
state_uu_id=str(state.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
city.save()
|
||||
address_list.append(city)
|
||||
else:
|
||||
print(f"City already exists {city.to_dict()}")
|
||||
|
||||
AddressDistrict.set_session(db_session)
|
||||
district = AddressDistrict.query.filter_by(district_name="ÇANKAYA", district_code="1231").first()
|
||||
if not district:
|
||||
district = AddressDistrict.create(
|
||||
district_name="ÇANKAYA",
|
||||
district_code="1231",
|
||||
city_id=city.id,
|
||||
city_uu_id=str(city.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
district.save()
|
||||
address_list.append(district)
|
||||
else:
|
||||
print(f"District already exists {district.to_dict()}")
|
||||
|
||||
AddressLocality.set_session(db_session)
|
||||
locality = AddressLocality.query.filter_by(locality_name="MERKEZ", locality_code="2431").first()
|
||||
if not locality:
|
||||
locality = AddressLocality.create(
|
||||
locality_name="MERKEZ",
|
||||
locality_code="2431",
|
||||
type_code="3",
|
||||
type_description=None,
|
||||
district_id=district.id,
|
||||
district_uu_id=str(district.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
locality.save()
|
||||
address_list.append(locality)
|
||||
else:
|
||||
print(f"Locality already exists {locality.to_dict()}")
|
||||
|
||||
AddressNeighborhood.set_session(db_session)
|
||||
neighborhood = AddressNeighborhood.query.filter_by(neighborhood_name="AYRANCI MAHALLESİ", neighborhood_code="1522").first()
|
||||
if not neighborhood:
|
||||
neighborhood = AddressNeighborhood.create(
|
||||
neighborhood_name="AYRANCI MAHALLESİ",
|
||||
neighborhood_code="1522",
|
||||
type_code="1",
|
||||
type_description="MAHALLESİ",
|
||||
locality_id=locality.id,
|
||||
locality_uu_id=str(locality.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
neighborhood.save()
|
||||
address_list.append(neighborhood)
|
||||
else:
|
||||
print(f"Neighborhood already exists {neighborhood.to_dict()}")
|
||||
|
||||
AddressStreet.set_session(db_session)
|
||||
street = AddressStreet.query.filter_by(street_name="REŞAT NURİ CADDESİ", street_code="52270").first()
|
||||
if not street:
|
||||
street = AddressStreet.create(
|
||||
street_name="REŞAT NURİ CADDESİ",
|
||||
type_description="CADDESİ",
|
||||
type_code="3",
|
||||
street_code="52270",
|
||||
neighborhood_id=neighborhood.id,
|
||||
neighborhood_uu_id=str(neighborhood.uu_id),
|
||||
is_confirmed=True,
|
||||
)
|
||||
street.save()
|
||||
address_list.append(street)
|
||||
else:
|
||||
print(f"Street already exists {street.to_dict()}")
|
||||
|
||||
Addresses.set_session(db_session)
|
||||
address = Addresses.query.filter_by(street_id=street.id, street_uu_id=str(street.uu_id)).first()
|
||||
if not address:
|
||||
address = Addresses.create(
|
||||
street_id=street.id,
|
||||
street_uu_id=str(street.uu_id),
|
||||
build_number="Ex1",
|
||||
door_number="1",
|
||||
floor_number="1",
|
||||
comment_address="Example Address",
|
||||
letter_address="Example Address",
|
||||
short_letter_address="Example Address",
|
||||
latitude=0,
|
||||
longitude=0,
|
||||
is_confirmed=True,
|
||||
)
|
||||
address.save()
|
||||
address_list.append(address)
|
||||
else:
|
||||
print(f"Address already exists {address.to_dict()}")
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import os
|
||||
from sqlalchemy import text
|
||||
|
||||
def generate_alembic(session):
|
||||
|
||||
try:
|
||||
result = session.execute(
|
||||
text(
|
||||
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = "
|
||||
"'alembic_version') AS table_existence;"
|
||||
)
|
||||
)
|
||||
if result.first()[0]:
|
||||
session.execute(text("delete from alembic_version;"))
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
run_command = "python -m alembic stamp head;"
|
||||
run_command += (
|
||||
"python -m alembic revision --autogenerate;python -m alembic upgrade head;"
|
||||
)
|
||||
os.system(run_command)
|
||||
|
|
@ -0,0 +1,660 @@
|
|||
import arrow
|
||||
|
||||
from api_modules.token.password_module import PasswordModule
|
||||
from api_controllers.mongo.database import mongo_handler
|
||||
from schemas import (
|
||||
Companies,
|
||||
Departments,
|
||||
Duty,
|
||||
Duties,
|
||||
Employees,
|
||||
People,
|
||||
Users,
|
||||
Staff,
|
||||
RelationshipDutyCompany,
|
||||
)
|
||||
|
||||
def create_application_defaults(db_session):
|
||||
created_list, created_by, confirmed_by = [], "System", "System"
|
||||
active_row = dict(is_confirmed=True, active=True, deleted=False, is_notification_send=True)
|
||||
Companies.set_session(db_session)
|
||||
Departments.set_session(db_session)
|
||||
Duties.set_session(db_session)
|
||||
Duty.set_session(db_session)
|
||||
Staff.set_session(db_session)
|
||||
People.set_session(db_session)
|
||||
Users.set_session(db_session)
|
||||
Employees.set_session(db_session)
|
||||
RelationshipDutyCompany.set_session(db_session)
|
||||
|
||||
company_management = Companies.query.filter_by(company_tag="Evyos",).first()
|
||||
if not company_management:
|
||||
company_management = Companies.find_or_create(
|
||||
**{
|
||||
"formal_name": "Evyos LTD",
|
||||
"public_name": "Evyos Verimlilik Sistemleri",
|
||||
"company_type": "LTD",
|
||||
"commercial_type": "Commercial",
|
||||
"tax_no": "123132123132",
|
||||
"company_tag": "Evyos",
|
||||
"default_lang_type": "TR",
|
||||
"default_money_type": "TL",
|
||||
"is_commercial": True,
|
||||
"is_confirmed": True,
|
||||
}
|
||||
)
|
||||
created_list.append(company_management)
|
||||
else:
|
||||
print(f"Company Management Found {company_management.to_dict()}")
|
||||
company_id, company_uu_id = company_management.id, str(company_management.uu_id)
|
||||
|
||||
execution = Departments.query.filter_by(department_code="EO001", company_id=company_id).first()
|
||||
if not execution:
|
||||
execution = Departments.create(
|
||||
department_name="Execution Office",
|
||||
department_code="EO001",
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(execution)
|
||||
else:
|
||||
print(f"Execution Found {execution.to_dict()}")
|
||||
|
||||
gen_man = Departments.query.filter_by(department_code="GM001", company_id=company_id).first()
|
||||
if not gen_man:
|
||||
gen_man = Departments.create(
|
||||
department_name="General Manager Example",
|
||||
department_code="GM001",
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(gen_man)
|
||||
else:
|
||||
print(f"General Manager Found {gen_man.to_dict()}")
|
||||
|
||||
it_dept = Departments.query.filter_by(department_code="ITD001", company_id=company_id).first()
|
||||
if not it_dept:
|
||||
it_dept = Departments.create(
|
||||
department_name="IT Department",
|
||||
department_code="ITD001",
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(it_dept)
|
||||
else:
|
||||
print(f"IT Department Found {it_dept.to_dict()}")
|
||||
|
||||
gen_duty = Duty.query.filter_by(duty_code="GM0001").first()
|
||||
if not gen_duty:
|
||||
gen_duty = Duty.create(
|
||||
duty_name="General Manager",
|
||||
duty_code="GM0001",
|
||||
duty_description="General Manager",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(gen_duty)
|
||||
else:
|
||||
print(f"General Manager Found {gen_duty.to_dict()}")
|
||||
|
||||
bm_duty = Duty.query.filter_by(duty_code="BM0001").first()
|
||||
if not bm_duty:
|
||||
bm_duty = Duty.create(
|
||||
duty_name="Business Manager",
|
||||
duty_code="BM0001",
|
||||
duty_description="Business Manager",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(bm_duty)
|
||||
else:
|
||||
print(f"Business Manager Found {bm_duty.to_dict()}")
|
||||
it_duty = Duty.query.filter_by(duty_code="IT0001").first()
|
||||
if not it_duty:
|
||||
it_duty = Duty.create(
|
||||
duty_name="IT Manager",
|
||||
duty_code="IT0001",
|
||||
duty_description="IT Manager",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(it_duty)
|
||||
else:
|
||||
print(f"IT Manager Found {it_duty.to_dict()}")
|
||||
bulk_duty = Duty.query.filter_by(duty_code="BULK").first()
|
||||
if not bulk_duty:
|
||||
bulk_duty = Duty.create(
|
||||
duty_name="BULK",
|
||||
duty_code="BULK",
|
||||
duty_description="BULK RECORDS OF THE COMPANY",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(bulk_duty)
|
||||
else:
|
||||
print(f"Bulk Duty Found {bulk_duty.to_dict()}")
|
||||
occu_duty = Duty.query.filter_by(duty_code="OCCUPANT").first()
|
||||
if not occu_duty:
|
||||
occu_duty = Duty.create(
|
||||
duty_name="OCCUPANT",
|
||||
duty_code="OCCUPANT",
|
||||
duty_description="OCCUPANT RECORDS OF THE COMPANY",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(occu_duty)
|
||||
else:
|
||||
print(f"Occupant Duty Found {occu_duty.to_dict()}")
|
||||
|
||||
duties_gen_man = Duties.query.filter_by(company_id=company_id, duties_id=gen_duty.id, department_id=gen_man.id).first()
|
||||
if not duties_gen_man:
|
||||
duties_gen_man = Duties.create(
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
duties_id=gen_duty.id,
|
||||
duties_uu_id=str(gen_duty.uu_id),
|
||||
department_id=gen_man.id,
|
||||
department_uu_id=str(gen_man.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(duties_gen_man)
|
||||
else:
|
||||
print(f"Duties General Manager Found {duties_gen_man.to_dict()}")
|
||||
|
||||
duties_created_bm = Duties.query.filter_by(company_id=company_id, duties_id=bm_duty.id, department_id=execution.id).first()
|
||||
if not duties_created_bm:
|
||||
duties_created_bm = Duties.create(
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
duties_id=bm_duty.id,
|
||||
duties_uu_id=str(bm_duty.uu_id),
|
||||
department_id=execution.id,
|
||||
department_uu_id=str(execution.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(duties_created_bm)
|
||||
else:
|
||||
print(f"Duties Business Manager Found {duties_created_bm.to_dict()}")
|
||||
duties_created_it = Duties.query.filter_by(company_id=company_id, duties_id=bulk_duty.id, department_id=execution.id).first()
|
||||
if not duties_created_it:
|
||||
duties_created_it = Duties.create(
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
duties_id=bulk_duty.id,
|
||||
duties_uu_id=str(bulk_duty.uu_id),
|
||||
department_id=execution.id,
|
||||
department_uu_id=str(execution.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(duties_created_it)
|
||||
else:
|
||||
print(f"Duties Bulk Found {duties_created_it.to_dict()}")
|
||||
duties_created_occupant = Duties.query.filter_by(company_id=company_id, duties_id=occu_duty.id, department_id=execution.id).first()
|
||||
if not duties_created_occupant:
|
||||
duties_created_occupant = Duties.create(
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
duties_id=occu_duty.id,
|
||||
duties_uu_id=str(occu_duty.uu_id),
|
||||
department_id=execution.id,
|
||||
department_uu_id=str(execution.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(duties_created_occupant)
|
||||
else:
|
||||
print(f"Duties Occupant Found {duties_created_occupant.to_dict()}")
|
||||
bulk_duty = Duty.query.filter_by(duty_code="BULK").first()
|
||||
if not bulk_duty:
|
||||
bulk_duty = Duty.create(
|
||||
duty_name="BULK",
|
||||
duty_code="BULK",
|
||||
duty_description="BULK RECORDS OF THE COMPANY",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(bulk_duty)
|
||||
else:
|
||||
print(f"Bulk Duty Found {bulk_duty.to_dict()}")
|
||||
it_dept = Departments.query.filter_by(department_code="ITD001", company_id=company_id).first()
|
||||
if not it_dept:
|
||||
it_dept = Departments.create(
|
||||
department_name="IT Department",
|
||||
department_code="ITD001",
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(it_dept)
|
||||
else:
|
||||
print(f"IT Department Found {it_dept.to_dict()}")
|
||||
created_duty = Duty.query.filter_by(duty_code="DM").first()
|
||||
if not created_duty:
|
||||
created_duty = Duty.create(
|
||||
duty_name="Database Manager",
|
||||
duty_code="DM",
|
||||
duty_description="Database Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(created_duty)
|
||||
created_duty = Duty.query.filter_by(duty_code="NM").first()
|
||||
if not created_duty:
|
||||
created_duty = Duty.create(
|
||||
duty_name="Network Manager",
|
||||
duty_code="NM",
|
||||
duty_description="Network Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(created_duty)
|
||||
application_manager_duty = Duty.query.filter_by(duty_code="AM").first()
|
||||
if not application_manager_duty:
|
||||
application_manager_duty = Duty.create(
|
||||
duty_name="Application Manager",
|
||||
duty_code="AM",
|
||||
duty_description="Application Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_manager_duty)
|
||||
application_super_user_duty = Duty.query.filter_by(duty_code="SUE").first()
|
||||
if not application_super_user_duty:
|
||||
application_super_user_duty = Duty.create(
|
||||
duty_name="Super User",
|
||||
duty_code="SUE",
|
||||
duty_description="Super User",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(application_super_user_duty)
|
||||
|
||||
application_manager_duties = Duties.query.filter_by(
|
||||
department_id=it_dept.id,
|
||||
duties_id=application_manager_duty.id,
|
||||
company_id=company_id,
|
||||
).first()
|
||||
if not application_manager_duties:
|
||||
application_manager_duties = Duties.create(
|
||||
department_id=it_dept.id,
|
||||
department_uu_id=str(it_dept.uu_id),
|
||||
duties_id=application_manager_duty.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(application_manager_duties)
|
||||
else:
|
||||
print(f"Application Manager Duties Found {application_manager_duties.to_dict()}")
|
||||
|
||||
|
||||
super_user_duties = Duties.query.filter_by(
|
||||
department_id=it_dept.id,
|
||||
duties_id=application_super_user_duty.id,
|
||||
company_id=company_id,
|
||||
).first()
|
||||
if not super_user_duties:
|
||||
super_user_duties = Duties.create(
|
||||
department_id=it_dept.id,
|
||||
department_uu_id=str(it_dept.uu_id),
|
||||
duties_id=application_super_user_duty.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
company_id=company_id,
|
||||
company_uu_id=str(company_uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(super_user_duties)
|
||||
else:
|
||||
print(f"Super User Duties Found {super_user_duties.to_dict()}")
|
||||
relation_super_user_duties = RelationshipDutyCompany.query.filter_by(
|
||||
duties_id=super_user_duties.id,
|
||||
owner_id=company_id,
|
||||
member_id=company_id,
|
||||
).first()
|
||||
if not relation_super_user_duties:
|
||||
relation_super_user_duties = RelationshipDutyCompany.create(
|
||||
duties_id=super_user_duties.id,
|
||||
owner_id=company_id,
|
||||
member_id=company_id,
|
||||
parent_id=None,
|
||||
child_count=0,
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(super_user_duties)
|
||||
|
||||
relation_application_manager_duties = RelationshipDutyCompany.query.filter_by(
|
||||
duties_id=application_manager_duties.id,
|
||||
owner_id=company_id,
|
||||
member_id=company_id,
|
||||
).first()
|
||||
if not relation_application_manager_duties:
|
||||
relation_application_manager_duties = RelationshipDutyCompany.create(
|
||||
duties_id=application_manager_duties.id,
|
||||
owner_id=company_id,
|
||||
member_id=company_id,
|
||||
parent_id=None,
|
||||
child_count=0,
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(relation_application_manager_duties)
|
||||
|
||||
app_manager = People.query.filter_by(
|
||||
person_tag="BAM-System",
|
||||
).first()
|
||||
if not app_manager:
|
||||
app_manager = People.create(
|
||||
**{
|
||||
"person_tag": "BAM-System",
|
||||
"firstname": "Berkay Application Manager",
|
||||
"surname": "Karatay",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312312",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231231",
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(app_manager)
|
||||
else:
|
||||
print(f"Application Manager Found {app_manager.to_dict()}")
|
||||
|
||||
sup_manager = People.query.filter_by(person_tag="BSU-System").first()
|
||||
if not sup_manager:
|
||||
sup_manager = People.create(
|
||||
**{
|
||||
"person_tag": "BSU-System",
|
||||
"firstname": "Berkay Super User",
|
||||
"surname": "Karatay",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312313",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231232",
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(sup_manager)
|
||||
else:
|
||||
print(f"Super User Found {sup_manager.to_dict()}")
|
||||
|
||||
gen_manager_people = People.query.filter_by(person_tag="BM-System").first()
|
||||
if not gen_manager_people:
|
||||
gen_manager_people = People.create(
|
||||
**{
|
||||
"person_tag": "BM-System",
|
||||
"firstname": "Example General Manager",
|
||||
"surname": "Example",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312314",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231233",
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(gen_manager_people)
|
||||
else:
|
||||
print(f"General Manager Found {gen_manager_people.to_dict()}")
|
||||
|
||||
application_manager_staff = Staff.query.filter_by(staff_code="AME", duties_id=application_manager_duties.id).first()
|
||||
if not application_manager_staff:
|
||||
application_manager_staff = Staff.create(
|
||||
**{
|
||||
"staff_code": "AME",
|
||||
"staff_name": "Application Manager Employee",
|
||||
"staff_description": "Application Manager Employee",
|
||||
"duties_id": application_manager_duties.id,
|
||||
"duties_uu_id": str(application_manager_duty.uu_id),
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(application_manager_staff)
|
||||
else:
|
||||
print(f"Application Manager Found {application_manager_staff.to_dict()}")
|
||||
|
||||
super_user_staff = Staff.query.filter_by(staff_code="SUE", duties_id=super_user_duties.id).first()
|
||||
if not super_user_staff:
|
||||
super_user_staff = Staff.create(
|
||||
**{
|
||||
"staff_code": "SUE",
|
||||
"staff_name": "Super User Employee",
|
||||
"staff_description": "Super User Employee",
|
||||
"duties_id": super_user_duties.id,
|
||||
"duties_uu_id": str(super_user_duties.uu_id),
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(super_user_staff)
|
||||
else:
|
||||
print(f"Super User Found {super_user_staff.to_dict()}")
|
||||
|
||||
gen_manager_staff = Staff.query.filter_by(staff_code="GME", duties_id=duties_gen_man.id).first()
|
||||
if not gen_manager_staff:
|
||||
gen_manager_staff = Staff.create(
|
||||
**{
|
||||
"staff_code": "GME",
|
||||
"staff_name": "General Manager Employee",
|
||||
"staff_description": "General Manager Employee",
|
||||
"duties_id": duties_gen_man.id,
|
||||
"duties_uu_id": str(duties_gen_man.uu_id),
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(gen_manager_staff)
|
||||
else:
|
||||
print(f"General Manager Found {gen_manager_staff.to_dict()}")
|
||||
|
||||
application_manager_staff = Staff.query.filter_by(staff_code="AME", duties_id=application_manager_duty.id).first()
|
||||
if not application_manager_staff:
|
||||
application_manager_staff = Staff.create(
|
||||
**{
|
||||
"staff_code": "AME",
|
||||
"staff_name": "Application Manager Employee",
|
||||
"staff_description": "Application Manager Employee",
|
||||
"duties_id": application_manager_duty.id,
|
||||
"duties_uu_id": str(application_manager_duty.uu_id),
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(application_manager_staff)
|
||||
gen_man_staff = Staff.query.filter_by(staff_code="GME", duties_id=duties_gen_man.id).first()
|
||||
if not gen_man_staff:
|
||||
gen_man_staff = Staff.create(
|
||||
**{
|
||||
"staff_code": "GME",
|
||||
"staff_name": "General Manager Employee",
|
||||
"staff_description": "General Manager Employee",
|
||||
"duties_id": duties_gen_man.id,
|
||||
"duties_uu_id": str(gen_duty.uu_id),
|
||||
**active_row,
|
||||
},
|
||||
)
|
||||
created_list.append(gen_man_staff)
|
||||
|
||||
gen_man_employee = Employees.query.filter_by(staff_id=gen_man_staff.id, people_id=gen_manager_people.id).first()
|
||||
if not gen_man_employee:
|
||||
gen_man_employee = Employees.create(
|
||||
staff_id=gen_man_staff.id,
|
||||
staff_uu_id=str(gen_man_staff.uu_id),
|
||||
people_id=gen_manager_people.id,
|
||||
people_uu_id=str(gen_manager_people.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(gen_man_employee)
|
||||
|
||||
app_manager_employee = Employees.query.filter_by(staff_id=application_manager_staff.id, people_id=app_manager.id).first()
|
||||
if not app_manager_employee:
|
||||
app_manager_employee = Employees.create(
|
||||
staff_id=application_manager_staff.id,
|
||||
staff_uu_id=str(application_manager_staff.uu_id),
|
||||
people_id=app_manager.id,
|
||||
people_uu_id=str(app_manager.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(app_manager_employee)
|
||||
|
||||
super_user_employee = Employees.query.filter_by(staff_id=super_user_staff.id, people_id=sup_manager.id).first()
|
||||
if not super_user_employee:
|
||||
super_user_employee = Employees.create(
|
||||
staff_id=super_user_staff.id,
|
||||
staff_uu_id=str(super_user_staff.uu_id),
|
||||
people_id=sup_manager.id,
|
||||
people_uu_id=str(sup_manager.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(super_user_employee)
|
||||
|
||||
gen_manager_user = Users.query.filter_by(person_id=gen_manager_people.id, user_tag=gen_manager_people.person_tag).first()
|
||||
if not gen_manager_user:
|
||||
gen_manager_user = Users.create(
|
||||
person_id=gen_manager_people.id,
|
||||
person_uu_id=str(gen_manager_people.uu_id),
|
||||
user_tag=gen_manager_people.person_tag,
|
||||
email="example.general@evyos.com.tr",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(gen_manager_user)
|
||||
gen_manager_user.password_expiry_begins = str(arrow.now())
|
||||
gen_manager_user.password_token = PasswordModule.generate_refresher_token()
|
||||
main_domain, collection_name = (
|
||||
"evyos.com.tr",
|
||||
f"{str(company_management.uu_id)}*Domain",
|
||||
)
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one(
|
||||
{"user_uu_id": str(gen_manager_user.uu_id)}
|
||||
)
|
||||
if not existing_record:
|
||||
mongo_engine.insert_one(
|
||||
document={
|
||||
"user_uu_id": str(gen_manager_user.uu_id),
|
||||
"other_domains_list": [main_domain],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
)
|
||||
else:
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(gen_manager_user.uu_id)},
|
||||
{
|
||||
"$set": {
|
||||
"other_domains_list": [main_domain],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
app_manager_user = Users.query.filter_by(person_id=app_manager.id, user_tag=app_manager.person_tag).first()
|
||||
if not app_manager_user:
|
||||
app_manager_user = Users.create(
|
||||
person_id=app_manager.id,
|
||||
user_tag=app_manager.person_tag,
|
||||
email="karatay.berkay.man@evyos.com.tr",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(app_manager_user)
|
||||
app_manager_user.password_expiry_begins = str(arrow.now())
|
||||
app_manager_user.password_token = PasswordModule.generate_refresher_token()
|
||||
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one(
|
||||
{"user_uu_id": str(app_manager_user.uu_id)}
|
||||
)
|
||||
if not existing_record:
|
||||
mongo_engine.insert_one(
|
||||
document={
|
||||
"user_uu_id": str(app_manager_user.uu_id),
|
||||
"other_domains_list": [main_domain],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
)
|
||||
else:
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(app_manager_user.uu_id)},
|
||||
{
|
||||
"$set": {
|
||||
"other_domains_list": [main_domain],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
sup_manager_user = Users.query.filter_by(person_id=sup_manager.id, user_tag=sup_manager.person_tag).first()
|
||||
if not sup_manager_user:
|
||||
sup_manager_user = Users.create(
|
||||
person_id=sup_manager.id,
|
||||
user_tag=sup_manager.person_tag,
|
||||
email="karatay.berkay.sup@evyos.com.tr",
|
||||
phone_number="+901111111112",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(sup_manager_user)
|
||||
|
||||
sup_manager_user.password_expiry_begins = str(arrow.now())
|
||||
sup_manager_user.password_token = PasswordModule.generate_refresher_token()
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one(
|
||||
{"user_uu_id": str(sup_manager_employee.uu_id)}
|
||||
)
|
||||
if not existing_record:
|
||||
print("insert sup existing record", existing_record)
|
||||
mongo_engine.insert_one(
|
||||
document={
|
||||
"user_uu_id": str(sup_manager_employee.uu_id),
|
||||
"other_domains_list": [main_domain, "management.com.tr"],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
)
|
||||
else:
|
||||
print("update sup existing record", existing_record)
|
||||
# Optionally update the existing record if needed
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(sup_manager_employee.uu_id)},
|
||||
{
|
||||
"$set": {
|
||||
"other_domains_list": [main_domain, "management.com.tr"],
|
||||
"main_domain": main_domain,
|
||||
"modified_at": arrow.now().timestamp(),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
print("All Defaults Create is now completed")
|
||||
|
|
@ -0,0 +1,266 @@
|
|||
from pydantic import BaseModel
|
||||
from schemas import BuildTypes, ApiEnumDropdown
|
||||
|
||||
|
||||
class InsertBuildTypes(BaseModel):
|
||||
function_code: str
|
||||
type_code: str
|
||||
lang: str
|
||||
type_name: str
|
||||
|
||||
|
||||
def init_api_enums_build_types(db_session):
|
||||
|
||||
BuildTypes.set_session(db_session)
|
||||
ApiEnumDropdown.set_session(db_session)
|
||||
|
||||
insert_types = [
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_KZN",
|
||||
"type_name": "Apartman Kazan Dairesi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_GRJ",
|
||||
"type_name": "Apartman Garaj",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_DP",
|
||||
"type_name": "Apartman Depo",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "DAIRE",
|
||||
"type_name": "Apartman Dairesi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT",
|
||||
"type_name": "Apartman Binası",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YNT",
|
||||
"type_name": "Apartman Yönetimi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_PRK",
|
||||
"type_name": "Apartman Açık Park Alanı",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YSL",
|
||||
"type_name": "Apartman Yeşil Alan",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YOL",
|
||||
"type_name": "Apartman Ara Yol",
|
||||
"lang": "TR",
|
||||
},
|
||||
]
|
||||
|
||||
for insert_type in insert_types:
|
||||
build_types = InsertBuildTypes(
|
||||
function_code="EVYOS",
|
||||
lang=insert_type["lang"],
|
||||
type_code=str(insert_type["type_code"]).upper(),
|
||||
type_name=insert_type["type_name"],
|
||||
)
|
||||
created_build_type = BuildTypes.query.filter_by(
|
||||
function_code=build_types.function_code,
|
||||
type_code=build_types.type_code,
|
||||
).first()
|
||||
if not created_build_type:
|
||||
created_build_type = BuildTypes.find_or_create(
|
||||
**build_types.model_dump(), is_confirmed=True, db=db_session
|
||||
)
|
||||
created_build_type.save()
|
||||
|
||||
insert_enums = [
|
||||
{"enum_class": "BuildDuesTypes", "type_code": "BDT-D", "type_name": "Debit"},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-A",
|
||||
"type_name": "Add Debit",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-R",
|
||||
"type_name": "Renovation",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-L",
|
||||
"type_name": "Lawyer expence",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-S",
|
||||
"type_name": "Service fee",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-I",
|
||||
"type_name": "Information",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-A",
|
||||
"type_name": "Kasa Tahsil Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-E",
|
||||
"type_name": "Kasa Tediye Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-M",
|
||||
"type_name": "Mahsup Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-O",
|
||||
"type_name": "Açılış Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-C",
|
||||
"type_name": "Kapanış Fişi",
|
||||
},
|
||||
{"enum_class": "IbanBudgetType", "type_code": "IBT-I", "type_name": "Iban"},
|
||||
{"enum_class": "IbanBudgetType", "type_code": "IBT-B", "type_name": "Budget"},
|
||||
{
|
||||
"enum_class": "IbanBudgetType",
|
||||
"type_code": "IBT-TR",
|
||||
"type_name": "Transaction records",
|
||||
},
|
||||
{"enum_class": "ProjectTypes", "type_code": "R", "type_name": "Tadilat"},
|
||||
{
|
||||
"enum_class": "ProjectTypes",
|
||||
"type_code": "PT-C",
|
||||
"type_name": "Mahkeme süreçleri",
|
||||
},
|
||||
{
|
||||
"enum_class": "ProjectTypes",
|
||||
"type_code": "PT-Z",
|
||||
"type_name": "Sıfır Bakiye",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "PT-B",
|
||||
"type_name": "Banka records",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "PT-S",
|
||||
"type_name": "Sistem kaydı",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "EBT-C",
|
||||
"type_name": "Build, Flat or Site records",
|
||||
},
|
||||
{"enum_class": "ExpireType", "type_code": "1", "type_name": "daily"},
|
||||
{"enum_class": "ExpireType", "type_code": "7", "type_name": "weekly"},
|
||||
{"enum_class": "ExpireType", "type_code": "30", "type_name": "monthly"},
|
||||
{"enum_class": "ExpireType", "type_code": "90", "type_name": "quarter"},
|
||||
{"enum_class": "ExpireType", "type_code": "180", "type_name": "six_month"},
|
||||
{"enum_class": "ExpireType", "type_code": "365", "type_name": "yearly"},
|
||||
{"enum_class": "PhoneType", "type_code": "M", "type_name": "cep tel"},
|
||||
{"enum_class": "PhoneType", "type_code": "L", "type_name": "sabit telefon"},
|
||||
{"enum_class": "PhoneType", "type_code": "F", "type_name": "fax"},
|
||||
{"enum_class": "PhoneType", "type_code": "C", "type_name": "santral"},
|
||||
{
|
||||
"enum_class": "PhoneType",
|
||||
"type_code": "G",
|
||||
"type_name": "ülke genelindeki hatlar 444",
|
||||
},
|
||||
{"enum_class": "PerComType", "type_code": "1", "type_name": "Person"},
|
||||
{"enum_class": "PerComType", "type_code": "2", "type_name": "Company"},
|
||||
{"enum_class": "Directions", "type_code": "NN", "type_name": "North"},
|
||||
{"enum_class": "Directions", "type_code": "EE", "type_name": "East"},
|
||||
{"enum_class": "Directions", "type_code": "SS", "type_name": "South"},
|
||||
{"enum_class": "Directions", "type_code": "WW", "type_name": "West"},
|
||||
{"enum_class": "Directions", "type_code": "NE", "type_name": "North East"},
|
||||
{"enum_class": "Directions", "type_code": "NW", "type_name": "North West"},
|
||||
{"enum_class": "Directions", "type_code": "SE", "type_name": "South East"},
|
||||
{"enum_class": "Directions", "type_code": "SW", "type_name": "South West"},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-RBM",
|
||||
"type_name": "Regular Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-DBM",
|
||||
"type_name": "Disaster Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-EBM",
|
||||
"type_name": "Emergency Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-D",
|
||||
"type_name": "Debit Sender",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-R",
|
||||
"type_name": "Credit Receiver",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-Z",
|
||||
"type_name": "Zero Balance",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-W",
|
||||
"type_name": "Weekly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-M",
|
||||
"type_name": "Monthly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-Q",
|
||||
"type_name": "Quarterly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-Y",
|
||||
"type_name": "Yearly",
|
||||
},
|
||||
]
|
||||
|
||||
for insert_enum in insert_enums:
|
||||
created_api_enum = ApiEnumDropdown.query.filter_by(
|
||||
enum_class=insert_enum["enum_class"],
|
||||
key=str(insert_enum["type_code"]).upper(),
|
||||
).first()
|
||||
if not created_api_enum:
|
||||
created_api_enum = ApiEnumDropdown.create(
|
||||
enum_class=insert_enum["enum_class"],
|
||||
value=insert_enum["type_name"],
|
||||
key=str(insert_enum["type_code"]).upper(),
|
||||
description=insert_enum["type_name"],
|
||||
is_confirmed=True,
|
||||
)
|
||||
created_api_enum.save()
|
||||
|
|
@ -0,0 +1,299 @@
|
|||
import arrow
|
||||
from api_modules.token.password_module import PasswordModule
|
||||
from api_controllers.mongo.database import mongo_handler
|
||||
from schemas import (
|
||||
Addresses,
|
||||
BuildLivingSpace,
|
||||
Users,
|
||||
People,
|
||||
Build,
|
||||
BuildParts,
|
||||
BuildTypes,
|
||||
ApiEnumDropdown,
|
||||
Companies,
|
||||
OccupantTypes,
|
||||
)
|
||||
|
||||
def create_occupant_defaults(db_session):
|
||||
|
||||
created_list = []
|
||||
Addresses.set_session(db_session)
|
||||
BuildLivingSpace.set_session(db_session)
|
||||
Users.set_session(db_session)
|
||||
People.set_session(db_session)
|
||||
Build.set_session(db_session)
|
||||
BuildParts.set_session(db_session)
|
||||
BuildTypes.set_session(db_session)
|
||||
ApiEnumDropdown.set_session(db_session)
|
||||
Companies.set_session(db_session)
|
||||
OccupantTypes.set_session(db_session)
|
||||
|
||||
company_management = Companies.query.filter_by(formal_name = "Evyos LTD",).first()
|
||||
if not company_management:
|
||||
raise Exception("Company not found")
|
||||
|
||||
company_id, company_uu_id = company_management.id, str(company_management.uu_id)
|
||||
active_row = dict(is_confirmed=True, active=True, deleted=False, is_notification_send=True)
|
||||
build_type = BuildTypes.query.filter_by(type_code = "APT").first()
|
||||
address = Addresses.query.filter_by(letter_address = "Example Address").first()
|
||||
|
||||
created_build = Build.query.filter_by(build_name = "Build Example").first()
|
||||
if not created_build:
|
||||
created_build = Build.create(
|
||||
build_name="Build Example",
|
||||
build_code="B001",
|
||||
build_no="B001",
|
||||
build_date="01.07.1980",
|
||||
address_id=address.id,
|
||||
address_uu_id=str(address.uu_id),
|
||||
build_types_id=build_type.id,
|
||||
build_types_uu_id=str(build_type.uu_id),
|
||||
**active_row
|
||||
)
|
||||
created_list.append(created_build)
|
||||
|
||||
build_type_created = BuildTypes.query.filter_by(type_code = "APT").first()
|
||||
build_type_flat = BuildTypes.query.filter_by(type_code = "DAIRE").first()
|
||||
enum_dropdown = ApiEnumDropdown.query.filter_by(key = "NE", enum_class = "Directions").first()
|
||||
|
||||
occupant_type_prs = OccupantTypes.query.filter_by(occupant_code = "MT-PRS").first()
|
||||
occupant_type_owner = OccupantTypes.query.filter_by(occupant_code = "FL-OWN").first()
|
||||
occupant_type_tenant = OccupantTypes.query.filter_by(occupant_code = "FL-TEN").first()
|
||||
created_managment_room = BuildParts.query.filter_by(part_code = "MR001").first()
|
||||
|
||||
if not created_managment_room:
|
||||
created_managment_room = BuildParts.create(
|
||||
address_gov_code="123123123123",
|
||||
build_id=created_build.id,
|
||||
build_uu_id=str(created_build.uu_id),
|
||||
part_code="MR001",
|
||||
part_net_size=100,
|
||||
part_no=0,
|
||||
part_level=0,
|
||||
part_type_id=build_type_created.id,
|
||||
part_type_uu_id=str(build_type_created.uu_id),
|
||||
part_direction_id=enum_dropdown.id,
|
||||
part_direction_uu_id=str(enum_dropdown.uu_id),
|
||||
human_livable=True,
|
||||
due_part_key="Example",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(created_managment_room)
|
||||
|
||||
created_flat = BuildParts.query.filter_by(part_code = "MF001").first()
|
||||
if not created_flat:
|
||||
created_flat = BuildParts.create(
|
||||
address_gov_code="123123123124",
|
||||
build_id=created_build.id,
|
||||
build_uu_id=str(created_build.uu_id),
|
||||
part_code="MF001",
|
||||
part_net_size=100,
|
||||
part_no=1,
|
||||
part_level=1,
|
||||
part_type_id=build_type_flat.id,
|
||||
part_type_uu_id=str(build_type_flat.uu_id),
|
||||
part_direction_id=enum_dropdown.id,
|
||||
part_direction_uu_id=str(enum_dropdown.uu_id),
|
||||
human_livable=True,
|
||||
due_part_key="Example",
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(created_flat)
|
||||
|
||||
build_manager_people = People.query.filter_by(person_tag = "Build Manager Example").first()
|
||||
if not build_manager_people:
|
||||
build_manager_people = People.create(
|
||||
**{
|
||||
"person_tag": "Build Manager Example",
|
||||
"firstname": "Example Build Manager",
|
||||
"surname": "Example",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312315",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231234",
|
||||
}
|
||||
)
|
||||
created_list.append(build_manager_people)
|
||||
|
||||
owner_people = People.query.filter_by(person_tag = "Owner Example").first()
|
||||
if not owner_people:
|
||||
owner_people = People.create(
|
||||
**{
|
||||
"person_tag": "Owner Example",
|
||||
"firstname": "Example Owner",
|
||||
"surname": "Example",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312316",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231234",
|
||||
}
|
||||
)
|
||||
created_list.append(owner_people)
|
||||
|
||||
tenant_people = People.query.filter_by(person_tag = "Tenant Example").first()
|
||||
if not tenant_people:
|
||||
tenant_people = People.create(
|
||||
**{
|
||||
"person_tag": "Tenant Example",
|
||||
"firstname": "Example Tenant",
|
||||
"surname": "Example",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312317",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231234",
|
||||
}
|
||||
)
|
||||
created_list.append(tenant_people)
|
||||
|
||||
main_domain, collection_name = "evyos.com.tr", f"{str(company_management.uu_id)}*Domain"
|
||||
user_build_manager = Users.query.filter_by(user_tag = "Build Manager Example").first()
|
||||
if not user_build_manager:
|
||||
user_build_manager = Users.create(
|
||||
person_id=build_manager_people.id,
|
||||
person_uu_id=str(build_manager_people.uu_id),
|
||||
user_tag=build_manager_people.person_tag,
|
||||
email="example.build.manager@gmail.com",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(user_build_manager)
|
||||
user_build_manager.password_expiry_begins = str(arrow.now())
|
||||
user_build_manager.password_token = PasswordModule.generate_refresher_token()
|
||||
|
||||
user_owner = Users.query.filter_by(user_tag = "Owner Example").first()
|
||||
if not user_owner:
|
||||
user_owner = Users.create(
|
||||
person_id=owner_people.id,
|
||||
person_uu_id=str(owner_people.uu_id),
|
||||
user_tag=owner_people.person_tag,
|
||||
email="example.owner@gmail.com",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(user_owner)
|
||||
user_owner.password_expiry_begins = str(arrow.now())
|
||||
user_owner.password_token = PasswordModule.generate_refresher_token()
|
||||
|
||||
user_tenant = Users.query.filter_by(user_tag = "Tenant Example").first()
|
||||
if not user_tenant:
|
||||
user_tenant = Users.create(
|
||||
person_id=tenant_people.id,
|
||||
person_uu_id=str(tenant_people.uu_id),
|
||||
user_tag=tenant_people.person_tag,
|
||||
email="example.tenant@gmail.com",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
related_company=str(company_management.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(user_tenant)
|
||||
user_tenant.password_expiry_begins = str(arrow.now())
|
||||
user_tenant.password_token = PasswordModule.generate_refresher_token()
|
||||
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one({"user_uu_id": str(user_build_manager.uu_id)})
|
||||
if not existing_record:
|
||||
mongo_engine.insert_one(
|
||||
document={"user_uu_id": str(user_build_manager.uu_id), "other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}
|
||||
)
|
||||
else:
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(user_build_manager.uu_id)},
|
||||
{"$set": {"other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}}
|
||||
)
|
||||
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one({"user_uu_id": str(user_owner.uu_id)})
|
||||
if not existing_record:
|
||||
mongo_engine.insert_one(
|
||||
document={"user_uu_id": str(user_owner.uu_id), "other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}
|
||||
)
|
||||
else:
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(user_owner.uu_id)},
|
||||
{"$set": {"other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}}
|
||||
)
|
||||
|
||||
with mongo_handler.collection(collection_name) as mongo_engine:
|
||||
existing_record = mongo_engine.find_one({"user_uu_id": str(user_tenant.uu_id)})
|
||||
if not existing_record:
|
||||
mongo_engine.insert_one(
|
||||
document={"user_uu_id": str(user_tenant.uu_id), "other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}
|
||||
)
|
||||
else:
|
||||
mongo_engine.update_one(
|
||||
{"user_uu_id": str(user_tenant.uu_id)},
|
||||
{"$set": {"other_domains_list": [main_domain], "main_domain": main_domain, "modified_at": arrow.now().timestamp()}})
|
||||
|
||||
created_build_living_space_prs = BuildLivingSpace.query.filter_by(
|
||||
build_id=created_build.id, build_parts_id=created_managment_room.id, person_id=build_manager_people.id
|
||||
).first()
|
||||
if not created_build_living_space_prs:
|
||||
created_build_living_space_prs = BuildLivingSpace.create(
|
||||
build_id=created_build.id,
|
||||
build_uu_id=str(created_build.uu_id),
|
||||
build_parts_id=created_managment_room.id,
|
||||
build_parts_uu_id=str(created_managment_room.uu_id),
|
||||
person_id=build_manager_people.id,
|
||||
person_uu_id=str(build_manager_people.uu_id),
|
||||
occupant_type_id=occupant_type_prs.id,
|
||||
occupant_type_uu_id=str(occupant_type_prs.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(created_build_living_space_prs)
|
||||
|
||||
created_build_living_space_owner = BuildLivingSpace.query.filter_by(
|
||||
build_id=created_build.id, build_parts_id=created_flat.id, person_id=owner_people.id
|
||||
).first()
|
||||
if not created_build_living_space_owner:
|
||||
created_build_living_space_owner = BuildLivingSpace.create(
|
||||
build_id=created_build.id,
|
||||
build_uu_id=str(created_build.uu_id),
|
||||
build_parts_id=created_flat.id,
|
||||
build_parts_uu_id=str(created_flat.uu_id),
|
||||
person_id=owner_people.id,
|
||||
person_uu_id=str(owner_people.uu_id),
|
||||
occupant_type_id=occupant_type_owner.id,
|
||||
occupant_type_uu_id=str(occupant_type_owner.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(created_build_living_space_owner)
|
||||
|
||||
created_build_living_space_tenant = BuildLivingSpace.query.filter_by(
|
||||
build_id=created_build.id, build_parts_id=created_flat.id, person_id=tenant_people.id
|
||||
).first()
|
||||
if not created_build_living_space_tenant:
|
||||
created_build_living_space_tenant = BuildLivingSpace.create(
|
||||
build_id=created_build.id,
|
||||
build_uu_id=str(created_build.uu_id),
|
||||
build_parts_id=created_flat.id,
|
||||
build_parts_uu_id=str(created_flat.uu_id),
|
||||
person_id=tenant_people.id,
|
||||
person_uu_id=str(tenant_people.uu_id),
|
||||
occupant_type_id=occupant_type_tenant.id,
|
||||
occupant_type_uu_id=str(occupant_type_tenant.uu_id),
|
||||
**active_row,
|
||||
)
|
||||
created_list.append(created_build_living_space_tenant)
|
||||
|
||||
db_session.commit()
|
||||
print("Occupant Defaults Create is now completed")
|
||||
|
|
@ -0,0 +1,225 @@
|
|||
from schemas import OccupantTypes
|
||||
|
||||
|
||||
def create_occupant_types_defaults(db_session):
|
||||
|
||||
"""
|
||||
occupant_category = mapped_column(String, server_default="")
|
||||
occupant_category_type = mapped_column(String, server_default="")
|
||||
occupant_is_unique = mapped_column(Boolean, server_default="0")
|
||||
"""
|
||||
|
||||
OccupantTypes.set_session(db_session)
|
||||
list_occupant_types = [
|
||||
{
|
||||
"occupant_type": "Toplantı Başkanı",
|
||||
"occupant_description": "Toplantı Başkanı",
|
||||
"occupant_code": "MT-PRS",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Katip",
|
||||
"occupant_description": "Toplantıda tutanak tutan kişi",
|
||||
"occupant_code": "MT-WRT",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Katılımcısı",
|
||||
"occupant_description": "Toplantıda sadece katılan kişi",
|
||||
"occupant_code": "MT-ATT",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Danışman",
|
||||
"occupant_description": "Toplantıda danışmanlık yapan kişi",
|
||||
"occupant_code": "MT-ADV",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Seçilmiş Başkanı",
|
||||
"occupant_description": "Toplantı Seçilmiş Başkanı",
|
||||
"occupant_code": "MT-VPR",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sahibi",
|
||||
"occupant_description": "Daire Sahibi",
|
||||
"occupant_code": "FL-OWN",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Kiracısı",
|
||||
"occupant_description": "Daire Kiracısı",
|
||||
"occupant_code": "FL-TEN",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sakini",
|
||||
"occupant_description": "Daire Sakini",
|
||||
"occupant_code": "FL-RES",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sakini Vekili",
|
||||
"occupant_description": "Daire Sakini Vekili",
|
||||
"occupant_code": "FL-REP",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Avukatı",
|
||||
"occupant_description": "Bina Avukatı",
|
||||
"occupant_code": "BU-ATT",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Avukatı Yardımcısı",
|
||||
"occupant_description": "Bina Avukatı Yardımcısı",
|
||||
"occupant_code": "BU-ATA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Denetmen Yardımcısı",
|
||||
"occupant_description": "Bina Denetmen Yardımcısı",
|
||||
"occupant_code": "BU-SPA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Denetmeni",
|
||||
"occupant_description": "Bina Denetmeni",
|
||||
"occupant_code": "BU-SPV",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Yönetici Yardımcısı",
|
||||
"occupant_description": "Bina Yönetici Yardımcısı",
|
||||
"occupant_code": "BU-MNA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Yöneticisi",
|
||||
"occupant_description": "Bina Yöneticisi",
|
||||
"occupant_code": "BU-MNG",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Muhasabecisi",
|
||||
"occupant_description": "Bina Muhasabecisi",
|
||||
"occupant_code": "BU-ACC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Lideri",
|
||||
"occupant_description": "Proje Lideri",
|
||||
"occupant_code": "PRJ-LDR",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Sorumlusu",
|
||||
"occupant_description": "Proje Sorumlusu",
|
||||
"occupant_code": "PRJ-RES",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Ekibi",
|
||||
"occupant_description": "Proje Ekibi",
|
||||
"occupant_code": "PRJ-EMP",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Finans Sorumlusu",
|
||||
"occupant_description": "Proje Finans Sorumlusu",
|
||||
"occupant_code": "PRJ-FIN",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Teknik Sorumlusu",
|
||||
"occupant_description": "Proje Teknik Sorumlusu",
|
||||
"occupant_code": "PRJ-TEC",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Mülkiyet Vekili",
|
||||
"occupant_description": "Daire Mülkiyet Vekili",
|
||||
"occupant_code": "FL-DEP", # deputy
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Sorumlusu",
|
||||
"occupant_description": "Bina Teknik Sorumlusu",
|
||||
"occupant_code": "BU-TEC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Elemanı",
|
||||
"occupant_description": "Bina Teknik Elemanı",
|
||||
"occupant_code": "BU-EMP",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Freelancer",
|
||||
"occupant_description": "Bina Teknik Freelancer",
|
||||
"occupant_code": "BU-FLC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
]
|
||||
for list_occupant_type in list_occupant_types:
|
||||
try:
|
||||
created_type = OccupantTypes.query.filter_by(
|
||||
occupant_code=list_occupant_type["occupant_code"],
|
||||
occupant_category=list_occupant_type["occupant_category_type"],
|
||||
).first()
|
||||
if not created_type:
|
||||
created_type = OccupantTypes.create(**list_occupant_type, is_confirmed=True)
|
||||
created_type.save()
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
from schemas import (
|
||||
Duty,
|
||||
OccupantTypes,
|
||||
Modules,
|
||||
Services,
|
||||
)
|
||||
|
||||
def create_modules_and_services_and_actions(db_session):
|
||||
|
||||
Duty.set_session(db_session)
|
||||
Services.set_session(db_session)
|
||||
Modules.set_session(db_session)
|
||||
OccupantTypes.set_session(db_session)
|
||||
|
||||
erp_module = Modules.query.filter_by(module_code = "EVYOS-ERP").first()
|
||||
if not erp_module:
|
||||
erp_module = Modules.create(
|
||||
**{
|
||||
"module_name": "EVYOS ERP", "module_description": "EVYOS Enterprise Resource Planning", "module_code": "EVYOS-ERP",
|
||||
"module_layer": 1, "is_default_module": False, "is_confirmed": True,
|
||||
}
|
||||
)
|
||||
erp_module.save()
|
||||
|
||||
build_module = Modules.query.filter_by(module_code = "BLD-MNG").first()
|
||||
if not build_module:
|
||||
build_module = Modules.create(
|
||||
**{
|
||||
"module_name": "Bina Yönetim Modülü", "module_description": "Building Management Module", "module_code": "BLD-MNG",
|
||||
"module_layer": 1, "is_default_module": False, "is_confirmed": True,
|
||||
}
|
||||
)
|
||||
build_module.save()
|
||||
|
||||
user_module = Modules.query.filter_by(module_code = "USR-PUB").first()
|
||||
if not user_module:
|
||||
user_module = Modules.create(
|
||||
**{
|
||||
"module_name": "Kullancı Modülü", "module_description": "Kullanıcı Genel Modülü", "module_code": "USR-PUB", "module_layer": 1,
|
||||
"is_default_module": True, "is_confirmed": True
|
||||
}
|
||||
)
|
||||
user_module.save()
|
||||
|
||||
erp_module_module_dict = dict(module_id=erp_module.id, module_uu_id=str(erp_module.uu_id))
|
||||
build_module_module_dict = dict(module_id=build_module.id, module_uu_id=str(build_module.uu_id))
|
||||
duty_objects = Duty.query.filter(Duty.module_id == erp_module.id).all()
|
||||
if not duty_objects:
|
||||
raise Exception("Duty objects not found")
|
||||
|
||||
for duty_object in duty_objects:
|
||||
created_service = Services.query.filter(Services.service_code == f"SRE-{duty_object.duty_code}").first()
|
||||
if not created_service:
|
||||
created_service = Services.create(
|
||||
**erp_module_module_dict,
|
||||
service_name=duty_object.duty_name,
|
||||
service_description=duty_object.duty_description,
|
||||
service_code=f"SRE-{duty_object.duty_code}",
|
||||
related_responsibility=duty_object.duty_code,
|
||||
is_confirmed=True,
|
||||
)
|
||||
created_service.save()
|
||||
|
||||
occupant_types = OccupantTypes.query.filter(OccupantTypes.module_id == build_module.id).all()
|
||||
if not occupant_types:
|
||||
raise Exception("Occupant types not found")
|
||||
for occupant_type in occupant_types:
|
||||
created_service = Services.query.filter(Services.service_code == f"SRO-{occupant_type.occupant_code}").first()
|
||||
if not created_service:
|
||||
created_service = Services.create(
|
||||
**build_module_module_dict,
|
||||
service_name=occupant_type.occupant_type,
|
||||
service_description=occupant_type.occupant_description,
|
||||
service_code=f"SRO-{occupant_type.occupant_code}",
|
||||
related_responsibility=occupant_type.occupant_code,
|
||||
is_confirmed=True,
|
||||
)
|
||||
created_service.save()
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_initializer /api_initializer
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/api_validations /api_validations
|
||||
COPY /api_services/api_modules /api_modules
|
||||
COPY /api_services/schemas /schemas
|
||||
COPY /api_services/api_middlewares /api_middlewares
|
||||
|
||||
COPY /api_services/api_builds/management_service/endpoints /api_initializer/endpoints
|
||||
COPY /api_services/api_builds/management_service/events /api_initializer/events
|
||||
COPY /api_services/api_builds/management_service/validations /api_initializer/validations
|
||||
COPY /api_services/api_builds/management_service/index.py /api_initializer/index.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
|
||||
|
||||
__all__ = []
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.application.cluster import ApplicationRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
application_endpoint_route = APIRouter(prefix="/application", tags=["Application Cluster"])
|
||||
|
||||
|
||||
application_list_all = "ApplicationListAll"
|
||||
@application_endpoint_route.post(
|
||||
path="/list/all",
|
||||
description="List all applications endpoint",
|
||||
operation_id=endpoints_index[application_list_all],
|
||||
)
|
||||
def application_list_all_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_all)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_list_available = "ApplicationListAvailable"
|
||||
@application_endpoint_route.post(
|
||||
path="/list/available",
|
||||
description="List available applications endpoint",
|
||||
operation_id=endpoints_index[application_list_available],
|
||||
)
|
||||
def application_list_available_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_available)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_list_appended = "ApplicationListAppended"
|
||||
@application_endpoint_route.post(
|
||||
path="/list/appended",
|
||||
description="List appended applications endpoint",
|
||||
operation_id=endpoints_index[application_list_appended],
|
||||
)
|
||||
def application_list_appended_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_appended)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_register_service = "ApplicationRegisterService"
|
||||
@application_endpoint_route.post(
|
||||
path="/register/service",
|
||||
description="Register service endpoint",
|
||||
operation_id=endpoints_index[application_register_service],
|
||||
)
|
||||
def application_register_service_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_register_service)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_unregister_service = "ApplicationUnRegisterService"
|
||||
@application_endpoint_route.post(
|
||||
path="/unregister/service",
|
||||
description="Unregister service endpoint",
|
||||
operation_id=endpoints_index[application_unregister_service],
|
||||
)
|
||||
def application_unregister_service_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_unregister_service)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_create = "ApplicationCreate"
|
||||
@application_endpoint_route.post(
|
||||
path="/create",
|
||||
description="Create application endpoint",
|
||||
operation_id=endpoints_index[application_create],
|
||||
)
|
||||
def application_create_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_update = "ApplicationUpdate"
|
||||
@application_endpoint_route.post(
|
||||
path="/update/{application_uuid}",
|
||||
description="Update application endpoint",
|
||||
operation_id=endpoints_index[application_update],
|
||||
)
|
||||
def application_update_route(application_uuid: str, data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_bind_employee = "ApplicationBindEmployee"
|
||||
@application_endpoint_route.post(
|
||||
path="/bind/employee",
|
||||
description="Bind employee endpoint",
|
||||
operation_id=endpoints_index[application_bind_employee],
|
||||
)
|
||||
def application_bind_employee_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_bind_employee)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
application_bind_occupant = "ApplicationBindOccupant"
|
||||
@application_endpoint_route.post(
|
||||
path="/bind/occupant",
|
||||
description="Bind occupant endpoint",
|
||||
operation_id=endpoints_index[application_bind_occupant],
|
||||
)
|
||||
def application_bind_occupant_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_bind_occupant)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
# from events.services.cluster import ServicesEndpointRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
# Create API router
|
||||
duty_types_route = APIRouter(prefix="/duty/types", tags=["Duty Types Cluster"])
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.events.cluster import EventsEndpointRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
# Create API router
|
||||
event_endpoint_route = APIRouter(prefix="/events", tags=["Event Actions"])
|
||||
|
||||
|
||||
event_list_available = "EventsListAvailable"
|
||||
@event_endpoint_route.post(
|
||||
path="/list/available",
|
||||
description="List available events endpoint",
|
||||
operation_id=endpoints_index[event_list_available],
|
||||
)
|
||||
def event_list_available_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_list_available)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
event_list_appended = "EventsListAppended"
|
||||
@event_endpoint_route.post(
|
||||
path="/list/appended",
|
||||
description="List appended events endpoint",
|
||||
operation_id=endpoints_index[event_list_appended],
|
||||
)
|
||||
def event_list_appended_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_list_appended)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
event_register_service = "EventRegisterService"
|
||||
@event_endpoint_route.post(
|
||||
path="/register/service",
|
||||
description="Register service endpoint",
|
||||
operation_id=endpoints_index[event_register_service],
|
||||
)
|
||||
def event_register_service_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_register_service)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
event_unregister_service = "EventUnRegisterService"
|
||||
@event_endpoint_route.post(
|
||||
path="/unregister/service",
|
||||
description="Unregister service endpoint",
|
||||
operation_id=endpoints_index[event_unregister_service],
|
||||
)
|
||||
def event_unregister_service_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_unregister_service)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
event_bind_employee_extra = "EventBindExtraEmployee"
|
||||
@event_endpoint_route.post(
|
||||
path="/bind/employee/extra",
|
||||
description="Bind employee extra endpoint",
|
||||
operation_id=endpoints_index[event_bind_employee_extra],
|
||||
)
|
||||
def event_bind_employee_extra_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_bind_employee_extra)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
event_bind_occupant_extra = "EventBindExtraOccupant"
|
||||
@event_endpoint_route.post(
|
||||
path="/bind/occupant/extra",
|
||||
description="Bind occupant extra endpoint",
|
||||
operation_id=endpoints_index[event_bind_occupant_extra],
|
||||
)
|
||||
def event_bind_occupant_extra_route(
|
||||
data: PaginateOnly,
|
||||
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||
):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_bind_occupant_extra)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
from fastapi import APIRouter
|
||||
from .events.router import event_endpoint_route
|
||||
from .services.router import services_endpoint_route
|
||||
from .application.router import application_endpoint_route
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
return [event_endpoint_route, application_endpoint_route, services_endpoint_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("/", "GET"),
|
||||
("/docs", "GET"),
|
||||
("/redoc", "GET"),
|
||||
("/openapi.json", "GET"),
|
||||
("/metrics", "GET"),
|
||||
]
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from index import endpoints_index
|
||||
from events.services.cluster import ServicesRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
# Create API router
|
||||
services_route = APIRouter(prefix="/services", tags=["Service Cluster"])
|
||||
|
||||
|
||||
services_list = "ServicesList"
|
||||
@services_route.post(
|
||||
path="/list",
|
||||
description="List all services endpoint",
|
||||
operation_id=endpoints_index[services_list],
|
||||
)
|
||||
def services_list(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ServicesRouterCluster.get_event_cluster(services_list)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
services_create = "ServicesCreate"
|
||||
@services_route.post(
|
||||
path="/create",
|
||||
description="Create service endpoint",
|
||||
operation_id=endpoints_index[services_create],
|
||||
)
|
||||
def services_create(data, headers: CommonHeaders = Depends(CommonHeaders)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ServicesRouterCluster.get_event_cluster(services_create)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
services_update = "ServicesUpdate"
|
||||
@services_route.post(
|
||||
path="/update/{uu_id}",
|
||||
description="Update service endpoint",
|
||||
operation_id=endpoints_index[services_update],
|
||||
)
|
||||
def services_update(uu_id: str, data, headers: CommonHeaders = Depends(CommonHeaders)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ServicesRouterCluster.get_event_cluster(services_update)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
||||
services_delete = "ServicesDelete"
|
||||
@services_route.post(
|
||||
path="/delete/{uu_id}",
|
||||
description="Delete service endpoint",
|
||||
operation_id=endpoints_index[services_delete],
|
||||
)
|
||||
def services_delete(uu_id: str, headers: CommonHeaders = Depends(CommonHeaders)):
|
||||
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||
FoundCluster = ServicesRouterCluster.get_event_cluster(services_delete)
|
||||
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||
return event_cluster_matched.event_callable(list_options=data)
|
||||
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
from typing import Any
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import func
|
||||
from schemas import AccountRecords
|
||||
|
||||
from endpoints.index import endpoints_index
|
||||
from events.event_endpoints.cluster import EventsEndpointRouterCluster
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_validations.response.pagination import PaginateOnly
|
||||
from api_middlewares.token_provider import TokenProvider
|
||||
|
||||
|
||||
test_endpoint_route = APIRouter(prefix="/tests", tags=["Endpoint Tests"])
|
||||
|
||||
|
||||
account_records_all = "AccountRecordsAll"
|
||||
@test_endpoint_route.get(
|
||||
path="/account/records/all",
|
||||
description="Account records endpoint",
|
||||
operation_id=endpoints_index[account_records_all],
|
||||
)
|
||||
def account_records_all_route(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
with AccountRecords.new_session() as db_session:
|
||||
AccountRecords.set_session(db_session)
|
||||
account_records_positive_list = db_session.query(func.sum(AccountRecords.currency_value)
|
||||
).filter(AccountRecords.currency_value > 0
|
||||
).order_by(AccountRecords.bank_date.desc()).first()
|
||||
account_records_negative_list = db_session.query(func.sum(AccountRecords.currency_value)
|
||||
).filter(AccountRecords.currency_value < 0
|
||||
).order_by(AccountRecords.bank_date.desc()).first()
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": {
|
||||
"debt": float(account_records_negative_list),
|
||||
"budget": float(account_records_positive_list),
|
||||
"total": float(account_records_positive_list) + float(account_records_negative_list),
|
||||
"lastPayment": account_records_data.bank_date,
|
||||
},
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
account_records_monthly = "AccountRecordsMonthly"
|
||||
@test_endpoint_route.get(
|
||||
path="/account/records/monthly",
|
||||
description="Account records endpoint",
|
||||
operation_id=endpoints_index[account_records_monthly],
|
||||
)
|
||||
def account_records_monthly_route(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
with AccountRecords.new_session() as db_session:
|
||||
account_records_positive_list = db_session.query(func.sum(AccountRecords.currency_value)).filter(
|
||||
AccountRecords.bank_date_y == datetime.date.today().year,
|
||||
AccountRecords.bank_date_m == datetime.date.today().month,
|
||||
).order_by(AccountRecords.bank_date.desc()).first()
|
||||
account_records_negative_list = db_session.query(func.sum(AccountRecords.currency_value)).filter(
|
||||
AccountRecords.bank_date_y == datetime.date.today().year,
|
||||
AccountRecords.bank_date_m == datetime.date.today().month,
|
||||
).order_by(AccountRecords.bank_date.desc()).first()
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": {
|
||||
"debt": float(account_records_negative_list),
|
||||
"budget": float(account_records_positive_list),
|
||||
"total": float(account_records_positive_list) + float(account_records_negative_list),
|
||||
"lastPayment": account_records_data.bank_date,
|
||||
},
|
||||
"completed": True,
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
from .events.cluster import EventsEndpointRouterCluster
|
||||
from .application.cluster import ApplicationRouterCluster
|
||||
# from .services.cluster import ServicesEndpointRouterCluster
|
||||
|
||||
__all__ = ["EventsEndpointRouterCluster", "ApplicationRouterCluster"]
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
ApplicationListAllEvent,
|
||||
ApplicationListAvailableEvent,
|
||||
ApplicationListAppendedEvent,
|
||||
ApplicationRegisterServiceEvent,
|
||||
ApplicationUnRegisterServiceEvent,
|
||||
ApplicationCreateEvent,
|
||||
ApplicationUpdateEvent,
|
||||
ApplicationBindEmployeeEvent,
|
||||
ApplicationBindOccupantEvent,
|
||||
)
|
||||
|
||||
|
||||
ApplicationRouterCluster = RouterCluster(name="ApplicationRouterCluster")
|
||||
|
||||
ApplicationEventClusterListAll = EventCluster(name="ApplicationListAll", endpoint_uu_id=endpoints_index["ApplicationListAll"])
|
||||
ApplicationEventClusterListAll.add_event(ApplicationListAllEvent)
|
||||
|
||||
ApplicationEventClusterListAvailable = EventCluster(name="ApplicationListAvailable", endpoint_uu_id=endpoints_index["ApplicationListAvailable"])
|
||||
ApplicationEventClusterListAvailable.add_event(ApplicationListAvailableEvent)
|
||||
|
||||
ApplicationEventClusterListAppended = EventCluster(name="ApplicationListAppended", endpoint_uu_id=endpoints_index["ApplicationListAppended"])
|
||||
ApplicationEventClusterListAppended.add_event(ApplicationListAppendedEvent)
|
||||
|
||||
ApplicationEventClusterRegisterService = EventCluster(name="ApplicationRegisterService", endpoint_uu_id=endpoints_index["ApplicationRegisterService"])
|
||||
ApplicationEventClusterRegisterService.add_event(ApplicationRegisterServiceEvent)
|
||||
|
||||
ApplicationEventClusterUnregisterService = EventCluster(name="ApplicationUnRegisterService", endpoint_uu_id=endpoints_index["ApplicationUnRegisterService"])
|
||||
ApplicationEventClusterUnregisterService.add_event(ApplicationUnRegisterServiceEvent)
|
||||
|
||||
ApplicationEventClusterCreate = EventCluster(name="ApplicationCreate", endpoint_uu_id=endpoints_index["ApplicationCreate"])
|
||||
ApplicationEventClusterCreate.add_event(ApplicationCreateEvent)
|
||||
|
||||
ApplicationEventClusterUpdate = EventCluster(name="ApplicationUpdate", endpoint_uu_id=endpoints_index["ApplicationUpdate"])
|
||||
ApplicationEventClusterUpdate.add_event(ApplicationUpdateEvent)
|
||||
|
||||
ApplicationEventClusterBindEmployee = EventCluster(name="ApplicationBindEmployee", endpoint_uu_id=endpoints_index["ApplicationBindEmployee"])
|
||||
ApplicationEventClusterBindEmployee.add_event(ApplicationBindEmployeeEvent)
|
||||
|
||||
ApplicationEventClusterBindOccupant = EventCluster(name="ApplicationBindOccupant", endpoint_uu_id=endpoints_index["ApplicationBindOccupant"])
|
||||
ApplicationEventClusterBindOccupant.add_event(ApplicationBindOccupantEvent)
|
||||
|
||||
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAll)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAvailable)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAppended)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterRegisterService)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterUnregisterService)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterCreate)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterUpdate)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterBindEmployee)
|
||||
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterBindOccupant)
|
||||
|
|
@ -0,0 +1,356 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
|
||||
from schemas import (
|
||||
Applications,
|
||||
Application2Employee,
|
||||
Application2Occupant,
|
||||
Service2Application,
|
||||
Services,
|
||||
)
|
||||
|
||||
# List all endpoint
|
||||
ApplicationListAllEvent = Event(
|
||||
name="application_list_all",
|
||||
key="1971ce4d-4f59-4aa8-83e2-ca19d7da6d11",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List all applications endpoint",
|
||||
)
|
||||
|
||||
# List available endpoint
|
||||
ApplicationListAvailableEvent = Event(
|
||||
name="application_list_available",
|
||||
key="d8e733f5-b53a-4c36-9082-12579bf9cc4a",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List available applications endpoint",
|
||||
)
|
||||
|
||||
# List appended endpoint
|
||||
ApplicationListAppendedEvent = Event(
|
||||
name="application_list_appended",
|
||||
key="ea7bbd58-da09-407c-a630-c324e0272385",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List appended applications endpoint",
|
||||
)
|
||||
|
||||
# Register application to service endpoint
|
||||
ApplicationRegisterServiceEvent = Event(
|
||||
name="application_register_service",
|
||||
key="47d7cfc8-6004-4442-8357-16ceac5d9d18",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Register application to service endpoint",
|
||||
)
|
||||
|
||||
# Unregister application to service endpoint
|
||||
ApplicationUnRegisterServiceEvent = Event(
|
||||
name="application_unregister_service",
|
||||
key="d228ab26-0b74-440f-8f1f-8f40be5a22f2",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Unregister application to service endpoint",
|
||||
)
|
||||
|
||||
# Create endpoint
|
||||
ApplicationCreateEvent = Event(
|
||||
name="application_create",
|
||||
key="f53ca9aa-5536-4d77-9129-78d67e61db4a",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Create applications endpoint",
|
||||
)
|
||||
|
||||
# Update endpoint
|
||||
ApplicationUpdateEvent = Event(
|
||||
name="application_update",
|
||||
key="0e9a855e-4e69-44b5-8ac2-825daa32840c",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Update applications endpoint",
|
||||
)
|
||||
|
||||
# Bind employee endpoint
|
||||
ApplicationBindEmployeeEvent = Event(
|
||||
name="application_bind_employee",
|
||||
key="948763ee-f221-409e-9a82-8525053505cb",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Bind applications to employee endpoint",
|
||||
)
|
||||
|
||||
# Bind occupant endpoint
|
||||
ApplicationBindOccupantEvent = Event(
|
||||
name="application_bind_occupant",
|
||||
key="03c894a3-b337-4d90-a559-5fcd0dc3e2c5",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Bind applications to occupant endpoint",
|
||||
)
|
||||
|
||||
|
||||
def application_list_all_callable(list_options: PaginateOnly):
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
with Applications.new_session() as db_session:
|
||||
Applications.set_session(db_session)
|
||||
if list_options.query:
|
||||
applications_list = Applications.query.filter(*Applications.convert(list_options.query))
|
||||
else:
|
||||
applications_list = Applications.query.filter()
|
||||
pagination = Pagination(data=applications_list)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
ApplicationListAllEvent.event_callable = application_list_all_callable
|
||||
|
||||
|
||||
def application_list_available_callable(list_options: PaginateOnly):
|
||||
"""
|
||||
Example callable method
|
||||
"""
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||
if not service_uu_id:
|
||||
return {
|
||||
"message": "MSG0003-PARAM-MISSING",
|
||||
"data": list_options.query,
|
||||
"completed": False,
|
||||
}
|
||||
list_options.query.pop("service_uu_id__ilike", None)
|
||||
list_options.query.pop("service_uu_id", None)
|
||||
with Applications.new_session() as db_session:
|
||||
Service2Application.set_session(db_session)
|
||||
service2applications = Service2Application.query.filter(*Service2Application.convert({"service_uu_id__ilike": service_uu_id})).all()
|
||||
already_events = [service_to_application.application_id for service_to_application in service2applications]
|
||||
if list_options.query:
|
||||
applications_list = Applications.query.filter(*Applications.convert(list_options.query), Applications.id.not_in(already_events))
|
||||
else:
|
||||
applications_list = Applications.query.filter(Applications.id.not_in(already_events))
|
||||
pagination = Pagination(data=applications_list)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
ApplicationListAvailableEvent.event_callable = application_list_available_callable
|
||||
|
||||
|
||||
def application_list_appended_callable(list_options: PaginateOnly):
|
||||
"""
|
||||
Example callable method
|
||||
"""
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||
if not service_uu_id:
|
||||
return {
|
||||
"message": "MSG0003-PARAM-MISSING",
|
||||
"data": list_options.query,
|
||||
"completed": False,
|
||||
}
|
||||
list_options.query.pop("service_uu_id__ilike", None)
|
||||
list_options.query.pop("service_uu_id", None)
|
||||
with Service2Application.new_session() as db_session:
|
||||
Service2Application.set_session(db_session)
|
||||
Applications.set_session(db_session)
|
||||
service2applications = Service2Application.query.filter(*Service2Application.convert({"service_uu_id__ilike": service_uu_id})).all()
|
||||
already_events = [service_to_application.application_id for service_to_application in service2applications]
|
||||
if list_options.query:
|
||||
applications_list = Applications.query.filter(*Applications.convert(list_options.query), Applications.id.in_(already_events))
|
||||
else:
|
||||
applications_list = Applications.query.filter(Applications.id.in_(already_events))
|
||||
pagination = Pagination(data=applications_list)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
ApplicationListAppendedEvent.event_callable = application_list_appended_callable
|
||||
|
||||
|
||||
def application_create_callable(data: Any):
|
||||
"""
|
||||
Create a new application
|
||||
"""
|
||||
with Applications.new_session() as db_session:
|
||||
created_application_dict = data.model_dump()
|
||||
Applications.set_session(db_session)
|
||||
created_application = Applications.create(**created_application_dict)
|
||||
if created_application:
|
||||
created_application.save()
|
||||
return {
|
||||
"completed": True,
|
||||
"message": "MSG0001-INSERT",
|
||||
"data": created_application,
|
||||
}
|
||||
return {
|
||||
"completed": False,
|
||||
"message": "MSG0002-ERROR",
|
||||
"data": created_application,
|
||||
}
|
||||
|
||||
|
||||
ApplicationCreateEvent.event_callable = application_create_callable
|
||||
|
||||
|
||||
def application_update_callable(data: Any, uu_id: str):
|
||||
"""
|
||||
Update an existing application
|
||||
"""
|
||||
with Applications.new_session() as db_session:
|
||||
updated_application_dict = data.model_dump(exclude_unset=True, exclude_none=True)
|
||||
found_application = Applications.query.filter(Applications.uu_id == uu_id).first()
|
||||
if not found_application:
|
||||
return {
|
||||
"completed": False,
|
||||
"message": "MSG0002-FOUND",
|
||||
"data": found_application,
|
||||
}
|
||||
updated_application = found_application.update(**updated_application_dict)
|
||||
updated_application.save()
|
||||
if updated_application.meta_data.updated:
|
||||
return {
|
||||
"completed": True,
|
||||
"message": "MSG0003-UPDATE",
|
||||
"data": updated_application,
|
||||
}
|
||||
return {
|
||||
"completed": False,
|
||||
"message": "MSG0003-UPDATE",
|
||||
"data": updated_application,
|
||||
}
|
||||
|
||||
|
||||
ApplicationUpdateEvent.event_callable = application_update_callable
|
||||
|
||||
|
||||
def application_register_service_callable(data: Any):
|
||||
"""
|
||||
Register an application to a service
|
||||
"""
|
||||
with Applications.new_session() as db_session:
|
||||
Applications.set_session(db_session)
|
||||
event = Applications.query.filter(Applications.uu_id == data.application_uu_id).first()
|
||||
if not event:
|
||||
return {
|
||||
"message": "MSG0003-NOT-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||
if not service:
|
||||
return {
|
||||
"message": "MSG0003-NOT-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service_to_application = Service2Application.query.filter(
|
||||
Service2Application.service_id == service.data.id,
|
||||
Service2Application.application_id == event.data.id,
|
||||
).first()
|
||||
if service_to_application:
|
||||
return {
|
||||
"message": "MSG0003-ALREADY-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service_to_application = Service2Application.create(
|
||||
service_id=service.data.id,
|
||||
service_uu_id=str(service.data.uu_id),
|
||||
application_id=event.data.id,
|
||||
application_uu_id=str(event.data.uu_id),
|
||||
application_code=event.data.application_code,
|
||||
site_url=event.data.site_url,
|
||||
is_confirmed=True,
|
||||
)
|
||||
service_to_application.save()
|
||||
return {
|
||||
"message": "MSG0003-REGISTER",
|
||||
"data": data.model_dump(),
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
ApplicationRegisterServiceEvent.event_callable = application_register_service_callable
|
||||
|
||||
|
||||
def application_unregister_service_callable(data: Any):
|
||||
"""
|
||||
Unregister an application from a service
|
||||
"""
|
||||
with Applications.new_session() as db_session:
|
||||
Applications.set_session(db_session)
|
||||
application = Applications.query.filter(Applications.uu_id == data.application_uu_id).first()
|
||||
if not application:
|
||||
return {
|
||||
"message": "MSG0003-NOT-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||
if not service:
|
||||
return {
|
||||
"message": "MSG0003-NOT-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service_to_application = Service2Application.query.filter(
|
||||
Service2Application.service_id == service.data.id,
|
||||
Service2Application.application_id == application.data.id,
|
||||
).first()
|
||||
if not service_to_application:
|
||||
return {
|
||||
"message": "MSG0003-NOT-FOUND",
|
||||
"data": data.model_dump(),
|
||||
"completed": False,
|
||||
}
|
||||
service_to_application.query.delete()
|
||||
db_session.commit()
|
||||
return {
|
||||
"message": "MSG0003-UNREGISTER",
|
||||
"data": data.model_dump(),
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
ApplicationUnRegisterServiceEvent.event_callable = application_unregister_service_callable
|
||||
|
||||
|
||||
def application_bind_employee_callable(data: Any):
|
||||
"""
|
||||
Example callable method
|
||||
"""
|
||||
return {
|
||||
"message": "MSG0003-BIND",
|
||||
"data": data.model_dump(),
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
ApplicationBindEmployeeEvent.event_callable = application_bind_employee_callable
|
||||
|
||||
|
||||
def application_bind_occupant_callable(data: Any):
|
||||
"""
|
||||
Example callable method
|
||||
"""
|
||||
return {
|
||||
"message": "MSG0003-BIND",
|
||||
"data": data.model_dump(),
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
ApplicationBindOccupantEvent.event_callable = application_bind_occupant_callable
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
EventsListAvailableEvent,
|
||||
EventsListAppendedEvent,
|
||||
EventRegisterServiceEvent,
|
||||
EventUnRegisterServiceEvent,
|
||||
EventBindEmployeeExtraEvent,
|
||||
EventBindOccupantExtraEvent,
|
||||
)
|
||||
|
||||
EventsEndpointRouterCluster = RouterCluster(name="EventsEndpointRouterCluster")
|
||||
|
||||
EventsEndpointEventClusterListAvailable = EventCluster(name="EventsListAvailable", endpoint_uu_id=endpoints_index["EventsListAvailable"])
|
||||
EventsEndpointEventClusterListAvailable.add_event(EventsListAvailableEvent)
|
||||
|
||||
EventsEndpointEventClusterListAppended = EventCluster(name="EventsListAppended", endpoint_uu_id=endpoints_index["EventsListAppended"])
|
||||
EventsEndpointEventClusterListAppended.add_event(EventsListAppendedEvent)
|
||||
|
||||
EventsEndpointEventClusterRegisterService = EventCluster(name="EventRegisterService", endpoint_uu_id=endpoints_index["EventRegisterService"])
|
||||
EventsEndpointEventClusterRegisterService.add_event(EventRegisterServiceEvent)
|
||||
|
||||
EventsEndpointEventClusterUnregisterService = EventCluster(name="EventUnregisterService", endpoint_uu_id=endpoints_index["EventUnRegisterService"])
|
||||
EventsEndpointEventClusterUnregisterService.add_event(EventUnRegisterServiceEvent)
|
||||
|
||||
EventsEndpointEventClusterBindEmployeeExtra = EventCluster(name="EventBindExtraEmployee", endpoint_uu_id=endpoints_index["EventBindExtraEmployee"])
|
||||
EventsEndpointEventClusterBindEmployeeExtra.add_event(EventBindEmployeeExtraEvent)
|
||||
|
||||
EventsEndpointEventClusterBindOccupantExtra = EventCluster(name="EventBindExtraOccupant", endpoint_uu_id=endpoints_index["EventBindExtraOccupant"])
|
||||
EventsEndpointEventClusterBindOccupantExtra.add_event(EventBindOccupantExtraEvent)
|
||||
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterListAvailable)
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterListAppended)
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterRegisterService)
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterUnregisterService)
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterBindEmployeeExtra)
|
||||
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterBindOccupantExtra)
|
||||
|
|
@ -0,0 +1,186 @@
|
|||
from typing import Any
|
||||
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
|
||||
from schemas import (
|
||||
Events,
|
||||
Event2Employee,
|
||||
Event2Occupant,
|
||||
Event2EmployeeExtra,
|
||||
Event2OccupantExtra,
|
||||
Service2Events,
|
||||
Services,
|
||||
)
|
||||
|
||||
# List available events endpoint
|
||||
EventsListAvailableEvent = Event(
|
||||
name="event_endpoint_list_available",
|
||||
key="d39af512-ec71-4c0f-9b35-e53b0d06d3a4",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List available events endpoint",
|
||||
)
|
||||
|
||||
# List appended events endpoint
|
||||
EventsListAppendedEvent = Event(
|
||||
name="event_endpoint_list_appended",
|
||||
key="bea77d6a-d99f-468b-9002-b3bda6bb6ad0",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List appended events endpoint",
|
||||
)
|
||||
|
||||
# Event Register endpoint
|
||||
EventRegisterServiceEvent = Event(
|
||||
name="event_endpoint_register_service",
|
||||
key="e18e7f89-5708-4a15-9258-99b0903ed43d",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Register service endpoint",
|
||||
)
|
||||
|
||||
# Event Unregister endpoint
|
||||
EventUnRegisterServiceEvent = Event(
|
||||
name="service_endpoint_unregister_service",
|
||||
key="4d693774-4857-435b-a63c-c39baebfe916",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Unregister service endpoint",
|
||||
)
|
||||
|
||||
# Bind employee extra endpoint
|
||||
EventBindEmployeeExtraEvent = Event(
|
||||
name="service_endpoint_bind_employee_extra",
|
||||
key="cd452928-4256-4fb4-b81e-0ca41d723616",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Bind service to employee extra endpoint",
|
||||
)
|
||||
|
||||
# Bind occupant extra endpoint
|
||||
EventBindOccupantExtraEvent = Event(
|
||||
name="service_endpoint_bind_occupant_extra",
|
||||
key="cb11a150-8049-45c9-8cf3-d5290ffd2e4a",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Bind service to occupant extra endpoint",
|
||||
)
|
||||
|
||||
|
||||
def events_list_available_callable(list_options: PaginateOnly):
|
||||
"""List available events with pagination and filtering options"""
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||
if not service_uu_id:
|
||||
return {"message": "MSG0003-PARAM-MISSING", "data": list_options.query, "completed": False}
|
||||
list_options.query.pop("service_uu_id__ilike", None)
|
||||
list_options.query.pop("service_uu_id", None)
|
||||
with Events.new_session() as db_session:
|
||||
service2events = Service2Events.query.filter(Service2Events.service_uu_id.ilike(f'%{service_uu_id}%'),)
|
||||
already_events = [service_to_event.event_id for service_to_event in service2events.all()]
|
||||
if list_options.query:
|
||||
events_list = Events.query.filter(*Events.convert(list_options.query), Events.id.not_in(already_events))
|
||||
else:
|
||||
events_list = Events.query.filter(Events.id.not_in(already_events))
|
||||
events_response = PostgresResponse(data=events_list)
|
||||
pagination = Pagination(data=events_response)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=events_response, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
EventsListAvailableEvent.event_callable = events_list_available_callable
|
||||
|
||||
|
||||
def events_list_appended_callable(list_options: PaginateOnly):
|
||||
"""List appended events with pagination and filtering options"""
|
||||
list_options = PaginateOnly(**list_options.model_dump())
|
||||
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||
if not service_uu_id:
|
||||
return {"message": "MSG0003-PARAM-MISSING", "data": list_options.query, "completed": False}
|
||||
list_options.query.pop("service_uu_id__ilike", None)
|
||||
list_options.query.pop("service_uu_id", None)
|
||||
with Events.new_session() as db_session:
|
||||
service2events = Service2Events.query.filter(*Service2Events.convert({"service_uu_id__ilike": service_uu_id}))
|
||||
already_events = [service_to_event.event_id for service_to_event in service2events.all()]
|
||||
if list_options.query:
|
||||
events_list = Events.filter_all(*Events.convert(list_options.query), Events.id.in_(already_events))
|
||||
else:
|
||||
events_list = Events.filter_all(Events.id.in_(already_events))
|
||||
events_response = PostgresResponse(data=events_list)
|
||||
pagination = Pagination(data=events_response)
|
||||
pagination.change(**list_options.model_dump())
|
||||
pagination_result = PaginationResult(data=events_response, pagination=pagination)
|
||||
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||
|
||||
|
||||
EventsListAppendedEvent.event_callable = events_list_appended_callable
|
||||
|
||||
|
||||
def event_register_service_callable(data: Any):
|
||||
"""Register event to service"""
|
||||
with Events.new_session() as db_session:
|
||||
event = Events.query.filter(Events.uu_id == data.event_uu_id).first()
|
||||
if not event:
|
||||
return EndpointResponse(message="MSG0003-NOT-FOUND", data=data.model_dump(), completed=False).response
|
||||
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||
if not service:
|
||||
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False,}
|
||||
service_to_event = Service2Events.query.filter_by(service_id=service.data.id, event_id=event.data.id).first()
|
||||
Service2Events.set_session(db_session)
|
||||
if not service_to_event:
|
||||
service_to_event = Service2Events.create(
|
||||
service_id=service.data.id, service_uu_id=str(service.data.uu_id), event_id=event.data.id,
|
||||
event_uu_id=str(event.data.uu_id), is_confirmed=True,
|
||||
)
|
||||
service_to_event.save()
|
||||
return {"message": "MSG0003-REGISTERED", "data": data.model_dump(), "completed": True}
|
||||
return {"message": "MSG0003-REGISTER-ERROR", "data": data.model_dump(), "completed": True}
|
||||
|
||||
|
||||
EventRegisterServiceEvent.event_callable = event_register_service_callable
|
||||
|
||||
|
||||
def event_unregister_service_callable(data: Any):
|
||||
"""Unregister event from service"""
|
||||
with Events.new_session() as db_session:
|
||||
event = Events.query.filter(Events.uu_id == data.event_uu_id).first()
|
||||
if not event:
|
||||
return EndpointResponse(message="MSG0003-NOT-FOUND", data=data.model_dump(), completed=False).response
|
||||
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||
if not service:
|
||||
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False}
|
||||
service_to_event = Service2Events.query.filter(
|
||||
Service2Events.service_id == service.data.id, Service2Events.event_id == event.data.id,
|
||||
).first()
|
||||
if not service_to_event:
|
||||
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False}
|
||||
service_to_event.query.delete()
|
||||
return {"message": "MSG0003-UNREGISTER", "data": data.model_dump(), "completed": True}
|
||||
|
||||
|
||||
EventUnRegisterServiceEvent.event_callable = event_unregister_service_callable
|
||||
|
||||
|
||||
def event_bind_employee_extra_callable(data: Any):
|
||||
"""Bind event to employee extra"""
|
||||
return EndpointResponse(message="MSG0003-BIND-EMP").response
|
||||
|
||||
|
||||
EventBindEmployeeExtraEvent.event_callable = event_bind_employee_extra_callable
|
||||
|
||||
|
||||
def event_bind_occupant_extra_callable(data: Any):
|
||||
"""Bind event to occupant extra"""
|
||||
return EndpointResponse(message="MSG0003-BIND-OCUP").response
|
||||
|
||||
|
||||
EventBindOccupantExtraEvent.event_callable = event_bind_occupant_extra_callable
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||
from index import endpoints_index
|
||||
from .supers_events import (
|
||||
SuperServiceListEvent,
|
||||
SuperServiceCreateEvent,
|
||||
SuperServiceUpdateEvent,
|
||||
SuperServiceDeleteEvent,
|
||||
)
|
||||
|
||||
ServicesRouterCluster = RouterCluster(name="ServicesRouterCluster")
|
||||
|
||||
ServicesEventClusterList = EventCluster(name="ServicesList", endpoint_uu_id=endpoints_index["ServicesList"])
|
||||
ServicesEventClusterList.add_event(SuperServiceListEvent)
|
||||
|
||||
ServicesEventClusterCreate = EventCluster(name="ServicesCreate", endpoint_uu_id=endpoints_index["ServicesCreate"])
|
||||
ServicesEventClusterCreate.add_event(SuperServiceCreateEvent)
|
||||
|
||||
ServicesEventClusterUpdate = EventCluster(name="ServicesUpdate", endpoint_uu_id=endpoints_index["ServicesUpdate"])
|
||||
ServicesEventClusterUpdate.add_event(SuperServiceUpdateEvent)
|
||||
|
||||
ServicesEventClusterDelete = EventCluster(name="ServicesDelete", endpoint_uu_id=endpoints_index["ServicesDelete"])
|
||||
ServicesEventClusterDelete.add_event(SuperServiceDeleteEvent)
|
||||
|
||||
ServicesRouterCluster.set_event_cluster(ServicesEventClusterList)
|
||||
ServicesRouterCluster.set_event_cluster(ServicesEventClusterCreate)
|
||||
ServicesRouterCluster.set_event_cluster(ServicesEventClusterUpdate)
|
||||
ServicesRouterCluster.set_event_cluster(ServicesEventClusterDelete)
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
from typing import Any
|
||||
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
from api_initializer.event_clusters import Event
|
||||
from api_validations.response import (
|
||||
PaginateOnly,
|
||||
Pagination,
|
||||
PaginationResult,
|
||||
PostgresResponseSingle,
|
||||
PostgresResponse,
|
||||
EndpointResponse
|
||||
)
|
||||
from schemas import (
|
||||
Events,
|
||||
Event2Employee,
|
||||
Event2Occupant,
|
||||
Event2EmployeeExtra,
|
||||
Event2OccupantExtra,
|
||||
Service2Events,
|
||||
Services,
|
||||
)
|
||||
|
||||
# List services endpoint
|
||||
SuperServiceListEvent = Event(
|
||||
name="super_service_list",
|
||||
key="ea24f5e6-279a-47e7-a5bd-8a5c1bd72d05",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users List available events endpoint",
|
||||
)
|
||||
|
||||
# Create service endpoint
|
||||
SuperServiceCreateEvent = Event(
|
||||
name="super_service_create",
|
||||
key="086051f4-f1ec-4d56-b706-09ce53d5e66c",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Create service endpoint",
|
||||
)
|
||||
|
||||
# Update service endpoint
|
||||
SuperServiceUpdateEvent = Event(
|
||||
name="super_service_update",
|
||||
key="267956e5-32b7-4b60-ab75-3b56b935d5c1",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Update service endpoint",
|
||||
)
|
||||
|
||||
# Delete service endpoint
|
||||
SuperServiceDeleteEvent = Event(
|
||||
name="super_service_delete",
|
||||
key="6c333122-272e-4690-9d71-7f5e14cc64c8",
|
||||
request_validator=None, # TODO: Add request validator
|
||||
response_validator=None, # TODO: Add response validator
|
||||
description="Super Users Delete service endpoint",
|
||||
)
|
||||
|
||||
|
||||
def super_service_list_callable(list_options: PaginateOnly, headers: CommonHeaders):
|
||||
"""List available events with pagination and filtering options"""
|
||||
return {
|
||||
"message": "MSG0003-LIST",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperServiceListEvent.event_callable = super_service_list_callable
|
||||
|
||||
|
||||
def super_service_create_callable(data: Any, headers: CommonHeaders):
|
||||
"""Create service"""
|
||||
return {
|
||||
"message": "MSG0003-CREATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperServiceCreateEvent.event_callable = super_service_create_callable
|
||||
|
||||
|
||||
def super_service_update_callable(data: Any, headers: CommonHeaders):
|
||||
"""Update service"""
|
||||
return {
|
||||
"message": "MSG0003-UPDATE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperServiceUpdateEvent.event_callable = super_service_update_callable
|
||||
|
||||
|
||||
def super_service_delete_callable(data: Any, headers: CommonHeaders):
|
||||
"""Delete service"""
|
||||
return {
|
||||
"message": "MSG0003-DELETE",
|
||||
"data": None,
|
||||
"completed": True,
|
||||
}
|
||||
|
||||
|
||||
SuperServiceDeleteEvent.event_callable = super_service_delete_callable
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
|
||||
endpoints_index: dict = {
|
||||
"AccountRecordsAll": "d538deb4-38f4-4913-a1af-bbef14cf6873",
|
||||
"AccountRecordsMonthly": "c0f5ccb1-1e56-4653-af13-ec0bf5e6aa51",
|
||||
"EventsListAvailable": "034a7eb7-0186-4f48-bb8c-165c429ad5c1",
|
||||
"EventsListAppended": "ec1f3ec3-3f28-4eaf-b89a-c463632c0b90",
|
||||
"EventRegisterService": "2cf99f10-72f0-4c2b-98be-3082d67b950d",
|
||||
"EventUnRegisterService": "15c24c6c-651b-4c5d-9c2b-5c6c6c6c6c6c",
|
||||
"EventBindExtraEmployee": "74cafa62-674e-41da-959d-1238ad4a443c",
|
||||
"EventBindExtraOccupant": "480bee12-8dfd-4242-b481-f6807eb9adf7",
|
||||
"ApplicationListAll": "a61169be-a009-47ec-8658-3dd388af5c3e",
|
||||
"ApplicationListAvailable": "bf8d7986-2db7-4ff8-80c2-1935977730a6",
|
||||
"ApplicationListAppended": "ff7bde16-2631-4465-a4c5-349b357dd334",
|
||||
"ApplicationRegisterService": "c77a9f36-c007-4079-83fa-1c995b585a6f",
|
||||
"ApplicationUnRegisterService": "48460f25-fb1e-477f-b641-d5eeacce5e7a",
|
||||
"ApplicationCreate": "a3ec9f67-12a2-4e8a-b977-1acfa0069c12",
|
||||
"ApplicationUpdate": "83281757-696a-41ed-9706-e145ac54c3a9",
|
||||
"ApplicationBindEmployee": "80427237-5ab6-4d17-8084-cdb87bda22a3",
|
||||
"ApplicationBindOccupant": "ae0fb101-cb13-47ab-86bd-233a5dbef269",
|
||||
"ServicesList": "7af16881-2c0f-463f-859f-7aca475e65eb",
|
||||
"ServicesCreate": "effca319-2074-4862-bb80-dde77f0e8407",
|
||||
"ServicesUpdate": "24dc83e9-c159-4bb3-8982-a8adf6555029",
|
||||
"ServicesDelete": "f4c9b2c4-d18a-43c6-abf9-030f71a1c381",
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /api_services/api_initializer /api_initializer
|
||||
COPY /api_services/api_controllers /api_controllers
|
||||
COPY /api_services/api_validations /api_validations
|
||||
COPY /api_services/api_modules /api_modules
|
||||
COPY /api_services/schemas /schemas
|
||||
|
||||
COPY /api_services/api_middlewares /api_middlewares
|
||||
COPY /api_services/api_builds/restriction_service/endpoints /api_initializer/endpoints
|
||||
COPY /api_services/api_builds/restriction_service/events /api_initializer/events
|
||||
COPY /api_services/api_builds/restriction_service/validations /api_initializer/validations
|
||||
COPY /api_services/api_builds/restriction_service/index.py /api_initializer/index.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
from fastapi import APIRouter, Depends
|
||||
|
||||
from events.pages.events import PageHandlers
|
||||
from index import endpoints_index
|
||||
|
||||
from validations.request.restrictions.validations import RequestApplication
|
||||
from api_validations.defaults.validations import CommonHeaders
|
||||
|
||||
|
||||
pages_route = APIRouter(prefix="/restrictions", tags=["Restrictions Cluster"])
|
||||
|
||||
|
||||
application_retrieve_page = "ApplicationRetrievePage"
|
||||
@pages_route.post(
|
||||
path="/page/valid",
|
||||
summary="Verify if page is valid returns application available",
|
||||
description="Verify if page is valid returns application available",
|
||||
operation_id=endpoints_index[application_retrieve_page]
|
||||
)
|
||||
def authentication_page_valid(data: RequestApplication, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""
|
||||
Verify if page is valid returns application that can user reach
|
||||
page: { url = /building/create} | result: { "application": "4c11f5ef-0bbd-41ac-925e-f79d9aac2b0e" }
|
||||
"""
|
||||
list_of = PageHandlers.retrieve_valid_page_via_token(access_token=headers.token, page_url=data.page)
|
||||
print('list_of', list_of)
|
||||
return {"completed": True, "application": list_of}
|
||||
|
||||
|
||||
application_retrieve_all_sites = "ApplicationRetrieveAllSites"
|
||||
@pages_route.get(
|
||||
path="/sites/list",
|
||||
summary="Lists all sites that are available for user",
|
||||
description="Lists all sites that are available for user",
|
||||
operation_id=endpoints_index[application_retrieve_all_sites]
|
||||
)
|
||||
def authentication_get_all_sites_list(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||
"""
|
||||
Verify if page is valid returns application that can user reach result: { "sites": ['/dashboard', '/building/create'] }
|
||||
"""
|
||||
list_of_application_url = PageHandlers.retrieve_valid_sites_via_token(access_token=headers.token)
|
||||
print('list_of_application_url', list(list_of_application_url))
|
||||
return {"completed": True, "sites": list(list_of_application_url)}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
from fastapi import APIRouter
|
||||
from .pages.router import pages_route
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
return [pages_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("/", "GET"),
|
||||
("/docs", "GET"),
|
||||
("/redoc", "GET"),
|
||||
("/openapi.json", "GET"),
|
||||
("/metrics", "GET"),
|
||||
]
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
|
||||
__all__ = []
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
from api_modules.redis.redis_handlers import RedisHandlers
|
||||
|
||||
|
||||
class PageHandlers:
|
||||
|
||||
@classmethod
|
||||
def retrieve_valid_page_via_token(cls, access_token: str, page_url: str) -> str:
|
||||
"""
|
||||
Retrieve valid page via token. {access_token: "string", page_url: "string"} | Results: str(application)
|
||||
"""
|
||||
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
||||
if result.is_employee:
|
||||
if result.selected_company and result.selected_company.reachable_app_codes:
|
||||
if application := result.selected_company.reachable_app_codes.get(page_url, None):
|
||||
return application
|
||||
elif result.is_occupant:
|
||||
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
||||
if application := result.selected_occupant.reachable_app_codes.get(page_url, None):
|
||||
return application
|
||||
raise ValueError("EYS_0013")
|
||||
|
||||
@classmethod
|
||||
def retrieve_valid_sites_via_token(cls, access_token: str) -> list:
|
||||
"""
|
||||
Retrieve valid pages via token. {"access_token": "string"} | Results: list(sites)
|
||||
"""
|
||||
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
||||
if result.is_employee:
|
||||
if result.selected_company and result.selected_company.reachable_app_codes:
|
||||
return result.selected_company.reachable_app_codes.keys()
|
||||
elif result.is_occupant:
|
||||
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
||||
return result.selected_occupant.reachable_app_codes.keys()
|
||||
raise ValueError("EYS_0013")
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
endpoints_index: dict = {
|
||||
"ApplicationRetrievePage": "e17a9475-0a8a-4a64-82a4-3357ac4a89d0",
|
||||
"ApplicationRetrieveAllSites": "e02b83fc-c579-460b-8a8a-04b46ff83318",
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class RequestApplication(BaseModel):
|
||||
page_url: str # /building/create
|
||||
Loading…
Reference in New Issue