3 services are updated
This commit is contained in:
parent
cd62d96158
commit
6dfa17c5e6
|
|
@ -0,0 +1,31 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
# Install system dependencies and Poetry
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||||
|
|
||||||
|
# Copy Poetry configuration
|
||||||
|
COPY /pyproject.toml ./pyproject.toml
|
||||||
|
|
||||||
|
# Configure Poetry and install dependencies with optimizations
|
||||||
|
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY /api_services/api_initializer /api_initializer
|
||||||
|
COPY /api_services/api_controllers /api_controllers
|
||||||
|
COPY /api_services/api_validations /api_validations
|
||||||
|
COPY /api_services/schemas /schemas
|
||||||
|
COPY /api_services/api_modules /api_modules
|
||||||
|
|
||||||
|
COPY /api_services/api_middlewares /api_middlewares
|
||||||
|
COPY /api_services/api_builds/auth-service/endpoints /api_initializer/endpoints
|
||||||
|
COPY /api_services/api_builds/auth-service/events /api_initializer/events
|
||||||
|
COPY /api_services/api_builds/auth-service/validations /api_initializer/validations
|
||||||
|
COPY /api_services/api_builds/auth-service/index.py /api_initializer/index.py
|
||||||
|
|
||||||
|
# Set Python path to include app directory
|
||||||
|
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Run the application using the configured uvicorn server
|
||||||
|
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||||
|
|
@ -22,65 +22,13 @@ from schemas import (
|
||||||
RelationshipEmployee2Build,
|
RelationshipEmployee2Build,
|
||||||
)
|
)
|
||||||
from api_modules.token.password_module import PasswordModule
|
from api_modules.token.password_module import PasswordModule
|
||||||
from api_controllers.redis.database import RedisActions
|
|
||||||
from api_controllers.mongo.database import mongo_handler
|
from api_controllers.mongo.database import mongo_handler
|
||||||
from api_validations.token.validations import EmployeeTokenObject, OccupantTokenObject, CompanyToken, OccupantToken, UserType
|
from api_validations.token.validations import TokenDictType, EmployeeTokenObject, OccupantTokenObject, CompanyToken, OccupantToken, UserType
|
||||||
from api_validations.defaults.validations import CommonHeaders
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
from api_modules.redis.redis_handlers import RedisHandlers
|
||||||
from validations.password.validations import PasswordHistoryViaUser
|
from validations.password.validations import PasswordHistoryViaUser
|
||||||
|
|
||||||
|
|
||||||
TokenDictType = Union[EmployeeTokenObject, OccupantTokenObject]
|
|
||||||
|
|
||||||
|
|
||||||
class RedisHandlers:
|
|
||||||
|
|
||||||
AUTH_TOKEN: str = "AUTH_TOKEN"
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def process_redis_object(cls, redis_object: Dict[str, Any]) -> TokenDictType:
|
|
||||||
"""Process Redis object and return appropriate token object."""
|
|
||||||
if not redis_object.get("selected_company"):
|
|
||||||
redis_object["selected_company"] = None
|
|
||||||
if not redis_object.get("selected_occupant"):
|
|
||||||
redis_object["selected_occupant"] = None
|
|
||||||
if redis_object.get("user_type") == UserType.employee.value:
|
|
||||||
return EmployeeTokenObject(**redis_object)
|
|
||||||
elif redis_object.get("user_type") == UserType.occupant.value:
|
|
||||||
return OccupantTokenObject(**redis_object)
|
|
||||||
raise ValueError("Invalid user type")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_object_from_redis(cls, access_token: str) -> TokenDictType:
|
|
||||||
redis_response = RedisActions.get_json(list_keys=[RedisHandlers.AUTH_TOKEN, access_token, "*"])
|
|
||||||
if not redis_response.status:
|
|
||||||
raise ValueError("EYS_0001")
|
|
||||||
if redis_object := redis_response.first:
|
|
||||||
return cls.process_redis_object(redis_object)
|
|
||||||
raise ValueError("EYS_0002")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set_object_to_redis(cls, user: Users, token, header_info):
|
|
||||||
result_delete = RedisActions.delete(list_keys=[RedisHandlers.AUTH_TOKEN, "*", str(user.uu_id)])
|
|
||||||
generated_access_token = PasswordModule.generate_access_token()
|
|
||||||
keys = [RedisHandlers.AUTH_TOKEN, generated_access_token, str(user.uu_id)]
|
|
||||||
RedisActions.set_json(list_keys=keys, value={**token, **header_info}, expires={"hours": 1, "minutes": 30})
|
|
||||||
return generated_access_token
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def update_token_at_redis(cls, token: str, add_payload: Union[CompanyToken, OccupantToken]):
|
|
||||||
if already_token_data := RedisActions.get_json(list_keys=[RedisHandlers.AUTH_TOKEN, token, "*"]).first:
|
|
||||||
already_token = cls.process_redis_object(already_token_data)
|
|
||||||
if already_token.is_employee and isinstance(add_payload, CompanyToken):
|
|
||||||
already_token.selected_company = add_payload
|
|
||||||
elif already_token.is_occupant and isinstance(add_payload, OccupantToken):
|
|
||||||
already_token.selected_occupant = add_payload
|
|
||||||
result = RedisActions.set_json(
|
|
||||||
list_keys=[RedisHandlers.AUTH_TOKEN, token, str(already_token.user_uu_id)], value=already_token.model_dump(), expires={"hours": 1, "minutes": 30}
|
|
||||||
)
|
|
||||||
return result.first
|
|
||||||
raise ValueError("Something went wrong")
|
|
||||||
|
|
||||||
|
|
||||||
class UserHandlers:
|
class UserHandlers:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -182,15 +130,10 @@ class LoginHandler:
|
||||||
duty_uu_id_list=duty_uu_id_list,
|
duty_uu_id_list=duty_uu_id_list,
|
||||||
duty_id_list=duty_id_list,
|
duty_id_list=duty_id_list,
|
||||||
).model_dump()
|
).model_dump()
|
||||||
set_to_redis_dict = dict(
|
set_to_redis_dict = dict(user=found_user, token=model_value, header_info=dict(language=headers.language, domain=headers.domain, timezone=headers.timezone))
|
||||||
user=found_user,
|
|
||||||
token=model_value,
|
|
||||||
header_info=dict(language=headers.language, domain=headers.domain, timezone=headers.timezone),
|
|
||||||
)
|
|
||||||
redis_handler = RedisHandlers()
|
|
||||||
user_dict = found_user.get_dict()
|
user_dict = found_user.get_dict()
|
||||||
person_dict = found_user.person.get_dict()
|
person_dict = found_user.person.get_dict()
|
||||||
if access_token := redis_handler.set_object_to_redis(**set_to_redis_dict):
|
if access_token := RedisHandlers().set_object_to_redis(**set_to_redis_dict):
|
||||||
return {
|
return {
|
||||||
"access_token": access_token,
|
"access_token": access_token,
|
||||||
"user_type": UserType.employee.name,
|
"user_type": UserType.employee.name,
|
||||||
|
|
@ -364,7 +307,7 @@ class LoginHandler:
|
||||||
Departments.id, Departments.uu_id, Duty.id, Duty.uu_id, Addresses.id, Addresses.letter_address, Staff.id, Staff.uu_id,
|
Departments.id, Departments.uu_id, Duty.id, Duty.uu_id, Addresses.id, Addresses.letter_address, Staff.id, Staff.uu_id,
|
||||||
Duties.id, Duties.uu_id,
|
Duties.id, Duties.uu_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
selected_company_query = db_session.query(*list_of_returns
|
selected_company_query = db_session.query(*list_of_returns
|
||||||
).join(Staff, Staff.id == Employees.staff_id
|
).join(Staff, Staff.id == Employees.staff_id
|
||||||
).join(People, People.id == Employees.people_id
|
).join(People, People.id == Employees.people_id
|
||||||
|
|
@ -559,41 +502,7 @@ class PasswordHandler:
|
||||||
return found_user
|
return found_user
|
||||||
|
|
||||||
|
|
||||||
class PageHandlers:
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def retrieve_valid_page_via_token(cls, access_token: str, page_url: str) -> str:
|
|
||||||
"""
|
|
||||||
Retrieve valid page via token. {access_token: "string", page_url: "string"} | Results: str(application)
|
|
||||||
"""
|
|
||||||
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
|
||||||
if result.is_employee:
|
|
||||||
if result.selected_company and result.selected_company.reachable_app_codes:
|
|
||||||
if application := result.selected_company.reachable_app_codes.get(page_url, None):
|
|
||||||
return application
|
|
||||||
elif result.is_occupant:
|
|
||||||
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
|
||||||
if application := result.selected_occupant.reachable_app_codes.get(page_url, None):
|
|
||||||
return application
|
|
||||||
raise ValueError("EYS_0013")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def retrieve_valid_sites_via_token(cls, access_token: str) -> list:
|
|
||||||
"""
|
|
||||||
Retrieve valid pages via token. {"access_token": "string"} | Results: list(sites)
|
|
||||||
"""
|
|
||||||
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
|
||||||
if result.is_employee:
|
|
||||||
if result.selected_company and result.selected_company.reachable_app_codes:
|
|
||||||
return result.selected_company.reachable_app_codes.keys()
|
|
||||||
elif result.is_occupant:
|
|
||||||
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
|
||||||
return result.selected_occupant.reachable_app_codes.keys()
|
|
||||||
raise ValueError("EYS_0013")
|
|
||||||
|
|
||||||
|
|
||||||
class AuthHandlers:
|
class AuthHandlers:
|
||||||
|
|
||||||
LoginHandler: LoginHandler = LoginHandler()
|
LoginHandler: LoginHandler = LoginHandler()
|
||||||
PasswordHandler: PasswordHandler = PasswordHandler()
|
PasswordHandler: PasswordHandler = PasswordHandler()
|
||||||
PageHandlers: PageHandlers = PageHandlers()
|
|
||||||
|
|
@ -14,10 +14,6 @@ class RequestVerifyOTP(BaseModel):
|
||||||
otp: str
|
otp: str
|
||||||
|
|
||||||
|
|
||||||
class RequestApplication(BaseModel):
|
|
||||||
page_url: str # /building/create
|
|
||||||
|
|
||||||
|
|
||||||
class RequestSelectEmployee(BaseModel):
|
class RequestSelectEmployee(BaseModel):
|
||||||
|
|
||||||
company_uu_id: str
|
company_uu_id: str
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
events_index: dict = {
|
|
||||||
"Slot1": "",
|
|
||||||
"Slot2": "",
|
|
||||||
"Slot3": "",
|
|
||||||
"Slot4": "",
|
|
||||||
"Slot5": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,31 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
# Install system dependencies and Poetry
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||||
|
|
||||||
|
# Copy Poetry configuration
|
||||||
|
COPY /pyproject.toml ./pyproject.toml
|
||||||
|
|
||||||
|
# Configure Poetry and install dependencies with optimizations
|
||||||
|
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY /api_services/api_initializer /api_initializer
|
||||||
|
COPY /api_services/api_controllers /api_controllers
|
||||||
|
COPY /api_services/api_validations /api_validations
|
||||||
|
COPY /api_services/api_modules /api_modules
|
||||||
|
COPY /api_services/schemas /schemas
|
||||||
|
COPY /api_services/api_middlewares /api_middlewares
|
||||||
|
|
||||||
|
COPY /api_services/api_builds/management_service/endpoints /api_initializer/endpoints
|
||||||
|
COPY /api_services/api_builds/management_service/events /api_initializer/events
|
||||||
|
COPY /api_services/api_builds/management_service/validations /api_initializer/validations
|
||||||
|
COPY /api_services/api_builds/management_service/index.py /api_initializer/index.py
|
||||||
|
|
||||||
|
# Set Python path to include app directory
|
||||||
|
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Run the application using the configured uvicorn server
|
||||||
|
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||||
|
|
@ -0,0 +1,147 @@
|
||||||
|
from typing import Any
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from index import endpoints_index
|
||||||
|
from events.application.cluster import ApplicationRouterCluster
|
||||||
|
|
||||||
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
from api_validations.response.pagination import PaginateOnly
|
||||||
|
from api_middlewares.token_provider import TokenProvider
|
||||||
|
|
||||||
|
|
||||||
|
application_endpoint_route = APIRouter(prefix="/application", tags=["Application Cluster"])
|
||||||
|
|
||||||
|
|
||||||
|
application_list_all = "ApplicationListAll"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/list/all",
|
||||||
|
description="List all applications endpoint",
|
||||||
|
operation_id=endpoints_index[application_list_all],
|
||||||
|
)
|
||||||
|
def application_list_all_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_all)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_list_available = "ApplicationListAvailable"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/list/available",
|
||||||
|
description="List available applications endpoint",
|
||||||
|
operation_id=endpoints_index[application_list_available],
|
||||||
|
)
|
||||||
|
def application_list_available_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_available)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_list_appended = "ApplicationListAppended"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/list/appended",
|
||||||
|
description="List appended applications endpoint",
|
||||||
|
operation_id=endpoints_index[application_list_appended],
|
||||||
|
)
|
||||||
|
def application_list_appended_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_list_appended)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_register_service = "ApplicationRegisterService"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/register/service",
|
||||||
|
description="Register service endpoint",
|
||||||
|
operation_id=endpoints_index[application_register_service],
|
||||||
|
)
|
||||||
|
def application_register_service_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_register_service)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_unregister_service = "ApplicationUnRegisterService"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/unregister/service",
|
||||||
|
description="Unregister service endpoint",
|
||||||
|
operation_id=endpoints_index[application_unregister_service],
|
||||||
|
)
|
||||||
|
def application_unregister_service_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_unregister_service)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_create = "ApplicationCreate"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/create",
|
||||||
|
description="Create application endpoint",
|
||||||
|
operation_id=endpoints_index[application_create],
|
||||||
|
)
|
||||||
|
def application_create_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_create)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_update = "ApplicationUpdate"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/update/{application_uuid}",
|
||||||
|
description="Update application endpoint",
|
||||||
|
operation_id=endpoints_index[application_update],
|
||||||
|
)
|
||||||
|
def application_update_route(application_uuid: str, data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_update)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_bind_employee = "ApplicationBindEmployee"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/bind/employee",
|
||||||
|
description="Bind employee endpoint",
|
||||||
|
operation_id=endpoints_index[application_bind_employee],
|
||||||
|
)
|
||||||
|
def application_bind_employee_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_bind_employee)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
application_bind_occupant = "ApplicationBindOccupant"
|
||||||
|
@application_endpoint_route.post(
|
||||||
|
path="/bind/occupant",
|
||||||
|
description="Bind occupant endpoint",
|
||||||
|
operation_id=endpoints_index[application_bind_occupant],
|
||||||
|
)
|
||||||
|
def application_bind_occupant_route(data: PaginateOnly, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = ApplicationRouterCluster.get_event_cluster(application_bind_occupant)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
@ -0,0 +1,121 @@
|
||||||
|
from typing import Any
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from index import endpoints_index
|
||||||
|
from events.events.cluster import EventsEndpointRouterCluster
|
||||||
|
|
||||||
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
from api_validations.response.pagination import PaginateOnly
|
||||||
|
from api_middlewares.token_provider import TokenProvider
|
||||||
|
|
||||||
|
|
||||||
|
# Create API router
|
||||||
|
event_endpoint_route = APIRouter(prefix="/events", tags=["Event Actions"])
|
||||||
|
|
||||||
|
|
||||||
|
event_list_available = "EventsListAvailable"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/list/available",
|
||||||
|
description="List available events endpoint",
|
||||||
|
operation_id=endpoints_index[event_list_available],
|
||||||
|
)
|
||||||
|
def event_list_available_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_list_available)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
event_list_appended = "EventsListAppended"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/list/appended",
|
||||||
|
description="List appended events endpoint",
|
||||||
|
operation_id=endpoints_index[event_list_appended],
|
||||||
|
)
|
||||||
|
def event_list_appended_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_list_appended)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
event_register_service = "EventRegisterService"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/register/service",
|
||||||
|
description="Register service endpoint",
|
||||||
|
operation_id=endpoints_index[event_register_service],
|
||||||
|
)
|
||||||
|
def event_register_service_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_register_service)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
event_unregister_service = "EventUnRegisterService"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/unregister/service",
|
||||||
|
description="Unregister service endpoint",
|
||||||
|
operation_id=endpoints_index[event_unregister_service],
|
||||||
|
)
|
||||||
|
def event_unregister_service_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_unregister_service)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
event_bind_employee_extra = "EventBindExtraEmployee"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/bind/employee/extra",
|
||||||
|
description="Bind employee extra endpoint",
|
||||||
|
operation_id=endpoints_index[event_bind_employee_extra],
|
||||||
|
)
|
||||||
|
def event_bind_employee_extra_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_bind_employee_extra)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
||||||
|
|
||||||
|
event_bind_occupant_extra = "EventBindExtraOccupant"
|
||||||
|
@event_endpoint_route.post(
|
||||||
|
path="/bind/occupant/extra",
|
||||||
|
description="Bind occupant extra endpoint",
|
||||||
|
operation_id=endpoints_index[event_bind_occupant_extra],
|
||||||
|
)
|
||||||
|
def event_bind_occupant_extra_route(
|
||||||
|
data: PaginateOnly,
|
||||||
|
headers: CommonHeaders = Depends(CommonHeaders.as_dependency),
|
||||||
|
):
|
||||||
|
token_object = TokenProvider.get_dict_from_redis(token=headers.token)
|
||||||
|
event_founder_dict = dict(endpoint_code=headers.operation_id, token=token_object)
|
||||||
|
event_key = TokenProvider.retrieve_event_codes(**event_founder_dict)
|
||||||
|
FoundCluster = EventsEndpointRouterCluster.get_event_cluster(event_bind_occupant_extra)
|
||||||
|
event_cluster_matched = FoundCluster.match_event(event_key=event_key)
|
||||||
|
return event_cluster_matched.event_callable(list_options=data)
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from .events.router import event_endpoint_route
|
||||||
|
from .services.router import services_endpoint_route
|
||||||
|
from .application.router import application_endpoint_route
|
||||||
|
|
||||||
|
def get_routes() -> list[APIRouter]:
|
||||||
|
return [event_endpoint_route, application_endpoint_route, services_endpoint_route]
|
||||||
|
|
||||||
|
|
||||||
|
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||||
|
return [
|
||||||
|
("/", "GET"),
|
||||||
|
("/docs", "GET"),
|
||||||
|
("/redoc", "GET"),
|
||||||
|
("/openapi.json", "GET"),
|
||||||
|
("/metrics", "GET"),
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
from typing import Any
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from index import endpoints_index
|
||||||
|
# from events.services.cluster import ServicesEndpointRouterCluster
|
||||||
|
|
||||||
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
from api_validations.response.pagination import PaginateOnly
|
||||||
|
from api_middlewares.token_provider import TokenProvider
|
||||||
|
|
||||||
|
|
||||||
|
# Create API router
|
||||||
|
services_endpoint_route = APIRouter(prefix="/services", tags=["Service Cluster"])
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
from typing import Any
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from sqlalchemy import func
|
||||||
|
from schemas import AccountRecords
|
||||||
|
|
||||||
|
from endpoints.index import endpoints_index
|
||||||
|
from events.event_endpoints.cluster import EventsEndpointRouterCluster
|
||||||
|
|
||||||
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
from api_validations.response.pagination import PaginateOnly
|
||||||
|
from api_middlewares.token_provider import TokenProvider
|
||||||
|
|
||||||
|
|
||||||
|
test_endpoint_route = APIRouter(prefix="/tests", tags=["Endpoint Tests"])
|
||||||
|
|
||||||
|
|
||||||
|
account_records_all = "AccountRecordsAll"
|
||||||
|
@test_endpoint_route.get(
|
||||||
|
path="/account/records/all",
|
||||||
|
description="Account records endpoint",
|
||||||
|
operation_id=endpoints_index[account_records_all],
|
||||||
|
)
|
||||||
|
def account_records_all_route(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
with AccountRecords.new_session() as db_session:
|
||||||
|
AccountRecords.set_session(db_session)
|
||||||
|
account_records_positive_list = db_session.query(func.sum(AccountRecords.currency_value)
|
||||||
|
).filter(AccountRecords.currency_value > 0
|
||||||
|
).order_by(AccountRecords.bank_date.desc()).first()
|
||||||
|
account_records_negative_list = db_session.query(func.sum(AccountRecords.currency_value)
|
||||||
|
).filter(AccountRecords.currency_value < 0
|
||||||
|
).order_by(AccountRecords.bank_date.desc()).first()
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-LIST",
|
||||||
|
"data": {
|
||||||
|
"debt": float(account_records_negative_list),
|
||||||
|
"budget": float(account_records_positive_list),
|
||||||
|
"total": float(account_records_positive_list) + float(account_records_negative_list),
|
||||||
|
"lastPayment": account_records_data.bank_date,
|
||||||
|
},
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
account_records_monthly = "AccountRecordsMonthly"
|
||||||
|
@test_endpoint_route.get(
|
||||||
|
path="/account/records/monthly",
|
||||||
|
description="Account records endpoint",
|
||||||
|
operation_id=endpoints_index[account_records_monthly],
|
||||||
|
)
|
||||||
|
def account_records_monthly_route(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
with AccountRecords.new_session() as db_session:
|
||||||
|
account_records_positive_list = db_session.query(func.sum(AccountRecords.currency_value)).filter(
|
||||||
|
AccountRecords.bank_date_y == datetime.date.today().year,
|
||||||
|
AccountRecords.bank_date_m == datetime.date.today().month,
|
||||||
|
).order_by(AccountRecords.bank_date.desc()).first()
|
||||||
|
account_records_negative_list = db_session.query(func.sum(AccountRecords.currency_value)).filter(
|
||||||
|
AccountRecords.bank_date_y == datetime.date.today().year,
|
||||||
|
AccountRecords.bank_date_m == datetime.date.today().month,
|
||||||
|
).order_by(AccountRecords.bank_date.desc()).first()
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-LIST",
|
||||||
|
"data": {
|
||||||
|
"debt": float(account_records_negative_list),
|
||||||
|
"budget": float(account_records_positive_list),
|
||||||
|
"total": float(account_records_positive_list) + float(account_records_negative_list),
|
||||||
|
"lastPayment": account_records_data.bank_date,
|
||||||
|
},
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||||
|
from index import endpoints_index
|
||||||
|
from .supers_events import (
|
||||||
|
ApplicationListAllEvent,
|
||||||
|
ApplicationListAvailableEvent,
|
||||||
|
ApplicationListAppendedEvent,
|
||||||
|
ApplicationRegisterServiceEvent,
|
||||||
|
ApplicationUnRegisterServiceEvent,
|
||||||
|
ApplicationCreateEvent,
|
||||||
|
ApplicationUpdateEvent,
|
||||||
|
ApplicationBindEmployeeEvent,
|
||||||
|
ApplicationBindOccupantEvent,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationRouterCluster = RouterCluster(name="ApplicationRouterCluster")
|
||||||
|
|
||||||
|
ApplicationEventClusterListAll = EventCluster(name="ApplicationListAll", endpoint_uu_id=endpoints_index["ApplicationListAll"])
|
||||||
|
ApplicationEventClusterListAll.add_event(ApplicationListAllEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterListAvailable = EventCluster(name="ApplicationListAvailable", endpoint_uu_id=endpoints_index["ApplicationListAvailable"])
|
||||||
|
ApplicationEventClusterListAvailable.add_event(ApplicationListAvailableEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterListAppended = EventCluster(name="ApplicationListAppended", endpoint_uu_id=endpoints_index["ApplicationListAppended"])
|
||||||
|
ApplicationEventClusterListAppended.add_event(ApplicationListAppendedEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterRegisterService = EventCluster(name="ApplicationRegisterService", endpoint_uu_id=endpoints_index["ApplicationRegisterService"])
|
||||||
|
ApplicationEventClusterRegisterService.add_event(ApplicationRegisterServiceEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterUnregisterService = EventCluster(name="ApplicationUnRegisterService", endpoint_uu_id=endpoints_index["ApplicationUnRegisterService"])
|
||||||
|
ApplicationEventClusterUnregisterService.add_event(ApplicationUnRegisterServiceEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterCreate = EventCluster(name="ApplicationCreate", endpoint_uu_id=endpoints_index["ApplicationCreate"])
|
||||||
|
ApplicationEventClusterCreate.add_event(ApplicationCreateEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterUpdate = EventCluster(name="ApplicationUpdate", endpoint_uu_id=endpoints_index["ApplicationUpdate"])
|
||||||
|
ApplicationEventClusterUpdate.add_event(ApplicationUpdateEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterBindEmployee = EventCluster(name="ApplicationBindEmployee", endpoint_uu_id=endpoints_index["ApplicationBindEmployee"])
|
||||||
|
ApplicationEventClusterBindEmployee.add_event(ApplicationBindEmployeeEvent)
|
||||||
|
|
||||||
|
ApplicationEventClusterBindOccupant = EventCluster(name="ApplicationBindOccupant", endpoint_uu_id=endpoints_index["ApplicationBindOccupant"])
|
||||||
|
ApplicationEventClusterBindOccupant.add_event(ApplicationBindOccupantEvent)
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAll)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAvailable)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterListAppended)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterRegisterService)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterUnregisterService)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterCreate)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterUpdate)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterBindEmployee)
|
||||||
|
ApplicationRouterCluster.set_event_cluster(ApplicationEventClusterBindOccupant)
|
||||||
|
|
@ -0,0 +1,373 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from api_initializer.event_clusters import Event
|
||||||
|
from api_validations.response import (
|
||||||
|
PaginateOnly,
|
||||||
|
Pagination,
|
||||||
|
PaginationResult,
|
||||||
|
PostgresResponseSingle,
|
||||||
|
PostgresResponse,
|
||||||
|
EndpointResponse
|
||||||
|
)
|
||||||
|
|
||||||
|
from schemas import (
|
||||||
|
Applications,
|
||||||
|
Application2Employee,
|
||||||
|
Application2Occupant,
|
||||||
|
Service2Application,
|
||||||
|
Services,
|
||||||
|
)
|
||||||
|
|
||||||
|
# List all endpoint
|
||||||
|
ApplicationListAllEvent = Event(
|
||||||
|
name="application_list_all",
|
||||||
|
key="1971ce4d-4f59-4aa8-83e2-ca19d7da6d11",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users List all applications endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# List available endpoint
|
||||||
|
ApplicationListAvailableEvent = Event(
|
||||||
|
name="application_list_available",
|
||||||
|
key="d8e733f5-b53a-4c36-9082-12579bf9cc4a",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users List available applications endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# List appended endpoint
|
||||||
|
ApplicationListAppendedEvent = Event(
|
||||||
|
name="application_list_appended",
|
||||||
|
key="ea7bbd58-da09-407c-a630-c324e0272385",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users List appended applications endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Register application to service endpoint
|
||||||
|
ApplicationRegisterServiceEvent = Event(
|
||||||
|
name="application_register_service",
|
||||||
|
key="47d7cfc8-6004-4442-8357-16ceac5d9d18",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Register application to service endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unregister application to service endpoint
|
||||||
|
ApplicationUnRegisterServiceEvent = Event(
|
||||||
|
name="application_unregister_service",
|
||||||
|
key="d228ab26-0b74-440f-8f1f-8f40be5a22f2",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Unregister application to service endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create endpoint
|
||||||
|
ApplicationCreateEvent = Event(
|
||||||
|
name="application_create",
|
||||||
|
key="f53ca9aa-5536-4d77-9129-78d67e61db4a",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Create applications endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update endpoint
|
||||||
|
ApplicationUpdateEvent = Event(
|
||||||
|
name="application_update",
|
||||||
|
key="0e9a855e-4e69-44b5-8ac2-825daa32840c",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Update applications endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind employee endpoint
|
||||||
|
ApplicationBindEmployeeEvent = Event(
|
||||||
|
name="application_bind_employee",
|
||||||
|
key="948763ee-f221-409e-9a82-8525053505cb",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Bind applications to employee endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind occupant endpoint
|
||||||
|
ApplicationBindOccupantEvent = Event(
|
||||||
|
name="application_bind_occupant",
|
||||||
|
key="03c894a3-b337-4d90-a559-5fcd0dc3e2c5",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Bind applications to occupant endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def application_list_all_callable(list_options: PaginateOnly):
|
||||||
|
list_options = PaginateOnly(**list_options.model_dump())
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
if list_options.query:
|
||||||
|
applications_list = Applications.filter_all(*Applications.convert(list_options.query), db=db_session)
|
||||||
|
else:
|
||||||
|
applications_list = Applications.filter_all(db=db_session)
|
||||||
|
pagination = Pagination(data=applications_list)
|
||||||
|
pagination.change(**list_options.model_dump())
|
||||||
|
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||||
|
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationListAllEvent.event_callable = application_list_all_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_list_available_callable(list_options: PaginateOnly):
|
||||||
|
"""
|
||||||
|
Example callable method
|
||||||
|
"""
|
||||||
|
list_options = PaginateOnly(**list_options.model_dump())
|
||||||
|
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||||
|
if not service_uu_id:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-PARAM-MISSING",
|
||||||
|
"data": list_options.query,
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
list_options.query.pop("service_uu_id__ilike", None)
|
||||||
|
list_options.query.pop("service_uu_id", None)
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
service2applications = Service2Application.filter_all(
|
||||||
|
*Service2Application.convert({"service_uu_id__ilike": service_uu_id}),
|
||||||
|
db=db_session,
|
||||||
|
)
|
||||||
|
already_events = [
|
||||||
|
service_to_application.application_id for service_to_application in service2applications.data
|
||||||
|
]
|
||||||
|
if list_options.query:
|
||||||
|
applications_list = Applications.filter_all(
|
||||||
|
*Applications.convert(list_options.query), Applications.id.not_in(already_events), db=db_session
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
applications_list = Applications.filter_all(Applications.id.not_in(already_events), db=db_session)
|
||||||
|
pagination = Pagination(data=applications_list)
|
||||||
|
pagination.change(**list_options.model_dump())
|
||||||
|
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||||
|
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationListAvailableEvent.event_callable = application_list_available_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_list_appended_callable(list_options: PaginateOnly):
|
||||||
|
"""
|
||||||
|
Example callable method
|
||||||
|
"""
|
||||||
|
list_options = PaginateOnly(**list_options.model_dump())
|
||||||
|
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||||
|
if not service_uu_id:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-PARAM-MISSING",
|
||||||
|
"data": list_options.query,
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
list_options.query.pop("service_uu_id__ilike", None)
|
||||||
|
list_options.query.pop("service_uu_id", None)
|
||||||
|
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
service2applications = Service2Application.filter_all(
|
||||||
|
*Service2Application.convert({"service_uu_id__ilike": service_uu_id}),
|
||||||
|
db=db_session,
|
||||||
|
)
|
||||||
|
already_events = [
|
||||||
|
service_to_application.application_id for service_to_application in service2applications.data
|
||||||
|
]
|
||||||
|
if list_options.query:
|
||||||
|
applications_list = Applications.filter_all(
|
||||||
|
*Applications.convert(list_options.query), Applications.id.in_(already_events), db=db_session
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
applications_list = Applications.filter_all(Applications.id.in_(already_events), db=db_session)
|
||||||
|
pagination = Pagination(data=applications_list)
|
||||||
|
pagination.change(**list_options.model_dump())
|
||||||
|
pagination_result = PaginationResult(data=applications_list, pagination=pagination)
|
||||||
|
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationListAppendedEvent.event_callable = application_list_appended_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_create_callable(data: Any):
|
||||||
|
"""
|
||||||
|
Create a new application
|
||||||
|
"""
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
created_application_dict = data.model_dump()
|
||||||
|
created_application = Applications.find_or_create(
|
||||||
|
db=db_session,
|
||||||
|
include_args=[Applications.application_for, Applications.application_code, Applications.site_url],
|
||||||
|
**created_application_dict,
|
||||||
|
)
|
||||||
|
if created_application.meta_data.created:
|
||||||
|
created_application.save(db=db_session)
|
||||||
|
return {
|
||||||
|
"completed": True,
|
||||||
|
"message": "MSG0001-INSERT",
|
||||||
|
"data": created_application,
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
"completed": False,
|
||||||
|
"message": "MSG0002-ERROR",
|
||||||
|
"data": created_application,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationCreateEvent.event_callable = application_create_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_update_callable(data: Any, uu_id: str):
|
||||||
|
"""
|
||||||
|
Update an existing application
|
||||||
|
"""
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
updated_application_dict = data.model_dump(
|
||||||
|
exclude_unset=True, exclude_none=True
|
||||||
|
)
|
||||||
|
found_application = Applications.filter_one(
|
||||||
|
Applications.uu_id == uu_id, db=db_session
|
||||||
|
).data
|
||||||
|
if not found_application:
|
||||||
|
return {
|
||||||
|
"completed": False,
|
||||||
|
"message": "MSG0002-FOUND",
|
||||||
|
"data": found_application,
|
||||||
|
}
|
||||||
|
updated_application = found_application.update(
|
||||||
|
db=db_session, **updated_application_dict
|
||||||
|
)
|
||||||
|
updated_application.save(db_session)
|
||||||
|
if updated_application.meta_data.updated:
|
||||||
|
return {
|
||||||
|
"completed": True,
|
||||||
|
"message": "MSG0003-UPDATE",
|
||||||
|
"data": updated_application,
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
"completed": False,
|
||||||
|
"message": "MSG0003-UPDATE",
|
||||||
|
"data": updated_application,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationUpdateEvent.event_callable = application_update_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_register_service_callable(data: Any):
|
||||||
|
"""
|
||||||
|
Register an application to a service
|
||||||
|
"""
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
event = Applications.filter_one_system(Applications.uu_id == data.application_uu_id, db=db_session)
|
||||||
|
if not event.data:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-NOT-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service = Services.filter_one_system(Services.uu_id == data.service_uu_id, db=db_session)
|
||||||
|
if not service.data:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-NOT-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service_to_application = Service2Application.find_or_create(
|
||||||
|
db=db_session,
|
||||||
|
include_args=[Service2Application.service_uu_id, Service2Application.application_uu_id],
|
||||||
|
service_id=service.data.id,
|
||||||
|
service_uu_id=str(service.data.uu_id),
|
||||||
|
application_id=event.data.id,
|
||||||
|
application_uu_id=str(event.data.uu_id),
|
||||||
|
application_code=event.data.application_code,
|
||||||
|
site_url=event.data.site_url,
|
||||||
|
is_confirmed=True,
|
||||||
|
)
|
||||||
|
if not service_to_application.meta_data.created:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-ALREADY-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service_to_application.save(db=db_session)
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-REGISTER",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationRegisterServiceEvent.event_callable = application_register_service_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_unregister_service_callable(data: Any):
|
||||||
|
"""
|
||||||
|
Unregister an application from a service
|
||||||
|
"""
|
||||||
|
with Applications.new_session() as db_session:
|
||||||
|
application = Applications.filter_one_system(Applications.uu_id == data.application_uu_id, db=db_session)
|
||||||
|
if not application.data:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-NOT-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service = Services.filter_one_system(Services.uu_id == data.service_uu_id, db=db_session)
|
||||||
|
if not service.data:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-NOT-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service_to_application = Service2Application.filter_one_system(
|
||||||
|
Service2Application.service_id == service.data.id,
|
||||||
|
Service2Application.application_id == application.data.id,
|
||||||
|
db=db_session,
|
||||||
|
)
|
||||||
|
if not service_to_application.data:
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-NOT-FOUND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": False,
|
||||||
|
}
|
||||||
|
service_to_application.query.delete()
|
||||||
|
db_session.commit()
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-UNREGISTER",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationUnRegisterServiceEvent.event_callable = application_unregister_service_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_bind_employee_callable(data: Any):
|
||||||
|
"""
|
||||||
|
Example callable method
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-BIND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationBindEmployeeEvent.event_callable = application_bind_employee_callable
|
||||||
|
|
||||||
|
|
||||||
|
def application_bind_occupant_callable(data: Any):
|
||||||
|
"""
|
||||||
|
Example callable method
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"message": "MSG0003-BIND",
|
||||||
|
"data": data.model_dump(),
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ApplicationBindOccupantEvent.event_callable = application_bind_occupant_callable
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
from api_initializer.event_clusters import EventCluster, RouterCluster
|
||||||
|
from index import endpoints_index
|
||||||
|
from .supers_events import (
|
||||||
|
EventsListAvailableEvent,
|
||||||
|
EventsListAppendedEvent,
|
||||||
|
EventRegisterServiceEvent,
|
||||||
|
EventUnRegisterServiceEvent,
|
||||||
|
EventBindEmployeeExtraEvent,
|
||||||
|
EventBindOccupantExtraEvent,
|
||||||
|
)
|
||||||
|
|
||||||
|
EventsEndpointRouterCluster = RouterCluster(name="EventsEndpointRouterCluster")
|
||||||
|
|
||||||
|
EventsEndpointEventClusterListAvailable = EventCluster(name="EventsListAvailable", endpoint_uu_id=endpoints_index["EventsListAvailable"])
|
||||||
|
EventsEndpointEventClusterListAvailable.add_event(EventsListAvailableEvent)
|
||||||
|
|
||||||
|
EventsEndpointEventClusterListAppended = EventCluster(name="EventsListAppended", endpoint_uu_id=endpoints_index["EventsListAppended"])
|
||||||
|
EventsEndpointEventClusterListAppended.add_event(EventsListAppendedEvent)
|
||||||
|
|
||||||
|
EventsEndpointEventClusterRegisterService = EventCluster(name="EventRegisterService", endpoint_uu_id=endpoints_index["EventRegisterService"])
|
||||||
|
EventsEndpointEventClusterRegisterService.add_event(EventRegisterServiceEvent)
|
||||||
|
|
||||||
|
EventsEndpointEventClusterUnregisterService = EventCluster(name="EventUnregisterService", endpoint_uu_id=endpoints_index["EventUnRegisterService"])
|
||||||
|
EventsEndpointEventClusterUnregisterService.add_event(EventUnRegisterServiceEvent)
|
||||||
|
|
||||||
|
EventsEndpointEventClusterBindEmployeeExtra = EventCluster(name="EventBindExtraEmployee", endpoint_uu_id=endpoints_index["EventBindExtraEmployee"])
|
||||||
|
EventsEndpointEventClusterBindEmployeeExtra.add_event(EventBindEmployeeExtraEvent)
|
||||||
|
|
||||||
|
EventsEndpointEventClusterBindOccupantExtra = EventCluster(name="EventBindExtraOccupant", endpoint_uu_id=endpoints_index["EventBindExtraOccupant"])
|
||||||
|
EventsEndpointEventClusterBindOccupantExtra.add_event(EventBindOccupantExtraEvent)
|
||||||
|
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterListAvailable)
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterListAppended)
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterRegisterService)
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterUnregisterService)
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterBindEmployeeExtra)
|
||||||
|
EventsEndpointRouterCluster.set_event_cluster(EventsEndpointEventClusterBindOccupantExtra)
|
||||||
|
|
@ -0,0 +1,186 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from api_initializer.event_clusters import Event
|
||||||
|
from api_validations.response import (
|
||||||
|
PaginateOnly,
|
||||||
|
Pagination,
|
||||||
|
PaginationResult,
|
||||||
|
PostgresResponseSingle,
|
||||||
|
PostgresResponse,
|
||||||
|
EndpointResponse
|
||||||
|
)
|
||||||
|
|
||||||
|
from schemas import (
|
||||||
|
Events,
|
||||||
|
Event2Employee,
|
||||||
|
Event2Occupant,
|
||||||
|
Event2EmployeeExtra,
|
||||||
|
Event2OccupantExtra,
|
||||||
|
Service2Events,
|
||||||
|
Services,
|
||||||
|
)
|
||||||
|
|
||||||
|
# List available events endpoint
|
||||||
|
EventsListAvailableEvent = Event(
|
||||||
|
name="event_endpoint_list_available",
|
||||||
|
key="d39af512-ec71-4c0f-9b35-e53b0d06d3a4",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users List available events endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# List appended events endpoint
|
||||||
|
EventsListAppendedEvent = Event(
|
||||||
|
name="event_endpoint_list_appended",
|
||||||
|
key="bea77d6a-d99f-468b-9002-b3bda6bb6ad0",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users List appended events endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Event Register endpoint
|
||||||
|
EventRegisterServiceEvent = Event(
|
||||||
|
name="event_endpoint_register_service",
|
||||||
|
key="e18e7f89-5708-4a15-9258-99b0903ed43d",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Register service endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Event Unregister endpoint
|
||||||
|
EventUnRegisterServiceEvent = Event(
|
||||||
|
name="service_endpoint_unregister_service",
|
||||||
|
key="4d693774-4857-435b-a63c-c39baebfe916",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Unregister service endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind employee extra endpoint
|
||||||
|
EventBindEmployeeExtraEvent = Event(
|
||||||
|
name="service_endpoint_bind_employee_extra",
|
||||||
|
key="cd452928-4256-4fb4-b81e-0ca41d723616",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Bind service to employee extra endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind occupant extra endpoint
|
||||||
|
EventBindOccupantExtraEvent = Event(
|
||||||
|
name="service_endpoint_bind_occupant_extra",
|
||||||
|
key="cb11a150-8049-45c9-8cf3-d5290ffd2e4a",
|
||||||
|
request_validator=None, # TODO: Add request validator
|
||||||
|
response_validator=None, # TODO: Add response validator
|
||||||
|
description="Super Users Bind service to occupant extra endpoint",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def events_list_available_callable(list_options: PaginateOnly):
|
||||||
|
"""List available events with pagination and filtering options"""
|
||||||
|
list_options = PaginateOnly(**list_options.model_dump())
|
||||||
|
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||||
|
if not service_uu_id:
|
||||||
|
return {"message": "MSG0003-PARAM-MISSING", "data": list_options.query, "completed": False}
|
||||||
|
list_options.query.pop("service_uu_id__ilike", None)
|
||||||
|
list_options.query.pop("service_uu_id", None)
|
||||||
|
with Events.new_session() as db_session:
|
||||||
|
service2events = Service2Events.query.filter(Service2Events.service_uu_id.ilike(f'%{service_uu_id}%'),)
|
||||||
|
already_events = [service_to_event.event_id for service_to_event in service2events.all()]
|
||||||
|
if list_options.query:
|
||||||
|
events_list = Events.query.filter(*Events.convert(list_options.query), Events.id.not_in(already_events))
|
||||||
|
else:
|
||||||
|
events_list = Events.query.filter(Events.id.not_in(already_events))
|
||||||
|
events_response = PostgresResponse(data=events_list)
|
||||||
|
pagination = Pagination(data=events_response)
|
||||||
|
pagination.change(**list_options.model_dump())
|
||||||
|
pagination_result = PaginationResult(data=events_response, pagination=pagination)
|
||||||
|
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||||
|
|
||||||
|
|
||||||
|
EventsListAvailableEvent.event_callable = events_list_available_callable
|
||||||
|
|
||||||
|
|
||||||
|
def events_list_appended_callable(list_options: PaginateOnly):
|
||||||
|
"""List appended events with pagination and filtering options"""
|
||||||
|
list_options = PaginateOnly(**list_options.model_dump())
|
||||||
|
service_uu_id = list_options.query.get("service_uu_id__ilike", None)
|
||||||
|
if not service_uu_id:
|
||||||
|
return {"message": "MSG0003-PARAM-MISSING", "data": list_options.query, "completed": False}
|
||||||
|
list_options.query.pop("service_uu_id__ilike", None)
|
||||||
|
list_options.query.pop("service_uu_id", None)
|
||||||
|
with Events.new_session() as db_session:
|
||||||
|
service2events = Service2Events.query.filter(*Service2Events.convert({"service_uu_id__ilike": service_uu_id}))
|
||||||
|
already_events = [service_to_event.event_id for service_to_event in service2events.all()]
|
||||||
|
if list_options.query:
|
||||||
|
events_list = Events.filter_all(*Events.convert(list_options.query), Events.id.in_(already_events))
|
||||||
|
else:
|
||||||
|
events_list = Events.filter_all(Events.id.in_(already_events))
|
||||||
|
events_response = PostgresResponse(data=events_list)
|
||||||
|
pagination = Pagination(data=events_response)
|
||||||
|
pagination.change(**list_options.model_dump())
|
||||||
|
pagination_result = PaginationResult(data=events_response, pagination=pagination)
|
||||||
|
return EndpointResponse(message="MSG0003-LIST", pagination_result=pagination_result).response
|
||||||
|
|
||||||
|
|
||||||
|
EventsListAppendedEvent.event_callable = events_list_appended_callable
|
||||||
|
|
||||||
|
|
||||||
|
def event_register_service_callable(data: Any):
|
||||||
|
"""Register event to service"""
|
||||||
|
with Events.new_session() as db_session:
|
||||||
|
event = Events.query.filter(Events.uu_id == data.event_uu_id).first()
|
||||||
|
if not event:
|
||||||
|
return EndpointResponse(message="MSG0003-NOT-FOUND", data=data.model_dump(), completed=False).response
|
||||||
|
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||||
|
if not service:
|
||||||
|
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False,}
|
||||||
|
service_to_event = Service2Events.query.filter_by(service_id=service.data.id, event_id=event.data.id).first()
|
||||||
|
Service2Events.set_session(db_session)
|
||||||
|
if not service_to_event:
|
||||||
|
service_to_event = Service2Events.create(
|
||||||
|
service_id=service.data.id, service_uu_id=str(service.data.uu_id), event_id=event.data.id,
|
||||||
|
event_uu_id=str(event.data.uu_id), is_confirmed=True,
|
||||||
|
)
|
||||||
|
service_to_event.save()
|
||||||
|
return {"message": "MSG0003-REGISTERED", "data": data.model_dump(), "completed": True}
|
||||||
|
return {"message": "MSG0003-REGISTER-ERROR", "data": data.model_dump(), "completed": True}
|
||||||
|
|
||||||
|
|
||||||
|
EventRegisterServiceEvent.event_callable = event_register_service_callable
|
||||||
|
|
||||||
|
|
||||||
|
def event_unregister_service_callable(data: Any):
|
||||||
|
"""Unregister event from service"""
|
||||||
|
with Events.new_session() as db_session:
|
||||||
|
event = Events.query.filter(Events.uu_id == data.event_uu_id).first()
|
||||||
|
if not event:
|
||||||
|
return EndpointResponse(message="MSG0003-NOT-FOUND", data=data.model_dump(), completed=False).response
|
||||||
|
service = Services.query.filter(Services.uu_id == data.service_uu_id).first()
|
||||||
|
if not service:
|
||||||
|
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False}
|
||||||
|
service_to_event = Service2Events.query.filter(
|
||||||
|
Service2Events.service_id == service.data.id, Service2Events.event_id == event.data.id,
|
||||||
|
).first()
|
||||||
|
if not service_to_event:
|
||||||
|
return {"message": "MSG0003-NOT-FOUND", "data": data.model_dump(), "completed": False}
|
||||||
|
service_to_event.query.delete()
|
||||||
|
return {"message": "MSG0003-UNREGISTER", "data": data.model_dump(), "completed": True}
|
||||||
|
|
||||||
|
|
||||||
|
EventUnRegisterServiceEvent.event_callable = event_unregister_service_callable
|
||||||
|
|
||||||
|
|
||||||
|
def event_bind_employee_extra_callable(data: Any):
|
||||||
|
"""Bind event to employee extra"""
|
||||||
|
return EndpointResponse(message="MSG0003-BIND-EMP").response
|
||||||
|
|
||||||
|
|
||||||
|
EventBindEmployeeExtraEvent.event_callable = event_bind_employee_extra_callable
|
||||||
|
|
||||||
|
|
||||||
|
def event_bind_occupant_extra_callable(data: Any):
|
||||||
|
"""Bind event to occupant extra"""
|
||||||
|
return EndpointResponse(message="MSG0003-BIND-OCUP").response
|
||||||
|
|
||||||
|
|
||||||
|
EventBindOccupantExtraEvent.event_callable = event_bind_occupant_extra_callable
|
||||||
|
|
@ -3,8 +3,8 @@ endpoints_index: dict = {
|
||||||
"AccountRecordsMonthly": "c0f5ccb1-1e56-4653-af13-ec0bf5e6aa51",
|
"AccountRecordsMonthly": "c0f5ccb1-1e56-4653-af13-ec0bf5e6aa51",
|
||||||
"EventsListAvailable": "034a7eb7-0186-4f48-bb8c-165c429ad5c1",
|
"EventsListAvailable": "034a7eb7-0186-4f48-bb8c-165c429ad5c1",
|
||||||
"EventsListAppended": "ec1f3ec3-3f28-4eaf-b89a-c463632c0b90",
|
"EventsListAppended": "ec1f3ec3-3f28-4eaf-b89a-c463632c0b90",
|
||||||
"EventServiceRegister": "2cf99f10-72f0-4c2b-98be-3082d67b950d",
|
"EventRegisterService": "2cf99f10-72f0-4c2b-98be-3082d67b950d",
|
||||||
"EventServiceUnRegister": "15c24c6c-651b-4c5d-9c2b-5c6c6c6c6c6c",
|
"EventUnRegisterService": "15c24c6c-651b-4c5d-9c2b-5c6c6c6c6c6c",
|
||||||
"EventBindExtraEmployee": "74cafa62-674e-41da-959d-1238ad4a443c",
|
"EventBindExtraEmployee": "74cafa62-674e-41da-959d-1238ad4a443c",
|
||||||
"EventBindExtraOccupant": "480bee12-8dfd-4242-b481-f6807eb9adf7",
|
"EventBindExtraOccupant": "480bee12-8dfd-4242-b481-f6807eb9adf7",
|
||||||
"ApplicationListAll": "a61169be-a009-47ec-8658-3dd388af5c3e",
|
"ApplicationListAll": "a61169be-a009-47ec-8658-3dd388af5c3e",
|
||||||
|
|
@ -16,4 +16,4 @@ endpoints_index: dict = {
|
||||||
"ApplicationUpdate": "83281757-696a-41ed-9706-e145ac54c3a9",
|
"ApplicationUpdate": "83281757-696a-41ed-9706-e145ac54c3a9",
|
||||||
"ApplicationBindEmployee": "80427237-5ab6-4d17-8084-cdb87bda22a3",
|
"ApplicationBindEmployee": "80427237-5ab6-4d17-8084-cdb87bda22a3",
|
||||||
"ApplicationBindOccupant": "ae0fb101-cb13-47ab-86bd-233a5dbef269",
|
"ApplicationBindOccupant": "ae0fb101-cb13-47ab-86bd-233a5dbef269",
|
||||||
}
|
}
|
||||||
|
|
@ -0,0 +1,31 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
# Install system dependencies and Poetry
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||||
|
|
||||||
|
# Copy Poetry configuration
|
||||||
|
COPY /pyproject.toml ./pyproject.toml
|
||||||
|
|
||||||
|
# Configure Poetry and install dependencies with optimizations
|
||||||
|
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY /api_services/api_initializer /api_initializer
|
||||||
|
COPY /api_services/api_controllers /api_controllers
|
||||||
|
COPY /api_services/api_validations /api_validations
|
||||||
|
COPY /api_services/api_modules /api_modules
|
||||||
|
COPY /api_services/schemas /schemas
|
||||||
|
|
||||||
|
COPY /api_services/api_middlewares /api_middlewares
|
||||||
|
COPY /api_services/api_builds/restriction-service/endpoints /api_initializer/endpoints
|
||||||
|
COPY /api_services/api_builds/restriction-service/events /api_initializer/events
|
||||||
|
COPY /api_services/api_builds/restriction-service/validations /api_initializer/validations
|
||||||
|
COPY /api_services/api_builds/restriction-service/index.py /api_initializer/index.py
|
||||||
|
|
||||||
|
# Set Python path to include app directory
|
||||||
|
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Run the application using the configured uvicorn server
|
||||||
|
CMD ["poetry", "run", "python", "/api_initializer/app.py"]
|
||||||
|
|
@ -0,0 +1,39 @@
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from events.pages.events import PageHandlers
|
||||||
|
from index import endpoints_index
|
||||||
|
|
||||||
|
from validations.request.restrictions.validations import RequestApplication
|
||||||
|
from api_validations.defaults.validations import CommonHeaders
|
||||||
|
|
||||||
|
|
||||||
|
pages_route = APIRouter(prefix="/restrictions", tags=["Restrictions Cluster"])
|
||||||
|
|
||||||
|
|
||||||
|
application_retrieve_page = "ApplicationRetrievePage"
|
||||||
|
@pages_route.post(
|
||||||
|
path="/page/valid",
|
||||||
|
summary="Verify if page is valid returns application available",
|
||||||
|
description="Verify if page is valid returns application available",
|
||||||
|
operation_id=endpoints_index[application_retrieve_page]
|
||||||
|
)
|
||||||
|
def authentication_page_valid(data: RequestApplication, headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
"""
|
||||||
|
Verify if page is valid returns application that can user reach
|
||||||
|
page: { url = /building/create} | result: { "application": "4c11f5ef-0bbd-41ac-925e-f79d9aac2b0e" }
|
||||||
|
"""
|
||||||
|
return PageHandlers.retrieve_valid_page_via_token(access_token=headers.token, page_url=data.page)
|
||||||
|
|
||||||
|
|
||||||
|
application_retrieve_all_sites = "ApplicationRetrieveAllSites"
|
||||||
|
@pages_route.get(
|
||||||
|
path="/sites/list",
|
||||||
|
summary="Lists all sites that are available for user",
|
||||||
|
description="Lists all sites that are available for user",
|
||||||
|
operation_id=endpoints_index[application_retrieve_all_sites]
|
||||||
|
)
|
||||||
|
def authentication_get_all_sites_list(headers: CommonHeaders = Depends(CommonHeaders.as_dependency)):
|
||||||
|
"""
|
||||||
|
Verify if page is valid returns application that can user reach result: { "sites": ['/dashboard', '/building/create'] }
|
||||||
|
"""
|
||||||
|
return PageHandlers.retrieve_valid_sites_via_token(access_token=headers.token)
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
|
from .pages.router import pages_route
|
||||||
|
|
||||||
def get_routes() -> list[APIRouter]:
|
def get_routes() -> list[APIRouter]:
|
||||||
return []
|
return [pages_route]
|
||||||
|
|
||||||
|
|
||||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = []
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
from api_modules.redis.redis_handlers import RedisHandlers
|
||||||
|
|
||||||
|
|
||||||
|
class PageHandlers:
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retrieve_valid_page_via_token(cls, access_token: str, page_url: str) -> str:
|
||||||
|
"""
|
||||||
|
Retrieve valid page via token. {access_token: "string", page_url: "string"} | Results: str(application)
|
||||||
|
"""
|
||||||
|
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
||||||
|
if result.is_employee:
|
||||||
|
if result.selected_company and result.selected_company.reachable_app_codes:
|
||||||
|
if application := result.selected_company.reachable_app_codes.get(page_url, None):
|
||||||
|
return application
|
||||||
|
elif result.is_occupant:
|
||||||
|
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
||||||
|
if application := result.selected_occupant.reachable_app_codes.get(page_url, None):
|
||||||
|
return application
|
||||||
|
raise ValueError("EYS_0013")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retrieve_valid_sites_via_token(cls, access_token: str) -> list:
|
||||||
|
"""
|
||||||
|
Retrieve valid pages via token. {"access_token": "string"} | Results: list(sites)
|
||||||
|
"""
|
||||||
|
if result := RedisHandlers.get_object_from_redis(access_token=access_token):
|
||||||
|
if result.is_employee:
|
||||||
|
if result.selected_company and result.selected_company.reachable_app_codes:
|
||||||
|
return result.selected_company.reachable_app_codes.keys()
|
||||||
|
elif result.is_occupant:
|
||||||
|
if result.selected_occupant and result.selected_occupant.reachable_app_codes:
|
||||||
|
return result.selected_occupant.reachable_app_codes.keys()
|
||||||
|
raise ValueError("EYS_0013")
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
endpoints_index: dict = {
|
||||||
|
"ApplicationRetrievePage": "e17a9475-0a8a-4a64-82a4-3357ac4a89d0",
|
||||||
|
"ApplicationRetrieveAllSites": "e02b83fc-c579-460b-8a8a-04b46ff83318",
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class RequestApplication(BaseModel):
|
||||||
|
page_url: str # /building/create
|
||||||
|
|
@ -2,27 +2,24 @@ import arrow
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import Any, Optional
|
from typing import Any, TypeVar, Type, Union, Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Integer, String, Float, ForeignKey, UUID, TIMESTAMP, Boolean, SmallInteger, Numeric, func, text, NUMERIC, ColumnExpressionArgument
|
||||||
|
from sqlalchemy.orm import InstrumentedAttribute, Mapped, mapped_column, Query, Session
|
||||||
|
from sqlalchemy.sql.elements import BinaryExpression
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, String, Float, ForeignKey, UUID, TIMESTAMP, Boolean, SmallInteger, Numeric, func, text, NUMERIC
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
from sqlalchemy_mixins.serialize import SerializeMixin
|
from sqlalchemy_mixins.serialize import SerializeMixin
|
||||||
from sqlalchemy_mixins.repr import ReprMixin
|
from sqlalchemy_mixins.repr import ReprMixin
|
||||||
from sqlalchemy_mixins.smartquery import SmartQueryMixin
|
from sqlalchemy_mixins.smartquery import SmartQueryMixin
|
||||||
from sqlalchemy_mixins.activerecord import ActiveRecordMixin
|
from sqlalchemy_mixins.activerecord import ActiveRecordMixin
|
||||||
from sqlalchemy.orm import InstrumentedAttribute, Mapped
|
|
||||||
|
|
||||||
from api_controllers.postgres.engine import get_db, Base
|
from api_controllers.postgres.engine import get_db, Base
|
||||||
|
|
||||||
|
|
||||||
class BasicMixin(
|
T = TypeVar("CrudMixin", bound="CrudMixin")
|
||||||
Base,
|
|
||||||
ActiveRecordMixin,
|
|
||||||
SerializeMixin,
|
class BasicMixin(Base, ActiveRecordMixin, SerializeMixin, ReprMixin, SmartQueryMixin):
|
||||||
ReprMixin,
|
|
||||||
SmartQueryMixin,
|
|
||||||
):
|
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
__repr__ = ReprMixin.__repr__
|
__repr__ = ReprMixin.__repr__
|
||||||
|
|
@ -89,6 +86,42 @@ class BasicMixin(
|
||||||
err = e
|
err = e
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def convert(cls: Type[T], smart_options: dict[str, Any], validate_model: Any = None) -> Optional[tuple[BinaryExpression, ...]]:
|
||||||
|
"""
|
||||||
|
Convert smart options to SQLAlchemy filter expressions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
smart_options: Dictionary of filter options
|
||||||
|
validate_model: Optional model to validate against
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of SQLAlchemy filter expressions or None if validation fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Let SQLAlchemy handle the validation by attempting to create the filter expressions
|
||||||
|
return tuple(cls.filter_expr(**smart_options))
|
||||||
|
except Exception as e:
|
||||||
|
# If there's an error, provide a helpful message with valid columns and relationships
|
||||||
|
valid_columns = set()
|
||||||
|
relationship_names = set()
|
||||||
|
|
||||||
|
# Get column names if available
|
||||||
|
if hasattr(cls, '__table__') and hasattr(cls.__table__, 'columns'):
|
||||||
|
valid_columns = set(column.key for column in cls.__table__.columns)
|
||||||
|
|
||||||
|
# Get relationship names if available
|
||||||
|
if hasattr(cls, '__mapper__') and hasattr(cls.__mapper__, 'relationships'):
|
||||||
|
relationship_names = set(rel.key for rel in cls.__mapper__.relationships)
|
||||||
|
|
||||||
|
# Create a helpful error message
|
||||||
|
error_msg = f"Error in filter expression: {str(e)}\n"
|
||||||
|
error_msg += f"Attempted to filter with: {smart_options}\n"
|
||||||
|
error_msg += f"Valid columns are: {', '.join(valid_columns)}\n"
|
||||||
|
error_msg += f"Valid relationships are: {', '.join(relationship_names)}"
|
||||||
|
|
||||||
|
raise ValueError(error_msg) from e
|
||||||
|
|
||||||
def get_dict(self, exclude_list: Optional[list[InstrumentedAttribute]] = None) -> dict[str, Any]:
|
def get_dict(self, exclude_list: Optional[list[InstrumentedAttribute]] = None) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Convert model instance to dictionary with customizable fields.
|
Convert model instance to dictionary with customizable fields.
|
||||||
|
|
@ -127,6 +160,7 @@ class BasicMixin(
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class CrudMixin(BasicMixin):
|
class CrudMixin(BasicMixin):
|
||||||
"""
|
"""
|
||||||
Base mixin providing CRUD operations and common fields for PostgreSQL models.
|
Base mixin providing CRUD operations and common fields for PostgreSQL models.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,53 @@
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
from api_validations.token.validations import TokenDictType, EmployeeTokenObject, OccupantTokenObject, CompanyToken, OccupantToken, UserType
|
||||||
|
from api_controllers.redis.database import RedisActions
|
||||||
|
from schemas import Users
|
||||||
|
|
||||||
|
|
||||||
|
class RedisHandlers:
|
||||||
|
|
||||||
|
AUTH_TOKEN: str = "AUTH_TOKEN"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def process_redis_object(cls, redis_object: dict[str, Any]) -> TokenDictType:
|
||||||
|
"""Process Redis object and return appropriate token object."""
|
||||||
|
if not redis_object.get("selected_company"):
|
||||||
|
redis_object["selected_company"] = None
|
||||||
|
if not redis_object.get("selected_occupant"):
|
||||||
|
redis_object["selected_occupant"] = None
|
||||||
|
if redis_object.get("user_type") == UserType.employee.value:
|
||||||
|
return EmployeeTokenObject(**redis_object)
|
||||||
|
elif redis_object.get("user_type") == UserType.occupant.value:
|
||||||
|
return OccupantTokenObject(**redis_object)
|
||||||
|
raise ValueError("Invalid user type")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_object_from_redis(cls, access_token: str) -> TokenDictType:
|
||||||
|
redis_response = RedisActions.get_json(list_keys=[RedisHandlers.AUTH_TOKEN, access_token, "*"])
|
||||||
|
if not redis_response.status:
|
||||||
|
raise ValueError("EYS_0001")
|
||||||
|
if redis_object := redis_response.first:
|
||||||
|
return cls.process_redis_object(redis_object)
|
||||||
|
raise ValueError("EYS_0002")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_object_to_redis(cls, user: Users, token, header_info):
|
||||||
|
result_delete = RedisActions.delete(list_keys=[RedisHandlers.AUTH_TOKEN, "*", str(user.uu_id)])
|
||||||
|
generated_access_token = PasswordModule.generate_access_token()
|
||||||
|
keys = [RedisHandlers.AUTH_TOKEN, generated_access_token, str(user.uu_id)]
|
||||||
|
RedisActions.set_json(list_keys=keys, value={**token, **header_info}, expires={"hours": 1, "minutes": 30})
|
||||||
|
return generated_access_token
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update_token_at_redis(cls, token: str, add_payload: Union[CompanyToken, OccupantToken]):
|
||||||
|
if already_token_data := RedisActions.get_json(list_keys=[RedisHandlers.AUTH_TOKEN, token, "*"]).first:
|
||||||
|
already_token = cls.process_redis_object(already_token_data)
|
||||||
|
if already_token.is_employee and isinstance(add_payload, CompanyToken):
|
||||||
|
already_token.selected_company = add_payload
|
||||||
|
elif already_token.is_occupant and isinstance(add_payload, OccupantToken):
|
||||||
|
already_token.selected_occupant = add_payload
|
||||||
|
list_keys = [RedisHandlers.AUTH_TOKEN, token, str(already_token.user_uu_id)]
|
||||||
|
result = RedisActions.set_json(list_keys=list_keys, value=already_token.model_dump(), expires={"hours": 1, "minutes": 30})
|
||||||
|
return result.first
|
||||||
|
raise ValueError("Something went wrong")
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
from .pagination import PaginateOnly, ListOptions, PaginationConfig
|
||||||
|
from .result import Pagination, PaginationResult
|
||||||
|
from .base import PostgresResponseSingle, PostgresResponse, ResultQueryJoin, ResultQueryJoinSingle
|
||||||
|
from .api import EndpointResponse, CreateEndpointResponse
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
from .result import PaginationResult
|
from .result import PaginationResult
|
||||||
from .base import PostgresResponseSingle
|
from .base import PostgresResponseSingle
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing import Any
|
from typing import Any, Type
|
||||||
|
|
||||||
|
|
||||||
class EndpointResponse(BaseModel):
|
class EndpointResponse(BaseModel):
|
||||||
"""Endpoint response model."""
|
"""Endpoint response model."""
|
||||||
|
|
@ -33,7 +34,10 @@ class EndpointResponse(BaseModel):
|
||||||
"data": result_data,
|
"data": result_data,
|
||||||
"pagination": pagination_dict,
|
"pagination": pagination_dict,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model_config = {
|
||||||
|
"arbitrary_types_allowed": True
|
||||||
|
}
|
||||||
|
|
||||||
class CreateEndpointResponse(BaseModel):
|
class CreateEndpointResponse(BaseModel):
|
||||||
"""Create endpoint response model."""
|
"""Create endpoint response model."""
|
||||||
|
|
@ -51,3 +55,6 @@ class CreateEndpointResponse(BaseModel):
|
||||||
"data": self.data.data,
|
"data": self.data.data,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model_config = {
|
||||||
|
"arbitrary_types_allowed": True
|
||||||
|
}
|
||||||
|
|
@ -1,8 +1,11 @@
|
||||||
|
from typing import Optional, Union, Type, Any, Dict, TypeVar
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from sqlalchemy.orm import Query
|
||||||
|
|
||||||
from .pagination import default_paginate_config
|
from .pagination import default_paginate_config
|
||||||
from .base import PostgresResponse
|
from .base import PostgresResponse
|
||||||
from typing import Optional, Union
|
|
||||||
from sqlalchemy.orm import Query
|
T = TypeVar("T")
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
class Pagination:
|
class Pagination:
|
||||||
"""
|
"""
|
||||||
|
|
@ -168,16 +171,7 @@ class PaginationResult:
|
||||||
@property
|
@property
|
||||||
def data(self) -> Union[list | dict]:
|
def data(self) -> Union[list | dict]:
|
||||||
"""Get query object."""
|
"""Get query object."""
|
||||||
query_ordered = self.dynamic_order_by()
|
query_paginated = self.dynamic_order_by().limit(self.limit).offset(self.offset)
|
||||||
query_paginated = query_ordered.limit(self.limit).offset(self.offset)
|
queried_data = (query_paginated.all() if self.response_type else query_paginated.first())
|
||||||
queried_data = (
|
data = ([result.get_dict() for result in queried_data] if self.response_type else queried_data.get_dict())
|
||||||
query_paginated.all() if self.response_type else query_paginated.first()
|
return [self.response_model(**item).model_dump() for item in data] if self.response_model else data
|
||||||
)
|
|
||||||
data = (
|
|
||||||
[result.get_dict() for result in queried_data]
|
|
||||||
if self.response_type
|
|
||||||
else queried_data.get_dict()
|
|
||||||
)
|
|
||||||
if self.response_model:
|
|
||||||
return [self.response_model(**item).model_dump() for item in data]
|
|
||||||
return data
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ services:
|
||||||
container_name: auth_service
|
container_name: auth_service
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: api_services/api_builds/auth-service/Dockerfile
|
dockerfile: api_services/api_builds/auth_service/Dockerfile
|
||||||
env_file:
|
env_file:
|
||||||
- api_env.env
|
- api_env.env
|
||||||
environment:
|
environment:
|
||||||
|
|
@ -21,11 +21,53 @@ services:
|
||||||
- "8001:8001"
|
- "8001:8001"
|
||||||
# restart: unless-stopped
|
# restart: unless-stopped
|
||||||
|
|
||||||
|
restriction_service:
|
||||||
|
container_name: restriction_service
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: api_services/api_builds/restriction_service/Dockerfile
|
||||||
|
env_file:
|
||||||
|
- api_env.env
|
||||||
|
environment:
|
||||||
|
- API_PATH=app:app
|
||||||
|
- API_HOST=0.0.0.0
|
||||||
|
- API_PORT=8002
|
||||||
|
- API_LOG_LEVEL=info
|
||||||
|
- API_RELOAD=1
|
||||||
|
- API_APP_NAME=evyos-restriction-api-gateway
|
||||||
|
- API_TITLE=WAG API Restriction Api Gateway
|
||||||
|
- API_DESCRIPTION=This api is serves as web restriction api gateway only to evyos web services.
|
||||||
|
- API_APP_URL=https://restriction_service
|
||||||
|
ports:
|
||||||
|
- "8002:8002"
|
||||||
|
# restart: unless-stopped
|
||||||
|
|
||||||
|
management_service:
|
||||||
|
container_name: management_service
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: api_services/api_builds/management_service/Dockerfile
|
||||||
|
env_file:
|
||||||
|
- api_env.env
|
||||||
|
environment:
|
||||||
|
- API_PATH=app:app
|
||||||
|
- API_HOST=0.0.0.0
|
||||||
|
- API_PORT=8003
|
||||||
|
- API_LOG_LEVEL=info
|
||||||
|
- API_RELOAD=1
|
||||||
|
- API_APP_NAME=evyos-management-api-gateway
|
||||||
|
- API_TITLE=WAG API Management Api Gateway
|
||||||
|
- API_DESCRIPTION=This api is serves as web management api gateway only to evyos web services.
|
||||||
|
- API_APP_URL=https://management_service
|
||||||
|
ports:
|
||||||
|
- "8003:8003"
|
||||||
|
# restart: unless-stopped
|
||||||
|
|
||||||
initializer_service:
|
initializer_service:
|
||||||
container_name: initializer_service
|
container_name: initializer_service
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: api_services/api_builds/initial-service/Dockerfile
|
dockerfile: api_services/api_builds/initial_service/Dockerfile
|
||||||
environment:
|
environment:
|
||||||
- SET_ALEMBIC=0
|
- SET_ALEMBIC=0
|
||||||
networks:
|
networks:
|
||||||
|
|
|
||||||
|
|
@ -6,4 +6,4 @@ endpoints_index: dict = {
|
||||||
"Slot3": "",
|
"Slot3": "",
|
||||||
"Slot4": "",
|
"Slot4": "",
|
||||||
"Slot5": "",
|
"Slot5": "",
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue