validations and dockerfiles are updated
This commit is contained in:
@@ -2,32 +2,35 @@ FROM python:3.12-slim-bookworm
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONPATH=/service_app
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
COPY ApiServices/AuthService/pyproject.toml .
|
||||
|
||||
RUN uv venv
|
||||
RUN uv pip install -r pyproject.toml
|
||||
|
||||
COPY ApiServices/AuthService ./service_app
|
||||
COPY ApiServices/api_handlers ./service_app/api_handlers
|
||||
|
||||
COPY databases ./service_app/databases
|
||||
COPY api_services ./service_app/api_services
|
||||
COPY api_objects ./service_app/api_objects
|
||||
COPY api_configs ./service_app/api_configs
|
||||
COPY api_events ./service_app/api_events
|
||||
COPY api_library ./service_app/api_library
|
||||
COPY api_validations ./service_app/api_validations
|
||||
|
||||
WORKDIR /service_app
|
||||
|
||||
CMD ["uv", "run", "app.py"]
|
||||
# Create logs directory
|
||||
RUN mkdir -p /service_app/logs
|
||||
|
||||
# Old File
|
||||
#FROM python:3.10
|
||||
COPY ApiServices/AuthService/pyproject.toml .
|
||||
|
||||
#RUN pip install --upgrade pip
|
||||
#RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||
#CMD ["python", "-m", "app"]
|
||||
RUN uv venv .venv
|
||||
RUN . .venv/bin/activate && uv pip install -r pyproject.toml
|
||||
|
||||
COPY ApiServices ./ApiServices
|
||||
COPY databases ./databases
|
||||
COPY api_services ./api_services
|
||||
COPY api_objects ./api_objects
|
||||
COPY api_configs ./api_configs
|
||||
COPY api_events ./api_events
|
||||
COPY api_library ./api_library
|
||||
COPY api_validations ./api_validations
|
||||
|
||||
WORKDIR /service_app/ApiServices/AuthService
|
||||
|
||||
# Create startup script
|
||||
RUN echo '#!/bin/bash\n\
|
||||
source /service_app/.venv/bin/activate\n\
|
||||
exec python app.py' > /service_app/start.sh && \
|
||||
chmod +x /service_app/start.sh
|
||||
|
||||
CMD ["/service_app/start.sh"]
|
||||
|
||||
@@ -1 +1,5 @@
|
||||
__all__ = []
|
||||
from .authentication.router import login_route
|
||||
|
||||
__all__ = [
|
||||
"login_route",
|
||||
]
|
||||
|
||||
131
ApiServices/AuthService/routers/authentication/router.py
Normal file
131
ApiServices/AuthService/routers/authentication/router.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from typing import Union
|
||||
from fastapi.routing import APIRouter
|
||||
from fastapi.requests import Request
|
||||
|
||||
from api_validations.validations_request import (
|
||||
Login,
|
||||
Logout,
|
||||
ChangePassword,
|
||||
Remember,
|
||||
Forgot,
|
||||
CreatePassword,
|
||||
OccupantSelection,
|
||||
EmployeeSelection,
|
||||
)
|
||||
from api_events.events import (
|
||||
AuthenticationLoginEventMethod,
|
||||
AuthenticationSelectEventMethod,
|
||||
AuthenticationCheckTokenEventMethod,
|
||||
AuthenticationRefreshEventMethod,
|
||||
AuthenticationChangePasswordEventMethod,
|
||||
AuthenticationCreatePasswordEventMethod,
|
||||
AuthenticationResetPasswordEventMethod,
|
||||
AuthenticationDisconnectUserEventMethod,
|
||||
AuthenticationLogoutEventMethod,
|
||||
AuthenticationRefreshTokenEventMethod,
|
||||
AuthenticationForgotPasswordEventMethod,
|
||||
AuthenticationDownloadAvatarEventMethod,
|
||||
)
|
||||
from ApiServices.api_handlers.auth_actions.token import parse_token_object_to_dict
|
||||
|
||||
|
||||
login_route = APIRouter(prefix="/authentication", tags=["Authentication"])
|
||||
login_route.include_router(login_route, include_in_schema=True)
|
||||
|
||||
|
||||
@login_route.post(path="/select", summary="Select company or occupant type")
|
||||
def authentication_select_company_or_occupant_type(
|
||||
request: Request, data: Union[EmployeeSelection, OccupantSelection]
|
||||
):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return (
|
||||
AuthenticationSelectEventMethod.authentication_select_company_or_occupant_type(
|
||||
data=data, request=request, token_dict=token_dict
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/login", summary="Login user with domain and password")
|
||||
def authentication_login_with_domain_and_creds(request: Request, data: Login):
|
||||
return AuthenticationLoginEventMethod.authentication_login_with_domain_and_creds(
|
||||
request=request, data=data
|
||||
)
|
||||
|
||||
|
||||
@login_route.get(path="/valid", summary="Check access token is valid")
|
||||
def authentication_check_token_is_valid(request: Request):
|
||||
return AuthenticationCheckTokenEventMethod.authentication_check_token_is_valid(
|
||||
request=request
|
||||
)
|
||||
|
||||
|
||||
@login_route.get(path="/refresh", summary="Refresh credentials with access token")
|
||||
def authentication_refresh_user_info(request: Request):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationRefreshEventMethod.authentication_refresh_user_info(
|
||||
request=request, token_dict=token_dict
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/change_password", summary="Change password with access token")
|
||||
def authentication_change_password(request: Request, data: ChangePassword):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationChangePasswordEventMethod.authentication_change_password(
|
||||
data=data, token_dict=token_dict
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(
|
||||
path="/create_password", summary="Create password with password token"
|
||||
)
|
||||
def authentication_create_password(data: CreatePassword):
|
||||
return AuthenticationCreatePasswordEventMethod.authentication_create_password(
|
||||
data=data
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/reset_password", summary="Create password with password token")
|
||||
def authentication_reset_password(data: Forgot):
|
||||
return AuthenticationResetPasswordEventMethod.authentication_reset_password(
|
||||
data=data
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/disconnect", summary="Disconnect user with access token")
|
||||
def authentication_disconnect_user(request: Request, data: Logout):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationDisconnectUserEventMethod.authentication_disconnect_user(
|
||||
data=data, token_dict=token_dict
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/logout", summary="Logout user with access token")
|
||||
def authentication_logout_user(request: Request, data: Logout):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationLogoutEventMethod.authentication_logout_user(
|
||||
data=data, token_dict=token_dict
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/refresher", summary="Refresh token with refresh token")
|
||||
def authentication_refresher_token(request: Request, data: Remember):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationRefreshTokenEventMethod.authentication_refresher_token(
|
||||
data=data, request=request, token_dict=token_dict
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/forgot", summary="Forgot password with email or phone number")
|
||||
def authentication_forgot_password(request: Request, data: Forgot):
|
||||
# token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationForgotPasswordEventMethod.authentication_forgot_password(
|
||||
data=data, request=request
|
||||
)
|
||||
|
||||
|
||||
@login_route.post(path="/avatar", summary="Get link of avatar with credentials")
|
||||
def authentication_download_avatar(request: Request):
|
||||
token_dict = parse_token_object_to_dict(request=request)
|
||||
return AuthenticationDownloadAvatarEventMethod.authentication_download_avatar(
|
||||
token_dict=token_dict
|
||||
)
|
||||
@@ -5,6 +5,9 @@ ENV PYTHONUNBUFFERED 1
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
# Create logs directory
|
||||
RUN mkdir -p /service_app/logs
|
||||
|
||||
COPY ApiServices/EventService/pyproject.toml .
|
||||
|
||||
RUN uv venv
|
||||
|
||||
@@ -5,6 +5,9 @@ ENV PYTHONUNBUFFERED 1
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
# Create logs directory
|
||||
RUN mkdir -p /service_app/logs
|
||||
|
||||
COPY ApiServices/ValidationService/pyproject.toml .
|
||||
|
||||
RUN uv venv
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
from .core_response import AlchemyJsonResponse
|
||||
|
||||
__all__ = ["AlchemyJsonResponse"]
|
||||
0
ApiServices/api_handlers/auth_actions/__init__.py
Normal file
0
ApiServices/api_handlers/auth_actions/__init__.py
Normal file
223
ApiServices/api_handlers/auth_actions/auth.py
Normal file
223
ApiServices/api_handlers/auth_actions/auth.py
Normal file
@@ -0,0 +1,223 @@
|
||||
from fastapi.exceptions import HTTPException
|
||||
from api_configs import Auth
|
||||
from api_objects import (
|
||||
OccupantTokenObject,
|
||||
EmployeeTokenObject,
|
||||
UserType,
|
||||
)
|
||||
from api_services.redis.conn import redis_cli
|
||||
from ApiServices.api_handlers.auth_actions.token import AccessObjectActions
|
||||
|
||||
|
||||
def save_access_token_to_redis(
|
||||
request, found_user, domain: str, access_token: str = None
|
||||
):
|
||||
from databases import (
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
Companies,
|
||||
Duties,
|
||||
Departments,
|
||||
Duty,
|
||||
Employees,
|
||||
Staff,
|
||||
Addresses,
|
||||
OccupantTypes
|
||||
)
|
||||
|
||||
if not found_user:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=dict(message="User is not found."),
|
||||
)
|
||||
|
||||
# Check user is already logged in or has a previous session
|
||||
already_tokens = AccessObjectActions.get_object_via_user_uu_id(user_id=found_user.uu_id)
|
||||
for key, token_user in already_tokens.items():
|
||||
if token_user.get("domain", "") == domain:
|
||||
redis_cli.delete(key)
|
||||
|
||||
access_token = (
|
||||
found_user.generate_access_token() if not access_token else access_token
|
||||
)
|
||||
# Prepare the user's details to save in Redis Session
|
||||
if found_user.is_occupant: # Check if user is NOT an occupant
|
||||
living_spaces: list[BuildLivingSpace] = BuildLivingSpace.filter_all(
|
||||
BuildLivingSpace.person_id == found_user.person_id
|
||||
).data
|
||||
if not living_spaces:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=dict(
|
||||
message="NO Living Space is found. This user has no proper account set please contact the admin."
|
||||
),
|
||||
)
|
||||
occupants_selection_dict = {}
|
||||
for living_space in living_spaces:
|
||||
build_parts_selection = BuildParts.filter_all(
|
||||
BuildParts.id == living_space.build_parts_id,
|
||||
)
|
||||
if not build_parts_selection.data:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=dict(
|
||||
message="No build Part is found for the living space. Please contact the admin."
|
||||
),
|
||||
)
|
||||
build_part = build_parts_selection.get(1)
|
||||
build = build_part.buildings
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
id=living_space.occupant_type,
|
||||
system=True,
|
||||
).data
|
||||
if not str(build.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build.uu_id)] = dict(
|
||||
build_uu_id=str(build.uu_id),
|
||||
build_name=build.build_name,
|
||||
build_no=build.build_no,
|
||||
occupants=[
|
||||
dict(
|
||||
part_uu_id=str(build_part.uu_id),
|
||||
part_name=build_part.part_name,
|
||||
part_level=build_part.part_level,
|
||||
uu_id=str(occupant_type.uu_id),
|
||||
description=occupant_type.occupant_description,
|
||||
code=occupant_type.occupant_code,
|
||||
)
|
||||
],
|
||||
)
|
||||
elif str(build.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build.uu_id)]["occupants"].append(
|
||||
dict(
|
||||
part_uu_id=str(build_part.uu_id),
|
||||
part_name=build_part.part_name,
|
||||
part_level=build_part.part_level,
|
||||
uu_id=str(occupant_type.uu_id),
|
||||
description=occupant_type.occupant_description,
|
||||
code=occupant_type.occupant_code,
|
||||
)
|
||||
)
|
||||
AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=OccupantTokenObject(
|
||||
domain=domain,
|
||||
user_type=UserType.occupant.value,
|
||||
user_uu_id=str(found_user.uu_id),
|
||||
credentials=found_user.credentials(),
|
||||
user_id=found_user.id,
|
||||
person_id=found_user.person_id,
|
||||
person_uu_id=str(found_user.person.uu_id),
|
||||
request=dict(request.headers),
|
||||
available_occupants=occupants_selection_dict,
|
||||
),
|
||||
)
|
||||
return dict(
|
||||
user_type=UserType.occupant.name,
|
||||
available_occupants=occupants_selection_dict,
|
||||
)
|
||||
|
||||
list_employee = Employees.filter_all(
|
||||
Employees.people_id == found_user.person_id,
|
||||
).data
|
||||
companies_uu_id_list, companies_id_list, companies_list = [], [], []
|
||||
duty_uu_id_list, duty_id_list = [], []
|
||||
for employee in list_employee:
|
||||
staff = Staff.filter_one(Staff.id == employee.staff_id).data
|
||||
if duties := Duties.filter_one(Duties.id == staff.duties_id).data:
|
||||
if duty_found := Duty.filter_by_one(id=duties.duties_id).data:
|
||||
duty_uu_id_list.append(str(duty_found.uu_id))
|
||||
duty_id_list.append(duty_found.id)
|
||||
|
||||
department = Departments.filter_one(
|
||||
Departments.id == duties.department_id,
|
||||
).data
|
||||
if company := Companies.filter_one(
|
||||
Companies.id == department.company_id,
|
||||
).data:
|
||||
companies_uu_id_list.append(str(company.uu_id))
|
||||
companies_id_list.append(company.id)
|
||||
company_address = Addresses.filter_by_one(
|
||||
id=company.official_address_id
|
||||
).data
|
||||
companies_list.append(
|
||||
dict(
|
||||
uu_id=str(company.uu_id),
|
||||
public_name=company.public_name,
|
||||
company_type=company.company_type,
|
||||
company_address=company_address,
|
||||
)
|
||||
)
|
||||
AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=EmployeeTokenObject(
|
||||
domain=domain,
|
||||
user_type=UserType.employee.value,
|
||||
user_uu_id=str(found_user.uu_id),
|
||||
credentials=found_user.credentials(),
|
||||
user_id=found_user.id,
|
||||
person_id=found_user.person_id,
|
||||
person_uu_id=str(found_user.person.uu_id),
|
||||
request=dict(request.headers),
|
||||
companies_uu_id_list=companies_uu_id_list,
|
||||
companies_id_list=companies_id_list,
|
||||
duty_uu_id_list=duty_uu_id_list,
|
||||
duty_id_list=duty_id_list,
|
||||
),
|
||||
)
|
||||
return dict(
|
||||
user_type=UserType.employee.name,
|
||||
companies_list=companies_list,
|
||||
)
|
||||
|
||||
|
||||
def update_selected_to_redis(request, add_payload):
|
||||
already_tokens = AccessObjectActions.get_object_via_access_key(request=request)
|
||||
if not hasattr(request, "headers"):
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=dict(
|
||||
message="Headers are not found in request. Invalid request object."
|
||||
),
|
||||
)
|
||||
access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
|
||||
if already_tokens.user_type == UserType.occupant.value:
|
||||
already_tokens.selected_occupant = add_payload.model_dump()
|
||||
return AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=OccupantTokenObject(**already_tokens.model_dump()),
|
||||
)
|
||||
elif already_tokens.user_type == UserType.employee.value:
|
||||
already_tokens.selected_company = add_payload.model_dump()
|
||||
return AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=EmployeeTokenObject(**already_tokens.model_dump()),
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=dict(
|
||||
message="User type is not found in the token object. Please reach to your administrator."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def update_access_token_to_redis(request, add_payload):
|
||||
already_tokens = AccessObjectActions.get_object_via_access_key(request=request)
|
||||
if not hasattr(request, "headers"):
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=dict(
|
||||
message="Headers are not found in request. Invalid request object."
|
||||
),
|
||||
)
|
||||
payload = {**add_payload, **already_tokens}
|
||||
access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
|
||||
if payload.get("user_type") == str(UserType.occupant.value):
|
||||
|
||||
return AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=OccupantTokenObject(**payload),
|
||||
)
|
||||
return AccessObjectActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=EmployeeTokenObject(**payload),
|
||||
)
|
||||
1
ApiServices/api_handlers/auth_actions/login.py
Normal file
1
ApiServices/api_handlers/auth_actions/login.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
219
ApiServices/api_handlers/auth_actions/token.py
Normal file
219
ApiServices/api_handlers/auth_actions/token.py
Normal file
@@ -0,0 +1,219 @@
|
||||
import typing
|
||||
from fastapi import HTTPException, status
|
||||
from api_objects import OccupantTokenObject, EmployeeTokenObject
|
||||
from api_services.redis.functions import RedisActions
|
||||
from api_configs import Auth
|
||||
|
||||
|
||||
class AccessObjectActions:
|
||||
|
||||
@classmethod
|
||||
def save_object_to_redis(
|
||||
cls,
|
||||
access_token,
|
||||
model_object: typing.Union[OccupantTokenObject, EmployeeTokenObject],
|
||||
expiry_minutes: int = Auth.TOKEN_EXPIRE_MINUTES_30.total_seconds() // 60
|
||||
) -> bool:
|
||||
"""Save access token object to Redis with expiry
|
||||
Args:
|
||||
access_token: The access token
|
||||
model_object: The token object to save
|
||||
expiry_minutes: Minutes until token expires (default: from Auth config)
|
||||
Returns:
|
||||
bool: True if successful
|
||||
Raises:
|
||||
HTTPException: If save fails
|
||||
"""
|
||||
try:
|
||||
RedisActions.save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=model_object,
|
||||
expiry_minutes=expiry_minutes
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print("Save Object to Redis Error: ", e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=dict(
|
||||
message="Failed to save token to Redis",
|
||||
error=str(e)
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_object_via_user_uu_id(cls, user_id: str) -> typing.Union[dict, None]:
|
||||
"""Get all valid tokens for a user
|
||||
Args:
|
||||
user_id: The user UUID to search for
|
||||
Returns:
|
||||
dict: Dictionary of valid tokens for the user
|
||||
"""
|
||||
return RedisActions.get_object_via_user_uu_id(user_id)
|
||||
|
||||
@classmethod
|
||||
def access_token(cls, request) -> str:
|
||||
"""Extract and validate access token from request
|
||||
Args:
|
||||
request: The request object
|
||||
Returns:
|
||||
str: The access token
|
||||
Raises:
|
||||
HTTPException: If token is missing or invalid
|
||||
"""
|
||||
if not hasattr(request, "headers"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=dict(
|
||||
message="Headers not found in request"
|
||||
)
|
||||
)
|
||||
|
||||
access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=dict(
|
||||
message="Unauthorized user, please login"
|
||||
),
|
||||
)
|
||||
return access_token
|
||||
|
||||
@classmethod
|
||||
def get_token_object(cls, request) -> typing.Union[OccupantTokenObject, EmployeeTokenObject]:
|
||||
"""Get and validate token object from request
|
||||
Args:
|
||||
request: The request object
|
||||
Returns:
|
||||
Union[OccupantTokenObject, EmployeeTokenObject]: The token object
|
||||
Raises:
|
||||
HTTPException: If token is invalid or expired
|
||||
"""
|
||||
try:
|
||||
return RedisActions.get_object_via_access_key(request)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=dict(
|
||||
message=str(e)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_object_via_access_key(
|
||||
cls, request,
|
||||
) -> typing.Union[EmployeeTokenObject, OccupantTokenObject, None]:
|
||||
from api_configs import Auth
|
||||
access_object = RedisActions.get_with_regex(
|
||||
value_regex=str(request.headers.get(Auth.ACCESS_TOKEN_TAG) + ":*")
|
||||
).data
|
||||
if access_object.get("user_type") == 1:
|
||||
if not access_object.get("selected_company", None):
|
||||
access_object["selected_company"] = None
|
||||
return EmployeeTokenObject(**access_object)
|
||||
elif access_object.get("user_type") == 2:
|
||||
if not access_object.get("selected_occupant", None):
|
||||
access_object["selected_occupant"] = None
|
||||
return OccupantTokenObject(**access_object)
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=dict(
|
||||
message="User type is not found in the token object. Please reach to your administrator."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def parse_token_object_to_dict(request): # from requests import Request
|
||||
import api_events.events as events
|
||||
|
||||
from databases import EndpointRestriction, Events
|
||||
from api_configs.configs import Config
|
||||
|
||||
if valid_token := AccessObjectActions.get_token_object(request=request):
|
||||
endpoint_name = str(request.url).replace(str(request.base_url), "/")
|
||||
if (
|
||||
str(endpoint_name) in Config.INSECURE_PATHS
|
||||
or str(endpoint_name) in Config.NOT_SECURE_PATHS
|
||||
):
|
||||
return valid_token
|
||||
if "update" in endpoint_name:
|
||||
endpoint_name = endpoint_name.split("update")[0] + "update"
|
||||
endpoint_active = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_name.ilike(f"%{endpoint_name}%"),
|
||||
system=True,
|
||||
).data
|
||||
|
||||
if not endpoint_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"This endpoint {endpoint_name} is not active for this user, please contact your responsible company for further information.",
|
||||
)
|
||||
if valid_token.user_type == 1:
|
||||
if not valid_token.selected_company:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Selected company is not found in the token object.",
|
||||
)
|
||||
selected_event = Events.filter_one(
|
||||
Events.endpoint_id == endpoint_active.id,
|
||||
Events.id.in_(valid_token.selected_company.reachable_event_list_id),
|
||||
).data
|
||||
if not selected_event:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="This endpoint requires event validation. Please contact your responsible company to use this event.",
|
||||
)
|
||||
event_function_class = getattr(selected_event, "function_class", None)
|
||||
event_function_code = getattr(selected_event, "function_code", None)
|
||||
function_class = getattr(events, event_function_class, None)
|
||||
active_function = getattr(
|
||||
function_class,
|
||||
function_class.__event_keys__.get(event_function_code, None),
|
||||
None,
|
||||
)
|
||||
if not active_function:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="This endpoint requires event validation. Please contact your responsible company to use this event.",
|
||||
)
|
||||
valid_token.available_event = active_function
|
||||
return valid_token
|
||||
elif valid_token.user_type == 2:
|
||||
if not valid_token.selected_occupant:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_418_IM_A_TEAPOT,
|
||||
detail="Selected occupant is not found in the token object.",
|
||||
)
|
||||
selected_event = Events.filter_all(
|
||||
Events.endpoint_id == endpoint_active.id,
|
||||
Events.id.in_(valid_token.selected_occupant.reachable_event_list_id),
|
||||
)
|
||||
if not selected_event.data:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"This endpoint {endpoint_name} requires event validation. Please contact your responsible company to use this event.",
|
||||
)
|
||||
selected_event = selected_event.data[0]
|
||||
event_function_class = getattr(selected_event, "function_class", None)
|
||||
event_function_code = getattr(selected_event, "function_code", None)
|
||||
function_class = getattr(events, event_function_class, None)
|
||||
active_function = getattr(
|
||||
function_class,
|
||||
function_class.__event_keys__.get(event_function_code, None),
|
||||
None,
|
||||
)
|
||||
if not active_function:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"This endpoint {endpoint_name} requires event validation. Please contact your responsible company to use this event.",
|
||||
)
|
||||
valid_token.available_event = active_function
|
||||
return valid_token
|
||||
valid_token.available_event = None
|
||||
return valid_token
|
||||
user_type = "Company" if valid_token.user_type == 1 else "Occupant"
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"Token of this user is not valid. Please login and refresh {user_type} selection.",
|
||||
)
|
||||
14
README.md
14
README.md
@@ -1,4 +1,3 @@
|
||||
|
||||
Add: uv pip install -r pyproject.toml
|
||||
|
||||
- Run Store Services
|
||||
@@ -11,17 +10,15 @@ On Linux
|
||||
|
||||
Connectors:
|
||||
|
||||
commercial_main_mongo_service:
|
||||
http://10.10.2.36:11777
|
||||
commercial_main_memory_service:
|
||||
http://10.10.2.36:11222
|
||||
postgres_main_commercial:
|
||||
http://10.10.2.36:5444
|
||||
commercial_main_mongo_service: http://10.10.2.36:11777
|
||||
commercial_main_memory_service: http://10.10.2.36:11222
|
||||
postgres_main_commercial: http://10.10.2.36:5444
|
||||
|
||||
make sure
|
||||
set lang and timezone on login
|
||||
BaseMixin || CrudMixin add
|
||||
http_exception = fastapi.HTTPException
|
||||
|
||||
status = fastapi.status
|
||||
|
||||
On debian
|
||||
@@ -33,3 +30,6 @@ On Linux
|
||||
> http://localhost:1111/docs | wag_management_auth_service
|
||||
> http://localhost:1112/docs | wag_management_event_service
|
||||
> http://localhost:1113/docs | wag_management_validation_service
|
||||
|
||||
|
||||
> docker compose -f ./api-docker-compose.yml up --build -d
|
||||
|
||||
@@ -8,22 +8,50 @@ services:
|
||||
dockerfile: ApiServices/AuthService/Dockerfile
|
||||
ports:
|
||||
- "1111:41575"
|
||||
environment:
|
||||
- PYTHONPATH=/service_app
|
||||
volumes:
|
||||
- auth_venv:/service_app/.venv
|
||||
- auth_logs:/service_app/logs
|
||||
|
||||
wag_management_event_service:
|
||||
container_name: wag_management_event_service
|
||||
restart: on-failure
|
||||
# restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ApiServices/EventService/Dockerfile
|
||||
ports:
|
||||
- "1112:41575"
|
||||
environment:
|
||||
- PYTHONPATH=/service_app
|
||||
volumes:
|
||||
- event_venv:/service_app/.venv
|
||||
- event_logs:/service_app/logs
|
||||
|
||||
wag_management_validation_service:
|
||||
container_name: wag_management_validation_service
|
||||
restart: on-failure
|
||||
# restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ApiServices/ValidationService/Dockerfile
|
||||
ports:
|
||||
- "1113:41575"
|
||||
environment:
|
||||
- PYTHONPATH=/service_app
|
||||
volumes:
|
||||
- validation_venv:/service_app/.venv
|
||||
- validation_logs:/service_app/logs
|
||||
|
||||
# wag_management_init_service:
|
||||
# container_name: wag_management_init_service
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_init/Dockerfile
|
||||
|
||||
volumes:
|
||||
auth_venv:
|
||||
event_venv:
|
||||
validation_venv:
|
||||
auth_logs:
|
||||
event_logs:
|
||||
validation_logs:
|
||||
|
||||
@@ -8,6 +8,9 @@ from .emailConfigs import (
|
||||
)
|
||||
from .configs import (
|
||||
Config,
|
||||
Auth,
|
||||
ApiStatic,
|
||||
RelationAccess,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@@ -16,6 +19,9 @@ __all__ = [
|
||||
"MongoConfig",
|
||||
"EmailConfig",
|
||||
"Config",
|
||||
"Auth",
|
||||
"ApiStatic",
|
||||
"RelationAccess",
|
||||
]
|
||||
|
||||
# from api_configs.configs import (
|
||||
|
||||
@@ -34,6 +34,11 @@ class Config:
|
||||
DESCRIPTION = "This api is serves as web api gateway only to evyos web services."
|
||||
APP_URL = "https://www.wag.eys.gen.tr"
|
||||
|
||||
# Timezone Configuration
|
||||
DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
|
||||
SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
|
||||
SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
|
||||
|
||||
|
||||
class ApiStatic:
|
||||
PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
|
||||
|
||||
@@ -1,72 +1,131 @@
|
||||
import typing
|
||||
from abc import ABC
|
||||
from fastapi import status
|
||||
from fastapi.exceptions import HTTPException
|
||||
from typing import TypeVar, Union, Dict, Any, Optional, Type
|
||||
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
|
||||
TokenType = TypeVar('TokenType', bound=Union[EmployeeTokenObject, OccupantTokenObject])
|
||||
|
||||
class ActionsSchema(ABC):
|
||||
"""Base class for defining API action schemas.
|
||||
|
||||
def __init__(self, endpoint: str = None):
|
||||
This class handles endpoint registration and validation in the database.
|
||||
"""
|
||||
def __init__(self, endpoint: str):
|
||||
"""Initialize with an API endpoint path.
|
||||
|
||||
Args:
|
||||
endpoint: The API endpoint path (e.g. "/users/create")
|
||||
"""
|
||||
self.endpoint = endpoint
|
||||
|
||||
def retrieve_action_from_endpoint(self):
|
||||
def retrieve_action_from_endpoint(self) -> Dict[str, Any]:
|
||||
"""Retrieve the endpoint registration from the database.
|
||||
|
||||
Returns:
|
||||
Dict containing the endpoint registration data
|
||||
|
||||
Raises:
|
||||
HTTPException: If endpoint is not found in database
|
||||
"""
|
||||
from databases import EndpointRestriction
|
||||
|
||||
endpoint_restriction = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_name.ilike(f"%{self.endpoint}%"), system=True
|
||||
EndpointRestriction.endpoint_name.ilike(f"%{self.endpoint}%"),
|
||||
system=True
|
||||
).data
|
||||
|
||||
if not endpoint_restriction:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Endpoint {self.endpoint} not found in the database",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Endpoint {self.endpoint} not found in the database"
|
||||
)
|
||||
return endpoint_restriction
|
||||
|
||||
|
||||
class ActionsSchemaFactory:
|
||||
"""Factory class for creating action schemas.
|
||||
|
||||
This class validates and initializes action schemas for API endpoints.
|
||||
"""
|
||||
def __init__(self, action: ActionsSchema):
|
||||
"""Initialize with an action schema.
|
||||
|
||||
Args:
|
||||
action: The action schema to initialize
|
||||
|
||||
Raises:
|
||||
HTTPException: If action initialization fails
|
||||
"""
|
||||
self.action = action
|
||||
try:
|
||||
self.action_match = self.action.retrieve_action_from_endpoint()
|
||||
except HTTPException as e:
|
||||
# Re-raise HTTP exceptions as-is
|
||||
raise e
|
||||
except Exception as e:
|
||||
err = e
|
||||
# Log and wrap other exceptions
|
||||
print(f"ActionsSchemaFactory Error: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to initialize action schema"
|
||||
) from e
|
||||
|
||||
|
||||
class MethodToEvent(ABC, ActionsSchemaFactory):
|
||||
"""Base class for mapping methods to API events.
|
||||
|
||||
action_key: str = None
|
||||
event_type: str = None
|
||||
This class handles method registration and validation for API events.
|
||||
"""
|
||||
action_key: Optional[str] = None
|
||||
event_type: Optional[str] = None
|
||||
event_description: str = ""
|
||||
event_category: str = ""
|
||||
|
||||
__event_keys__: dict = {}
|
||||
__event_validation__: dict = {}
|
||||
__event_keys__: Dict[str, str] = {}
|
||||
__event_validation__: Dict[str, Any] = {}
|
||||
|
||||
@classmethod
|
||||
def call_event_method(cls, method_uu_id: str, *args, **kwargs):
|
||||
def call_event_method(cls, method_uu_id: str, *args: Any, **kwargs: Any) -> Any:
|
||||
"""Call an event method by its UUID.
|
||||
|
||||
Args:
|
||||
method_uu_id: UUID of the method to call
|
||||
*args: Positional arguments to pass to method
|
||||
**kwargs: Keyword arguments to pass to method
|
||||
|
||||
Returns:
|
||||
The result of the called method
|
||||
|
||||
Raises:
|
||||
AttributeError: If method UUID is not found
|
||||
"""
|
||||
function_name = cls.__event_keys__.get(method_uu_id)
|
||||
if not function_name:
|
||||
raise AttributeError(f"No method found for UUID: {method_uu_id}")
|
||||
|
||||
return getattr(cls, function_name)(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def ban_token_objects(
|
||||
cls,
|
||||
token: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
ban_list: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
from fastapi import status
|
||||
from fastapi.exceptions import HTTPException
|
||||
token: TokenType,
|
||||
ban_list: Type[TokenType]
|
||||
) -> None:
|
||||
"""Check if a token type is banned from accessing an event.
|
||||
|
||||
if token.user_type == ban_list.user_type:
|
||||
if isinstance(token, EmployeeTokenObject):
|
||||
Args:
|
||||
token: The token to check
|
||||
ban_list: The token type that is banned
|
||||
|
||||
Raises:
|
||||
HTTPException: If token type matches banned type
|
||||
"""
|
||||
if isinstance(token, ban_list):
|
||||
user_type = "employee" if isinstance(token, EmployeeTokenObject) else "occupant"
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
detail="No employee can reach this event. An notification is send to admin about event registration",
|
||||
)
|
||||
if isinstance(token, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
detail="No occupant can reach this event. An notification is send to admin about event registration",
|
||||
detail=f"No {user_type} can reach this event. A notification has been sent to admin."
|
||||
)
|
||||
|
||||
@@ -7,15 +7,16 @@ from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.validations_response.account import AccountListResponse
|
||||
from api_validations.validations_response.account_responses import AccountRecordsListResponse
|
||||
from databases import (
|
||||
AccountRecords,
|
||||
BuildIbans,
|
||||
)
|
||||
from databases.sql_models.building.build import Build, BuildLivingSpace
|
||||
from databases.sql_models.building.build import BuildLivingSpace
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookPayments
|
||||
from databases.sql_models.others.enums import ApiEnumDropdown
|
||||
|
||||
@@ -31,8 +32,8 @@ class AccountRecordsListEventMethods(MethodToEvent):
|
||||
"208e6273-17ef-44f0-814a-8098f816b63a": "account_records_list_flt_res",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": AccountListResponse,
|
||||
"208e6273-17ef-44f0-814a-8098f816b63a": AccountListResponse,
|
||||
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": AccountRecordsListResponse,
|
||||
"208e6273-17ef-44f0-814a-8098f816b63a": AccountRecordsListResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -53,7 +54,12 @@ class AccountRecordsListEventMethods(MethodToEvent):
|
||||
AccountRecords.filter_attr = list_options
|
||||
records = AccountRecords.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="List Build record", result=records
|
||||
completed=True,
|
||||
message="Account records listed successfully",
|
||||
result=records,
|
||||
cls_object=AccountRecords,
|
||||
filter_attributes=list_options,
|
||||
response_model=AccountRecordsListResponse
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -182,7 +188,9 @@ class AccountRecordsListEventMethods(MethodToEvent):
|
||||
"process_comment": list_of_values[5],
|
||||
}
|
||||
)
|
||||
return dict(completed=True, message="List Build record", result=return_list)
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="List Account records", result=return_list
|
||||
)
|
||||
|
||||
|
||||
class AccountRecordsCreateEventMethods(MethodToEvent):
|
||||
@@ -221,9 +229,7 @@ class AccountRecordsCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
account_record = AccountRecords.find_or_create(**data.excluded_dump())
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Update Build record",
|
||||
result=account_record.get_dict(),
|
||||
completed=True, message="Account record created successfully", result=account_record
|
||||
)
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
# Build.pre_query = Build.select_action(
|
||||
@@ -263,14 +269,9 @@ class AccountRecordsCreateEventMethods(MethodToEvent):
|
||||
data_dict["receive_debit"] = debit_type.id
|
||||
data_dict["receive_debit_uu_id"] = str(debit_type.uu_id)
|
||||
|
||||
account_record = AccountRecords.find_or_create(**data_dict)
|
||||
account_record.save()
|
||||
account_record.update(is_confirmed=True)
|
||||
account_record.save()
|
||||
account_record = AccountRecords.insert_one(data_dict).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Create Account record are successful",
|
||||
result=account_record.get_dict(),
|
||||
completed=True, message="Account record created successfully", result=account_record
|
||||
)
|
||||
|
||||
|
||||
@@ -299,10 +300,9 @@ class AccountRecordsUpdateEventMethods(MethodToEvent):
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
pass
|
||||
|
||||
account_record = AccountRecords.update_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Update Build record",
|
||||
result=None,
|
||||
completed=True, message="Account record updated successfully", result=account_record
|
||||
)
|
||||
|
||||
|
||||
@@ -326,10 +326,9 @@ class AccountRecordsPatchEventMethods(MethodToEvent):
|
||||
data,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
account_record = AccountRecords.patch_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Patch Build record",
|
||||
result=None,
|
||||
completed=True, message="Account record patched successfully", result=account_record
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from api_validations.validations_request import (
|
||||
UpdatePostCode,
|
||||
SearchAddress,
|
||||
)
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
|
||||
@@ -77,9 +77,7 @@ class AddressListEventMethods(MethodToEvent):
|
||||
Addresses.filter_attr = list_options
|
||||
records = Addresses.filter_all().data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="List Address records",
|
||||
result=records,
|
||||
completed=True, message="List Address records", result=records
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -89,13 +87,12 @@ class AddressListEventMethods(MethodToEvent):
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
Addresses.filter_attr = list_options
|
||||
records = Addresses.list_via_employee(
|
||||
token_dict=token_dict,
|
||||
Addresses.pre_query = Addresses.filter_all(
|
||||
Addresses.street_id.in_(get_street_ids),
|
||||
)
|
||||
records = Addresses.filter_all().data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="List Address records",
|
||||
result=records,
|
||||
completed=True, message="List Address records", result=records
|
||||
)
|
||||
|
||||
|
||||
@@ -135,21 +132,20 @@ class AddressCreateEventMethods(MethodToEvent):
|
||||
address.save()
|
||||
address.update(is_confirmed=True)
|
||||
address.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Address record",
|
||||
"data": address.get_dict(),
|
||||
},
|
||||
status_code=200,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Address created successfully", result=address.get_dict()
|
||||
)
|
||||
|
||||
|
||||
class AddressSearchEventMethods(MethodToEvent):
|
||||
"""Event methods for searching addresses.
|
||||
|
||||
This class handles address search functionality including text search
|
||||
and filtering.
|
||||
"""
|
||||
event_type = "SEARCH"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
event_description = "Search for addresses using text and filters"
|
||||
event_category = "Address"
|
||||
|
||||
__event_keys__ = {
|
||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": "search_address",
|
||||
@@ -158,70 +154,133 @@ class AddressSearchEventMethods(MethodToEvent):
|
||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": SearchAddress,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _build_order_clause(
|
||||
cls,
|
||||
filter_list: Dict[str, Any],
|
||||
schemas: List[str],
|
||||
filter_table: Any
|
||||
) -> Any:
|
||||
"""Build the ORDER BY clause for the query.
|
||||
|
||||
Args:
|
||||
filter_list: Dictionary of filter options
|
||||
schemas: List of available schema fields
|
||||
filter_table: SQLAlchemy table to query
|
||||
|
||||
Returns:
|
||||
SQLAlchemy order_by clause
|
||||
"""
|
||||
# Default to ordering by UUID if field not in schema
|
||||
if filter_list.get("order_field") not in schemas:
|
||||
filter_list["order_field"] = "uu_id"
|
||||
else:
|
||||
# Extract table and field from order field
|
||||
table_name, field_name = str(filter_list.get("order_field")).split(".")
|
||||
filter_table = getattr(databases.sql_models, table_name)
|
||||
filter_list["order_field"] = field_name
|
||||
|
||||
# Build order clause
|
||||
field = getattr(filter_table, filter_list.get("order_field"))
|
||||
return field.desc() if str(filter_list.get("order_type"))[0] == "d" else field.asc()
|
||||
|
||||
@classmethod
|
||||
def _format_record(cls, record: Any, schemas: List[str]) -> Dict[str, str]:
|
||||
"""Format a database record into a dictionary.
|
||||
|
||||
Args:
|
||||
record: Database record to format
|
||||
schemas: List of schema fields
|
||||
|
||||
Returns:
|
||||
Formatted record dictionary
|
||||
"""
|
||||
result = {}
|
||||
for index, schema in enumerate(schemas):
|
||||
value = str(record[index])
|
||||
# Special handling for UUID fields
|
||||
if "uu_id" in value:
|
||||
value = str(value)
|
||||
result[schema] = value
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def search_address(
|
||||
cls,
|
||||
data: SearchAddress,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
import databases.sql_models
|
||||
from time import perf_counter
|
||||
) -> JSONResponse:
|
||||
"""Search for addresses using text search and filters.
|
||||
|
||||
st = perf_counter()
|
||||
Args:
|
||||
data: Search parameters including text and filters
|
||||
token_dict: Authentication token
|
||||
|
||||
pre_query_first = AddressStreet.search_address_text(search_text=data.search)
|
||||
query, schemas, new_data_list = (
|
||||
pre_query_first.get("query"),
|
||||
pre_query_first.get("schema"),
|
||||
[],
|
||||
Returns:
|
||||
JSON response with search results
|
||||
|
||||
Raises:
|
||||
HTTPException: If search fails
|
||||
"""
|
||||
try:
|
||||
# Start performance measurement
|
||||
start_time = perf_counter()
|
||||
|
||||
# Get initial query
|
||||
search_result = AddressStreet.search_address_text(search_text=data.search)
|
||||
if not search_result:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="No addresses found matching search criteria"
|
||||
)
|
||||
|
||||
query = search_result.get("query")
|
||||
schemas = search_result.get("schema")
|
||||
|
||||
# Apply filters
|
||||
filter_list = data.list_options.dump()
|
||||
filter_table = AddressStreet
|
||||
if filter_list.get("order_field") not in schemas:
|
||||
filter_list["order_field"] = "uu_id"
|
||||
else:
|
||||
filter_table = getattr(
|
||||
databases.sql_models, str(filter_list.get("order_field")).split(".")[0]
|
||||
)
|
||||
filter_list["order_field"] = str(filter_list.get("order_field")).split(".")[
|
||||
1
|
||||
]
|
||||
|
||||
order = (
|
||||
getattr(filter_table, filter_list.get("order_field")).desc()
|
||||
if str(filter_list.get("order_type"))[0] == "d"
|
||||
else getattr(filter_table, filter_list.get("order_field")).asc()
|
||||
)
|
||||
# Build and apply order clause
|
||||
order = cls._build_order_clause(filter_list, schemas, filter_table)
|
||||
|
||||
# Apply pagination
|
||||
page_size = int(filter_list.get("size"))
|
||||
offset = (int(filter_list.get("page")) - 1) * page_size
|
||||
|
||||
# Execute query
|
||||
query = (
|
||||
query.order_by(order)
|
||||
.limit(int(filter_list.get("size")))
|
||||
.offset(int((filter_list.get("page")) - 1) * int(filter_list.get("size")))
|
||||
.limit(page_size)
|
||||
.offset(offset)
|
||||
.populate_existing()
|
||||
)
|
||||
|
||||
records = list(query.all())
|
||||
print(perf_counter() - st)
|
||||
|
||||
for item in records:
|
||||
new_data_dict = {}
|
||||
for index, schema in enumerate(schemas):
|
||||
new_data_dict[schema] = str(item[index])
|
||||
if "uu_id" in str(item[index]):
|
||||
new_data_dict[schema] = str(new_data_dict.get(schema))
|
||||
new_data_list.append(new_data_dict)
|
||||
# Format results
|
||||
results = [cls._format_record(record, schemas) for record in records]
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"pagination": filter_list,
|
||||
"count": len(new_data_list),
|
||||
"data": new_data_list,
|
||||
"message": "Search Address records",
|
||||
},
|
||||
status_code=200,
|
||||
# Log performance
|
||||
duration = perf_counter() - start_time
|
||||
print(f"Address search completed in {duration:.3f}s")
|
||||
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Address search results",
|
||||
result=results
|
||||
)
|
||||
|
||||
except HTTPException as e:
|
||||
# Re-raise HTTP exceptions
|
||||
raise e
|
||||
except Exception as e:
|
||||
# Log and wrap other errors
|
||||
print(f"Address search error: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to search addresses"
|
||||
) from e
|
||||
|
||||
|
||||
class AddressUpdateEventMethods(MethodToEvent):
|
||||
|
||||
@@ -256,13 +315,8 @@ class AddressUpdateEventMethods(MethodToEvent):
|
||||
data_dict = data.excluded_dump()
|
||||
updated_address = address.update(**data_dict)
|
||||
updated_address.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Address record",
|
||||
"data": updated_address.get_dict(),
|
||||
},
|
||||
status_code=200,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Address updated successfully", result=updated_address.get_dict()
|
||||
)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
@@ -308,13 +362,8 @@ class AddressPatchEventMethods(MethodToEvent):
|
||||
del data_dict["post_code_uu_id"]
|
||||
|
||||
patched_address = address.patch(**data_dict)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Patch Address record",
|
||||
"data": patched_address.get_dict(),
|
||||
},
|
||||
status_code=200,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Address patched successfully", result=patched_address.get_dict()
|
||||
)
|
||||
|
||||
|
||||
@@ -361,13 +410,8 @@ class AddressPostCodeCreateEventMethods(MethodToEvent):
|
||||
post_code.save()
|
||||
relation_table.update(is_confirmed=True)
|
||||
relation_table.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Post Code record",
|
||||
"data": post_code.get_dict(),
|
||||
},
|
||||
status_code=200,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Post code created successfully", result=post_code.get_dict()
|
||||
)
|
||||
|
||||
|
||||
@@ -407,26 +451,16 @@ class AddressPostCodeUpdateEventMethods(MethodToEvent):
|
||||
data_dict = data.excluded_dump()
|
||||
updated_post_code = post_code.update(**data_dict)
|
||||
updated_post_code.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Post Code record",
|
||||
"data": updated_post_code.get_dict(),
|
||||
},
|
||||
status_code=200,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Post code updated successfully", result=updated_post_code.get_dict()
|
||||
)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Occupant can not update post code.",
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Post Code record",
|
||||
"data": {},
|
||||
},
|
||||
status_code=404,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Update Post Code record", result={}
|
||||
)
|
||||
|
||||
|
||||
@@ -459,17 +493,10 @@ class AddressPostCodeListEventMethods(MethodToEvent):
|
||||
detail="User has no post code registered or not yet any post code created.",
|
||||
)
|
||||
|
||||
AddressPostcode.pre_query = AddressPostcode.filter_all(
|
||||
AddressPostcode.id.in_(
|
||||
[post_code.member_id for post_code in post_code_list]
|
||||
),
|
||||
).query
|
||||
AddressPostcode.filter_attr = list_options
|
||||
records = AddressPostcode.filter_all().data
|
||||
post_codes = AddressPostcode.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="List Address records",
|
||||
result=records,
|
||||
completed=True, message="List Post code records", result=post_codes
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,69 +1,45 @@
|
||||
import datetime
|
||||
import json
|
||||
import typing
|
||||
from typing import Union
|
||||
|
||||
import arrow
|
||||
from fastapi import status
|
||||
from fastapi.requests import Request
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_objects import OccupantTokenObject, EmployeeTokenObject
|
||||
from api_services.token_service import TokenService
|
||||
from api_services.redis.functions import RedisActions
|
||||
from api_library.response_handlers import ResponseHandler
|
||||
from api_library.logger import user_logger
|
||||
from api_configs import Auth, ApiStatic
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
|
||||
from databases import (
|
||||
Companies,
|
||||
Staff,
|
||||
Duties,
|
||||
Departments,
|
||||
Employees,
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
Build,
|
||||
Duty,
|
||||
Event2Occupant,
|
||||
Event2Employee,
|
||||
Users,
|
||||
UsersTokens,
|
||||
OccupantTypes,
|
||||
RelationshipEmployee2Build,
|
||||
Companies, Staff, Duties, Departments, Employees,
|
||||
BuildLivingSpace, BuildParts, Build, Duty, Event2Occupant,
|
||||
Event2Employee, Users, UsersTokens, OccupantTypes,
|
||||
RelationshipEmployee2Build
|
||||
)
|
||||
|
||||
from api_services import (
|
||||
redis_cli,
|
||||
send_email,
|
||||
get_object_via_access_key,
|
||||
get_object_via_user_uu_id,
|
||||
save_access_token_to_redis,
|
||||
update_selected_to_redis,
|
||||
password_is_changed_template,
|
||||
change_your_password_template,
|
||||
)
|
||||
|
||||
from api_configs import ApiStatic, Auth
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects import (
|
||||
OccupantToken,
|
||||
CompanyToken,
|
||||
EmployeeTokenObject,
|
||||
OccupantTokenObject,
|
||||
)
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
from databases.no_sql_models.login_handlers import load_user_with_erp_details
|
||||
|
||||
from api_validations.validations_request import (
|
||||
Login,
|
||||
Logout,
|
||||
ChangePassword,
|
||||
Remember,
|
||||
Forgot,
|
||||
CreatePassword,
|
||||
OccupantSelection,
|
||||
Login, Logout, ChangePassword, Remember,
|
||||
Forgot, CreatePassword, OccupantSelection,
|
||||
EmployeeSelection,
|
||||
)
|
||||
|
||||
from api_validations.validations_response.auth_responses import (
|
||||
AuthenticationLoginResponse,
|
||||
AuthenticationRefreshResponse,
|
||||
AuthenticationUserInfoResponse
|
||||
)
|
||||
|
||||
class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Login via domain and access key : [email] | [phone]"
|
||||
event_category = "AUTHENTICATION"
|
||||
@@ -72,37 +48,50 @@ class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": AuthenticationLoginResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_login_with_domain_and_creds(
|
||||
cls,
|
||||
data: Login,
|
||||
request,
|
||||
):
|
||||
def authentication_login_with_domain_and_creds(cls, data: Login, request: Request):
|
||||
try:
|
||||
access_dict = Users.login_user_with_credentials(data=data, request=request)
|
||||
found_user = access_dict.get("user", None)
|
||||
found_user = access_dict.get("user")
|
||||
|
||||
if not found_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
|
||||
user_logger.log_login_attempt(
|
||||
request, None, data.domain, data.access_key,
|
||||
success=False, error="Invalid credentials"
|
||||
)
|
||||
return ResponseHandler.unauthorized("Invalid credentials")
|
||||
|
||||
user_logger.log_login_attempt(
|
||||
request, found_user.id, data.domain, data.access_key,
|
||||
success=True
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "User is logged in successfully",
|
||||
response_data = {
|
||||
"access_token": access_dict.get("access_token"),
|
||||
"refresh_token": access_dict.get("refresher_token"),
|
||||
"access_object": access_dict.get("access_object"),
|
||||
"user": found_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
}
|
||||
return ResponseHandler.success(
|
||||
message="User logged in successfully",
|
||||
data=response_data,
|
||||
response_model=AuthenticationLoginResponse
|
||||
)
|
||||
except Exception as e:
|
||||
user_logger.log_login_attempt(
|
||||
request, None, data.domain, data.access_key,
|
||||
success=False, error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=str(e)
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Select Employee Duty or Occupant Type"
|
||||
event_category = "AUTHENTICATION"
|
||||
@@ -115,69 +104,74 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_select_company_or_occupant_type(
|
||||
def _handle_employee_selection(
|
||||
cls,
|
||||
request: Request,
|
||||
data: Union[EmployeeSelection, OccupantSelection],
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
data: EmployeeSelection,
|
||||
token_dict: EmployeeTokenObject,
|
||||
request: Request
|
||||
):
|
||||
from api_objects import OccupantToken, CompanyToken
|
||||
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
"""Handle employee company selection"""
|
||||
if data.company_uu_id not in token_dict.companies_uu_id_list:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Company is not found in users company list",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
if selected_company := Companies.filter_one(
|
||||
Companies.uu_id == data.company_uu_id,
|
||||
).data:
|
||||
return ResponseHandler.unauthorized("Company not found in user's company list")
|
||||
|
||||
selected_company = Companies.filter_one(
|
||||
Companies.uu_id == data.company_uu_id
|
||||
).data
|
||||
if not selected_company:
|
||||
return ResponseHandler.not_found("Company not found")
|
||||
|
||||
# Get department IDs for the company
|
||||
department_ids = [
|
||||
department.id
|
||||
for department in Departments.filter_all(
|
||||
Departments.company_id == selected_company.id,
|
||||
dept.id for dept in Departments.filter_all(
|
||||
Departments.company_id == selected_company.id
|
||||
).data
|
||||
]
|
||||
|
||||
# Get duties IDs for the company
|
||||
duties_ids = [
|
||||
duties.id
|
||||
for duties in Duties.filter_all(
|
||||
Duties.company_id == selected_company.id,
|
||||
duty.id for duty in Duties.filter_all(
|
||||
Duties.company_id == selected_company.id
|
||||
).data
|
||||
]
|
||||
|
||||
# Get staff IDs
|
||||
staff_ids = [
|
||||
staff.id
|
||||
for staff in Staff.filter_all(
|
||||
Staff.duties_id.in_(duties_ids),
|
||||
staff.id for staff in Staff.filter_all(
|
||||
Staff.duties_id.in_(duties_ids)
|
||||
).data
|
||||
]
|
||||
|
||||
# Get employee
|
||||
employee = Employees.filter_one(
|
||||
Employees.people_id == token_dict.person_id,
|
||||
Employees.staff_id.in_(staff_ids),
|
||||
Employees.staff_id.in_(staff_ids)
|
||||
).data
|
||||
|
||||
if not employee:
|
||||
return ResponseHandler.not_found("Employee not found")
|
||||
|
||||
# Get reachable events
|
||||
reachable_event_list_id = Event2Employee.get_event_id_by_employee_id(
|
||||
employee_id=employee.id
|
||||
)
|
||||
staff = Staff.filter_one(
|
||||
Staff.id == employee.staff_id,
|
||||
).data
|
||||
duties = Duties.filter_one(
|
||||
Duties.id == staff.duties_id,
|
||||
).data
|
||||
|
||||
# Get staff and duties
|
||||
staff = Staff.filter_one(Staff.id == employee.staff_id).data
|
||||
duties = Duties.filter_one(Duties.id == staff.duties_id).data
|
||||
department = Departments.filter_one(
|
||||
Departments.id == duties.department_id,
|
||||
Departments.id == duties.department_id
|
||||
).data
|
||||
|
||||
# Get bulk duty
|
||||
bulk_id = Duty.filter_by_one(system=True, duty_code="BULK").data
|
||||
bulk_duty_id = Duties.filter_by_one(
|
||||
company_id=selected_company.id,
|
||||
duties_id=bulk_id.id,
|
||||
**Duties.valid_record_dict,
|
||||
**Duties.valid_record_dict
|
||||
).data
|
||||
update_selected_to_redis(
|
||||
request=request,
|
||||
add_payload=CompanyToken(
|
||||
|
||||
# Create company token
|
||||
company_token = CompanyToken(
|
||||
company_uu_id=selected_company.uu_id.__str__(),
|
||||
company_id=selected_company.id,
|
||||
department_id=department.id,
|
||||
@@ -189,58 +183,65 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
staff_uu_id=staff.uu_id.__str__(),
|
||||
employee_id=employee.id,
|
||||
employee_uu_id=employee.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
),
|
||||
reachable_event_list_id=reachable_event_list_id
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Company is selected successfully",
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
|
||||
# Update Redis
|
||||
update_selected_to_redis(request=request, add_payload=company_token)
|
||||
return ResponseHandler.success("Company selected successfully")
|
||||
|
||||
@classmethod
|
||||
def _handle_occupant_selection(
|
||||
cls,
|
||||
data: OccupantSelection,
|
||||
token_dict: OccupantTokenObject,
|
||||
request: Request
|
||||
):
|
||||
"""Handle occupant type selection"""
|
||||
# Get occupant type
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
system=True, uu_id=data.occupant_uu_id
|
||||
system=True,
|
||||
uu_id=data.occupant_uu_id
|
||||
).data
|
||||
if not occupant_type:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Occupant Type is not found",
|
||||
)
|
||||
return ResponseHandler.not_found("Occupant Type not found")
|
||||
|
||||
# Get build part
|
||||
build_part = BuildParts.filter_by_one(
|
||||
system=True, uu_id=data.build_part_uu_id
|
||||
system=True,
|
||||
uu_id=data.build_part_uu_id
|
||||
).data
|
||||
if not build_part:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Build Part is not found",
|
||||
)
|
||||
build = Build.filter_one(
|
||||
Build.id == build_part.build_id,
|
||||
).data
|
||||
return ResponseHandler.not_found("Build Part not found")
|
||||
|
||||
# Get build and company info
|
||||
build = Build.filter_one(Build.id == build_part.build_id).data
|
||||
related_company = RelationshipEmployee2Build.filter_one(
|
||||
RelationshipEmployee2Build.member_id == build.id,
|
||||
RelationshipEmployee2Build.member_id == build.id
|
||||
).data
|
||||
company_related = Companies.filter_one(
|
||||
Companies.id == related_company.company_id,
|
||||
Companies.id == related_company.company_id
|
||||
).data
|
||||
responsible_employee = Employees.filter_one(
|
||||
Employees.id == related_company.employee_id,
|
||||
Employees.id == related_company.employee_id
|
||||
).data
|
||||
if selected_occupant_type := BuildLivingSpace.filter_one(
|
||||
|
||||
# Get selected occupant type
|
||||
selected_occupant_type = BuildLivingSpace.filter_one(
|
||||
BuildLivingSpace.occupant_type == occupant_type.id,
|
||||
BuildLivingSpace.person_id == token_dict.person_id,
|
||||
BuildLivingSpace.build_parts_id == build_part.id,
|
||||
).data:
|
||||
reachable_event_list_id = (
|
||||
Event2Occupant.get_event_id_by_build_living_space_id(
|
||||
BuildLivingSpace.build_parts_id == build_part.id
|
||||
).data
|
||||
if not selected_occupant_type:
|
||||
return ResponseHandler.not_found("Selected occupant type not found")
|
||||
|
||||
# Get reachable events
|
||||
reachable_event_list_id = Event2Occupant.get_event_id_by_build_living_space_id(
|
||||
build_living_space_id=selected_occupant_type.id
|
||||
)
|
||||
)
|
||||
update_selected_to_redis(
|
||||
request=request,
|
||||
add_payload=OccupantToken(
|
||||
|
||||
# Create occupant token
|
||||
occupant_token = OccupantToken(
|
||||
living_space_id=selected_occupant_type.id,
|
||||
living_space_uu_id=selected_occupant_type.uu_id.__str__(),
|
||||
occupant_type_id=occupant_type.id,
|
||||
@@ -254,24 +255,38 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
responsible_employee_uuid=responsible_employee.uu_id.__str__(),
|
||||
responsible_company_id=company_related.id,
|
||||
responsible_company_uuid=company_related.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
),
|
||||
reachable_event_list_id=reachable_event_list_id
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Occupant is selected successfully",
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
|
||||
# Update Redis
|
||||
update_selected_to_redis(request=request, add_payload=occupant_token)
|
||||
return ResponseHandler.success("Occupant selected successfully")
|
||||
|
||||
@classmethod
|
||||
def authentication_select_company_or_occupant_type(
|
||||
cls,
|
||||
request: Request,
|
||||
data: Union[EmployeeSelection, OccupantSelection],
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
"""Handle selection of company or occupant type"""
|
||||
try:
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
return cls._handle_employee_selection(data, token_dict, request)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
return cls._handle_occupant_selection(data, token_dict, request)
|
||||
return ResponseHandler.error(
|
||||
"Invalid token type",
|
||||
status_code=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data provided"},
|
||||
status_code=status.HTTP_418_IM_A_TEAPOT,
|
||||
except Exception as e:
|
||||
return ResponseHandler.error(
|
||||
str(e),
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationCheckTokenEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Check Token is valid for user"
|
||||
event_category = "AUTHENTICATION"
|
||||
@@ -284,74 +299,70 @@ class AuthenticationCheckTokenEventMethods(MethodToEvent):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_check_token_is_valid(
|
||||
cls,
|
||||
request,
|
||||
):
|
||||
if get_object_via_access_key(request=request):
|
||||
return JSONResponse(
|
||||
content={"completed": True, "message": "Access Token is valid"},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Access Token is NOT valid"},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
def authentication_check_token_is_valid(cls, request: Request):
|
||||
try:
|
||||
TokenService.validate_token(request)
|
||||
return ResponseHandler.success("Access Token is valid")
|
||||
except HTTPException:
|
||||
return ResponseHandler.unauthorized("Access Token is NOT valid")
|
||||
|
||||
|
||||
class AuthenticationRefreshEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = (
|
||||
"Refresher Token for refreshing access token without credentials"
|
||||
)
|
||||
event_description = "Refresh user info using access token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"48379bb2-ba81-4d8e-a9dd-58837cfcbf67": "authentication_refresh_user_info",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"48379bb2-ba81-4d8e-a9dd-58837cfcbf67": "authentication_refresh_user_info",
|
||||
"48379bb2-ba81-4d8e-a9dd-58837cfcbf67": AuthenticationRefreshResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_refresh_user_info(
|
||||
cls,
|
||||
request,
|
||||
token_dict: typing.Union[EmployeeSelection, OccupantSelection],
|
||||
request: Request,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
access_token = str(request.headers.get(Auth.ACCESS_TOKEN_TAG))
|
||||
if token_user := get_object_via_access_key(request=request):
|
||||
if found_user := Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id")
|
||||
).data:
|
||||
try:
|
||||
access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
|
||||
if not access_token:
|
||||
return ResponseHandler.unauthorized()
|
||||
|
||||
# Get user and token info
|
||||
found_user = Users.filter_one(
|
||||
Users.uu_id == token_dict.user_uu_id
|
||||
).data
|
||||
if not found_user:
|
||||
return ResponseHandler.not_found("User not found")
|
||||
|
||||
user_token = UsersTokens.filter_one(
|
||||
UsersTokens.domain == found_user.domain_name,
|
||||
UsersTokens.user_id == found_user.id,
|
||||
UsersTokens.token_type == "RememberMe",
|
||||
).data
|
||||
access_dict = {
|
||||
|
||||
# Update user metadata
|
||||
TokenService.update_user_metadata(found_user, request)
|
||||
|
||||
response_data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": getattr(user_token, "token", None),
|
||||
"user": found_user.get_dict()
|
||||
}
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "User is logged in successfully via refresh token",
|
||||
"data": load_user_with_erp_details(found_user, access_dict),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": {}},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
return ResponseHandler.success(
|
||||
"User info refreshed successfully",
|
||||
data=response_data,
|
||||
response_model=AuthenticationRefreshResponse
|
||||
)
|
||||
except Exception as e:
|
||||
return ResponseHandler.error(str(e))
|
||||
|
||||
|
||||
class AuthenticationChangePasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Change password with access token implemented on request headers without password reset token"
|
||||
event_description = "Change password with access token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
@@ -364,31 +375,38 @@ class AuthenticationChangePasswordEventMethods(MethodToEvent):
|
||||
@classmethod
|
||||
def authentication_change_password(
|
||||
cls,
|
||||
request: Request,
|
||||
data: ChangePassword,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
if found_user := Users.filter_one(Users.uu_id == token_dict.uu_id).data:
|
||||
if found_user.check_password(data.old_password):
|
||||
try:
|
||||
if not isinstance(token_dict, EmployeeTokenObject):
|
||||
return ResponseHandler.unauthorized("Only employees can change password")
|
||||
|
||||
found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
|
||||
if not found_user:
|
||||
return ResponseHandler.not_found("User not found")
|
||||
|
||||
if not found_user.check_password(data.old_password):
|
||||
user_logger.log_password_change(
|
||||
request, found_user.id, "change",
|
||||
success=False, error="Invalid old password"
|
||||
)
|
||||
return ResponseHandler.unauthorized("Old password is incorrect")
|
||||
|
||||
found_user.set_password(data.new_password)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password is changed successfully",
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
user_logger.log_password_change(
|
||||
request, found_user.id, "change",
|
||||
success=True
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Old password is not correct",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data"},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
||||
return ResponseHandler.success("Password changed successfully")
|
||||
except Exception as e:
|
||||
user_logger.log_password_change(
|
||||
request, found_user.id if found_user else None,
|
||||
"change", success=False, error=str(e)
|
||||
)
|
||||
return ResponseHandler.error(str(e))
|
||||
|
||||
|
||||
class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
@@ -426,22 +444,11 @@ class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Email can not be sent. Try again later"
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password is created successfully",
|
||||
"data": found_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Record not found",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
return ResponseHandler.success(
|
||||
"Password is created successfully",
|
||||
data=found_user.get_dict(),
|
||||
)
|
||||
return ResponseHandler.not_found("Record not found")
|
||||
|
||||
|
||||
class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
@@ -463,35 +470,21 @@ class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
):
|
||||
found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
|
||||
if not found_user:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Invalid data",
|
||||
"data": None,
|
||||
},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
if already_tokens := get_object_via_user_uu_id(user_id=str(found_user.uu_id)):
|
||||
return ResponseHandler.not_found("User not found")
|
||||
if already_tokens := RedisActions.get_object_via_user_uu_id(user_id=str(found_user.uu_id)):
|
||||
for key, token_user in already_tokens.items():
|
||||
redis_cli.delete(key)
|
||||
RedisActions.delete_key(key)
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
).data
|
||||
selected_user.remove_refresher_token(
|
||||
domain=data.domain, disconnect=True
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "All sessions are disconnected",
|
||||
"data": selected_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": None},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
return ResponseHandler.success(
|
||||
"All sessions are disconnected",
|
||||
data=selected_user.get_dict(),
|
||||
)
|
||||
return ResponseHandler.not_found("Invalid data")
|
||||
|
||||
|
||||
class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
@@ -512,32 +505,21 @@ class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
cls, request: Request, data: Logout, token_dict: dict = None
|
||||
):
|
||||
token_user = None
|
||||
if already_tokens := get_object_via_access_key(request=request):
|
||||
if already_tokens := RedisActions.get_object_via_access_key(request=request):
|
||||
for key in already_tokens:
|
||||
token_user = json.loads(redis_cli.get(key) or {})
|
||||
token_user = json.loads(RedisActions.get_key(key) or {})
|
||||
if token_user.get("domain") == data.domain:
|
||||
redis_cli.delete(key)
|
||||
RedisActions.delete_key(key)
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
).data
|
||||
selected_user.remove_refresher_token(domain=data.domain)
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Session is logged out",
|
||||
"data": token_user,
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Logout is not successfully completed",
|
||||
"data": None,
|
||||
},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
return ResponseHandler.success(
|
||||
"Session is logged out",
|
||||
data=token_user,
|
||||
)
|
||||
return ResponseHandler.not_found("Logout is not successfully completed")
|
||||
|
||||
|
||||
class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
@@ -550,7 +532,7 @@ class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
"c90f3334-10c9-4181-b5ff-90d98a0287b2": "authentication_refresher_token",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c90f3334-10c9-4181-b5ff-90d98a0287b2": "authentication_refresher_token",
|
||||
"c90f3334-10c9-4181-b5ff-90d98a0287b2": AuthenticationRefreshResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -563,10 +545,7 @@ class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
**UsersTokens.valid_record_dict,
|
||||
).data
|
||||
if not token_refresher:
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": {}},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
return ResponseHandler.not_found("Invalid data")
|
||||
if found_user := Users.filter_one(
|
||||
Users.id == token_refresher.user_id,
|
||||
).data:
|
||||
@@ -580,24 +559,16 @@ class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
request, "remote_addr", None
|
||||
) or request.headers.get("X-Forwarded-For", None)
|
||||
found_user.last_seen = str(system_arrow.now())
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "User is logged in successfully via refresher token",
|
||||
"data": load_user_with_erp_details(
|
||||
found_user,
|
||||
{
|
||||
response_data = {
|
||||
"access_token": access_key,
|
||||
"refresh_token": data.refresh_token,
|
||||
},
|
||||
),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": {}},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
}
|
||||
return ResponseHandler.success(
|
||||
"User is logged in successfully via refresher token",
|
||||
data=response_data,
|
||||
response_model=AuthenticationRefreshResponse
|
||||
)
|
||||
return ResponseHandler.not_found("Invalid data")
|
||||
|
||||
|
||||
class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
@@ -641,13 +612,9 @@ class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
found_user.password_token_is_valid = str(system_arrow.shift(days=1))
|
||||
found_user.save()
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password is change link is sent to your email or phone",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return ResponseHandler.success(
|
||||
"Password is change link is sent to your email or phone",
|
||||
data={},
|
||||
)
|
||||
|
||||
|
||||
@@ -687,13 +654,9 @@ class AuthenticationResetPasswordEventMethods(MethodToEvent):
|
||||
raise found_user.raise_http_exception(
|
||||
status_code=400, message="Email can not be sent. Try again later"
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password change link is sent to your email or phone",
|
||||
"data": found_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return ResponseHandler.success(
|
||||
"Password change link is sent to your email or phone",
|
||||
data=found_user.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
@@ -707,7 +670,7 @@ class AuthenticationDownloadAvatarEventMethods(MethodToEvent):
|
||||
"c140cd5f-307f-4046-a93e-3ade032a57a7": "authentication_download_avatar",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c140cd5f-307f-4046-a93e-3ade032a57a7": "authentication_download_avatar",
|
||||
"c140cd5f-307f-4046-a93e-3ade032a57a7": AuthenticationUserInfoResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -722,11 +685,7 @@ class AuthenticationDownloadAvatarEventMethods(MethodToEvent):
|
||||
system_arrow.now() - system_arrow.get(str(found_user.expiry_ends))
|
||||
).days
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Avatar and profile is shared via user credentials",
|
||||
"data": {
|
||||
user_info = {
|
||||
"lang": token_dict.lang,
|
||||
"full_name": found_user.person.full_name,
|
||||
"avatar": found_user.avatar,
|
||||
@@ -734,14 +693,13 @@ class AuthenticationDownloadAvatarEventMethods(MethodToEvent):
|
||||
"expiry_ends": str(found_user.expiry_ends),
|
||||
"expired_str": expired_starts,
|
||||
"expired_int": int(expired_int),
|
||||
},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": {}},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
}
|
||||
return ResponseHandler.success(
|
||||
"Avatar and profile is shared via user credentials",
|
||||
data=user_info,
|
||||
response_model=AuthenticationUserInfoResponse
|
||||
)
|
||||
return ResponseHandler.not_found("Invalid data")
|
||||
|
||||
|
||||
AuthenticationLoginEventMethod = AuthenticationLoginEventMethods(
|
||||
@@ -780,65 +738,3 @@ AuthenticationDownloadAvatarEventMethod = AuthenticationDownloadAvatarEventMetho
|
||||
AuthenticationResetPasswordEventMethod = AuthenticationResetPasswordEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/reset_password")
|
||||
)
|
||||
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=data.access_key,
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=str(DateTimeLocal.now()),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key="via_refresher",
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=selected_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
|
||||
@@ -21,7 +21,7 @@ from api_validations.validations_request import (
|
||||
)
|
||||
from api_validations.validations_response import ListBuildingResponse
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
|
||||
@@ -57,9 +57,8 @@ class BuildListEventMethods(MethodToEvent):
|
||||
records = Build.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building Records are listed",
|
||||
message="Building records listed successfully",
|
||||
result=records,
|
||||
response_model=ListBuildingResponse,
|
||||
)
|
||||
|
||||
|
||||
@@ -124,20 +123,17 @@ class BuildCreateEventMethods(MethodToEvent):
|
||||
created_build.save()
|
||||
man_build_part.update(is_confirmed=True)
|
||||
man_build_part.save()
|
||||
# created_build_relation = RelationshipEmployee2Build.find_or_create(
|
||||
# company_id=token_dict.selected_company.company_id,
|
||||
# member_id=created_build.id,
|
||||
# employee_id=token_dict.selected_company.employee_id,
|
||||
# )
|
||||
# created_build_relation.update(is_confirmed=True)
|
||||
# created_build_relation.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Build record completed. This build is assigned to you.",
|
||||
"data": created_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
relationship = RelationshipEmployee2Build.find_or_create(
|
||||
employee_id=token_dict.selected_company.employee_id,
|
||||
build_id=created_build.id,
|
||||
company_id=token_dict.selected_company.company_id,
|
||||
)
|
||||
relationship.save()
|
||||
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building created successfully",
|
||||
result=created_build.get_dict(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -166,20 +162,17 @@ class BuildCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
|
||||
created_build = Build.create_action(data=data, token=token_dict)
|
||||
|
||||
created_build_relation = RelationshipEmployee2Build.find_or_create(
|
||||
company_id=token_dict.selected_company.company_id,
|
||||
member_id=created_build.id,
|
||||
relationship = RelationshipEmployee2Build.find_or_create(
|
||||
employee_id=token_dict.selected_company.employee_id,
|
||||
build_id=created_build.id,
|
||||
company_id=token_dict.selected_company.company_id,
|
||||
)
|
||||
created_build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Build record completed. This build is assigned to you.",
|
||||
"data": created_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
relationship.save()
|
||||
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building created successfully",
|
||||
result=created_build.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
@@ -205,44 +198,30 @@ class BuildUpdateEventMethods(MethodToEvent):
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Occupant cannot update building",
|
||||
)
|
||||
|
||||
Build.pre_query = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id
|
||||
)
|
||||
updated_build = Build.update_action(
|
||||
data=data, token=token_dict, build_uu_id=build_uu_id
|
||||
build = Build.filter_one(Build.uu_id == build_uu_id).data
|
||||
if not build:
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Building not found",
|
||||
result={},
|
||||
status_code="HTTP_404_NOT_FOUND"
|
||||
)
|
||||
Build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build record",
|
||||
"data": updated_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
find_one_build = Build.filter_one(
|
||||
Build.uu_id == build_uu_id,
|
||||
).data
|
||||
access_authorized_build = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id,
|
||||
filter_expr=[Build.id == find_one_build.id],
|
||||
)
|
||||
if access_authorized_build.count:
|
||||
updated_build = Build.update_action(
|
||||
data=data, token=token_dict, build_uu_id=build_uu_id
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build record",
|
||||
"data": updated_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"This user can not modify {build_uu_id} - building.",
|
||||
|
||||
build.update(**data.excluded_dump())
|
||||
build.save()
|
||||
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building updated successfully",
|
||||
result=build.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
@@ -261,36 +240,31 @@ class BuildPatchEventMethods(MethodToEvent):
|
||||
|
||||
@classmethod
|
||||
def build_patch(cls, build_uu_id: str, data: PatchRecord, token_dict):
|
||||
find_one_build = Build.filter_one(
|
||||
Build.uu_id == build_uu_id,
|
||||
if isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Occupant cannot patch building",
|
||||
)
|
||||
access_authorized_build = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id,
|
||||
filter_expr=[Build.id == find_one_build.id],
|
||||
|
||||
Build.pre_query = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id
|
||||
)
|
||||
if access_authorized_build.count:
|
||||
action = data.excluded_dump()
|
||||
find_one_build.active = bool(action.get("active", find_one_build.active))
|
||||
find_one_build.is_confirmed = bool(
|
||||
action.get("confirm", find_one_build.is_confirmed)
|
||||
build = Build.filter_one(Build.uu_id == build_uu_id).data
|
||||
if not build:
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Building not found",
|
||||
result={},
|
||||
status_code="HTTP_404_NOT_FOUND"
|
||||
)
|
||||
find_one_build.deleted = bool(action.get("delete", find_one_build.deleted))
|
||||
find_one_build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Patch Build record completed",
|
||||
"data": find_one_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Patch Build record failed",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
|
||||
build.update(**data.excluded_dump())
|
||||
build.save()
|
||||
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building patched successfully",
|
||||
result=build.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,9 @@ from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_validations.validations_response.building_responses import BuildAreaListResponse
|
||||
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
|
||||
@@ -23,7 +25,7 @@ class BuildAreaListEventMethods(MethodToEvent):
|
||||
"0bb51845-65a2-4340-8872-a3b5aad95468": "build_area_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"0bb51845-65a2-4340-8872-a3b5aad95468": None,
|
||||
"0bb51845-65a2-4340-8872-a3b5aad95468": BuildAreaListResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -49,7 +51,12 @@ class BuildAreaListEventMethods(MethodToEvent):
|
||||
BuildArea.filter_attr = list_options
|
||||
records = BuildArea.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="List of Build Area", result=records
|
||||
completed=True,
|
||||
message="Building areas listed successfully",
|
||||
result=records,
|
||||
cls_object=BuildArea,
|
||||
filter_attributes=list_options,
|
||||
response_model=BuildAreaListResponse
|
||||
)
|
||||
|
||||
|
||||
@@ -103,14 +110,11 @@ class BuildAreaCreateEventMethods(MethodToEvent):
|
||||
|
||||
data_dict["build_id"] = selected_build.id
|
||||
data_dict["build_uu_id"] = str(selected_build.uu_id)
|
||||
created_build_part = BuildArea.find_or_create(**data_dict)
|
||||
created_build_part.save()
|
||||
created_build_part.update(is_confirmed=True)
|
||||
created_build_part.save()
|
||||
area = BuildArea.insert_one(data_dict).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Created Build Area",
|
||||
result=created_build_part.get_dict(),
|
||||
message="Building area created successfully",
|
||||
result=area
|
||||
)
|
||||
|
||||
|
||||
@@ -131,10 +135,11 @@ class BuildAreaUpdateEventMethods(MethodToEvent):
|
||||
data: UpdateBuildArea,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
area = BuildArea.update_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Update Build record",
|
||||
result=None,
|
||||
completed=True,
|
||||
message="Building area updated successfully",
|
||||
result=area
|
||||
)
|
||||
|
||||
|
||||
@@ -155,10 +160,11 @@ class BuildAreaPatchEventMethods(MethodToEvent):
|
||||
data,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
area = BuildArea.patch_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Patch Build record",
|
||||
result=None,
|
||||
completed=True,
|
||||
message="Building area patched successfully",
|
||||
result=area
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from databases import (
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildParts,
|
||||
@@ -46,11 +46,8 @@ class BuildingBuildPartsListEventMethods(MethodToEvent):
|
||||
records = BuildParts.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building Parts Records are listed",
|
||||
message="Building parts listed successfully",
|
||||
result=records,
|
||||
cls_object=BuildParts,
|
||||
response_model=BuildPartsListResponse,
|
||||
filter_attributes=list_options,
|
||||
)
|
||||
|
||||
|
||||
@@ -72,13 +69,10 @@ class BuildingBuildPartsCreateEventMethods(MethodToEvent):
|
||||
created_build.save()
|
||||
created_build.update(is_confirmed=True)
|
||||
created_build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Build Parts record",
|
||||
"data": created_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building part created successfully",
|
||||
result=created_build,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,13 +92,10 @@ class BuildingBuildPartsUpdateEventMethods(MethodToEvent):
|
||||
):
|
||||
updated_build = BuildParts.update_action(data=data, token=token_dict)
|
||||
updated_build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build Parts record",
|
||||
"data": updated_build,
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building part updated successfully",
|
||||
result=updated_build,
|
||||
)
|
||||
|
||||
|
||||
@@ -133,21 +124,15 @@ class BuildingBuildPartsPatchEventMethods(MethodToEvent):
|
||||
)
|
||||
find_one_build.deleted = bool(action.get("delete", find_one_build.deleted))
|
||||
find_one_build.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build Parts record",
|
||||
"data": find_one_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building part patched successfully",
|
||||
result=find_one_build,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Update Build Parts record",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Building part patched failed",
|
||||
result={},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,10 +11,11 @@ from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_validations.validations_response.building_responses import BuildSitesListResponse
|
||||
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from databases.sql_models.identity.identity import Addresses
|
||||
|
||||
|
||||
class BuildSitesListEventMethods(MethodToEvent):
|
||||
@@ -24,7 +25,7 @@ class BuildSitesListEventMethods(MethodToEvent):
|
||||
"6798414c-6c7d-47f0-9d8b-6935a0f51c2e": "build_sites_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"6798414c-6c7d-47f0-9d8b-6935a0f51c2e": None,
|
||||
"6798414c-6c7d-47f0-9d8b-6935a0f51c2e": BuildSitesListResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -58,7 +59,12 @@ class BuildSitesListEventMethods(MethodToEvent):
|
||||
BuildSites.filter_attr = list_options
|
||||
records = BuildSites.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Update Build record", result=records
|
||||
completed=True,
|
||||
message="Building sites listed successfully",
|
||||
result=records,
|
||||
cls_object=BuildSites,
|
||||
filter_attributes=list_options,
|
||||
response_model=BuildSitesListResponse
|
||||
)
|
||||
|
||||
|
||||
@@ -102,14 +108,11 @@ class BuildSitesCreateEventMethods(MethodToEvent):
|
||||
},
|
||||
)
|
||||
data_dict = data.excluded_dump()
|
||||
created_build_part = BuildSites.find_or_create(**data_dict)
|
||||
created_build_part.save()
|
||||
created_build_part.update(is_confirmed=True)
|
||||
created_build_part.save()
|
||||
site = BuildSites.insert_one(data_dict).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Update Build record",
|
||||
result=created_build_part,
|
||||
message="Building site created successfully",
|
||||
result=site
|
||||
)
|
||||
|
||||
|
||||
@@ -130,10 +133,11 @@ class BuildSitesUpdateEventMethods(MethodToEvent):
|
||||
data: UpdateBuildArea,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
site = BuildSites.update_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Update Build record",
|
||||
result=None,
|
||||
completed=True,
|
||||
message="Building site updated successfully",
|
||||
result=site
|
||||
)
|
||||
|
||||
|
||||
@@ -154,10 +158,11 @@ class BuildSitesPatchEventMethods(MethodToEvent):
|
||||
data,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
site = BuildSites.patch_one(build_uu_id, data).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Patch Build record",
|
||||
result=None,
|
||||
completed=True,
|
||||
message="Building site patched successfully",
|
||||
result=site
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@ from typing import Union
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
from api_validations.validations_response.building_responses import BuildTypesListResponse
|
||||
from databases.sql_models.building.build import BuildTypes
|
||||
|
||||
|
||||
@@ -15,7 +16,9 @@ class BuildTypesListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"5344d03c-fc47-43ec-8c44-6c2acd7e5d9f": "build_types_list",
|
||||
}
|
||||
__event_validation__ = {"5344d03c-fc47-43ec-8c44-6c2acd7e5d9f": None}
|
||||
__event_validation__ = {
|
||||
"5344d03c-fc47-43ec-8c44-6c2acd7e5d9f": BuildTypesListResponse
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_types_list(
|
||||
@@ -30,16 +33,23 @@ class BuildTypesListEventMethods(MethodToEvent):
|
||||
results = BuildTypes.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building types listed successfully",
|
||||
result=results,
|
||||
message="Build Types are listed successfully",
|
||||
cls_object=BuildTypes,
|
||||
filter_attributes=list_options,
|
||||
response_model=BuildTypesListResponse
|
||||
)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You are not authorized to access this endpoint"
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="You are not authorized to access this endpoint",
|
||||
result=None
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You are not authorized to access this endpoint"
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="You are not authorized to access this endpoint",
|
||||
result=None
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ from api_events.events.events.events_bind_services import (
|
||||
ServiceBindOccupantEventMethods,
|
||||
)
|
||||
from databases import (
|
||||
Modules,
|
||||
BuildParts,
|
||||
Build,
|
||||
BuildLivingSpace,
|
||||
@@ -14,7 +13,7 @@ from databases import (
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildLivingSpace,
|
||||
UpdateBuildLivingSpace,
|
||||
@@ -71,6 +70,11 @@ class BuildingLivingSpacesListEventMethods(MethodToEvent):
|
||||
).query
|
||||
BuildLivingSpace.filter_attr = list_options
|
||||
records = BuildLivingSpace.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Living spaces listed successfully",
|
||||
result=records
|
||||
)
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
build_id_list_query = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id
|
||||
@@ -103,11 +107,8 @@ class BuildingLivingSpacesListEventMethods(MethodToEvent):
|
||||
records = BuildLivingSpace.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building Living Spaces are listed successfully",
|
||||
result=records,
|
||||
response_model=LivingSpaceListResponse,
|
||||
cls_object=BuildLivingSpace,
|
||||
filter_attributes=list_options,
|
||||
message="Living spaces listed successfully",
|
||||
result=records
|
||||
)
|
||||
|
||||
|
||||
@@ -210,7 +211,11 @@ class BuildingLivingSpacesCreateEventMethods(MethodToEvent):
|
||||
build_living_space_id=created_living_space.id,
|
||||
service_id=occupants_service.id,
|
||||
)
|
||||
return created_living_space
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Living space created successfully",
|
||||
result=created_living_space
|
||||
)
|
||||
|
||||
|
||||
class BuildingLivingSpacesUpdateEventMethods(MethodToEvent):
|
||||
@@ -295,6 +300,13 @@ class BuildingLivingSpacesUpdateEventMethods(MethodToEvent):
|
||||
data_dict["owner_person_id"] = life_person.id
|
||||
del data_dict["build_parts_uu_id"], data_dict["life_person_uu_id"]
|
||||
|
||||
living_space = BuildLivingSpace.update_one(build_uu_id, data_dict).data
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Living space updated successfully",
|
||||
result=living_space
|
||||
)
|
||||
|
||||
|
||||
BuildingLivingSpacesListEventMethod = BuildingLivingSpacesListEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/list")
|
||||
|
||||
@@ -12,9 +12,11 @@ from api_validations.validations_request import (
|
||||
PatchRecord,
|
||||
)
|
||||
|
||||
from api_validations.validations_response.company_responses import CompanyListResponse
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class CompanyListEventMethods(MethodToEvent):
|
||||
@@ -23,7 +25,7 @@ class CompanyListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"f6900cb5-ac5b-478e-8e7c-fa87e65cd2e5": "company_list",
|
||||
}
|
||||
__event_validation__ = {"f6900cb5-ac5b-478e-8e7c-fa87e65cd2e5": None}
|
||||
__event_validation__ = {"f6900cb5-ac5b-478e-8e7c-fa87e65cd2e5": CompanyListResponse}
|
||||
|
||||
@classmethod
|
||||
def company_list(
|
||||
@@ -46,8 +48,11 @@ class CompanyListEventMethods(MethodToEvent):
|
||||
records = Companies.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building Living Spaces are listed successfully",
|
||||
message="Companies listed successfully",
|
||||
result=records,
|
||||
cls_object=Companies,
|
||||
filter_attributes=list_options,
|
||||
response_model=CompanyListResponse
|
||||
)
|
||||
|
||||
|
||||
@@ -65,18 +70,15 @@ class CompanyCreateEventMethods(MethodToEvent):
|
||||
data: InsertCompany,
|
||||
token_dict: EmployeeTokenObject,
|
||||
):
|
||||
created_company = Companies.create_action(data=data, token=token_dict)
|
||||
created_company = Companies.insert_one(data).data
|
||||
created_company.update(
|
||||
related_company=token_dict.selected_company.company_uu_id
|
||||
)
|
||||
created_company.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Create Company record",
|
||||
"data": created_company.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Company created successfully",
|
||||
result=created_company.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
@@ -100,27 +102,19 @@ class CompanyUpdateEventMethods(MethodToEvent):
|
||||
token_dict.selected_company.duty_id,
|
||||
],
|
||||
)
|
||||
find_one_company = Companies.filter_one(
|
||||
Companies.uu_id == company_uu_id,
|
||||
).data
|
||||
if not find_one_company:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Company record",
|
||||
"data": {},
|
||||
},
|
||||
status_code=200,
|
||||
company = Companies.update_one(company_uu_id, data).data
|
||||
if not company:
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Company not found",
|
||||
result={},
|
||||
status_code="HTTP_404_NOT_FOUND"
|
||||
)
|
||||
updated_company = find_one_company.update(**data.excluded_dump())
|
||||
Companies.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Company record",
|
||||
"data": updated_company,
|
||||
},
|
||||
status_code=200,
|
||||
company.save()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Company updated successfully",
|
||||
result=company.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
@@ -136,42 +130,25 @@ class CompanyPatchEventMethods(MethodToEvent):
|
||||
def company_patch(
|
||||
cls, company_uu_id: str, data: PatchRecord, token_dict: EmployeeTokenObject
|
||||
):
|
||||
find_one_company = Companies.filter_one(
|
||||
Companies.uu_id == company_uu_id,
|
||||
).data
|
||||
access_authorized_company = Companies.select_action(
|
||||
Companies.pre_query = Companies.select_action(
|
||||
duty_id_list=[
|
||||
token_dict.selected_company.bulk_duties_id,
|
||||
token_dict.selected_company.duty_id,
|
||||
],
|
||||
)
|
||||
if access_authorized_company.count:
|
||||
action = data.excluded_dump()
|
||||
find_one_company.active = bool(
|
||||
action.get("active", find_one_company.active)
|
||||
company = Companies.patch_one(company_uu_id, data).data
|
||||
if not company:
|
||||
return AlchemyJsonResponse(
|
||||
completed=False,
|
||||
message="Company not found",
|
||||
result={},
|
||||
status_code="HTTP_404_NOT_FOUND"
|
||||
)
|
||||
find_one_company.is_confirmed = bool(
|
||||
action.get("confirm", find_one_company.is_confirmed)
|
||||
)
|
||||
find_one_company.deleted = bool(
|
||||
action.get("delete", find_one_company.deleted)
|
||||
)
|
||||
find_one_company.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Patch Company record completed",
|
||||
"data": find_one_company.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Patch Company record failed",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
company.save()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Company patched successfully",
|
||||
result=company.get_dict(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from databases import Departments
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class DepartmentListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_validations.validations_request import (
|
||||
InsertDuties,
|
||||
UpdateDuties,
|
||||
|
||||
@@ -10,8 +10,8 @@ from api_validations.validations_request import (
|
||||
from databases import Duty
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class DutyListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -12,8 +12,8 @@ from api_validations.validations_request import (
|
||||
from databases import Employees, Staff, People, EmployeeHistory
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class EmployeeListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -13,7 +13,7 @@ from databases import Staff, Duties
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class StaffListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -17,7 +17,7 @@ from api_validations.validations_request import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
|
||||
|
||||
@@ -23,13 +23,12 @@ from databases import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow, client_arrow
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildDecisionBookItems,
|
||||
ListOptions,
|
||||
ListDecisionBook,
|
||||
)
|
||||
from databases.sql_models.event.event import Services
|
||||
|
||||
|
||||
@@ -1,24 +1,13 @@
|
||||
import typing
|
||||
|
||||
from databases import (
|
||||
Build,
|
||||
BuildParts,
|
||||
BuildDecisionBook,
|
||||
BuildDecisionBookItems,
|
||||
BuildDecisionBookPerson,
|
||||
BuildDecisionBookPayments,
|
||||
BuildDecisionBookProjects,
|
||||
BuildDecisionBookProjectPerson,
|
||||
ApiEnumDropdown,
|
||||
OccupantTypes,
|
||||
Companies,
|
||||
BuildLivingSpace,
|
||||
)
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_events.events.abstract_class import MethodToEvent
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow, client_arrow
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
|
||||
class DecisionBookDecisionBookItemsDebitsListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -11,7 +11,6 @@ from databases import (
|
||||
BuildLivingSpace,
|
||||
BuildParts,
|
||||
BuildDecisionBookPersonOccupants,
|
||||
People,
|
||||
OccupantTypes,
|
||||
)
|
||||
|
||||
@@ -25,7 +24,6 @@ from api_validations.validations_request import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
|
||||
class DecisionBookPersonListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -13,7 +13,6 @@ from databases import (
|
||||
BuildDecisionBookPersonOccupants,
|
||||
OccupantTypes,
|
||||
Users,
|
||||
ApiEnumDropdown,
|
||||
)
|
||||
|
||||
from api_validations.validations_request import (
|
||||
@@ -22,7 +21,6 @@ from api_validations.validations_request import (
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
|
||||
|
||||
@@ -1,20 +1,14 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import status
|
||||
|
||||
from api_validations.validations_response.parts import BuildPartsListResponse
|
||||
from databases import (
|
||||
Build,
|
||||
BuildParts,
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_events.events.abstract_class import MethodToEvent
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildParts,
|
||||
UpdateBuildParts,
|
||||
ListOptions,
|
||||
)
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookPayments
|
||||
|
||||
@@ -16,10 +16,9 @@ from api_validations.validations_request import (
|
||||
ApprovalsBuildDecisionBookProjects,
|
||||
ListOptions,
|
||||
)
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from databases import Build, BuildLivingSpace, BuildParts, ApiEnumDropdown
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from databases import BuildLivingSpace, BuildParts, ApiEnumDropdown
|
||||
from databases.sql_models.building.decision_book import (
|
||||
BuildDecisionBookProjectItems,
|
||||
BuildDecisionBookItems,
|
||||
BuildDecisionBook,
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@ from typing import Union
|
||||
|
||||
from databases import (
|
||||
BuildDecisionBookProjectItems,
|
||||
BuildDecisionBookProjectPerson,
|
||||
)
|
||||
|
||||
from api_validations.validations_request import (
|
||||
@@ -13,7 +12,7 @@ from api_validations.validations_request import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookProjects
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from api_validations.validations_request import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class ProjectDecisionBookPersonListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -10,11 +10,7 @@ from api_validations.validations_request import (
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_events.events.events.events_bind_services import (
|
||||
ServiceBindOccupantEventMethods,
|
||||
)
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from databases.sql_models.company.employee import Employees
|
||||
from databases.sql_models.event.event import Event2Occupant, Event2Employee
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from fastapi.exceptions import HTTPException
|
||||
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from databases import (
|
||||
Modules,
|
||||
Employees,
|
||||
BuildParts,
|
||||
BuildLivingSpace,
|
||||
@@ -21,10 +20,10 @@ from api_validations.validations_request import (
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
|
||||
class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"0d2bc5c9-d4b1-4951-8305-69da4a687fdc": "bind_services_occupant",
|
||||
@@ -70,7 +69,6 @@ class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
data: RegisterServices2Occupant,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
raise HTTPException(
|
||||
|
||||
@@ -2,7 +2,6 @@ from typing import Union
|
||||
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
from api_events.events.events.events_services import ServicesEvents
|
||||
from databases import (
|
||||
Events,
|
||||
Employees,
|
||||
@@ -20,7 +19,7 @@ from api_validations.validations_request import (
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class EventsListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -4,9 +4,7 @@ from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent
|
||||
|
||||
|
||||
class ModelEvents(MethodToEvent):
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from api_validations.validations_request import DepartmentsPydantic, PatchRecord
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent
|
||||
|
||||
|
||||
class ModulesEvents(MethodToEvent):
|
||||
|
||||
@@ -6,9 +6,8 @@ from api_validations.validations_request import (
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_events.events.abstract_class import MethodToEvent
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
|
||||
class ServicesEvents(MethodToEvent):
|
||||
|
||||
@@ -5,7 +5,6 @@ from fastapi.responses import JSONResponse
|
||||
|
||||
from api_validations.validations_response.people import PeopleListResponse
|
||||
from databases import (
|
||||
Build,
|
||||
People,
|
||||
Users,
|
||||
Companies,
|
||||
@@ -14,7 +13,7 @@ from databases import (
|
||||
from api_validations.validations_request import InsertPerson, UpdateUsers
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
|
||||
|
||||
class PeopleListEventMethods(MethodToEvent):
|
||||
|
||||
@@ -4,12 +4,12 @@ from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_configs import ApiStatic
|
||||
from databases import MongoQueryIdentity, Users, Companies, People
|
||||
from databases import MongoQueryIdentity, Users, Companies
|
||||
from databases.no_sql_models.validations import DomainViaUser
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from ApiServices.api_handlers import AlchemyJsonResponse
|
||||
from api_services.email.service import send_email
|
||||
from api_services.templates.password_templates import change_your_password_template
|
||||
from api_validations.validations_request import (
|
||||
@@ -17,7 +17,6 @@ from api_validations.validations_request import (
|
||||
UpdateUsers,
|
||||
PatchRecord,
|
||||
ListOptions,
|
||||
RegisterServices2Occupant,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import arrow
|
||||
import calendar
|
||||
from api_configs.configs import Config
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
class DateTimeLocal:
|
||||
|
||||
__SYSTEM__: str = "GMT+0"
|
||||
def __init__(self, timezone: str = None, is_client: bool = True):
|
||||
if timezone and timezone not in Config.SUPPORTED_TIMEZONES:
|
||||
raise ValueError(f"Unsupported timezone: {timezone}. Must be one of {Config.SUPPORTED_TIMEZONES}")
|
||||
|
||||
def __init__(self, timezone: str = "GMT+3", is_client: bool = True):
|
||||
self.timezone = self.__SYSTEM__
|
||||
self.timezone = Config.SYSTEM_TIMEZONE
|
||||
if is_client:
|
||||
self.timezone = timezone.replace("-", "+")
|
||||
self.timezone = (timezone or Config.DEFAULT_TIMEZONE).replace("-", "+")
|
||||
|
||||
def find_last_day_of_month(self, date_value):
|
||||
today = self.get(date_value).date()
|
||||
@@ -44,6 +47,68 @@ class DateTimeLocal:
|
||||
def string_date_only(self, date):
|
||||
return self.get(date).format("YYYY-MM-DD")
|
||||
|
||||
def to_timestamp(self, date):
|
||||
"""Convert datetime to UTC timestamp"""
|
||||
return self.get(date).timestamp()
|
||||
|
||||
def from_timestamp(self, timestamp):
|
||||
"""Convert timestamp to timezone-aware datetime"""
|
||||
return arrow.get(timestamp).to(str(self.timezone))
|
||||
|
||||
def is_timezone_aware(self, date):
|
||||
"""Check if a date is timezone-aware"""
|
||||
return self.get(date).tzinfo is not None
|
||||
|
||||
def standardize_timezone(self, date):
|
||||
"""Ensure date is in the correct timezone"""
|
||||
if not self.is_timezone_aware(date):
|
||||
return self.get(date).to(str(self.timezone))
|
||||
return self.get(date)
|
||||
|
||||
def get_expiry_time(self, **kwargs):
|
||||
"""Get future time for cache expiry
|
||||
Example: get_expiry_time(hours=1, minutes=30)
|
||||
"""
|
||||
return self.now().shift(**kwargs)
|
||||
|
||||
def is_expired(self, timestamp):
|
||||
"""Check if a timestamp is expired"""
|
||||
if not timestamp:
|
||||
return True
|
||||
return self.from_timestamp(timestamp) < self.now()
|
||||
|
||||
def get_cache_key(self, base_key, *args):
|
||||
"""Generate a cache key with timezone info
|
||||
Example: get_cache_key('user_profile', user_id, 'details')
|
||||
"""
|
||||
components = [str(base_key)]
|
||||
components.extend(str(arg) for arg in args)
|
||||
components.append(f"tz_{self.timezone}")
|
||||
return ':'.join(components)
|
||||
|
||||
def format_for_db(self, date):
|
||||
"""Format date for database storage"""
|
||||
return self.get(date).format('YYYY-MM-DD HH:mm:ss.SSSZZ')
|
||||
|
||||
def parse_from_db(self, date_str):
|
||||
"""Parse date from database format"""
|
||||
if not date_str:
|
||||
return None
|
||||
return self.get(date_str)
|
||||
|
||||
def get_day_boundaries(self, date=None):
|
||||
"""Get start and end of day in current timezone"""
|
||||
dt = self.get(date) if date else self.now()
|
||||
start = dt.floor('day')
|
||||
end = dt.ceil('day')
|
||||
return start, end
|
||||
|
||||
def get_month_boundaries(self, date=None):
|
||||
"""Get start and end of month in current timezone"""
|
||||
dt = self.get(date) if date else self.now()
|
||||
start = dt.floor('month')
|
||||
end = dt.ceil('month')
|
||||
return start, end
|
||||
|
||||
client_arrow = DateTimeLocal(is_client=True)
|
||||
system_arrow = DateTimeLocal(is_client=False)
|
||||
|
||||
109
api_library/logger.py
Normal file
109
api_library/logger.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi.requests import Request
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
class UserActivityLogger:
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger("user_activity")
|
||||
self.logger.setLevel(logging.INFO)
|
||||
|
||||
# Add handlers if not already added
|
||||
if not self.logger.handlers:
|
||||
log_path = "/service_app/logs/user_activity.log"
|
||||
os.makedirs(os.path.dirname(log_path), exist_ok=True)
|
||||
handler = logging.FileHandler(log_path)
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
self.logger.addHandler(handler)
|
||||
|
||||
def _get_request_metadata(self, request: Request) -> Dict[str, Any]:
|
||||
"""Extract common metadata from request"""
|
||||
return {
|
||||
"agent": request.headers.get("User-Agent"),
|
||||
"ip": getattr(request, "remote_addr", None) or request.headers.get("X-Forwarded-For"),
|
||||
"platform": request.headers.get("Origin"),
|
||||
"timestamp": str(system_arrow.now())
|
||||
}
|
||||
|
||||
def log_login_attempt(
|
||||
self,
|
||||
request: Request,
|
||||
user_id: int,
|
||||
domain: str,
|
||||
access_key: str,
|
||||
success: bool,
|
||||
error: Optional[str] = None
|
||||
):
|
||||
"""Log login attempts"""
|
||||
metadata = self._get_request_metadata(request)
|
||||
log_data = {
|
||||
"event": "login_attempt",
|
||||
"user_id": user_id,
|
||||
"domain": domain,
|
||||
"access_key": access_key,
|
||||
"success": success,
|
||||
"error": error,
|
||||
**metadata
|
||||
}
|
||||
|
||||
if success:
|
||||
self.logger.info("Login successful", extra=log_data)
|
||||
else:
|
||||
self.logger.warning("Login failed", extra=log_data)
|
||||
|
||||
def log_password_change(
|
||||
self,
|
||||
request: Request,
|
||||
user_id: int,
|
||||
change_type: str,
|
||||
success: bool,
|
||||
error: Optional[str] = None
|
||||
):
|
||||
"""Log password changes"""
|
||||
metadata = self._get_request_metadata(request)
|
||||
log_data = {
|
||||
"event": "password_change",
|
||||
"user_id": user_id,
|
||||
"change_type": change_type,
|
||||
"success": success,
|
||||
"error": error,
|
||||
**metadata
|
||||
}
|
||||
|
||||
if success:
|
||||
self.logger.info("Password change successful", extra=log_data)
|
||||
else:
|
||||
self.logger.warning("Password change failed", extra=log_data)
|
||||
|
||||
def log_session_activity(
|
||||
self,
|
||||
request: Request,
|
||||
user_id: int,
|
||||
activity_type: str,
|
||||
domain: Optional[str] = None,
|
||||
success: bool = True,
|
||||
error: Optional[str] = None
|
||||
):
|
||||
"""Log session activities (logout, disconnect, etc)"""
|
||||
metadata = self._get_request_metadata(request)
|
||||
log_data = {
|
||||
"event": "session_activity",
|
||||
"activity_type": activity_type,
|
||||
"user_id": user_id,
|
||||
"domain": domain,
|
||||
"success": success,
|
||||
"error": error,
|
||||
**metadata
|
||||
}
|
||||
|
||||
if success:
|
||||
self.logger.info(f"{activity_type} successful", extra=log_data)
|
||||
else:
|
||||
self.logger.warning(f"{activity_type} failed", extra=log_data)
|
||||
|
||||
# Global logger instance
|
||||
user_logger = UserActivityLogger()
|
||||
38
api_library/response_handlers.py
Normal file
38
api_library/response_handlers.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from typing import Any, Optional
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
class ResponseHandler:
|
||||
@staticmethod
|
||||
def success(message: str, data: Optional[Any] = None, status_code: int = status.HTTP_200_OK) -> JSONResponse:
|
||||
"""Create a success response"""
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": message,
|
||||
"data": data or {},
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def error(message: str, data: Optional[Any] = None, status_code: int = status.HTTP_400_BAD_REQUEST) -> JSONResponse:
|
||||
"""Create an error response"""
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": message,
|
||||
"data": data or {},
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unauthorized(message: str = "Unauthorized access") -> JSONResponse:
|
||||
"""Create an unauthorized response"""
|
||||
return ResponseHandler.error(message, status_code=status.HTTP_401_UNAUTHORIZED)
|
||||
|
||||
@staticmethod
|
||||
def not_found(message: str = "Resource not found") -> JSONResponse:
|
||||
"""Create a not found response"""
|
||||
return ResponseHandler.error(message, status_code=status.HTTP_404_NOT_FOUND)
|
||||
@@ -35,6 +35,7 @@ class ApplicationToken(BaseModel):
|
||||
person_uu_id: str
|
||||
|
||||
request: Optional[dict] = None # Request Info of Client
|
||||
expires_at: Optional[float] = None # Expiry timestamp
|
||||
|
||||
|
||||
class OccupantToken(BaseModel):
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
from .email.service import send_email
|
||||
from .redis.old_functions import (
|
||||
save_object_to_redis as save_access_token_to_redis,
|
||||
)
|
||||
from .redis.functions import RedisActions
|
||||
from .templates.password_templates import (
|
||||
password_is_changed_template,
|
||||
change_your_password_template,
|
||||
)
|
||||
|
||||
update_selected_to_redis = RedisActions.set_json
|
||||
@@ -1,24 +1,8 @@
|
||||
import json
|
||||
|
||||
import typing
|
||||
from api_services.redis.conn import redis_cli
|
||||
|
||||
|
||||
class RedisResponse:
|
||||
def __init__(
|
||||
self, status: bool, message: str, data: dict = None, error: str = None
|
||||
):
|
||||
self.status = status
|
||||
self.message = message
|
||||
self.data = data
|
||||
self.error = error
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"status": self.status,
|
||||
"message": self.message,
|
||||
"data": self.data,
|
||||
"error": self.error,
|
||||
}
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from api_objects import OccupantTokenObject, EmployeeTokenObject
|
||||
|
||||
|
||||
class RedisActions:
|
||||
@@ -63,6 +47,42 @@ class RedisActions:
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_with_regex(cls, value_regex):
|
||||
try:
|
||||
already_tokens = redis_cli.scan_iter(match=str(value_regex))
|
||||
for already_token in already_tokens:
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Single value is retrieved successfully.",
|
||||
data=json.loads(redis_cli.get(already_token)),
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Values are not listed successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def list_all(cls, value_regex):
|
||||
try:
|
||||
already_tokens = redis_cli.scan_iter(match=str(value_regex))
|
||||
already_tokens_list = {}
|
||||
for already_token in already_tokens:
|
||||
already_tokens_list[already_token.decode()] = json.loads(redis_cli.get(already_token))
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Values are listed successfully.",
|
||||
data=already_tokens_list,
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Values are not listed successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_replace_all(cls, value, value_regex):
|
||||
try:
|
||||
@@ -159,84 +179,218 @@ class RedisActions:
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_json_with_expiry(cls, name, value, **expiry_kwargs):
|
||||
"""Set JSON value with expiry time
|
||||
Example: set_json_with_expiry('key', value, hours=1, minutes=30)
|
||||
"""
|
||||
try:
|
||||
search_name = str(name) if isinstance(name, str) else name.decode()
|
||||
expiry_time = system_arrow.get_expiry_time(**expiry_kwargs)
|
||||
seconds_until_expiry = int(expiry_time.timestamp() - system_arrow.now().timestamp())
|
||||
|
||||
# def get_object_via_access_key(
|
||||
# request,
|
||||
# ):
|
||||
#
|
||||
# if not hasattr(request, "headers"):
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=401,
|
||||
# detail=dict(
|
||||
# message="Headers are not found in request. Invalid request object."
|
||||
# ),
|
||||
# )
|
||||
# if not request.headers.get(redis_imports.ACCESS_TOKEN_TAG):
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=401,
|
||||
# detail=dict(message="Unauthorized user, please login..."),
|
||||
# )
|
||||
# already_tokens = redis_cli.scan_iter(
|
||||
# match=str(request.headers.get(redis_imports.ACCESS_TOKEN_TAG) + ":*")
|
||||
# )
|
||||
# if already_tokens := list(already_tokens):
|
||||
# try:
|
||||
# if redis_object := json.loads(
|
||||
# redis_cli.get(already_tokens[0].decode()) or {}
|
||||
# ):
|
||||
# if redis_object.get("user_type") == 1:
|
||||
# if not redis_object.get("selected_company", None):
|
||||
# redis_object["selected_company"] = None
|
||||
# return redis_imports.EmployeeTokenObject(**redis_object)
|
||||
# elif redis_object.get("user_type") == 2:
|
||||
# if not redis_object.get("selected_occupant", None):
|
||||
# redis_object["selected_occupant"] = None
|
||||
# return redis_imports.OccupantTokenObject(**redis_object)
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=401,
|
||||
# detail=dict(
|
||||
# message="User type is not found in the token object. Please reach to your administrator."
|
||||
# ),
|
||||
# )
|
||||
# except Exception as e:
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=500,
|
||||
# detail={
|
||||
# "message": "Redis Service raised an exception.",
|
||||
# "error": str(e),
|
||||
# },
|
||||
# )
|
||||
#
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=redis_imports.status.HTTP_401_UNAUTHORIZED,
|
||||
# detail="Invalid credentials. Please login again.",
|
||||
# )
|
||||
#
|
||||
#
|
||||
# def get_object_via_user_uu_id(user_id: str) -> typing.Union[dict, None]:
|
||||
# already_tokens = redis_cli.scan_iter(match=str("*:" + str(user_id)))
|
||||
# already_tokens_list, already_tokens_dict = [], {}
|
||||
# for already_token in already_tokens:
|
||||
# redis_object = json.loads(redis_cli.get(already_token) or {})
|
||||
# already_tokens_list.append(redis_object)
|
||||
# already_tokens_dict[already_token.decode()] = redis_object
|
||||
# return already_tokens_dict
|
||||
#
|
||||
#
|
||||
# def save_object_to_redis(
|
||||
# access_token, model_object, redis_imports: RedisImports
|
||||
# ) -> bool:
|
||||
# try:
|
||||
# if redis_cli.set(
|
||||
# name=str(access_token) + ":" + str(model_object.user_uu_id),
|
||||
# value=model_object.model_dump_json(),
|
||||
# ):
|
||||
# return access_token
|
||||
# except Exception as e:
|
||||
# print("Save Object to Redis Error: ", e)
|
||||
# raise redis_imports.exceptions(
|
||||
# status_code=redis_imports.status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
# detail=dict(
|
||||
# message="Headers are not found in request. Invalid request object. Redis Error: Token is not saved."
|
||||
# ),
|
||||
# )
|
||||
redis_cli.setex(
|
||||
name=search_name,
|
||||
time=seconds_until_expiry,
|
||||
value=json.dumps({
|
||||
'value': value,
|
||||
'expires_at': expiry_time.timestamp()
|
||||
})
|
||||
)
|
||||
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Value is set successfully with expiry.",
|
||||
data={'value': value, 'expires_at': expiry_time.timestamp()},
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Value is not set successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_json_if_valid(cls, name):
|
||||
"""Get JSON value if not expired"""
|
||||
try:
|
||||
search_name = str(name) if isinstance(name, str) else name.decode()
|
||||
result = redis_cli.get(name=search_name)
|
||||
|
||||
if not result:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Key not found.",
|
||||
)
|
||||
|
||||
data = json.loads(result)
|
||||
if system_arrow.is_expired(data.get('expires_at')):
|
||||
redis_cli.delete(search_name)
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Cache expired.",
|
||||
)
|
||||
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Value retrieved successfully.",
|
||||
data=data['value'],
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Error retrieving value.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def cache_with_timezone(cls, name, value, timezone=None, **expiry_kwargs):
|
||||
"""Cache value with timezone-aware key"""
|
||||
try:
|
||||
dt = DateTimeLocal(timezone=timezone) if timezone else system_arrow
|
||||
cache_key = dt.get_cache_key(name)
|
||||
return cls.set_json_with_expiry(cache_key, value, **expiry_kwargs)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Error caching with timezone.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def save_object_to_redis(cls, access_token, model_object, expiry_minutes: int = 30):
|
||||
"""Save object to Redis with expiry time
|
||||
Args:
|
||||
access_token: The access token to use as key prefix
|
||||
model_object: The model object to save (must have user_uu_id attribute and model_dump_json method)
|
||||
expiry_minutes: Minutes until the token expires (default: 30)
|
||||
Returns:
|
||||
str: The access token if successful
|
||||
Raises:
|
||||
Exception: If saving fails
|
||||
"""
|
||||
try:
|
||||
key = f"{access_token}:{model_object.user_uu_id}"
|
||||
expiry_time = system_arrow.get_expiry_time(minutes=expiry_minutes)
|
||||
seconds_until_expiry = max(1, int(expiry_time.timestamp() - system_arrow.now().timestamp()))
|
||||
|
||||
# Add expiry time to the model data
|
||||
model_data = json.loads(model_object.model_dump_json())
|
||||
model_data['expires_at'] = expiry_time.timestamp()
|
||||
|
||||
if redis_cli.setex(
|
||||
name=key,
|
||||
time=seconds_until_expiry,
|
||||
value=json.dumps(model_data)
|
||||
):
|
||||
return access_token
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
f"Failed to save object to Redis. Error: {str(e)}"
|
||||
)
|
||||
|
||||
raise Exception("Failed to save token to Redis")
|
||||
|
||||
@classmethod
|
||||
def get_object_via_access_key(cls, request):
|
||||
"""Get object from Redis using access key from request headers
|
||||
Args:
|
||||
request: The request object containing headers
|
||||
Returns:
|
||||
Union[EmployeeTokenObject, OccupantTokenObject]: The token object
|
||||
Raises:
|
||||
Exception: If retrieval fails or token is invalid
|
||||
"""
|
||||
from api_configs.configs import Auth
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
if not hasattr(request, "headers"):
|
||||
raise Exception("Headers not found in request")
|
||||
|
||||
access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
|
||||
if not access_token:
|
||||
raise Exception("Unauthorized user, please login")
|
||||
|
||||
# Scan for matching tokens
|
||||
token_pattern = f"{access_token}:*"
|
||||
matching_tokens = list(redis_cli.scan_iter(match=token_pattern))
|
||||
|
||||
if not matching_tokens:
|
||||
raise Exception("Invalid credentials. Please login again")
|
||||
|
||||
try:
|
||||
# Check if token has expired in Redis
|
||||
token_key = matching_tokens[0]
|
||||
ttl = redis_cli.ttl(token_key)
|
||||
if ttl <= 0:
|
||||
redis_cli.delete(token_key)
|
||||
raise Exception("Token expired. Please login again")
|
||||
|
||||
# Get the token data
|
||||
token_data = json.loads(redis_cli.get(token_key) or '{}')
|
||||
|
||||
# Return appropriate token object based on user type
|
||||
if token_data.get("user_type") == 1: # Employee
|
||||
if not token_data.get("selected_company"):
|
||||
token_data["selected_company"] = None
|
||||
return EmployeeTokenObject(**token_data)
|
||||
|
||||
elif token_data.get("user_type") == 2: # Occupant
|
||||
if not token_data.get("selected_occupant"):
|
||||
token_data["selected_occupant"] = None
|
||||
return OccupantTokenObject(**token_data)
|
||||
|
||||
raise Exception("Invalid user type in token")
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"Failed to retrieve token: {str(e)}")
|
||||
|
||||
@classmethod
|
||||
def get_object_via_user_uu_id(cls, user_id: str) -> typing.Union[dict, None]:
|
||||
"""Get all objects for a user by UUID
|
||||
Args:
|
||||
user_id: The user UUID to search for
|
||||
Returns:
|
||||
dict: Dictionary of token keys and their corresponding objects
|
||||
"""
|
||||
token_pattern = f"*:{str(user_id)}"
|
||||
matching_tokens = redis_cli.scan_iter(match=token_pattern)
|
||||
|
||||
tokens_dict = {}
|
||||
for token_key in matching_tokens:
|
||||
token_data = json.loads(redis_cli.get(token_key) or '{}')
|
||||
|
||||
# Skip expired tokens and clean them up
|
||||
if system_arrow.is_expired(token_data.get('expires_at')):
|
||||
redis_cli.delete(token_key)
|
||||
continue
|
||||
|
||||
tokens_dict[token_key.decode()] = token_data
|
||||
|
||||
return tokens_dict
|
||||
|
||||
|
||||
class RedisResponse:
|
||||
def __init__(
|
||||
self, status: bool, message: str, data: typing.Union[dict | list] = None, error: str = None
|
||||
):
|
||||
self.status = status
|
||||
self.message = message
|
||||
self.data = data
|
||||
if isinstance(data, dict):
|
||||
self.data_type = "dict"
|
||||
elif isinstance(data, list):
|
||||
self.data_type = "list"
|
||||
elif data is None:
|
||||
self.data_type = None
|
||||
self.error = error
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"status": self.status,
|
||||
"message": self.message,
|
||||
"data": self.data,
|
||||
"data_type": self.data_type,
|
||||
"error": self.error,
|
||||
}
|
||||
|
||||
75
api_services/token_service.py
Normal file
75
api_services/token_service.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from typing import Optional, Union, Dict, Any
|
||||
from fastapi.requests import Request
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from api_services.redis.functions import RedisActions
|
||||
from api_objects import OccupantTokenObject, EmployeeTokenObject
|
||||
from api_configs import Auth
|
||||
from databases import Users, UsersTokens
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
class TokenService:
|
||||
@staticmethod
|
||||
def validate_token(request: Request) -> Union[OccupantTokenObject, EmployeeTokenObject]:
|
||||
"""Validate and return token object from request"""
|
||||
try:
|
||||
return RedisActions.get_object_via_access_key(request)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail={"message": str(e)}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user_tokens(user_id: str) -> Dict[str, Any]:
|
||||
"""Get all valid tokens for a user"""
|
||||
return RedisActions.get_object_via_user_uu_id(user_id)
|
||||
|
||||
@staticmethod
|
||||
def validate_refresh_token(domain: str, refresh_token: str) -> Optional[UsersTokens]:
|
||||
"""Validate refresh token and return token object"""
|
||||
return UsersTokens.filter_by_one(
|
||||
token=refresh_token,
|
||||
domain=domain,
|
||||
**UsersTokens.valid_record_dict
|
||||
).data
|
||||
|
||||
@staticmethod
|
||||
def update_user_metadata(user: Users, request: Request):
|
||||
"""Update user metadata from request"""
|
||||
user.last_agent = request.headers.get("User-Agent")
|
||||
user.last_platform = request.headers.get("Origin")
|
||||
user.last_remote_addr = (
|
||||
getattr(request, "remote_addr", None) or
|
||||
request.headers.get("X-Forwarded-For")
|
||||
)
|
||||
user.last_seen = str(system_arrow.now())
|
||||
user.save()
|
||||
|
||||
@staticmethod
|
||||
def clear_user_tokens(user_id: str, domain: Optional[str] = None):
|
||||
"""Clear user tokens from Redis
|
||||
Args:
|
||||
user_id: User UUID
|
||||
domain: Optional domain to clear tokens for specific domain only
|
||||
"""
|
||||
tokens = RedisActions.get_object_via_user_uu_id(user_id)
|
||||
for key, token_data in tokens.items():
|
||||
if domain is None or token_data.get("domain") == domain:
|
||||
RedisActions.redis_cli.delete(key)
|
||||
|
||||
@staticmethod
|
||||
def validate_password_token(token: str) -> Optional[Users]:
|
||||
"""Validate password reset token and return user"""
|
||||
user = Users.filter_one(Users.password_token == token).data
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Check if token is expired
|
||||
token_valid_until = system_arrow.get(str(user.password_token_is_valid))
|
||||
if system_arrow.now() > token_valid_until:
|
||||
user.password_token = ""
|
||||
user.save()
|
||||
return None
|
||||
|
||||
return user
|
||||
@@ -1,136 +0,0 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class AccountListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"iban": "IBAN Numarası",
|
||||
"bank_date": "Banka İşlem Tarihi",
|
||||
"currency_value": "Para Birimi Değeri",
|
||||
"bank_balance": "Banka Bakiyesi",
|
||||
"currency": "Para Birimi Birimi",
|
||||
"additional_balance": "Ek Bakiye",
|
||||
"channel_branch": "Şube Banka",
|
||||
"process_name": "Banka İşlem Türü Adı",
|
||||
"process_type": "Banka İşlem Türü",
|
||||
"process_comment": "İşlem Kayıt Yorumu",
|
||||
"process_garbage": "İşlem Kayıt Çöpü",
|
||||
"bank_reference_code": "Banka Referans Kodu",
|
||||
"add_comment_note": "Yorum Not Ekle",
|
||||
"is_receipt_mail_send": "Makbuz Posta Gönderildi",
|
||||
"found_from": "Bulunduğu Yer",
|
||||
"similarity": "Benzerlik",
|
||||
"remainder_balance": "Kalan Bakiye",
|
||||
"bank_date_y": "Banka İşlem Yılı",
|
||||
"bank_date_m": "Banka İşlem Ayı",
|
||||
"bank_date_w": "Banka İşlem Haftası",
|
||||
"bank_date_d": "Banka İşlem Günü",
|
||||
"approving_accounting_record": "Onaylayan Muhasebe Kaydı",
|
||||
"accounting_receipt_date": "Muhasebe Makbuz Tarihi",
|
||||
"accounting_receipt_number": "Muhasebe Makbuz Numarası",
|
||||
"approved_record": "Onaylanmış Kayıt",
|
||||
"import_file_name": "İçe Aktarım Dosya Adı",
|
||||
"receive_debit": "Alacak Borç",
|
||||
"receive_debit_uu_id": "Alacak Borç UU Kimliği",
|
||||
"budget_type": "Bütçe Türü",
|
||||
"budget_type_uu_id": "Bütçe Türü UU Kimliği",
|
||||
"company_uu_id": "Şirket UU Kimliği",
|
||||
"send_company_uu_id": "Şirket UU Kimliği Gönder",
|
||||
"send_person_uu_id": "Kişi UU Kimliği Gönder",
|
||||
"approving_accounting_person_uu_id": "Onaylayan Muhasebe Kişi UU Kimliği",
|
||||
"living_space_uu_id": "Yaşam Alanı UU Kimliği",
|
||||
"customer_uu_id": "Müşteri UU Kimliği",
|
||||
"build_uu_id": "Yapı UU Kimliği",
|
||||
"build_parts_uu_id": "Yapı Parça UU Kimliği",
|
||||
"build_decision_book_uu_id": "Yapı Karar Defteri UU Kimliği",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"iban": "IBAN Number",
|
||||
"bank_date": "Bank Transaction Date",
|
||||
"currency_value": "Currency Value",
|
||||
"bank_balance": "Bank Balance",
|
||||
"currency": "Unit of Currency",
|
||||
"additional_balance": "Additional Balance",
|
||||
"channel_branch": "Branch Bank",
|
||||
"process_name": "Bank Process Type Name",
|
||||
"process_type": "Bank Process Type",
|
||||
"process_comment": "Transaction Record Comment",
|
||||
"process_garbage": "Transaction Record Garbage",
|
||||
"bank_reference_code": "Bank Reference Code",
|
||||
"add_comment_note": "Add Comment Note",
|
||||
"is_receipt_mail_send": "Receipt Mail Send",
|
||||
"found_from": "Found From",
|
||||
"similarity": "Similarity",
|
||||
"remainder_balance": "Remainder Balance",
|
||||
"bank_date_y": "Bank Date Year",
|
||||
"bank_date_m": "Bank Date Month",
|
||||
"bank_date_w": "Bank Date Week",
|
||||
"bank_date_d": "Bank Date Day",
|
||||
"approving_accounting_record": "Approving Accounting Record",
|
||||
"accounting_receipt_date": "Accounting Receipt Date",
|
||||
"accounting_receipt_number": "Accounting Receipt Number",
|
||||
"approved_record": "Approved Record",
|
||||
"import_file_name": "Import File Name",
|
||||
"receive_debit": "Receive Debit",
|
||||
"receive_debit_uu_id": "Receive Debit UU ID",
|
||||
"budget_type": "Budget Type",
|
||||
"budget_type_uu_id": "Budget Type UU ID",
|
||||
"company_uu_id": "Company UU ID",
|
||||
"send_company_uu_id": "Send Company UU ID",
|
||||
"send_person_uu_id": "Send Person UU ID",
|
||||
"approving_accounting_person_uu_id": "Approving Accounting Person UU ID",
|
||||
"living_space_uu_id": "Living Space UU ID",
|
||||
"customer_uu_id": "Customer UU ID",
|
||||
"build_uu_id": "Build UU ID",
|
||||
"build_parts_uu_id": "Build Parts UU ID",
|
||||
"build_decision_book_uu_id": "Build Decision Book UU ID",
|
||||
}
|
||||
|
||||
|
||||
class AccountListResponse(BaseModelRegular, CrudRecords, AccountListValidation):
|
||||
|
||||
iban: Optional[str] = None
|
||||
bank_date: Optional[str] = None
|
||||
currency_value: Optional[str] = None
|
||||
bank_balance: Optional[str] = None
|
||||
currency: Optional[str] = None
|
||||
additional_balance: Optional[str] = None
|
||||
channel_branch: Optional[str] = None
|
||||
process_name: Optional[str] = None
|
||||
process_type: Optional[str] = None
|
||||
process_comment: Optional[str] = None
|
||||
process_garbage: Optional[str] = None
|
||||
bank_reference_code: Optional[str] = None
|
||||
add_comment_note: Optional[str] = None
|
||||
is_receipt_mail_send: Optional[str] = None
|
||||
found_from: Optional[str] = None
|
||||
similarity: Optional[str] = None
|
||||
remainder_balance: Optional[str] = None
|
||||
bank_date_y: Optional[str] = None
|
||||
bank_date_m: Optional[str] = None
|
||||
bank_date_w: Optional[str] = None
|
||||
bank_date_d: Optional[str] = None
|
||||
approving_accounting_record: Optional[str] = None
|
||||
accounting_receipt_date: Optional[str] = None
|
||||
accounting_receipt_number: Optional[str] = None
|
||||
approved_record: Optional[str] = None
|
||||
import_file_name: Optional[str] = None
|
||||
receive_debit: Optional[str] = None
|
||||
receive_debit_uu_id: Optional[str] = None
|
||||
budget_type: Optional[str] = None
|
||||
budget_type_uu_id: Optional[str] = None
|
||||
company_uu_id: Optional[str] = None
|
||||
send_company_uu_id: Optional[str] = None
|
||||
send_person_uu_id: Optional[str] = None
|
||||
approving_accounting_person_uu_id: Optional[str] = None
|
||||
living_space_uu_id: Optional[str] = None
|
||||
customer_uu_id: Optional[str] = None
|
||||
build_uu_id: Optional[str] = None
|
||||
build_parts_uu_id: Optional[str] = None
|
||||
build_decision_book_uu_id: Optional[str] = None
|
||||
234
api_validations/validations_response/account_responses.py
Normal file
234
api_validations/validations_response/account_responses.py
Normal file
@@ -0,0 +1,234 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from uuid import UUID
|
||||
from .base_responses import BaseResponse, CrudCollection
|
||||
|
||||
|
||||
class AccountBooksResponse(BaseResponse):
|
||||
"""Response model for account books"""
|
||||
country: str
|
||||
branch_type: int
|
||||
company_id: int
|
||||
company_uu_id: str
|
||||
branch_id: Optional[int]
|
||||
branch_uu_id: Optional[str]
|
||||
|
||||
|
||||
class AccountBooksCollection(CrudCollection[AccountBooksResponse]):
|
||||
"""Collection of account books"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountCodesResponse(BaseResponse):
|
||||
"""Response model for account codes"""
|
||||
account_code: str
|
||||
comment_line: str
|
||||
is_receive_or_debit: bool
|
||||
product_id: int = 0
|
||||
nvi_id: str = ""
|
||||
status_id: int = 0
|
||||
account_code_seperator: str = "."
|
||||
system_id: int = 0
|
||||
locked: bool = False
|
||||
company_id: Optional[int]
|
||||
company_uu_id: str
|
||||
customer_id: Optional[int]
|
||||
customer_uu_id: str
|
||||
person_id: Optional[int]
|
||||
person_uu_id: str
|
||||
|
||||
|
||||
class AccountCodesCollection(CrudCollection[AccountCodesResponse]):
|
||||
"""Collection of account codes"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountCodeParserResponse(BaseResponse):
|
||||
"""Response model for account code parser"""
|
||||
account_code_1: str
|
||||
account_code_2: str
|
||||
account_code_3: str
|
||||
account_code_4: str = ""
|
||||
account_code_5: str = ""
|
||||
account_code_6: str = ""
|
||||
account_code_id: int
|
||||
account_code_uu_id: str
|
||||
|
||||
|
||||
class AccountCodeParserCollection(CrudCollection[AccountCodeParserResponse]):
|
||||
"""Collection of account code parsers"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountMasterResponse(BaseResponse):
|
||||
"""Response model for account master"""
|
||||
doc_date: datetime
|
||||
plug_type: str
|
||||
plug_number: int
|
||||
special_code: str = ""
|
||||
authorization_code: str = ""
|
||||
doc_code: str = ""
|
||||
doc_type: int = 0
|
||||
comment_line1: str = ""
|
||||
comment_line2: str = ""
|
||||
comment_line3: str = ""
|
||||
comment_line4: str = ""
|
||||
comment_line5: str = ""
|
||||
comment_line6: str = ""
|
||||
project_code: str = ""
|
||||
module_no: str = ""
|
||||
journal_no: int = 0
|
||||
status_id: int = 0
|
||||
canceled: bool = False
|
||||
print_count: int = 0
|
||||
total_active: Decimal = Decimal("0")
|
||||
total_passive: Decimal = Decimal("0")
|
||||
total_active_1: Decimal = Decimal("0")
|
||||
total_passive_1: Decimal = Decimal("0")
|
||||
total_active_2: Decimal = Decimal("0")
|
||||
total_passive_2: Decimal = Decimal("0")
|
||||
total_active_3: Decimal = Decimal("0")
|
||||
total_passive_3: Decimal = Decimal("0")
|
||||
total_active_4: Decimal = Decimal("0")
|
||||
total_passive_4: Decimal = Decimal("0")
|
||||
cross_ref: int = 0
|
||||
data_center_id: str = ""
|
||||
data_center_rec_num: int = 0
|
||||
account_header_id: int
|
||||
account_header_uu_id: str
|
||||
project_item_id: Optional[int]
|
||||
project_item_uu_id: Optional[str]
|
||||
department_id: Optional[int]
|
||||
department_uu_id: Optional[str]
|
||||
|
||||
|
||||
class AccountMasterCollection(CrudCollection[AccountMasterResponse]):
|
||||
"""Collection of account masters"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountDetailResponse(BaseResponse):
|
||||
"""Response model for account detail"""
|
||||
doc_date: datetime
|
||||
line_no: int
|
||||
receive_debit: str
|
||||
debit: Decimal
|
||||
department: str = ""
|
||||
special_code: str = ""
|
||||
account_ref: int = 0
|
||||
account_fiche_ref: int = 0
|
||||
center_ref: int = 0
|
||||
general_code: str = ""
|
||||
credit: Decimal = Decimal("0")
|
||||
currency_type: str = "TL"
|
||||
exchange_rate: Decimal = Decimal("0")
|
||||
debit_cur: Decimal = Decimal("0")
|
||||
credit_cur: Decimal = Decimal("0")
|
||||
discount_cur: Decimal = Decimal("0")
|
||||
amount: Decimal = Decimal("0")
|
||||
cross_account_code: str = ""
|
||||
inf_index: Decimal = Decimal("0")
|
||||
not_inflated: int = 0
|
||||
not_calculated: int = 0
|
||||
comment_line1: str = ""
|
||||
comment_line2: str = ""
|
||||
comment_line3: str = ""
|
||||
comment_line4: str = ""
|
||||
comment_line5: str = ""
|
||||
comment_line6: str = ""
|
||||
owner_acc_ref: int = 0
|
||||
from_where: int = 0
|
||||
orj_eid: int = 0
|
||||
canceled: int = 0
|
||||
cross_ref: int = 0
|
||||
data_center_id: str = ""
|
||||
data_center_rec_num: str = "0"
|
||||
status_id: int = 0
|
||||
plug_type_id: Optional[int]
|
||||
plug_type_uu_id: str
|
||||
account_header_id: int
|
||||
account_header_uu_id: str
|
||||
account_code_id: int
|
||||
account_code_uu_id: str
|
||||
account_master_id: int
|
||||
account_master_uu_id: str
|
||||
project_id: Optional[int]
|
||||
project_uu_id: Optional[str]
|
||||
|
||||
|
||||
class AccountDetailCollection(CrudCollection[AccountDetailResponse]):
|
||||
"""Collection of account details"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountRecordResponse(BaseResponse):
|
||||
"""Response model for account records"""
|
||||
iban: str
|
||||
bank_date: datetime
|
||||
currency_value: Decimal
|
||||
bank_balance: Decimal
|
||||
currency: str = "TRY"
|
||||
additional_balance: Decimal = Decimal("0")
|
||||
channel_branch: str
|
||||
process_name: str
|
||||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
add_comment_note: Optional[str]
|
||||
is_receipt_mail_send: Optional[bool] = False
|
||||
found_from: Optional[str]
|
||||
similarity: Optional[float]
|
||||
remainder_balance: Optional[Decimal]
|
||||
bank_date_y: Optional[int]
|
||||
bank_date_m: Optional[int]
|
||||
bank_date_w: Optional[int]
|
||||
bank_date_d: Optional[int]
|
||||
approving_accounting_record: Optional[bool]
|
||||
accounting_receipt_date: Optional[datetime]
|
||||
accounting_receipt_number: Optional[int]
|
||||
approved_record: Optional[bool]
|
||||
import_file_name: Optional[str]
|
||||
receive_debit_uu_id: Optional[str]
|
||||
budget_type_uu_id: Optional[str]
|
||||
company_uu_id: Optional[str]
|
||||
send_company_uu_id: Optional[str]
|
||||
customer_id: Optional[str]
|
||||
customer_uu_id: Optional[str]
|
||||
send_person_uu_id: Optional[str]
|
||||
approving_accounting_person_uu_id: Optional[str]
|
||||
build_parts_uu_id: Optional[str]
|
||||
build_decision_book_uu_id: Optional[str]
|
||||
|
||||
|
||||
class AccountRecordCollection(CrudCollection[AccountRecordResponse]):
|
||||
"""Collection of account records"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountRecordExchangeResponse(BaseResponse):
|
||||
"""Response model for account record exchanges"""
|
||||
account_record_id: int
|
||||
account_record_uu_id: str
|
||||
exchange_rate: Decimal
|
||||
exchange_currency: str = "TRY"
|
||||
exchange_value: Decimal
|
||||
exchange_date: datetime
|
||||
|
||||
|
||||
class AccountRecordExchangeCollection(CrudCollection[AccountRecordExchangeResponse]):
|
||||
"""Collection of account record exchanges"""
|
||||
pass
|
||||
|
||||
|
||||
class AccountRecordsListResponse(BaseModel):
|
||||
"""Response model for account records list endpoint"""
|
||||
uu_id: UUID
|
||||
account_name: str
|
||||
account_code: str
|
||||
company_id: int
|
||||
company_uu_id: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
33
api_validations/validations_response/auth_responses.py
Normal file
33
api_validations/validations_response/auth_responses.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
class AuthenticationLoginResponse(BaseModel):
|
||||
"""Response model for authentication login endpoint"""
|
||||
token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
expires_in: int
|
||||
user_info: Dict[str, Any]
|
||||
|
||||
|
||||
class AuthenticationRefreshResponse(BaseModel):
|
||||
"""Response model for authentication refresh endpoint"""
|
||||
token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
expires_in: int
|
||||
|
||||
|
||||
class AuthenticationUserInfoResponse(BaseModel):
|
||||
"""Response model for authentication user info endpoint"""
|
||||
user_id: int
|
||||
username: str
|
||||
email: str
|
||||
first_name: str
|
||||
last_name: str
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
38
api_validations/validations_response/base_responses.py
Normal file
38
api_validations/validations_response/base_responses.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, TypeVar, Generic, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
class BaseResponse(BaseModel):
|
||||
"""Base response model that all other response models inherit from"""
|
||||
uu_id: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
created_by: Optional[str]
|
||||
updated_by: Optional[str]
|
||||
confirmed_by: Optional[str]
|
||||
is_confirmed: Optional[bool] = None
|
||||
active: Optional[bool] = True
|
||||
deleted: Optional[bool] = False
|
||||
expiry_starts: Optional[datetime]
|
||||
expiry_ends: Optional[datetime]
|
||||
is_notification_send: Optional[bool] = False
|
||||
is_email_send: Optional[bool] = False
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class CrudCollection(BaseModel, Generic[T]):
|
||||
"""Base collection model for paginated responses"""
|
||||
page: int = 1
|
||||
size: int = 10
|
||||
total: int = 0
|
||||
order_field: str = "id"
|
||||
order_type: str = "asc"
|
||||
items: List[T] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
76
api_validations/validations_response/budget_responses.py
Normal file
76
api_validations/validations_response/budget_responses.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from decimal import Decimal
|
||||
from .base_responses import BaseResponse, CrudCollection
|
||||
|
||||
|
||||
class DecisionBookBudgetBooksResponse(BaseResponse):
|
||||
"""Response model for decision book budget books"""
|
||||
country: str
|
||||
branch_type: int = 0
|
||||
company_id: int
|
||||
company_uu_id: str
|
||||
branch_id: Optional[int]
|
||||
branch_uu_id: Optional[str]
|
||||
build_decision_book_id: int
|
||||
build_decision_book_uu_id: Optional[str]
|
||||
|
||||
|
||||
class DecisionBookBudgetBooksCollection(CrudCollection[DecisionBookBudgetBooksResponse]):
|
||||
"""Collection of decision book budget books"""
|
||||
pass
|
||||
|
||||
|
||||
class DecisionBookBudgetCodesResponse(BaseResponse):
|
||||
"""Response model for decision book budget codes"""
|
||||
budget_code: str
|
||||
comment_line: str
|
||||
budget_type: str
|
||||
budget_code_seperator: str = "."
|
||||
system_id: int = 0
|
||||
locked: bool = False
|
||||
company_id: Optional[int]
|
||||
company_uu_id: str
|
||||
customer_id: Optional[int]
|
||||
customer_uu_id: str
|
||||
|
||||
|
||||
class DecisionBookBudgetCodesCollection(CrudCollection[DecisionBookBudgetCodesResponse]):
|
||||
"""Collection of decision book budget codes"""
|
||||
pass
|
||||
|
||||
|
||||
class DecisionBookBudgetMasterResponse(BaseResponse):
|
||||
"""Response model for decision book budget master"""
|
||||
budget_type: str
|
||||
currency: str = "TRY"
|
||||
total_budget: Decimal
|
||||
tracking_period_id: Optional[int]
|
||||
tracking_period_uu_id: Optional[str]
|
||||
budget_books_id: int
|
||||
budget_books_uu_id: Optional[str]
|
||||
department_id: int
|
||||
department_uu_id: Optional[str]
|
||||
|
||||
|
||||
class DecisionBookBudgetMasterCollection(CrudCollection[DecisionBookBudgetMasterResponse]):
|
||||
"""Collection of decision book budget masters"""
|
||||
pass
|
||||
|
||||
|
||||
class DecisionBookBudgetsResponse(BaseResponse):
|
||||
"""Response model for decision book budgets"""
|
||||
process_date: datetime
|
||||
budget_codes_id: int
|
||||
total_budget: Decimal
|
||||
used_budget: Decimal = Decimal("0")
|
||||
remaining_budget: Decimal = Decimal("0")
|
||||
decision_book_budget_master_id: int
|
||||
decision_book_budget_master_uu_id: Optional[str]
|
||||
|
||||
|
||||
class DecisionBookBudgetsCollection(CrudCollection[DecisionBookBudgetsResponse]):
|
||||
"""Collection of decision book budgets"""
|
||||
pass
|
||||
@@ -1,123 +0,0 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class BuildListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"gov_address_code": "Devlet Adres Kodu",
|
||||
"build_name": "Bina Adı",
|
||||
"build_types_uu_id": "Bina Tipi",
|
||||
"build_no": "Bina No",
|
||||
"max_floor": "Kat Sayısı",
|
||||
"underground_floor": "Bodrum Kat Sayısı",
|
||||
"address_uu_id": "Adres",
|
||||
"build_date": "Yapım Tarihi",
|
||||
"decision_period_date": "Karar Tarihi",
|
||||
"tax_no": "Vergi No",
|
||||
"lift_count": "Asansör Sayısı",
|
||||
"heating_system": "Isıtma Sistemi",
|
||||
"cooling_system": "Soğutma Sistemi",
|
||||
"hot_water_system": "Sıcak Su Sistemi",
|
||||
"block_service_man_count": "Hizmet Görevlisi Sayısı",
|
||||
"security_service_man_count": "Güvenlik Görevlisi Sayısı",
|
||||
"garage_count": "Garaj Sayısı",
|
||||
"site_uu_id": "Site UUID",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"gov_address_code": "Government Address Code",
|
||||
"build_name": "Building Name",
|
||||
"build_types_uu_id": "Building Type",
|
||||
"build_no": "Building No",
|
||||
"max_floor": "Number of Floors",
|
||||
"underground_floor": "Number of Basement Floors",
|
||||
"address_uu_id": "Address",
|
||||
"build_date": "Construction Date",
|
||||
"decision_period_date": "Decision Date",
|
||||
"tax_no": "Tax No",
|
||||
"lift_count": "Number of Elevators",
|
||||
"heating_system": "Heating System",
|
||||
"cooling_system": "Cooling System",
|
||||
"hot_water_system": "Hot Water System",
|
||||
"block_service_man_count": "Number of Service Officers",
|
||||
"security_service_man_count": "Number of Security Officers",
|
||||
"garage_count": "Number of Garages",
|
||||
"site_uu_id": "Site UUID",
|
||||
}
|
||||
|
||||
|
||||
class ListBuildingResponse(BaseModelRegular, CrudRecords, BuildListValidation):
|
||||
|
||||
gov_address_code: Optional[str] = None
|
||||
build_name: Optional[str] = None
|
||||
build_types_uu_id: Optional[str] = None
|
||||
build_no: Optional[str] = None
|
||||
max_floor: Optional[int] = None
|
||||
underground_floor: Optional[int] = None
|
||||
address_uu_id: Optional[str] = None
|
||||
build_date: Optional[str] = None
|
||||
decision_period_date: Optional[str] = None
|
||||
tax_no: Optional[str] = None
|
||||
lift_count: Optional[int] = None
|
||||
heating_system: Optional[bool] = None
|
||||
cooling_system: Optional[bool] = None
|
||||
hot_water_system: Optional[bool] = None
|
||||
block_service_man_count: Optional[int] = None
|
||||
security_service_man_count: Optional[int] = None
|
||||
garage_count: Optional[int] = None
|
||||
site_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
# class InsertBuild(BaseModelRegular, BuildValidation):
|
||||
# gov_address_code: str
|
||||
# build_name: str
|
||||
# build_types_uu_id: str
|
||||
# max_floor: int
|
||||
# underground_floor: int
|
||||
# address_uu_id: str
|
||||
# build_date: str
|
||||
# decision_period_date: str
|
||||
#
|
||||
# tax_no: Optional[str] = None
|
||||
# lift_count: Optional[int] = None
|
||||
# heating_system: Optional[bool] = None
|
||||
# cooling_system: Optional[bool] = None
|
||||
# hot_water_system: Optional[bool] = None
|
||||
# block_service_man_count: Optional[int] = None
|
||||
# security_service_man_count: Optional[int] = None
|
||||
# garage_count: Optional[int] = None
|
||||
#
|
||||
#
|
||||
# class BuildUpdateValidation:
|
||||
# tr = {
|
||||
# **BuildValidation.tr,
|
||||
# **PydanticBaseModelValidation.tr,
|
||||
# }
|
||||
# en = {
|
||||
# **BuildValidation.en,
|
||||
# **PydanticBaseModelValidation.en,
|
||||
# }
|
||||
#
|
||||
#
|
||||
# class UpdateBuild(PydanticBaseModel, BuildUpdateValidation):
|
||||
# gov_address_code: Optional[str] = None
|
||||
# build_name: Optional[str] = None
|
||||
# build_no: Optional[str] = None
|
||||
# build_types: Optional[str] = None
|
||||
# max_floor: Optional[int] = None
|
||||
# underground_floor: Optional[int] = None
|
||||
# build_date: Optional[str] = None
|
||||
# tax_no: Optional[str] = None
|
||||
# lift_count: Optional[int] = None
|
||||
# heating_system: Optional[bool] = None
|
||||
# cooling_system: Optional[bool] = None
|
||||
# hot_water_system: Optional[bool] = None
|
||||
# block_service_man_count: Optional[int] = None
|
||||
# security_service_man_count: Optional[int] = None
|
||||
# garage_count: Optional[int] = None
|
||||
# address_uu_id: Optional[str] = None
|
||||
251
api_validations/validations_response/building_responses.py
Normal file
251
api_validations/validations_response/building_responses.py
Normal file
@@ -0,0 +1,251 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from decimal import Decimal
|
||||
from .base_responses import BaseResponse, CrudCollection
|
||||
|
||||
|
||||
class BuildAreaListResponse(BaseResponse):
|
||||
"""Response model for building area list endpoint"""
|
||||
uu_id: UUID
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
area_name: str
|
||||
area_value: float
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class BuildAreaListCollection(CrudCollection[BuildAreaListResponse]):
|
||||
"""Collection of building area list"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildSitesListResponse(BaseResponse):
|
||||
"""Response model for building sites list endpoint"""
|
||||
uu_id: UUID
|
||||
address_id: int
|
||||
site_name: str
|
||||
site_value: float
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class BuildSitesListCollection(CrudCollection[BuildSitesListResponse]):
|
||||
"""Collection of building sites list"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildTypesListResponse(BaseResponse):
|
||||
"""Response model for building types list endpoint"""
|
||||
uu_id: UUID
|
||||
type_name: str
|
||||
type_value: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class BuildTypesListCollection(CrudCollection[BuildTypesListResponse]):
|
||||
"""Collection of building types list"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildTypesResponse(BaseResponse):
|
||||
"""Response model for building types"""
|
||||
function_code: str
|
||||
type_code: str
|
||||
lang: str = "TR"
|
||||
|
||||
|
||||
class BuildTypesCollection(CrudCollection[BuildTypesResponse]):
|
||||
"""Collection of building types"""
|
||||
pass
|
||||
|
||||
|
||||
class Part2EmployeeResponse(BaseResponse):
|
||||
"""Response model for part to employee mapping"""
|
||||
build_id: int
|
||||
part_id: int
|
||||
employee_id: int
|
||||
|
||||
|
||||
class Part2EmployeeCollection(CrudCollection[Part2EmployeeResponse]):
|
||||
"""Collection of part to employee mappings"""
|
||||
pass
|
||||
|
||||
|
||||
class RelationshipEmployee2BuildResponse(BaseResponse):
|
||||
"""Response model for employee to build relationship"""
|
||||
company_id: int
|
||||
employee_id: int
|
||||
member_id: int
|
||||
relationship_type: Optional[str] = "Employee"
|
||||
show_only: bool = False
|
||||
|
||||
|
||||
class RelationshipEmployee2BuildCollection(CrudCollection[RelationshipEmployee2BuildResponse]):
|
||||
"""Collection of employee to build relationships"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildResponse(BaseResponse):
|
||||
"""Response model for buildings"""
|
||||
gov_address_code: str = ""
|
||||
build_name: str
|
||||
build_no: str
|
||||
max_floor: int = 1
|
||||
underground_floor: int = 0
|
||||
build_date: datetime
|
||||
decision_period_date: datetime
|
||||
tax_no: str = ""
|
||||
lift_count: int = 0
|
||||
heating_system: bool = True
|
||||
cooling_system: bool = False
|
||||
hot_water_system: bool = False
|
||||
block_service_man_count: int = 0
|
||||
security_service_man_count: int = 0
|
||||
garage_count: int = 0
|
||||
management_room_id: Optional[int]
|
||||
site_id: Optional[int]
|
||||
site_uu_id: Optional[str]
|
||||
address_id: int
|
||||
address_uu_id: str
|
||||
build_types_id: int
|
||||
build_types_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildCollection(CrudCollection[BuildResponse]):
|
||||
"""Collection of buildings"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildPartsResponse(BaseResponse):
|
||||
"""Response model for building parts"""
|
||||
address_gov_code: str
|
||||
part_no: int = 0
|
||||
part_level: int = 0
|
||||
part_code: str
|
||||
part_gross_size: int = 0
|
||||
part_net_size: int = 0
|
||||
default_accessory: str = "0"
|
||||
human_livable: bool = True
|
||||
due_part_key: str
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
part_direction_id: Optional[int]
|
||||
part_direction_uu_id: Optional[str]
|
||||
part_type_id: int
|
||||
part_type_uu_id: str
|
||||
|
||||
|
||||
class BuildPartsCollection(CrudCollection[BuildPartsResponse]):
|
||||
"""Collection of building parts"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildLivingSpaceResponse(BaseResponse):
|
||||
"""Response model for building living space"""
|
||||
fix_value: Decimal = Decimal("0")
|
||||
fix_percent: Decimal = Decimal("0")
|
||||
agreement_no: str = ""
|
||||
marketing_process: bool = False
|
||||
marketing_layer: int = 0
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
person_id: int
|
||||
person_uu_id: str
|
||||
occupant_type: int
|
||||
occupant_type_uu_id: str
|
||||
|
||||
|
||||
class BuildLivingSpaceCollection(CrudCollection[BuildLivingSpaceResponse]):
|
||||
"""Collection of building living spaces"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildManagementResponse(BaseResponse):
|
||||
"""Response model for building management"""
|
||||
discounted_percentage: Decimal = Decimal("0.00")
|
||||
discounted_price: Decimal = Decimal("0.00")
|
||||
calculated_price: Decimal = Decimal("0.00")
|
||||
occupant_type: int
|
||||
occupant_type_uu_id: str
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
|
||||
|
||||
class BuildManagementCollection(CrudCollection[BuildManagementResponse]):
|
||||
"""Collection of building management records"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildAreaResponse(BaseResponse):
|
||||
"""Response model for building area"""
|
||||
area_name: str = ""
|
||||
area_code: str = ""
|
||||
area_type: str = "GREEN"
|
||||
area_direction: str = "NN"
|
||||
area_gross_size: Decimal = Decimal("0")
|
||||
area_net_size: Decimal = Decimal("0")
|
||||
width: int = 0
|
||||
size: int = 0
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
part_type_id: Optional[int]
|
||||
part_type_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildAreaCollection(CrudCollection[BuildAreaResponse]):
|
||||
"""Collection of building areas"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildSitesResponse(BaseResponse):
|
||||
"""Response model for building sites"""
|
||||
site_name: str
|
||||
site_no: str
|
||||
address_id: int
|
||||
address_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildSitesCollection(CrudCollection[BuildSitesResponse]):
|
||||
"""Collection of building sites"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildCompaniesProvidingResponse(BaseResponse):
|
||||
"""Response model for building companies providing services"""
|
||||
build_id: int
|
||||
build_uu_id: Optional[str]
|
||||
company_id: int
|
||||
company_uu_id: Optional[str]
|
||||
provide_id: Optional[int]
|
||||
provide_uu_id: Optional[str]
|
||||
contract_id: Optional[int]
|
||||
|
||||
|
||||
class BuildCompaniesProvidingCollection(CrudCollection[BuildCompaniesProvidingResponse]):
|
||||
"""Collection of building companies providing services"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildPersonProvidingResponse(BaseResponse):
|
||||
"""Response model for building person providing services"""
|
||||
build_id: int
|
||||
build_uu_id: Optional[str]
|
||||
people_id: int
|
||||
people_uu_id: Optional[str]
|
||||
provide_id: Optional[int]
|
||||
provide_uu_id: Optional[str]
|
||||
contract_id: Optional[int]
|
||||
|
||||
|
||||
class BuildPersonProvidingCollection(CrudCollection[BuildPersonProvidingResponse]):
|
||||
"""Collection of building person providing services"""
|
||||
pass
|
||||
55
api_validations/validations_response/company_responses.py
Normal file
55
api_validations/validations_response/company_responses.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
class CompanyListResponse(BaseModel):
|
||||
"""Response model for company list endpoint"""
|
||||
uu_id: UUID
|
||||
company_name: str
|
||||
company_code: str
|
||||
company_email: str
|
||||
company_phone: str
|
||||
company_address: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class CompanyDepartmentListResponse(BaseModel):
|
||||
"""Response model for company department list endpoint"""
|
||||
uu_id: UUID
|
||||
department_name: str
|
||||
department_code: str
|
||||
company_id: int
|
||||
company_uu_id: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class CompanyDutyListResponse(BaseModel):
|
||||
"""Response model for company duty list endpoint"""
|
||||
uu_id: UUID
|
||||
duty_name: str
|
||||
duty_code: str
|
||||
department_id: int
|
||||
department_uu_id: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
|
||||
|
||||
class CompanyEmployeeListResponse(BaseModel):
|
||||
"""Response model for company employee list endpoint"""
|
||||
uu_id: UUID
|
||||
employee_id: int
|
||||
employee_uu_id: str
|
||||
company_id: int
|
||||
company_uu_id: str
|
||||
duty_id: int
|
||||
duty_uu_id: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime]
|
||||
deleted: bool = False
|
||||
178
api_validations/validations_response/decision_book_responses.py
Normal file
178
api_validations/validations_response/decision_book_responses.py
Normal file
@@ -0,0 +1,178 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from decimal import Decimal
|
||||
from .base_responses import BaseResponse, CrudCollection
|
||||
|
||||
|
||||
class BuildDecisionBookResponse(BaseResponse):
|
||||
"""Response model for building decision book"""
|
||||
decision_book_pdf_path: Optional[str] = ""
|
||||
resp_company_fix_wage: float = 0
|
||||
contact_agreement_path: Optional[str] = ""
|
||||
contact_agreement_date: Optional[datetime]
|
||||
meeting_date: Optional[str]
|
||||
decision_type: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookCollection(CrudCollection[BuildDecisionBookResponse]):
|
||||
"""Collection of building decision books"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookInvitationsResponse(BaseResponse):
|
||||
"""Response model for building decision book invitations"""
|
||||
build_id: int
|
||||
build_uu_id: Optional[str]
|
||||
decision_book_id: int
|
||||
decision_book_uu_id: Optional[str]
|
||||
invitation_type: str
|
||||
invitation_attempt: int = 1
|
||||
living_part_count: int = 1
|
||||
living_part_percentage: Decimal = Decimal("0.51")
|
||||
message: Optional[str]
|
||||
planned_date: datetime
|
||||
planned_date_expires: datetime
|
||||
|
||||
|
||||
class BuildDecisionBookInvitationsCollection(CrudCollection[BuildDecisionBookInvitationsResponse]):
|
||||
"""Collection of building decision book invitations"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookPersonResponse(BaseResponse):
|
||||
"""Response model for building decision book person"""
|
||||
dues_percent_discount: int = 0
|
||||
dues_fix_discount: Decimal = Decimal("0")
|
||||
dues_discount_approval_date: datetime
|
||||
send_date: datetime
|
||||
is_attending: bool = False
|
||||
confirmed_date: Optional[datetime]
|
||||
token: str = ""
|
||||
vicarious_person_id: Optional[int]
|
||||
vicarious_person_uu_id: Optional[str]
|
||||
invite_id: int
|
||||
invite_uu_id: str
|
||||
build_decision_book_id: int
|
||||
build_decision_book_uu_id: str
|
||||
build_living_space_id: int
|
||||
build_living_space_uu_id: Optional[str]
|
||||
person_id: int
|
||||
|
||||
|
||||
class BuildDecisionBookPersonCollection(CrudCollection[BuildDecisionBookPersonResponse]):
|
||||
"""Collection of building decision book persons"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookPersonOccupantsResponse(BaseResponse):
|
||||
"""Response model for building decision book person occupants"""
|
||||
build_decision_book_person_id: int
|
||||
build_decision_book_person_uu_id: Optional[str]
|
||||
invite_id: Optional[int]
|
||||
invite_uu_id: Optional[str]
|
||||
occupant_type_id: int
|
||||
occupant_type_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookPersonOccupantsCollection(CrudCollection[BuildDecisionBookPersonOccupantsResponse]):
|
||||
"""Collection of building decision book person occupants"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookItemsResponse(BaseResponse):
|
||||
"""Response model for building decision book items"""
|
||||
item_order: int
|
||||
item_comment: str
|
||||
item_objection: Optional[str]
|
||||
info_is_completed: bool = False
|
||||
is_payment_created: bool = False
|
||||
info_type_id: Optional[int]
|
||||
info_type_uu_id: Optional[str]
|
||||
build_decision_book_id: int
|
||||
build_decision_book_uu_id: Optional[str]
|
||||
item_short_comment: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookItemsCollection(CrudCollection[BuildDecisionBookItemsResponse]):
|
||||
"""Collection of building decision book items"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookItemsUnapprovedResponse(BaseResponse):
|
||||
"""Response model for building decision book items unapproved"""
|
||||
item_objection: str
|
||||
item_order: int
|
||||
decision_book_item_id: int
|
||||
decision_book_item_uu_id: Optional[str]
|
||||
person_id: int
|
||||
person_uu_id: Optional[str]
|
||||
build_decision_book_item: int
|
||||
build_decision_book_item_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookItemsUnapprovedCollection(CrudCollection[BuildDecisionBookItemsUnapprovedResponse]):
|
||||
"""Collection of building decision book items unapproved"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookPaymentsResponse(BaseResponse):
|
||||
"""Response model for building decision book payments"""
|
||||
payment_plan_time_periods: str
|
||||
process_date: datetime
|
||||
payment_amount: Decimal
|
||||
currency: str = "TRY"
|
||||
payment_types_id: Optional[int]
|
||||
payment_types_uu_id: Optional[str]
|
||||
period_time: str
|
||||
process_date_y: int
|
||||
process_date_m: int
|
||||
build_decision_book_item_id: int
|
||||
build_decision_book_item_uu_id: str
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
decision_book_project_id: Optional[int]
|
||||
decision_book_project_uu_id: Optional[str]
|
||||
account_records_id: Optional[int]
|
||||
account_records_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookPaymentsCollection(CrudCollection[BuildDecisionBookPaymentsResponse]):
|
||||
"""Collection of building decision book payments"""
|
||||
pass
|
||||
|
||||
|
||||
class BuildDecisionBookLegalResponse(BaseResponse):
|
||||
"""Response model for building decision book legal"""
|
||||
period_start_date: datetime
|
||||
lawsuits_decision_number: str
|
||||
lawsuits_decision_date: datetime
|
||||
period_stop_date: datetime
|
||||
decision_book_pdf_path: Optional[str] = ""
|
||||
resp_company_total_wage: Optional[Decimal] = Decimal("0")
|
||||
contact_agreement_path: Optional[str] = ""
|
||||
contact_agreement_date: Optional[datetime]
|
||||
meeting_date: str
|
||||
lawsuits_type: str = "C"
|
||||
lawsuits_name: str
|
||||
lawsuits_note: str
|
||||
lawyer_cost: Decimal
|
||||
mediator_lawyer_cost: Decimal
|
||||
other_cost: Decimal
|
||||
legal_cost: Decimal
|
||||
approved_cost: Decimal
|
||||
total_price: Decimal
|
||||
build_db_item_id: int
|
||||
build_db_item_uu_id: Optional[str]
|
||||
resp_attorney_id: int
|
||||
resp_attorney_uu_id: Optional[str]
|
||||
resp_attorney_company_id: int
|
||||
resp_attorney_company_uu_id: Optional[str]
|
||||
mediator_lawyer_person_id: int
|
||||
mediator_lawyer_person_uu_id: Optional[str]
|
||||
|
||||
|
||||
class BuildDecisionBookLegalCollection(CrudCollection[BuildDecisionBookLegalResponse]):
|
||||
"""Collection of building decision book legal records"""
|
||||
pass
|
||||
179
databases/__init__.py
Normal file
179
databases/__init__.py
Normal file
@@ -0,0 +1,179 @@
|
||||
# SQL Models
|
||||
from databases.sql_models.account.account import (
|
||||
AccountBooks,
|
||||
AccountCodeParser,
|
||||
AccountRecords,
|
||||
AccountCodes,
|
||||
AccountDetail,
|
||||
AccountMaster,
|
||||
AccountRecordExchanges,
|
||||
)
|
||||
from databases.sql_models.building.budget import (
|
||||
DecisionBookBudgetBooks,
|
||||
DecisionBookBudgetCodes,
|
||||
DecisionBookBudgetMaster,
|
||||
DecisionBookBudgets,
|
||||
)
|
||||
from databases.sql_models.account.iban import (
|
||||
BuildIbans,
|
||||
BuildIbanDescription,
|
||||
)
|
||||
from databases.sql_models.api.encrypter import CrypterEngine
|
||||
from databases.sql_models.building.build import (
|
||||
Build,
|
||||
BuildTypes,
|
||||
BuildParts,
|
||||
BuildArea,
|
||||
BuildSites,
|
||||
BuildLivingSpace,
|
||||
BuildPersonProviding,
|
||||
BuildCompaniesProviding,
|
||||
RelationshipEmployee2Build,
|
||||
)
|
||||
from databases.sql_models.building.decision_book import (
|
||||
BuildDecisionBook,
|
||||
BuildDecisionBookItems,
|
||||
BuildDecisionBookPerson,
|
||||
BuildDecisionBookLegal,
|
||||
BuildDecisionBookItemsUnapproved,
|
||||
BuildDecisionBookInvitations,
|
||||
BuildDecisionBookPayments,
|
||||
BuildDecisionBookProjects,
|
||||
BuildDecisionBookProjectPerson,
|
||||
BuildDecisionBookPersonOccupants,
|
||||
BuildDecisionBookProjectItems,
|
||||
)
|
||||
from databases.sql_models.company.company import (
|
||||
Companies,
|
||||
RelationshipDutyCompany,
|
||||
)
|
||||
from databases.sql_models.company.employee import (
|
||||
Employees,
|
||||
EmployeesSalaries,
|
||||
EmployeeHistory,
|
||||
Staff,
|
||||
)
|
||||
from databases.sql_models.company.department import (
|
||||
Duty,
|
||||
Duties,
|
||||
Departments,
|
||||
)
|
||||
from databases.sql_models.event.event import (
|
||||
Modules,
|
||||
Services,
|
||||
Service2Events,
|
||||
Events,
|
||||
Event2Occupant,
|
||||
Event2Employee,
|
||||
Event2OccupantExtra,
|
||||
Event2EmployeeExtra,
|
||||
)
|
||||
from databases.sql_models.identity.identity import (
|
||||
Addresses,
|
||||
AddressCity,
|
||||
AddressStreet,
|
||||
AddressLocality,
|
||||
AddressDistrict,
|
||||
AddressNeighborhood,
|
||||
AddressState,
|
||||
AddressCountry,
|
||||
AddressPostcode,
|
||||
AddressGeographicLocations,
|
||||
UsersTokens,
|
||||
OccupantTypes,
|
||||
People,
|
||||
Users,
|
||||
RelationshipDutyPeople,
|
||||
RelationshipEmployee2PostCode,
|
||||
Contracts,
|
||||
)
|
||||
from databases.sql_models.others.enums import (
|
||||
ApiEnumDropdown,
|
||||
)
|
||||
from databases.sql_models.rules.rules import (
|
||||
EndpointRestriction,
|
||||
)
|
||||
|
||||
# NO-SQL Models
|
||||
from databases.no_sql_models.mongo_database import (
|
||||
MongoQuery,
|
||||
)
|
||||
from databases.no_sql_models.identity import (
|
||||
MongoQueryIdentity,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AccountBooks",
|
||||
"AccountCodeParser",
|
||||
"AccountRecords",
|
||||
"AccountCodes",
|
||||
"AccountDetail",
|
||||
"AccountMaster",
|
||||
"AccountRecordExchanges",
|
||||
"BuildIbans",
|
||||
"BuildIbanDescription",
|
||||
"CrypterEngine",
|
||||
"Build",
|
||||
"BuildTypes",
|
||||
"BuildParts",
|
||||
"BuildArea",
|
||||
"BuildSites",
|
||||
"BuildLivingSpace",
|
||||
"BuildPersonProviding",
|
||||
"BuildCompaniesProviding",
|
||||
"BuildDecisionBook",
|
||||
"BuildDecisionBookItems",
|
||||
"BuildDecisionBookPerson",
|
||||
"BuildDecisionBookLegal",
|
||||
"BuildDecisionBookItemsUnapproved",
|
||||
"BuildDecisionBookInvitations",
|
||||
"BuildDecisionBookPayments",
|
||||
"BuildDecisionBookProjects",
|
||||
"BuildDecisionBookProjectPerson",
|
||||
"BuildDecisionBookPersonOccupants",
|
||||
"BuildDecisionBookProjectItems",
|
||||
"DecisionBookBudgetBooks",
|
||||
"DecisionBookBudgetCodes",
|
||||
"DecisionBookBudgetMaster",
|
||||
"DecisionBookBudgets",
|
||||
"Companies",
|
||||
"RelationshipDutyCompany",
|
||||
"Employees",
|
||||
"EmployeesSalaries",
|
||||
"EmployeeHistory",
|
||||
"Staff",
|
||||
"Duty",
|
||||
"Duties",
|
||||
"Departments",
|
||||
"Modules",
|
||||
"Services",
|
||||
"Service2Events",
|
||||
"Events",
|
||||
"Event2Occupant",
|
||||
"Event2Employee",
|
||||
"Event2OccupantExtra",
|
||||
"Event2EmployeeExtra",
|
||||
"Addresses",
|
||||
"AddressCity",
|
||||
"AddressStreet",
|
||||
"AddressLocality",
|
||||
"AddressDistrict",
|
||||
"AddressNeighborhood",
|
||||
"AddressState",
|
||||
"AddressCountry",
|
||||
"AddressPostcode",
|
||||
"AddressGeographicLocations",
|
||||
"UsersTokens",
|
||||
"OccupantTypes",
|
||||
"People",
|
||||
"Users",
|
||||
"RelationshipDutyPeople",
|
||||
"RelationshipEmployee2PostCode",
|
||||
"Contracts",
|
||||
"ApiEnumDropdown",
|
||||
"EndpointRestriction",
|
||||
"RelationshipEmployee2Build",
|
||||
# ------------------------------------------------
|
||||
"MongoQuery",
|
||||
"MongoQueryIdentity",
|
||||
]
|
||||
11
databases/extensions/__init__.py
Normal file
11
databases/extensions/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from .selector_classes import (
|
||||
Explanation,
|
||||
SelectActionWithEmployee,
|
||||
SelectAction,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Explanation",
|
||||
"SelectAction",
|
||||
"SelectActionWithEmployee",
|
||||
]
|
||||
361
databases/extensions/auth.py
Normal file
361
databases/extensions/auth.py
Normal file
@@ -0,0 +1,361 @@
|
||||
import uuid
|
||||
import secrets
|
||||
import hashlib
|
||||
import requests
|
||||
|
||||
from sqlalchemy import or_
|
||||
from datetime import timedelta
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi import status
|
||||
|
||||
from databases.no_sql_models.validations import (
|
||||
PasswordHistoryViaUser,
|
||||
AccessHistoryViaUser,
|
||||
)
|
||||
|
||||
from api_library.date_time_actions.date_functions import system_arrow, client_arrow
|
||||
from api_configs import ApiStatic, Auth
|
||||
|
||||
|
||||
class PasswordModule:
|
||||
|
||||
@classmethod
|
||||
def generate_token(cls, length):
|
||||
return secrets.token_urlsafe(length)
|
||||
|
||||
@classmethod
|
||||
def create_hashed_password(cls, domain, id_, password):
|
||||
salted_password = f"{domain}-{id_}-{password}"
|
||||
return hashlib.sha256(salted_password.encode()).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def check_hashed_password(cls, domain, id_, password, password_hashed):
|
||||
if not password_hashed:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Password is not changed yet user has no password.",
|
||||
)
|
||||
return cls.create_hashed_password(domain, id_, password) == password_hashed
|
||||
|
||||
|
||||
class AuthModule(PasswordModule):
|
||||
|
||||
@classmethod
|
||||
def check_user_exits(cls, access_key, domain):
|
||||
from databases import Users
|
||||
|
||||
found_user = Users.query.filter(
|
||||
or_(
|
||||
Users.email == str(access_key).lower(),
|
||||
Users.phone_number == str(access_key).replace(" ", ""),
|
||||
),
|
||||
).first()
|
||||
if not found_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Given access key or domain is not matching with the any user record.",
|
||||
)
|
||||
|
||||
other_domains_list = found_user.get_main_domain_and_other_domains(
|
||||
get_main_domain=False
|
||||
)
|
||||
if domain not in other_domains_list:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=dict(message="Unauthorized User attempts to connect api"),
|
||||
)
|
||||
return found_user
|
||||
|
||||
def generate_access_token(self):
|
||||
return self.generate_token(Auth.ACCESS_TOKEN_LENGTH)
|
||||
|
||||
def remove_refresher_token(self, domain, disconnect: bool = False):
|
||||
from databases import (
|
||||
UsersTokens,
|
||||
)
|
||||
|
||||
if disconnect:
|
||||
registered_tokens = UsersTokens.filter_all(
|
||||
UsersTokens.user_id == self.id, system=True
|
||||
)
|
||||
else:
|
||||
registered_tokens = UsersTokens.filter_all(
|
||||
UsersTokens.domain == domain,
|
||||
UsersTokens.user_id == self.id,
|
||||
system=True,
|
||||
)
|
||||
registered_tokens.query.delete()
|
||||
UsersTokens.save()
|
||||
|
||||
def check_password(self, password):
|
||||
main_domain = self.get_main_domain_and_other_domains(get_main_domain=True)
|
||||
if check_password := self.check_hashed_password(
|
||||
domain=main_domain,
|
||||
id_=str(self.uu_id),
|
||||
password_hashed=self.hash_password,
|
||||
password=password,
|
||||
):
|
||||
return check_password
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Password is not correct.",
|
||||
)
|
||||
|
||||
def check_password_is_different(self, password):
|
||||
main_domain = self.get_main_domain_and_other_domains(get_main_domain=True)
|
||||
if self.hash_password == self.create_hashed_password(
|
||||
domain=main_domain, id_=self.uu_id, password=password
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="New password is same with old password.",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_password(found_user, password, password_token=None):
|
||||
from databases import MongoQueryIdentity
|
||||
|
||||
if found_user.password_token:
|
||||
replace_day = 0
|
||||
try:
|
||||
replace_day = int(
|
||||
str(found_user.password_expires_day or 0)
|
||||
.split(",")[0]
|
||||
.replace(" days", "")
|
||||
)
|
||||
except Exception as e:
|
||||
err = e
|
||||
token_is_expired = system_arrow.now() >= system_arrow.get(
|
||||
str(found_user.password_expiry_begins)
|
||||
).shift(days=replace_day)
|
||||
|
||||
if not password_token == found_user.password_token and token_is_expired:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Password token is not valid. Please request a new password token.",
|
||||
)
|
||||
query_engine = MongoQueryIdentity(company_uuid=found_user.related_company)
|
||||
|
||||
domain_via_user = query_engine.get_domain_via_user(
|
||||
user_uu_id=str(found_user.uu_id)
|
||||
)["main_domain"]
|
||||
new_password_dict = {
|
||||
"password": found_user.create_hashed_password(
|
||||
domain=domain_via_user, id_=str(found_user.uu_id), password=password
|
||||
),
|
||||
"date": str(system_arrow.now().date()),
|
||||
}
|
||||
history_dict = PasswordHistoryViaUser(
|
||||
user_uu_id=str(found_user.uu_id),
|
||||
password_add=new_password_dict,
|
||||
access_history_detail={
|
||||
"request": "",
|
||||
"ip": "",
|
||||
},
|
||||
)
|
||||
found_user.password_expiry_begins = str(system_arrow.now())
|
||||
found_user.hash_password = new_password_dict.get("password")
|
||||
found_user.password_token = "" if found_user.password_token else ""
|
||||
query_engine.refresh_password_history_via_user(payload=history_dict)
|
||||
found_user.save()
|
||||
return found_user
|
||||
|
||||
@staticmethod
|
||||
def reset_password_token(found_user):
|
||||
found_user.password_expiry_begins = str(system_arrow.now())
|
||||
found_user.password_token = found_user.generate_token(
|
||||
Auth.REFRESHER_TOKEN_LENGTH
|
||||
)
|
||||
found_user.save()
|
||||
return found_user.password_token
|
||||
|
||||
def generate_refresher_token(self, domain: str, remember_me=False):
|
||||
from databases import (
|
||||
UsersTokens,
|
||||
)
|
||||
|
||||
if remember_me:
|
||||
refresh_token = self.generate_token(Auth.REFRESHER_TOKEN_LENGTH)
|
||||
if already_token := UsersTokens.filter_by_one(
|
||||
system=True, user_id=self.id, token_type="RememberMe", domain=domain
|
||||
).data:
|
||||
already_token.update(token=refresh_token)
|
||||
already_token.expires_at = system_arrow.shift(days=3)
|
||||
already_token.save()
|
||||
return refresh_token
|
||||
users_tokens = UsersTokens.filter_by_all(
|
||||
user_id=self.id, token_type="RememberMe", domain=domain, system=True
|
||||
).data
|
||||
if users_tokens:
|
||||
users_tokens.query.delete()
|
||||
UsersTokens.save()
|
||||
|
||||
users_token = UsersTokens.find_or_create(
|
||||
user_id=self.id,
|
||||
token_type="RememberMe",
|
||||
token=refresh_token,
|
||||
domain=domain,
|
||||
)
|
||||
users_token.save_and_confirm()
|
||||
return refresh_token
|
||||
return None
|
||||
|
||||
def remainder_day(self):
|
||||
join_list = [
|
||||
_ for _ in str(self.password_expires_day).split(",")[0] if _.isdigit()
|
||||
]
|
||||
return float(timedelta(days=int("".join(join_list))).seconds)
|
||||
|
||||
|
||||
class UserLoginModule(AuthModule):
|
||||
|
||||
@classmethod
|
||||
def login_user_with_credentials(cls, data, request):
|
||||
from api_services.redis.auth_actions.auth import save_access_token_to_redis
|
||||
from databases import (
|
||||
Users,
|
||||
People,
|
||||
MongoQueryIdentity,
|
||||
)
|
||||
|
||||
found_user = Users.check_user_exits(
|
||||
access_key=data.access_key, domain=data.domain
|
||||
)
|
||||
access_token = found_user.generate_access_token()
|
||||
query_engine = MongoQueryIdentity(company_uuid=found_user.related_company)
|
||||
if found_user.check_password(password=data.password):
|
||||
access_object_to_redis = save_access_token_to_redis(
|
||||
request=request,
|
||||
found_user=found_user,
|
||||
domain=data.domain,
|
||||
access_token=access_token,
|
||||
)
|
||||
refresher_token = found_user.generate_refresher_token(
|
||||
domain=data.domain, remember_me=data.remember_me
|
||||
)
|
||||
headers_request = request.headers
|
||||
headers_request = dict(headers_request)
|
||||
headers_request["evyos-user-agent"] = headers_request.get("user-agent")
|
||||
headers_request["evyos-platform"] = headers_request.get("user-agent")
|
||||
headers_request["evyos-ip-ext"] = "94.54.68.158"
|
||||
found_user.last_agent = headers_request.get("evyos-user-agent", None)
|
||||
found_user.last_platform = headers_request.get("evyos-platform", None)
|
||||
found_user.last_remote_addr = headers_request.get("evyos-ip-ext", None)
|
||||
found_user.last_seen = str(system_arrow.now())
|
||||
if ext_ip := headers_request.get("evyos-ip-ext"):
|
||||
agent = headers_request.get("evyos-user-agent", "")
|
||||
platform = headers_request.get("evyos-platform", "")
|
||||
address = requests.get(f"http://ip-api.com/json/{ext_ip}").json()
|
||||
address_package = {
|
||||
"city": address["city"],
|
||||
"zip": address["zip"],
|
||||
"country": address["country"],
|
||||
"countryCode": address["countryCode"],
|
||||
"region": address["region"],
|
||||
"regionName": address["regionName"],
|
||||
}
|
||||
mongo_db = MongoQueryIdentity(
|
||||
company_uuid=str(found_user.related_company).replace(" ", ""),
|
||||
storage_reasoning="AccessHistory",
|
||||
)
|
||||
filter_query = {
|
||||
"agent": agent,
|
||||
"platform": platform,
|
||||
"address": address_package,
|
||||
"user_id": found_user.id,
|
||||
}
|
||||
already_exits = mongo_db.mongo_engine.filter_by(filter_query) or None
|
||||
no_address_validates = mongo_db.mongo_engine.get_all()[0] == 0
|
||||
record_id = uuid.uuid4().__str__()
|
||||
notice_link = ApiStatic.blacklist_login(record_id=record_id)
|
||||
found_people = People.filter_one(People.id == found_user.person_id).data
|
||||
access_via_user = query_engine.update_access_history_via_user(
|
||||
AccessHistoryViaUser(
|
||||
**{
|
||||
"user_uu_id": found_user.uu_id.__str__(),
|
||||
"access_history": {
|
||||
"record_id": record_id,
|
||||
"agent": agent,
|
||||
"platform": platform,
|
||||
"address": address_package,
|
||||
"ip": ext_ip,
|
||||
"access_token": access_token,
|
||||
"created_at": system_arrow.now().timestamp(),
|
||||
# "is_confirmed": True if no_address_validates else False,
|
||||
# "is_first": True if no_address_validates else False,
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
if already_exits:
|
||||
update_mongo = mongo_db.mongo_engine.table.update_one(
|
||||
filter=filter_query,
|
||||
update={
|
||||
"$set": {
|
||||
"ip": ext_ip,
|
||||
"access_token": access_token,
|
||||
"created_at": system_arrow.now().timestamp(),
|
||||
}
|
||||
},
|
||||
)
|
||||
else:
|
||||
mongo_db.mongo_engine.insert(
|
||||
payload={
|
||||
"user_id": found_user.id,
|
||||
"record_id": record_id,
|
||||
"agent": agent,
|
||||
"platform": platform,
|
||||
"address": address_package,
|
||||
"ip": ext_ip,
|
||||
"access_token": access_token,
|
||||
"created_at": system_arrow.now().timestamp(),
|
||||
"is_confirmed": True if no_address_validates else False,
|
||||
"is_first": True if no_address_validates else False,
|
||||
}
|
||||
)
|
||||
found_user.remember_me = bool(data.remember_me)
|
||||
found_user.save()
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresher_token": refresher_token,
|
||||
"user": found_user,
|
||||
"access_object": access_object_to_redis,
|
||||
}
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Login is not successful. Please check your credentials.",
|
||||
)
|
||||
|
||||
|
||||
# UserLogger.log_error(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=data.access_key,
|
||||
# agent=found_user.last_agent,
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=found_user.last_platform,
|
||||
# login_date=str(DateTimeLocal.now()),
|
||||
# is_login=True,
|
||||
# )
|
||||
# )
|
||||
|
||||
# if (
|
||||
# not str(found_people.country_code).lower()
|
||||
# == str(address_package.get("countryCode")).lower()
|
||||
# ):
|
||||
# send_email_completed = send_email(
|
||||
# subject=f"Dear {found_user.nick_name}, your password has been changed.",
|
||||
# receivers=[str(found_user.email)],
|
||||
# html=invalid_ip_or_address_found(
|
||||
# user_name=found_user.nick_name,
|
||||
# address=address_package,
|
||||
# notice_link=notice_link,
|
||||
# ),
|
||||
# )
|
||||
# if not send_email_completed:
|
||||
# raise HTTPException(
|
||||
# status_code=400,
|
||||
# detail="An error occured at sending email. Please contact with support team.",
|
||||
# )
|
||||
79
databases/extensions/selector_classes.py
Normal file
79
databases/extensions/selector_classes.py
Normal file
@@ -0,0 +1,79 @@
|
||||
class Explanation: ...
|
||||
|
||||
|
||||
class SelectorsBase:
|
||||
@classmethod
|
||||
def add_confirmed_filter(cls, first_table, second_table) -> tuple:
|
||||
return (
|
||||
first_table.active == True,
|
||||
first_table.is_confirmed == True,
|
||||
first_table.deleted == False,
|
||||
second_table.active == True,
|
||||
second_table.is_confirmed == True,
|
||||
second_table.deleted == False,
|
||||
)
|
||||
|
||||
|
||||
class SelectActionWithEmployee:
|
||||
|
||||
@classmethod
|
||||
def select_action(cls, employee_id, filter_expr: list = None):
|
||||
if filter_expr is not None:
|
||||
filter_expr = (cls.__many__table__.employee_id == employee_id, *filter_expr)
|
||||
data = (
|
||||
cls.session.query(cls.id)
|
||||
.select_from(cls)
|
||||
.join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
|
||||
.filter(
|
||||
*filter_expr,
|
||||
*SelectorsBase.add_confirmed_filter(
|
||||
first_table=cls, second_table=cls.__many__table__
|
||||
),
|
||||
)
|
||||
)
|
||||
return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
|
||||
data = (
|
||||
cls.session.query(cls.id)
|
||||
.select_from(cls)
|
||||
.join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
|
||||
.filter(
|
||||
cls.__many__table__.employee_id == employee_id,
|
||||
*SelectorsBase.add_confirmed_filter(
|
||||
first_table=cls, second_table=cls.__many__table__
|
||||
),
|
||||
)
|
||||
)
|
||||
return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
|
||||
|
||||
|
||||
class SelectAction:
|
||||
|
||||
@classmethod
|
||||
def select_action(cls, duty_id_list: list, filter_expr: list = None):
|
||||
if filter_expr is not None:
|
||||
data = (
|
||||
cls.session.query(cls.id)
|
||||
.select_from(cls)
|
||||
.join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
|
||||
.filter(
|
||||
cls.__many__table__.duties_id.in_(duty_id_list),
|
||||
*SelectorsBase.add_confirmed_filter(
|
||||
first_table=cls, second_table=cls.__many__table__
|
||||
),
|
||||
*filter_expr,
|
||||
)
|
||||
)
|
||||
return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
|
||||
|
||||
data = (
|
||||
cls.session.query(cls.id)
|
||||
.select_from(cls)
|
||||
.join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
|
||||
.filter(
|
||||
cls.__many__table__.duties_id.in_(duty_id_list),
|
||||
*SelectorsBase.add_confirmed_filter(
|
||||
first_table=cls, second_table=cls.__many__table__
|
||||
),
|
||||
)
|
||||
)
|
||||
return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
|
||||
@@ -6,6 +6,20 @@ from databases.no_sql_models.validations import (
|
||||
AccessHistoryViaUser,
|
||||
)
|
||||
from databases.no_sql_models.mongo_database import MongoQuery
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
|
||||
def validate_timestamp(doc):
|
||||
"""Validate and fix timestamp fields in MongoDB documents"""
|
||||
if not doc:
|
||||
return doc
|
||||
|
||||
timestamp_fields = ['modified_at', 'created_at', 'accessed_at', 'timestamp']
|
||||
for field in timestamp_fields:
|
||||
if field in doc and not isinstance(doc[field], (int, float)):
|
||||
# Convert to proper timestamp if it's not already
|
||||
doc[field] = system_arrow.to_timestamp(doc[field])
|
||||
return doc
|
||||
|
||||
|
||||
class MongoQueryIdentity:
|
||||
@@ -42,7 +56,7 @@ class MongoQueryIdentity:
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"other_domains_list": [payload.main_domain],
|
||||
"main_domain": payload.main_domain,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
"modified_at": system_arrow.to_timestamp(system_arrow.now()),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -52,14 +66,15 @@ class MongoQueryIdentity:
|
||||
match=payload.user_uu_id,
|
||||
payload={
|
||||
"other_domains_list": payload.other_domains_list,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
"modified_at": system_arrow.to_timestamp(system_arrow.now()),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
|
||||
def get_domain_via_user(self, user_uu_id):
|
||||
self.use_collection("Domain")
|
||||
return self.mongo_engine.get_one(match=str(user_uu_id), field="user_uu_id")
|
||||
result = self.mongo_engine.find(match=user_uu_id, field="user_uu_id")
|
||||
return [validate_timestamp(doc) for doc in result] if result else None
|
||||
|
||||
def refresh_password_history_via_user(self, payload: PasswordHistoryViaUser):
|
||||
self.use_collection("PasswordHistory")
|
||||
@@ -96,14 +111,15 @@ class MongoQueryIdentity:
|
||||
payload={
|
||||
"password_history": password_history_list,
|
||||
"access_history_detail": payload.access_history_detail,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
"modified_at": system_arrow.to_timestamp(system_arrow.now()),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
|
||||
def get_password_history_via_user(self, user_uu_id):
|
||||
self.use_collection("PasswordHistory")
|
||||
return self.mongo_engine.get_one(match=user_uu_id, field="user_uu_id")
|
||||
self.use_collection("UserPasswordHistory")
|
||||
result = self.mongo_engine.find(match=user_uu_id, field="user_uu_id")
|
||||
return [validate_timestamp(doc) for doc in result] if result else None
|
||||
|
||||
def update_access_history_via_user(self, payload: AccessHistoryViaUser):
|
||||
self.use_collection("AccessHistory")
|
||||
@@ -119,7 +135,7 @@ class MongoQueryIdentity:
|
||||
payload={
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"access_history": access_history,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
"modified_at": system_arrow.to_timestamp(system_arrow.now()),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
@@ -127,14 +143,11 @@ class MongoQueryIdentity:
|
||||
payload={
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"access_history": [payload.access_history],
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
"modified_at": system_arrow.to_timestamp(system_arrow.now()),
|
||||
}
|
||||
)
|
||||
|
||||
def get_access_history_via_user(self, user_uu_id):
|
||||
self.use_collection("AccessHistory")
|
||||
return self.mongo_engine.filter_by(
|
||||
payload={"user_uu_id": user_uu_id},
|
||||
sort_by="modified_at",
|
||||
sort_direction="desc",
|
||||
)
|
||||
result = self.mongo_engine.find(match=user_uu_id, field="user_uu_id")
|
||||
return [validate_timestamp(doc) for doc in result] if result else None
|
||||
|
||||
@@ -73,6 +73,7 @@ class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||
]
|
||||
|
||||
creds = None # The credentials to use in the model.
|
||||
lang = "tr" # The language to use in the model.
|
||||
client_arrow: DateTimeLocal = None # The arrow to use in the model.
|
||||
valid_record_dict: dict = {"active": True, "deleted": False}
|
||||
valid_record_args = lambda class_: [class_.active == True, class_.deleted == False]
|
||||
@@ -89,6 +90,7 @@ class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||
def set_user_define_properties(cls, token):
|
||||
cls.creds = token.credentials
|
||||
cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
|
||||
cls.lang = str(token.lang).lower()
|
||||
|
||||
@classmethod
|
||||
def remove_non_related_inputs(cls, kwargs):
|
||||
@@ -179,7 +181,7 @@ class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||
"message": "",
|
||||
}
|
||||
return already_record
|
||||
elif already_record.is_confirmed:
|
||||
elif not already_record.is_confirmed:
|
||||
already_record.meta_data = {
|
||||
"created": False,
|
||||
"error_case": "IsNotConfirmed",
|
||||
@@ -202,7 +204,7 @@ class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||
cls.created_by_id = cls.creds.get("person_id", None)
|
||||
cls.created_by = cls.creds.get("person_name", None)
|
||||
created_record.flush()
|
||||
already_record.meta_data = {"created": True, "error_case": None, "message": ""}
|
||||
created_record.meta_data = {"created": True, "error_case": None, "message": ""}
|
||||
return created_record
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -8,13 +8,14 @@ engine_config = {
|
||||
"url": WagDatabase.DATABASE_URL,
|
||||
"pool_size": 20,
|
||||
"max_overflow": 10,
|
||||
"echo": False,
|
||||
"echo": True,
|
||||
"echo_pool":True,
|
||||
"isolation_level": "READ COMMITTED",
|
||||
"pool_pre_ping": True,
|
||||
}
|
||||
|
||||
engine = create_engine(**engine_config)
|
||||
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, echo=True)
|
||||
engine = create_engine(**engine_config, )
|
||||
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False)
|
||||
session = scoped_session(sessionmaker(bind=engine))
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
@@ -45,6 +45,31 @@ class FilterAttributes:
|
||||
self.__session__.delete(self)
|
||||
self.__session__.commit()
|
||||
|
||||
@classmethod
|
||||
def save_via_metadata(cls):
|
||||
"""Save the data via the metadata."""
|
||||
try:
|
||||
meta_data = getattr(cls, "meta_data", {})
|
||||
meta_data_created = meta_data.get("created", False)
|
||||
if meta_data_created:
|
||||
print('meta_data_created commit', meta_data_created)
|
||||
cls.__session__.commit()
|
||||
print('meta_data_created rollback', meta_data_created)
|
||||
cls.__session__.rollback()
|
||||
# cls.raise_http_exception(
|
||||
# status_code="HTTP_304_NOT_MODIFIED",
|
||||
# error_case=meta_data.get("error_case", "Error on save and commit"),
|
||||
# data={},
|
||||
# message=meta_data.get("message", "Error on save and commit"),
|
||||
# )
|
||||
except SQLAlchemyError as e:
|
||||
cls.raise_http_exception(
|
||||
status_code="HTTP_304_NOT_MODIFIED",
|
||||
error_case=e.__class__.__name__,
|
||||
data={},
|
||||
message=str(e.__context__).split("\n")[0],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def save(cls):
|
||||
"""Saves the updated model to the current entity db."""
|
||||
@@ -52,11 +77,15 @@ class FilterAttributes:
|
||||
cls.__session__.commit()
|
||||
except SQLAlchemyError as e:
|
||||
cls.raise_http_exception(
|
||||
status_code="HTTP_400_BAD_REQUEST",
|
||||
status_code="HTTP_304_NOT_MODIFIED",
|
||||
error_case=e.__class__.__name__,
|
||||
data={},
|
||||
message=str(e.__context__).split("\n")[0],
|
||||
)
|
||||
@classmethod
|
||||
def rollback(cls):
|
||||
"""Rollback the current session."""
|
||||
cls.__session__.rollback()
|
||||
|
||||
def save_and_confirm(self):
|
||||
"""Saves the updated model to the current entity db."""
|
||||
|
||||
@@ -23,3 +23,16 @@ dependencies = [
|
||||
"unidecode>=1.3.8",
|
||||
"uvicorn>=0.32.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
"pytest>=8.3.0",
|
||||
"pytest-asyncio>=0.25.0",
|
||||
"httpx>=0.28.0",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
pythonpath = ["."]
|
||||
testpaths = ["testers"]
|
||||
python_files = ["test_*.py"]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
5
pytest.ini
Normal file
5
pytest.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[pytest]
|
||||
pythonpath = .
|
||||
testpaths = testers
|
||||
python_files = test_*.py
|
||||
asyncio_mode = auto
|
||||
93
service_app_init/.dockerignore
Normal file
93
service_app_init/.dockerignore
Normal file
@@ -0,0 +1,93 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitattributes
|
||||
|
||||
|
||||
# CI
|
||||
.codeclimate.yml
|
||||
.travis.yml
|
||||
.taskcluster.yml
|
||||
|
||||
# Docker
|
||||
docker-compose.yml
|
||||
service_app/Dockerfile
|
||||
.docker
|
||||
.dockerignore
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__/
|
||||
**/*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
service_app/env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Virtual environment
|
||||
service_app/.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# PyCharm
|
||||
.idea
|
||||
|
||||
# Python mode for VIM
|
||||
.ropeproject
|
||||
**/.ropeproject
|
||||
|
||||
# Vim swap files
|
||||
**/*.swp
|
||||
|
||||
# VS Code
|
||||
.vscode/
|
||||
|
||||
test_application/
|
||||
|
||||
|
||||
162
service_app_init/.gitignore
vendored
Normal file
162
service_app_init/.gitignore
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.idea/
|
||||
.Python
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
service_app/.env
|
||||
.venv
|
||||
service_app/env/
|
||||
venv/
|
||||
service_app/env/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
34
service_app_init/Dockerfile
Normal file
34
service_app_init/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
COPY ../service_app_init/requirements.txt .
|
||||
|
||||
RUN uv venv
|
||||
RUN uv pip install -r requirements.txt
|
||||
|
||||
COPY ../service_app_init ./service_app_init
|
||||
|
||||
COPY ../databases ./service_app_init/databases
|
||||
COPY ../api_configs ./service_app_init/api_configs
|
||||
COPY ../api_events ./service_app_init/api_events
|
||||
COPY ../api_library ./service_app_init/api_library
|
||||
COPY ../api_validations ./service_app_init/api_validations
|
||||
COPY ../api_objects ./service_app_init/api_objects
|
||||
COPY ../api_services ./service_app_init/api_services
|
||||
#COPY ../service_app/application ./service_app_init/application
|
||||
#COPY ../service_app/routers ./service_app_init/routers
|
||||
|
||||
WORKDIR /service_app_init
|
||||
|
||||
CMD ["uv", "run", "runner.py"]
|
||||
|
||||
# Old File
|
||||
#FROM python:3.10
|
||||
|
||||
#RUN pip install --upgrade pip
|
||||
#RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||
#CMD ["python", "-m", "app"]
|
||||
0
service_app_init/__init__.py
Normal file
0
service_app_init/__init__.py
Normal file
110
service_app_init/alembic.ini
Normal file
110
service_app_init/alembic.ini
Normal file
@@ -0,0 +1,110 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
; sqlalchemy.url = postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@10.10.2.44:5434/wag_database
|
||||
sqlalchemy.url = postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@10.10.2.36:5444/wag_database
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
service_app_init/alembic/README
Normal file
1
service_app_init/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
79
service_app_init/alembic/env.py
Normal file
79
service_app_init/alembic/env.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
from databases.sql_models.postgres_database import Base
|
||||
from databases.sql_models import *
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
service_app_init/alembic/script.py.mako
Normal file
24
service_app_init/alembic/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
0
service_app_init/alembic/versions/a.txt
Normal file
0
service_app_init/alembic/versions/a.txt
Normal file
24
service_app_init/alembic_generate.py
Normal file
24
service_app_init/alembic_generate.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import os
|
||||
|
||||
|
||||
def generate_alembic_with_session(text):
|
||||
from databases.sql_models.postgres_database import session
|
||||
|
||||
try:
|
||||
result = session.execute(
|
||||
text(
|
||||
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = "
|
||||
"'alembic_version') AS table_existence;"
|
||||
)
|
||||
)
|
||||
if result.first()[0]:
|
||||
session.execute(text("delete from alembic_version;"))
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
run_command = "python -m alembic stamp head;"
|
||||
run_command += (
|
||||
"python -m alembic revision --autogenerate;python -m alembic upgrade head;"
|
||||
)
|
||||
os.system(run_command)
|
||||
32
service_app_init/initialize_app/__init__.py
Normal file
32
service_app_init/initialize_app/__init__.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from .initialize_default_department import (
|
||||
create_occupant_types_defaults,
|
||||
create_application_defaults,
|
||||
)
|
||||
from .initiator import init_api_enums_build_types
|
||||
from .model_initator import copy_validations_to_database
|
||||
from .modules_and_services_init import (
|
||||
create_endpoints_from_api_functions,
|
||||
create_modules_and_services_and_actions,
|
||||
)
|
||||
|
||||
from .event_initator import (
|
||||
add_events_all_services_and_occupant_types,
|
||||
create_all_events_from_actions,
|
||||
add_events_to_system_super_user,
|
||||
)
|
||||
from .initialize_identity_address_defaults import (
|
||||
create_identity_address_defaults,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"create_application_defaults",
|
||||
"create_occupant_types_defaults",
|
||||
"init_api_enums_build_types",
|
||||
"copy_validations_to_database",
|
||||
"create_endpoints_from_api_functions",
|
||||
"create_modules_and_services_and_actions",
|
||||
"create_all_events_from_actions",
|
||||
"add_events_all_services_and_occupant_types",
|
||||
"create_identity_address_defaults",
|
||||
"add_events_to_system_super_user",
|
||||
]
|
||||
409
service_app_init/initialize_app/default_inits/cities.json
Normal file
409
service_app_init/initialize_app/default_inits/cities.json
Normal file
@@ -0,0 +1,409 @@
|
||||
|
||||
{
|
||||
"cities": [
|
||||
{
|
||||
"ref_id" : 1,
|
||||
"licence_plate" : "01",
|
||||
"city_name" : "Adana"
|
||||
},
|
||||
{
|
||||
"ref_id" : 2,
|
||||
"licence_plate" : "02",
|
||||
"city_name" : "Adıyaman"
|
||||
},
|
||||
{
|
||||
"ref_id" : 3,
|
||||
"licence_plate" : "03",
|
||||
"city_name" : "Afyonkarahisar"
|
||||
},
|
||||
{
|
||||
"ref_id" : 4,
|
||||
"licence_plate" : "04",
|
||||
"city_name" : "Ağrı"
|
||||
},
|
||||
{
|
||||
"ref_id" : 5,
|
||||
"licence_plate" : "05",
|
||||
"city_name" : "Amasya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 6,
|
||||
"licence_plate" : "06",
|
||||
"city_name" : "Ankara"
|
||||
},
|
||||
{
|
||||
"ref_id" : 7,
|
||||
"licence_plate" : "07",
|
||||
"city_name" : "Antalya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 8,
|
||||
"licence_plate" : "08",
|
||||
"city_name" : "Artvin"
|
||||
},
|
||||
{
|
||||
"ref_id" : 9,
|
||||
"licence_plate" : "09",
|
||||
"city_name" : "Aydın"
|
||||
},
|
||||
{
|
||||
"ref_id" : 10,
|
||||
"licence_plate" : "10",
|
||||
"city_name" : "Balıkesir"
|
||||
},
|
||||
{
|
||||
"ref_id" : 11,
|
||||
"licence_plate" : "11",
|
||||
"city_name" : "Bilecik"
|
||||
},
|
||||
{
|
||||
"ref_id" : 12,
|
||||
"licence_plate" : "12",
|
||||
"city_name" : "Bingöl"
|
||||
},
|
||||
{
|
||||
"ref_id" : 13,
|
||||
"licence_plate" : "13",
|
||||
"city_name" : "Bitlis"
|
||||
},
|
||||
{
|
||||
"ref_id" : 14,
|
||||
"licence_plate" : "14",
|
||||
"city_name" : "Bolu"
|
||||
},
|
||||
{
|
||||
"ref_id" : 15,
|
||||
"licence_plate" : "15",
|
||||
"city_name" : "Burdur"
|
||||
},
|
||||
{
|
||||
"ref_id" : 16,
|
||||
"licence_plate" : "16",
|
||||
"city_name" : "Bursa"
|
||||
},
|
||||
{
|
||||
"ref_id" : 17,
|
||||
"licence_plate" : "17",
|
||||
"city_name" : "Çanakkale"
|
||||
},
|
||||
{
|
||||
"ref_id" : 18,
|
||||
"licence_plate" : "18",
|
||||
"city_name" : "Çankırı"
|
||||
},
|
||||
{
|
||||
"ref_id" : 19,
|
||||
"licence_plate" : "19",
|
||||
"city_name" : "Çorum"
|
||||
},
|
||||
{
|
||||
"ref_id" : 20,
|
||||
"licence_plate" : "20",
|
||||
"city_name" : "Denizli"
|
||||
},
|
||||
{
|
||||
"ref_id" : 21,
|
||||
"licence_plate" : "21",
|
||||
"city_name" : "Diyarbakır"
|
||||
},
|
||||
{
|
||||
"ref_id" : 22,
|
||||
"licence_plate" : "22",
|
||||
"city_name" : "Edirne"
|
||||
},
|
||||
{
|
||||
"ref_id" : 23,
|
||||
"licence_plate" : "23",
|
||||
"city_name" : "Elazığ"
|
||||
},
|
||||
{
|
||||
"ref_id" : 24,
|
||||
"licence_plate" : "24",
|
||||
"city_name" : "Erzincan"
|
||||
},
|
||||
{
|
||||
"ref_id" : 25,
|
||||
"licence_plate" : "25",
|
||||
"city_name" : "Erzurum"
|
||||
},
|
||||
{
|
||||
"ref_id" : 26,
|
||||
"licence_plate" : "26",
|
||||
"city_name" : "Eskişehir"
|
||||
},
|
||||
{
|
||||
"ref_id" : 27,
|
||||
"licence_plate" : "27",
|
||||
"city_name" : "Gaziantep"
|
||||
},
|
||||
{
|
||||
"ref_id" : 28,
|
||||
"licence_plate" : "28",
|
||||
"city_name" : "Giresun"
|
||||
},
|
||||
{
|
||||
"ref_id" : 29,
|
||||
"licence_plate" : "29",
|
||||
"city_name" : "Gümüşhane"
|
||||
},
|
||||
{
|
||||
"ref_id" : 30,
|
||||
"licence_plate" : "30",
|
||||
"city_name" : "Hakkari"
|
||||
},
|
||||
{
|
||||
"ref_id" : 31,
|
||||
"licence_plate" : "31",
|
||||
"city_name" : "Hatay"
|
||||
},
|
||||
{
|
||||
"ref_id" : 32,
|
||||
"licence_plate" : "32",
|
||||
"city_name" : "Isparta"
|
||||
},
|
||||
{
|
||||
"ref_id" : 33,
|
||||
"licence_plate" : "33",
|
||||
"city_name" : "Mersin"
|
||||
},
|
||||
{
|
||||
"ref_id" : 34,
|
||||
"licence_plate" : "34",
|
||||
"city_name" : "İstanbul"
|
||||
},
|
||||
{
|
||||
"ref_id" : 35,
|
||||
"licence_plate" : "35",
|
||||
"city_name" : "İzmir"
|
||||
},
|
||||
{
|
||||
"ref_id" : 36,
|
||||
"licence_plate" : "36",
|
||||
"city_name" : "Kars"
|
||||
},
|
||||
{
|
||||
"ref_id" : 37,
|
||||
"licence_plate" : "37",
|
||||
"city_name" : "Kastamonu"
|
||||
},
|
||||
{
|
||||
"ref_id" : 38,
|
||||
"licence_plate" : "38",
|
||||
"city_name" : "Kayseri"
|
||||
},
|
||||
{
|
||||
"ref_id" : 39,
|
||||
"licence_plate" : "39",
|
||||
"city_name" : "Kırklareli"
|
||||
},
|
||||
{
|
||||
"ref_id" : 40,
|
||||
"licence_plate" : "40",
|
||||
"city_name" : "Kırşehir"
|
||||
},
|
||||
{
|
||||
"ref_id" : 41,
|
||||
"licence_plate" : "41",
|
||||
"city_name" : "Kocaeli"
|
||||
},
|
||||
{
|
||||
"ref_id" : 42,
|
||||
"licence_plate" : "42",
|
||||
"city_name" : "Konya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 43,
|
||||
"licence_plate" : "43",
|
||||
"city_name" : "Kütahya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 44,
|
||||
"licence_plate" : "44",
|
||||
"city_name" : "Malatya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 45,
|
||||
"licence_plate" : "45",
|
||||
"city_name" : "Manisa"
|
||||
},
|
||||
{
|
||||
"ref_id" : 46,
|
||||
"licence_plate" : "46",
|
||||
"city_name" : "Kahramanmaraş"
|
||||
},
|
||||
{
|
||||
"ref_id" : 47,
|
||||
"licence_plate" : "47",
|
||||
"city_name" : "Mardin"
|
||||
},
|
||||
{
|
||||
"ref_id" : 48,
|
||||
"licence_plate" : "48",
|
||||
"city_name" : "Muğla"
|
||||
},
|
||||
{
|
||||
"ref_id" : 49,
|
||||
"licence_plate" : "49",
|
||||
"city_name" : "Muş"
|
||||
},
|
||||
{
|
||||
"ref_id" : 50,
|
||||
"licence_plate" : "50",
|
||||
"city_name" : "Nevşehir"
|
||||
},
|
||||
{
|
||||
"ref_id" : 51,
|
||||
"licence_plate" : "51",
|
||||
"city_name" : "Niğde"
|
||||
},
|
||||
{
|
||||
"ref_id" : 52,
|
||||
"licence_plate" : "52",
|
||||
"city_name" : "Ordu"
|
||||
},
|
||||
{
|
||||
"ref_id" : 53,
|
||||
"licence_plate" : "53",
|
||||
"city_name" : "Rize"
|
||||
},
|
||||
{
|
||||
"ref_id" : 54,
|
||||
"licence_plate" : "54",
|
||||
"city_name" : "Sakarya"
|
||||
},
|
||||
{
|
||||
"ref_id" : 55,
|
||||
"licence_plate" : "55",
|
||||
"city_name" : "Samsun"
|
||||
},
|
||||
{
|
||||
"ref_id" : 56,
|
||||
"licence_plate" : "56",
|
||||
"city_name" : "Siirt"
|
||||
},
|
||||
{
|
||||
"ref_id" : 57,
|
||||
"licence_plate" : "57",
|
||||
"city_name" : "Sinop"
|
||||
},
|
||||
{
|
||||
"ref_id" : 58,
|
||||
"licence_plate" : "58",
|
||||
"city_name" : "Sivas"
|
||||
},
|
||||
{
|
||||
"ref_id" : 59,
|
||||
"licence_plate" : "59",
|
||||
"city_name" : "Tekirdağ"
|
||||
},
|
||||
{
|
||||
"ref_id" : 60,
|
||||
"licence_plate" : "60",
|
||||
"city_name" : "Tokat"
|
||||
},
|
||||
{
|
||||
"ref_id" : 61,
|
||||
"licence_plate" : "61",
|
||||
"city_name" : "Trabzon"
|
||||
},
|
||||
{
|
||||
"ref_id" : 62,
|
||||
"licence_plate" : "62",
|
||||
"city_name" : "Tunceli"
|
||||
},
|
||||
{
|
||||
"ref_id" : 63,
|
||||
"licence_plate" : "63",
|
||||
"city_name" : "Şanlıurfa"
|
||||
},
|
||||
{
|
||||
"ref_id" : 64,
|
||||
"licence_plate" : "64",
|
||||
"city_name" : "Uşak"
|
||||
},
|
||||
{
|
||||
"ref_id" : 65,
|
||||
"licence_plate" : "65",
|
||||
"city_name" : "Van"
|
||||
},
|
||||
{
|
||||
"ref_id" : 66,
|
||||
"licence_plate" : "66",
|
||||
"city_name" : "Yozgat"
|
||||
},
|
||||
{
|
||||
"ref_id" : 67,
|
||||
"licence_plate" : "67",
|
||||
"city_name" : "Zonguldak"
|
||||
},
|
||||
{
|
||||
"ref_id" : 68,
|
||||
"licence_plate" : "68",
|
||||
"city_name" : "Aksaray"
|
||||
},
|
||||
{
|
||||
"ref_id" : 69,
|
||||
"licence_plate" : "69",
|
||||
"city_name" : "Bayburt"
|
||||
},
|
||||
{
|
||||
"ref_id" : 70,
|
||||
"licence_plate" : "70",
|
||||
"city_name" : "Karaman"
|
||||
},
|
||||
{
|
||||
"ref_id" : 71,
|
||||
"licence_plate" : "71",
|
||||
"city_name" : "Kırıkkale"
|
||||
},
|
||||
{
|
||||
"ref_id" : 72,
|
||||
"licence_plate" : "72",
|
||||
"city_name" : "Batman"
|
||||
},
|
||||
{
|
||||
"ref_id" : 73,
|
||||
"licence_plate" : "73",
|
||||
"city_name" : "Şırnak"
|
||||
},
|
||||
{
|
||||
"ref_id" : 74,
|
||||
"licence_plate" : "74",
|
||||
"city_name" : "Bartın"
|
||||
},
|
||||
{
|
||||
"ref_id" : 75,
|
||||
"licence_plate" : "75",
|
||||
"city_name" : "Ardahan"
|
||||
},
|
||||
{
|
||||
"ref_id" : 76,
|
||||
"licence_plate" : "76",
|
||||
"city_name" : "Iğdır"
|
||||
},
|
||||
{
|
||||
"ref_id" : 77,
|
||||
"licence_plate" : "77",
|
||||
"city_name" : "Yalova"
|
||||
},
|
||||
{
|
||||
"ref_id" : 78,
|
||||
"licence_plate" : "78",
|
||||
"city_name" : "Karabük"
|
||||
},
|
||||
{
|
||||
"ref_id" : 79,
|
||||
"licence_plate" : "79",
|
||||
"city_name" : "Kilis"
|
||||
},
|
||||
{
|
||||
"ref_id" : 80,
|
||||
"licence_plate" : "80",
|
||||
"city_name" : "Osmaniye"
|
||||
},
|
||||
{
|
||||
"ref_id" : 81,
|
||||
"licence_plate" : "81",
|
||||
"city_name" : "Düzce"
|
||||
}
|
||||
]}
|
||||
1600
service_app_init/initialize_app/default_inits/countries.json
Normal file
1600
service_app_init/initialize_app/default_inits/countries.json
Normal file
File diff suppressed because it is too large
Load Diff
4794
service_app_init/initialize_app/default_inits/district.json
Normal file
4794
service_app_init/initialize_app/default_inits/district.json
Normal file
File diff suppressed because it is too large
Load Diff
28871
service_app_init/initialize_app/default_inits/locality.json
Normal file
28871
service_app_init/initialize_app/default_inits/locality.json
Normal file
File diff suppressed because it is too large
Load Diff
356646
service_app_init/initialize_app/default_inits/neighborhood.json
Normal file
356646
service_app_init/initialize_app/default_inits/neighborhood.json
Normal file
File diff suppressed because it is too large
Load Diff
6
service_app_init/initialize_app/default_inits/notes.txt
Normal file
6
service_app_init/initialize_app/default_inits/notes.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
cities:
|
||||
cities.ref_id == cities.city_code
|
||||
|
||||
district_code:
|
||||
district_code.ref_id == district_code.district_code
|
||||
134
service_app_init/initialize_app/event_initator.py
Normal file
134
service_app_init/initialize_app/event_initator.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from databases import (
|
||||
Events,
|
||||
Services,
|
||||
Service2Events,
|
||||
Employees,
|
||||
Staff,
|
||||
)
|
||||
|
||||
from databases.sql_models.event.event import Event2Employee
|
||||
from api_events.events.events.events_bind_modules import ModulesBindEmployeeEventMethods
|
||||
|
||||
active_confirmed = dict(
|
||||
created_by="System",
|
||||
confirmed_by="System",
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
|
||||
|
||||
def create_all_events_from_actions():
|
||||
import api_events.events as events
|
||||
from databases import EndpointRestriction
|
||||
|
||||
an_empty_list, duplicate_list = [], []
|
||||
|
||||
for event in events.__all__:
|
||||
event_selected = getattr(events, event)
|
||||
for (
|
||||
event_selected_key,
|
||||
event_selected_one,
|
||||
) in event_selected.__event_keys__.items():
|
||||
an_empty_list.append(event_selected_key)
|
||||
event_selected_function = getattr(event_selected, event_selected_one, None)
|
||||
if not event_selected.action.endpoint:
|
||||
raise Exception(
|
||||
f"Endpoint not found in {event_selected.__name__} class"
|
||||
)
|
||||
endpoint_restriction = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_name.ilike(
|
||||
f"%{event_selected.action.endpoint}%"
|
||||
),
|
||||
system=True,
|
||||
).data
|
||||
if endpoint_restriction and event_selected_function:
|
||||
selected_event = Events.filter_one(
|
||||
Events.event_type == event_selected.event_type,
|
||||
Events.function_class == event,
|
||||
Events.function_code == event_selected_key,
|
||||
Events.endpoint_id == endpoint_restriction.id,
|
||||
Events.endpoint_uu_id == str(endpoint_restriction.uu_id),
|
||||
system=True,
|
||||
).data
|
||||
if not selected_event:
|
||||
created_event = Events.find_or_create(
|
||||
event_type=event_selected.event_type,
|
||||
function_class=event,
|
||||
function_code=event_selected_key,
|
||||
endpoint_id=endpoint_restriction.id,
|
||||
endpoint_uu_id=str(endpoint_restriction.uu_id),
|
||||
**active_confirmed,
|
||||
)
|
||||
created_event.save()
|
||||
created_event.update(is_confirmed=True)
|
||||
created_event.save()
|
||||
print(f"Event created: {created_event.uu_id}")
|
||||
|
||||
for item in an_empty_list:
|
||||
if an_empty_list.count(item) > 1:
|
||||
if item not in duplicate_list:
|
||||
duplicate_list.append(item)
|
||||
|
||||
if duplicate_list:
|
||||
raise Exception(
|
||||
f"Duplicate events found: {duplicate_list}. Check events folder look for given uu-ids."
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def add_events_all_services_and_occupant_types():
|
||||
import api_events.tasks2events as tasks2events
|
||||
|
||||
for event_block in tasks2events.__all__:
|
||||
event_block_class = getattr(tasks2events, event_block)
|
||||
service_selected = Services.filter_one(
|
||||
Services.service_code == getattr(event_block_class, "service_code", None),
|
||||
system=True,
|
||||
).data
|
||||
if not service_selected:
|
||||
raise Exception(f"{event_block_class.service_code} service is not found")
|
||||
|
||||
service_selected.update(
|
||||
related_responsibility=getattr(event_block_class, "related_code", None)
|
||||
)
|
||||
for event_id, event_uu_id in event_block_class():
|
||||
if Service2Events.filter_by_one(
|
||||
service_id=service_selected.id, event_id=event_id, system=True
|
||||
).data:
|
||||
continue
|
||||
service_events = Service2Events.find_or_create(
|
||||
service_id=service_selected.id,
|
||||
service_uu_id=str(service_selected.uu_id),
|
||||
event_id=event_id,
|
||||
event_uu_id=event_uu_id,
|
||||
)
|
||||
service_events.save_and_confirm()
|
||||
return
|
||||
|
||||
|
||||
def add_events_to_system_super_user():
|
||||
|
||||
add_service = Services.filter_by_one(system=True, service_code="SRE-SUE").data
|
||||
if not add_service:
|
||||
raise Exception("Service not found")
|
||||
|
||||
find_staff = Staff.filter_by_one(system=True, staff_code="SUE").data
|
||||
if not find_staff:
|
||||
raise Exception("Super User not found")
|
||||
|
||||
add_employee = Employees.filter_by_one(system=True, staff_id=find_staff.id).data
|
||||
if not add_employee:
|
||||
raise Exception("Super User Employee not found")
|
||||
|
||||
ModulesBindEmployeeEventMethods.bind_default_module_for_first_init_occupant(
|
||||
employee_id=add_employee.id
|
||||
)
|
||||
event_employee = Event2Employee.find_or_create(
|
||||
event_service_id=add_service.id,
|
||||
event_service_uu_id=str(add_service.uu_id),
|
||||
employee_id=add_employee.id,
|
||||
employee_uu_id=str(add_employee.uu_id),
|
||||
)
|
||||
event_employee.save_and_confirm()
|
||||
514
service_app_init/initialize_app/initialize_default_department.py
Normal file
514
service_app_init/initialize_app/initialize_default_department.py
Normal file
@@ -0,0 +1,514 @@
|
||||
from databases.no_sql_models.validations import DomainViaUser
|
||||
from databases import MongoQueryIdentity, Event2Employee, OccupantTypes
|
||||
|
||||
|
||||
def create_occupant_types_defaults():
|
||||
"""
|
||||
occupant_category = mapped_column(String, server_default="")
|
||||
occupant_category_type = mapped_column(String, server_default="")
|
||||
occupant_is_unique = mapped_column(Boolean, server_default="0")
|
||||
"""
|
||||
list_occupant_types = [
|
||||
{
|
||||
"occupant_type": "Toplantı Başkanı",
|
||||
"occupant_description": "Toplantı Başkanı",
|
||||
"occupant_code": "MT-PRS",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Katip",
|
||||
"occupant_description": "Toplantıda tutanak tutan kişi",
|
||||
"occupant_code": "MT-WRT",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Katılımcısı",
|
||||
"occupant_description": "Toplantıda sadece katılan kişi",
|
||||
"occupant_code": "MT-ATT",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Danışman",
|
||||
"occupant_description": "Toplantıda danışmanlık yapan kişi",
|
||||
"occupant_code": "MT-ADV",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Toplantı Seçilmiş Başkanı",
|
||||
"occupant_description": "Toplantı Seçilmiş Başkanı",
|
||||
"occupant_code": "MT-VPR",
|
||||
"occupant_category": "Toplantı",
|
||||
"occupant_category_type": "MT",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sahibi",
|
||||
"occupant_description": "Daire Sahibi",
|
||||
"occupant_code": "FL-OWN",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Kiracısı",
|
||||
"occupant_description": "Daire Kiracısı",
|
||||
"occupant_code": "FL-TEN",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sakini",
|
||||
"occupant_description": "Daire Sakini",
|
||||
"occupant_code": "FL-RES",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Sakini Vekili",
|
||||
"occupant_description": "Daire Sakini Vekili",
|
||||
"occupant_code": "FL-REP",
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Avukatı",
|
||||
"occupant_description": "Bina Avukatı",
|
||||
"occupant_code": "BU-ATT",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Avukatı Yardımcısı",
|
||||
"occupant_description": "Bina Avukatı Yardımcısı",
|
||||
"occupant_code": "BU-ATA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Denetmen Yardımcısı",
|
||||
"occupant_description": "Bina Denetmen Yardımcısı",
|
||||
"occupant_code": "BU-SPA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Denetmeni",
|
||||
"occupant_description": "Bina Denetmeni",
|
||||
"occupant_code": "BU-SPV",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Yönetici Yardımcısı",
|
||||
"occupant_description": "Bina Yönetici Yardımcısı",
|
||||
"occupant_code": "BU-MNA",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Yöneticisi",
|
||||
"occupant_description": "Bina Yöneticisi",
|
||||
"occupant_code": "BU-MNG",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": True,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Muhasabecisi",
|
||||
"occupant_description": "Bina Muhasabecisi",
|
||||
"occupant_code": "BU-ACC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Lideri",
|
||||
"occupant_description": "Proje Lideri",
|
||||
"occupant_code": "PRJ-LDR",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Sorumlusu",
|
||||
"occupant_description": "Proje Sorumlusu",
|
||||
"occupant_code": "PRJ-RES",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Ekibi",
|
||||
"occupant_description": "Proje Ekibi",
|
||||
"occupant_code": "PRJ-EMP",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Finans Sorumlusu",
|
||||
"occupant_description": "Proje Finans Sorumlusu",
|
||||
"occupant_code": "PRJ-FIN",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Proje Teknik Sorumlusu",
|
||||
"occupant_description": "Proje Teknik Sorumlusu",
|
||||
"occupant_code": "PRJ-TEC",
|
||||
"occupant_category": "Proje",
|
||||
"occupant_category_type": "PRJ",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Daire Mülkiyet Vekili",
|
||||
"occupant_description": "Daire Mülkiyet Vekili",
|
||||
"occupant_code": "FL-DEP", # deputy
|
||||
"occupant_category": "Daire",
|
||||
"occupant_category_type": "FL",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Sorumlusu",
|
||||
"occupant_description": "Bina Teknik Sorumlusu",
|
||||
"occupant_code": "BU-TEC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Elemanı",
|
||||
"occupant_description": "Bina Teknik Elemanı",
|
||||
"occupant_code": "BU-EMP",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
{
|
||||
"occupant_type": "Bina Teknik Freelancer",
|
||||
"occupant_description": "Bina Teknik Freelancer",
|
||||
"occupant_code": "BU-FLC",
|
||||
"occupant_category": "Bina",
|
||||
"occupant_category_type": "BU",
|
||||
"occupant_is_unique": False,
|
||||
},
|
||||
]
|
||||
for list_occupant_type in list_occupant_types:
|
||||
try:
|
||||
created_type = OccupantTypes.find_or_create(**list_occupant_type)
|
||||
created_type.save_and_confirm()
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
|
||||
def create_application_defaults():
|
||||
|
||||
from databases import (
|
||||
Companies,
|
||||
Departments,
|
||||
Duty,
|
||||
Duties,
|
||||
Employees,
|
||||
People,
|
||||
Users,
|
||||
Staff,
|
||||
RelationshipDutyCompany,
|
||||
)
|
||||
|
||||
created_list = []
|
||||
created_by, confirmed_by = "System", "System"
|
||||
company_management = Companies.find_or_create(
|
||||
**{
|
||||
"formal_name": "Evyos LTD",
|
||||
"public_name": "Evyos Verimlilik Sistemleri",
|
||||
"company_type": "LTD",
|
||||
"commercial_type": "Commercial",
|
||||
"tax_no": "123132123132",
|
||||
"company_tag": "Evyos",
|
||||
"default_lang_type": "TR",
|
||||
"default_money_type": "TL",
|
||||
"created_by": created_by,
|
||||
"confirmed_by": confirmed_by,
|
||||
"is_commercial": True,
|
||||
}
|
||||
)
|
||||
created_list.append(company_management)
|
||||
|
||||
another_list = Duties.init_a_company_default_duties(
|
||||
company_id=company_management.id, company_uu_id=str(company_management.uu_id)
|
||||
)
|
||||
created_list.extend(another_list)
|
||||
bulk_duty = Duty.filter_by_one(
|
||||
system=True,
|
||||
duty_code="BULK",
|
||||
).data
|
||||
|
||||
it_dept = Departments.filter_by_one(
|
||||
system=True,
|
||||
department_name="IT Department",
|
||||
department_code="ITD001",
|
||||
company_id=company_management.id,
|
||||
company_uu_id=str(company_management.uu_id),
|
||||
).data
|
||||
|
||||
created_duty = Duty.find_or_create(
|
||||
**dict(
|
||||
duty_name="Database Manager",
|
||||
duty_code="DM",
|
||||
duty_description="Database Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
)
|
||||
created_list.append(created_duty)
|
||||
|
||||
created_duty = Duty.find_or_create(
|
||||
**dict(
|
||||
duty_name="Network Manager",
|
||||
duty_code="NM",
|
||||
duty_description="Network Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
)
|
||||
created_list.append(created_duty)
|
||||
|
||||
application_manager_duty = Duty.find_or_create(
|
||||
duty_name="Application Manager",
|
||||
duty_code="AM",
|
||||
duty_description="Application Manager",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_manager_duty)
|
||||
application_super_user_duty = Duty.find_or_create(
|
||||
duty_name="Super User",
|
||||
duty_code="SUE",
|
||||
duty_description="Super User",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_super_user_duty)
|
||||
application_manager_duties = Duties.find_or_create(
|
||||
department_id=it_dept.id,
|
||||
department_uu_id=str(it_dept.uu_id),
|
||||
duties_id=application_manager_duty.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
company_id=company_management.id,
|
||||
company_uu_id=str(company_management.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_manager_duties)
|
||||
super_user_duties = Duties.find_or_create(
|
||||
department_id=it_dept.id,
|
||||
department_uu_id=str(it_dept.uu_id),
|
||||
duties_id=application_super_user_duty.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
company_id=company_management.id,
|
||||
company_uu_id=str(company_management.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(super_user_duties)
|
||||
RelationshipDutyCompany.find_or_create(
|
||||
duties_id=application_manager_duties.id,
|
||||
owner_id=company_management.id,
|
||||
member_id=company_management.id,
|
||||
parent_id=None,
|
||||
child_count=0,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_manager_duties)
|
||||
RelationshipDutyCompany.find_or_create(
|
||||
duties_id=super_user_duties.id,
|
||||
owner_id=company_management.id,
|
||||
member_id=company_management.id,
|
||||
parent_id=None,
|
||||
child_count=0,
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(super_user_duties)
|
||||
|
||||
app_manager = People.find_or_create(
|
||||
**{
|
||||
"firstname": "Berkay Application Manager",
|
||||
"surname": "Karatay",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312312",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231231",
|
||||
"created_by": created_by,
|
||||
"confirmed_by": confirmed_by,
|
||||
}
|
||||
)
|
||||
created_list.append(app_manager)
|
||||
sup_manager = People.find_or_create(
|
||||
**{
|
||||
"firstname": "Berkay Super User",
|
||||
"surname": "Karatay",
|
||||
"sex_code": "M",
|
||||
"middle_name": "",
|
||||
"father_name": "Father",
|
||||
"mother_name": "Mother",
|
||||
"country_code": "TR",
|
||||
"national_identity_id": "12312312313",
|
||||
"birth_place": "Ankara",
|
||||
"birth_date": "01.07.1990",
|
||||
"tax_no": "1231231232",
|
||||
"created_by": created_by,
|
||||
"confirmed_by": confirmed_by,
|
||||
}
|
||||
)
|
||||
created_list.append(sup_manager)
|
||||
application_manager_staff = Staff.find_or_create(
|
||||
staff_description="Application Manager",
|
||||
staff_name="Application Manager Employee",
|
||||
staff_code="AME",
|
||||
duties_id=application_manager_duties.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(application_manager_staff)
|
||||
super_user_staff = Staff.find_or_create(
|
||||
staff_description="Super User",
|
||||
staff_name="Super User Employee",
|
||||
staff_code="SUE",
|
||||
duties_id=super_user_duties.id,
|
||||
duties_uu_id=str(application_manager_duty.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(super_user_staff)
|
||||
app_manager_employee = Employees.find_or_create(
|
||||
staff_id=application_manager_staff.id,
|
||||
staff_uu_id=str(application_manager_staff.uu_id),
|
||||
people_id=app_manager.id,
|
||||
people_uu_id=str(app_manager.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(app_manager_employee)
|
||||
|
||||
super_user_employee = Employees.find_or_create(
|
||||
staff_id=super_user_staff.id,
|
||||
staff_uu_id=str(super_user_staff.uu_id),
|
||||
people_id=sup_manager.id,
|
||||
people_uu_id=str(sup_manager.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(super_user_employee)
|
||||
|
||||
app_manager_user = Users.find_or_create(
|
||||
person_id=app_manager.id,
|
||||
person_uu_id=str(app_manager.uu_id),
|
||||
user_tag=app_manager.person_tag,
|
||||
email="karatay.berkay.man@evyos.com.tr",
|
||||
phone_number="+901111111111",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
related_company=str(company_management.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(app_manager_user)
|
||||
app_manager_user.reset_password_token(found_user=app_manager_user)
|
||||
mongo_engine = MongoQueryIdentity(company_uuid=company_management.uu_id)
|
||||
mongo_engine.create_domain_via_user(
|
||||
payload=DomainViaUser(
|
||||
user_uu_id=str(app_manager_user.uu_id),
|
||||
main_domain="evyos.com.tr",
|
||||
other_domains_list=["evyos.com.tr"],
|
||||
)
|
||||
)
|
||||
|
||||
sup_manager_employee = Users.find_or_create(
|
||||
person_id=sup_manager.id,
|
||||
person_uu_id=str(sup_manager.uu_id),
|
||||
user_tag=sup_manager.person_tag,
|
||||
email="karatay.berkay.sup@evyos.com.tr",
|
||||
phone_number="+901111111112",
|
||||
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
|
||||
created_by=created_by,
|
||||
confirmed_by=confirmed_by,
|
||||
related_company=str(company_management.uu_id),
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
)
|
||||
created_list.append(sup_manager_employee)
|
||||
|
||||
sup_manager_employee.reset_password_token(found_user=sup_manager_employee)
|
||||
mongo_engine.create_domain_via_user(
|
||||
payload=DomainViaUser(
|
||||
user_uu_id=str(sup_manager_employee.uu_id),
|
||||
main_domain="evyos.com.tr",
|
||||
other_domains_list=["evyos.com.tr"],
|
||||
)
|
||||
)
|
||||
People.save()
|
||||
for created_list_item in created_list:
|
||||
created_list_item.save_and_confirm()
|
||||
print("All Defaults Create is now completed")
|
||||
@@ -0,0 +1,169 @@
|
||||
from json import loads
|
||||
from os import path
|
||||
from databases import (
|
||||
AddressCountry,
|
||||
AddressCity,
|
||||
AddressDistrict,
|
||||
AddressLocality,
|
||||
AddressNeighborhood,
|
||||
AddressState,
|
||||
)
|
||||
|
||||
path_to_folder = "initialize_app/default_inits"
|
||||
list_of_snippets = [
|
||||
"countries.json",
|
||||
"cities.json",
|
||||
"district.json",
|
||||
"locality.json",
|
||||
"neighborhood.json",
|
||||
]
|
||||
|
||||
|
||||
def create_country_defaults(path_to_joined_folder, confirmed_by_system):
|
||||
|
||||
with open(path_to_joined_folder("countries.json"), "r") as file:
|
||||
countries = loads(file.read())
|
||||
|
||||
print("Countries are read from file ------------------")
|
||||
if not countries:
|
||||
print("Countries json file is empty")
|
||||
|
||||
for country in countries.get("tr_co"):
|
||||
country_obj = AddressCountry.find_or_create(
|
||||
country_name=country.get("country_name"),
|
||||
country_code=country.get("country_code"),
|
||||
ref_id=str(country.get("ref_id")),
|
||||
**confirmed_by_system,
|
||||
)
|
||||
|
||||
|
||||
def create_cities_defaults(path_to_joined_folder, confirmed_by_system, state_id):
|
||||
|
||||
with open(path_to_joined_folder("cities.json"), "r") as file:
|
||||
cities = loads(file.read())
|
||||
print("Cities are read from file ------------------")
|
||||
if not cities:
|
||||
print("Cities json file is empty")
|
||||
|
||||
for city in cities.get("cities"):
|
||||
city_obj = AddressCity.find_or_create(
|
||||
state_id=state_id,
|
||||
city_name=city.get("city_name"),
|
||||
city_code=city.get("licence_plate"),
|
||||
licence_plate=city.get("licence_plate"),
|
||||
ref_id=str(city.get("ref_id")),
|
||||
**confirmed_by_system,
|
||||
)
|
||||
print(f"City {city_obj.city_name} is created")
|
||||
|
||||
|
||||
def create_district_defaults(path_to_joined_folder, confirmed_by_system):
|
||||
|
||||
with open(path_to_joined_folder("district.json"), "r") as file:
|
||||
districts = loads(file.read())
|
||||
print("Districts are read from file ------------------")
|
||||
if not districts:
|
||||
print("Districts json file is empty")
|
||||
|
||||
for district in districts.get("tr_ilce"):
|
||||
city = AddressCity.find_one(ref_id=str(district.get("city_id")))
|
||||
if not city:
|
||||
print(f"City with ref_id {district.get('city_id')} is not found")
|
||||
|
||||
district_obj = AddressDistrict.find_or_create(
|
||||
city_id=city.id,
|
||||
district_name=district.get("district_name"),
|
||||
district_code=str(district.get("ref_id")),
|
||||
ref_id=str(district.get("ref_id")),
|
||||
**confirmed_by_system,
|
||||
)
|
||||
print(f"District {district_obj} is created")
|
||||
|
||||
|
||||
def create_locality_defaults(path_to_joined_folder, confirmed_by_system):
|
||||
|
||||
with open(path_to_joined_folder("locality.json"), "r") as file:
|
||||
localities = loads(file.read())
|
||||
print("Localities are read from file ------------------")
|
||||
if not localities:
|
||||
print("Localities json file is empty")
|
||||
|
||||
for locality in localities.get("tr_semt"):
|
||||
district = AddressDistrict.find_one(ref_id=str(locality.get("district_id")))
|
||||
if not district:
|
||||
print(
|
||||
f"District with ref_id {locality.get('district_id')} is not found"
|
||||
)
|
||||
|
||||
locality_obj = AddressLocality.find_or_create(
|
||||
district_id=district.id,
|
||||
locality_name=locality.get("locality_name"),
|
||||
locality_code=str(locality.get("post_code")),
|
||||
ref_id=str(locality.get("ref_id")),
|
||||
**confirmed_by_system,
|
||||
)
|
||||
print(f"Locality {locality_obj} is created")
|
||||
|
||||
|
||||
def create_neighborhood_defaults(path_to_joined_folder, confirmed_by_system):
|
||||
|
||||
with open(path_to_joined_folder("neighborhood.json"), "r") as file:
|
||||
neighborhoods = loads(file.read())
|
||||
print("Neighborhoods are read from file ------------------")
|
||||
if not neighborhoods:
|
||||
print("Neighborhoods json file is empty")
|
||||
|
||||
for neighborhood in neighborhoods.get("tr_mahalle_koy"):
|
||||
locality = AddressLocality.find_one(
|
||||
ref_id=str(neighborhood.get("locality_id"))
|
||||
)
|
||||
district = AddressDistrict.find_one(
|
||||
ref_id=str(neighborhood.get("district_id"))
|
||||
)
|
||||
|
||||
if not district:
|
||||
print(
|
||||
f"District with ref_id {neighborhood.get('district_id')} is not found"
|
||||
)
|
||||
|
||||
neighborhood_obj = AddressNeighborhood.find_or_create(
|
||||
locality_id=locality.id if locality else None,
|
||||
district_id=district.id,
|
||||
neighborhood_name=neighborhood.get("neighborhood_name"),
|
||||
neighborhood_code=str(neighborhood.get("ref_id")),
|
||||
ref_id=str(neighborhood.get("ref_id")),
|
||||
**confirmed_by_system,
|
||||
)
|
||||
print(f"Neighborhood {neighborhood_obj} is created")
|
||||
|
||||
|
||||
def create_identity_address_defaults():
|
||||
|
||||
print("Creating address defaults ------------------")
|
||||
|
||||
path_to_joined_folder = lambda json_name: path.join(path_to_folder, json_name)
|
||||
confirmed_by_system = dict(
|
||||
is_confirmed=True,
|
||||
active=True,
|
||||
deleted=False,
|
||||
is_notification_send=True,
|
||||
created_by="System",
|
||||
confirmed_by="System",
|
||||
)
|
||||
|
||||
create_country_defaults(path_to_joined_folder, confirmed_by_system)
|
||||
|
||||
turkey = AddressCountry.find_one(ref_id="90")
|
||||
turkey_state = AddressState.find_or_create(
|
||||
state_name="Türkiye",
|
||||
state_code="TR",
|
||||
country_id=turkey.id,
|
||||
**confirmed_by_system,
|
||||
)
|
||||
|
||||
create_cities_defaults(path_to_joined_folder, confirmed_by_system, turkey_state.id)
|
||||
create_district_defaults(path_to_joined_folder, confirmed_by_system)
|
||||
create_locality_defaults(path_to_joined_folder, confirmed_by_system)
|
||||
create_neighborhood_defaults(path_to_joined_folder, confirmed_by_system)
|
||||
print("All address defaults are created ------------------")
|
||||
return True
|
||||
245
service_app_init/initialize_app/initiator.py
Normal file
245
service_app_init/initialize_app/initiator.py
Normal file
@@ -0,0 +1,245 @@
|
||||
from databases import (
|
||||
BuildTypes,
|
||||
ApiEnumDropdown,
|
||||
)
|
||||
|
||||
|
||||
def init_api_enums_build_types():
|
||||
from api_validations.validations_request import InsertBuildTypes
|
||||
|
||||
insert_types = [
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_KZN",
|
||||
"type_name": "Apartman Kazan Dairesi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_GRJ",
|
||||
"type_name": "Apartman Garaj",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_DP",
|
||||
"type_name": "Apartman Depo",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "DAIRE",
|
||||
"type_name": "Apartman Dairesi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT",
|
||||
"type_name": "Apartman Binası",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YNT",
|
||||
"type_name": "Apartman Yönetimi",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_PRK",
|
||||
"type_name": "Apartman Açık Park Alanı",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YSL",
|
||||
"type_name": "Apartman Yeşil Alan",
|
||||
"lang": "TR",
|
||||
},
|
||||
{
|
||||
"function_code": "EVYOS",
|
||||
"type_code": "APT_YOL",
|
||||
"type_name": "Apartman Ara Yol",
|
||||
"lang": "TR",
|
||||
},
|
||||
]
|
||||
for insert_type in insert_types:
|
||||
build_types = InsertBuildTypes(
|
||||
function_code="EVYOS",
|
||||
lang=insert_type["lang"],
|
||||
type_code=str(insert_type["type_code"]).upper(),
|
||||
type_name=insert_type["type_name"],
|
||||
)
|
||||
created_build_type = BuildTypes.find_or_create(**build_types.model_dump())
|
||||
created_build_type.save_and_confirm()
|
||||
|
||||
insert_enums = [
|
||||
{"enum_class": "BuildDuesTypes", "type_code": "BDT-D", "type_name": "Debit"},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-A",
|
||||
"type_name": "Add Debit",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-R",
|
||||
"type_name": "Renovation",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-L",
|
||||
"type_name": "Lawyer expence",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-S",
|
||||
"type_name": "Service fee",
|
||||
},
|
||||
{
|
||||
"enum_class": "BuildDuesTypes",
|
||||
"type_code": "BDT-I",
|
||||
"type_name": "Information",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-A",
|
||||
"type_name": "Kasa Tahsil Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-E",
|
||||
"type_name": "Kasa Tediye Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-M",
|
||||
"type_name": "Mahsup Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-O",
|
||||
"type_name": "Açılış Fişi",
|
||||
},
|
||||
{
|
||||
"enum_class": "AccountingReceiptTypes",
|
||||
"type_code": "ART-C",
|
||||
"type_name": "Kapanış Fişi",
|
||||
},
|
||||
{"enum_class": "IbanBudgetType", "type_code": "IBT-I", "type_name": "Iban"},
|
||||
{"enum_class": "IbanBudgetType", "type_code": "IBT-B", "type_name": "Budget"},
|
||||
{
|
||||
"enum_class": "IbanBudgetType",
|
||||
"type_code": "IBT-TR",
|
||||
"type_name": "Transaction records",
|
||||
},
|
||||
{"enum_class": "ProjectTypes", "type_code": "R", "type_name": "Tadilat"},
|
||||
{
|
||||
"enum_class": "ProjectTypes",
|
||||
"type_code": "PT-C",
|
||||
"type_name": "Mahkeme süreçleri",
|
||||
},
|
||||
{
|
||||
"enum_class": "ProjectTypes",
|
||||
"type_code": "PT-Z",
|
||||
"type_name": "Sıfır Bakiye",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "PT-B",
|
||||
"type_name": "Banka records",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "PT-S",
|
||||
"type_name": "Sistem kaydı",
|
||||
},
|
||||
{
|
||||
"enum_class": "EdmBudgetType",
|
||||
"type_code": "EBT-C",
|
||||
"type_name": "Build, Flat or Site records",
|
||||
},
|
||||
{"enum_class": "ExpireType", "type_code": "1", "type_name": "daily"},
|
||||
{"enum_class": "ExpireType", "type_code": "7", "type_name": "weekly"},
|
||||
{"enum_class": "ExpireType", "type_code": "30", "type_name": "monthly"},
|
||||
{"enum_class": "ExpireType", "type_code": "90", "type_name": "quarter"},
|
||||
{"enum_class": "ExpireType", "type_code": "180", "type_name": "six_month"},
|
||||
{"enum_class": "ExpireType", "type_code": "365", "type_name": "yearly"},
|
||||
{"enum_class": "PhoneType", "type_code": "M", "type_name": "cep tel"},
|
||||
{"enum_class": "PhoneType", "type_code": "L", "type_name": "sabit telefon"},
|
||||
{"enum_class": "PhoneType", "type_code": "F", "type_name": "fax"},
|
||||
{"enum_class": "PhoneType", "type_code": "C", "type_name": "santral"},
|
||||
{
|
||||
"enum_class": "PhoneType",
|
||||
"type_code": "G",
|
||||
"type_name": "ülke genelindeki hatlar 444",
|
||||
},
|
||||
{"enum_class": "PerComType", "type_code": "1", "type_name": "Person"},
|
||||
{"enum_class": "PerComType", "type_code": "2", "type_name": "Company"},
|
||||
{"enum_class": "Directions", "type_code": "NN", "type_name": "North"},
|
||||
{"enum_class": "Directions", "type_code": "EE", "type_name": "East"},
|
||||
{"enum_class": "Directions", "type_code": "SS", "type_name": "South"},
|
||||
{"enum_class": "Directions", "type_code": "WW", "type_name": "West"},
|
||||
{"enum_class": "Directions", "type_code": "NE", "type_name": "North East"},
|
||||
{"enum_class": "Directions", "type_code": "NW", "type_name": "North West"},
|
||||
{"enum_class": "Directions", "type_code": "SE", "type_name": "South East"},
|
||||
{"enum_class": "Directions", "type_code": "SW", "type_name": "South West"},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-RBM",
|
||||
"type_name": "Regular Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-DBM",
|
||||
"type_name": "Disaster Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "MeetingTypes",
|
||||
"type_code": "MT-EBM",
|
||||
"type_name": "Emergency Building Meeting",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-D",
|
||||
"type_name": "Debit Sender",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-R",
|
||||
"type_name": "Credit Receiver",
|
||||
},
|
||||
{
|
||||
"enum_class": "DebitTypes",
|
||||
"type_code": "DT-Z",
|
||||
"type_name": "Zero Balance",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-W",
|
||||
"type_name": "Weekly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-M",
|
||||
"type_name": "Monthly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-Q",
|
||||
"type_name": "Quarterly",
|
||||
},
|
||||
{
|
||||
"enum_class": "TimePeriod",
|
||||
"type_code": "TP-Y",
|
||||
"type_name": "Yearly",
|
||||
},
|
||||
]
|
||||
|
||||
for insert_enum in insert_enums:
|
||||
created_api_enum = ApiEnumDropdown.find_or_create(
|
||||
enum_class=insert_enum["enum_class"],
|
||||
value=insert_enum["type_name"],
|
||||
key=str(insert_enum["type_code"]).upper(),
|
||||
description=insert_enum["type_name"],
|
||||
)
|
||||
created_api_enum.save_and_confirm()
|
||||
42
service_app_init/initialize_app/model_initator.py
Normal file
42
service_app_init/initialize_app/model_initator.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import api_validations.validations_request as validations
|
||||
|
||||
|
||||
def get_upper_only(text: str):
|
||||
sc_letter, th_letter = text[1], text[2]
|
||||
upper_letters = "".join(letter for letter in text if str(letter).isupper())
|
||||
return upper_letters[0] + sc_letter + th_letter + upper_letters[1:]
|
||||
|
||||
|
||||
def copy_validations_to_database():
|
||||
for validation in validations.__all__:
|
||||
validation_pydantic = getattr(validations, validation)
|
||||
model_dict = {
|
||||
"model_owner": "system",
|
||||
"model_type": "BaseModel",
|
||||
"model_name": validation_pydantic.__name__,
|
||||
"model_description": "",
|
||||
"model_code": get_upper_only(validation_pydantic.__name__),
|
||||
"is_confirmed": True,
|
||||
"deleted": False,
|
||||
"active": True,
|
||||
}
|
||||
# created_model = Models.find_or_create(**model_dict)
|
||||
fields = validation_pydantic.model_fields
|
||||
for field, info in fields.items():
|
||||
default_value = (
|
||||
None
|
||||
if getattr(info, "default").__str__() == "PydanticUndefined"
|
||||
else getattr(info, "default")
|
||||
)
|
||||
model_field_dict = {
|
||||
"field_name": field,
|
||||
"field_type": str(info.annotation),
|
||||
"field_required": bool(info.is_required()),
|
||||
"model_id": created_model.id,
|
||||
"is_confirmed": True,
|
||||
"deleted": False,
|
||||
"active": True,
|
||||
}
|
||||
if default_value:
|
||||
model_field_dict.update({"field_default_value": str(default_value)})
|
||||
# ModelEntities.find_or_create(**model_field_dict)
|
||||
339
service_app_init/initialize_app/modules_and_services_init.py
Normal file
339
service_app_init/initialize_app/modules_and_services_init.py
Normal file
@@ -0,0 +1,339 @@
|
||||
from databases import (
|
||||
Modules,
|
||||
Duty,
|
||||
Services,
|
||||
OccupantTypes,
|
||||
)
|
||||
|
||||
|
||||
def create_endpoints_from_api_functions(routers):
|
||||
from application.create_file import create_app
|
||||
from databases import EndpointRestriction
|
||||
|
||||
api_app = create_app(routers=routers)
|
||||
|
||||
for route in api_app.routes:
|
||||
route_path, route_summary = (
|
||||
str(getattr(route, "path")),
|
||||
str(getattr(route, "name")) or "",
|
||||
)
|
||||
|
||||
methods = [method.lower() for method in getattr(route, "methods")]
|
||||
for route_method in methods:
|
||||
restriction = EndpointRestriction.find_or_create(
|
||||
**dict(
|
||||
endpoint_method=route_method,
|
||||
endpoint_name=route_path,
|
||||
endpoint_desc=route_summary.replace("_", " "),
|
||||
endpoint_function=route_summary,
|
||||
)
|
||||
)
|
||||
restriction.endpoint_code = f"AR{str(restriction.id).zfill(3)}"
|
||||
restriction.save_and_confirm()
|
||||
|
||||
EndpointRestriction.save()
|
||||
return api_app
|
||||
|
||||
|
||||
def create_services_building(module_dict: dict):
|
||||
"""
|
||||
4. Service [Bina] Yönetim - OPTIONAL
|
||||
5. Service [Bina] Yasal İşler - OPTIONAL
|
||||
6. Service [Bina] Temizlik - OPTIONAL
|
||||
7. Service [Bina] Isınma Sistemi - OPTIONAL
|
||||
8. Service [Bina] Güvenlik Sistemi + OPTIONAL
|
||||
9. Service [Bina] Bakım + OPTIONAL
|
||||
10. Service [Bina] Onarım + OPTIONAL
|
||||
11 Service [Bina] Gözlem + OPTIONAL
|
||||
"""
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Management",
|
||||
service_description="Building Management Service",
|
||||
service_code="SR-BLD-MNG",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Legal Affairs",
|
||||
service_description="Building Legal Affairs Service",
|
||||
service_code="SR-BLD-LGL",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Cleaning",
|
||||
service_description="Building Cleaning Service",
|
||||
service_code="SR-BLD-CLN",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Heating System",
|
||||
service_description="Building Heating System Service",
|
||||
service_code="SR-BLD-HTS",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Security System",
|
||||
service_description="Building Security System Service",
|
||||
service_code="SR-BLD-SEC",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Maintenance",
|
||||
service_description="Building Maintenance Service",
|
||||
service_code="SR-BLD-MNT",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Repair",
|
||||
service_description="Building Repair Service",
|
||||
service_code="SR-BLD-RPR",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Building Observation",
|
||||
service_description="Building Observation Service",
|
||||
service_code="SR-BLD-OBS",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
return
|
||||
|
||||
|
||||
def create_services_flat(module_dict: dict):
|
||||
"""
|
||||
11. Service [Daire] Kiralama + OPTIONAL
|
||||
12. Service [Daire] Satış + OPTIONAL
|
||||
13. Service [Daire] Tadiilat + OPTIONAL
|
||||
14. Service [Daire] Temizlik + OPTIONAL
|
||||
11 Service [Daire] Gözlem + OPTIONAL
|
||||
"""
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Flat Rent",
|
||||
service_description="Flat Rent Service",
|
||||
service_code="SR-FLT-RNT",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Flat Sale",
|
||||
service_description="Flat Sale Service",
|
||||
service_code="SR-FLT-SAL",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Flat Renovation",
|
||||
service_description="Flat Renovation Service",
|
||||
service_code="SR-FLT-RNV",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Flat Cleaning",
|
||||
service_description="Flat Cleaning Service",
|
||||
service_code="SR-FLT-CLN",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Flat Observation",
|
||||
service_description="Flat Observation Service",
|
||||
service_code="SR-FLT-OBS",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
return
|
||||
|
||||
|
||||
def create_services_authenticate(module_dict: dict):
|
||||
authentication_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Authenticate",
|
||||
service_description="Authenticate Service",
|
||||
service_code="AUTH",
|
||||
)
|
||||
authentication_service.save_and_confirm()
|
||||
return
|
||||
|
||||
|
||||
def create_services_meeting(module_dict: dict):
|
||||
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Meeting Regular",
|
||||
service_description="Regular Meeting Service",
|
||||
service_code="MEET-REG",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Meeting Emergency",
|
||||
service_description="Emergency Meeting Service",
|
||||
service_code="MEET-EMR",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
created_service = Services.find_or_create(
|
||||
**module_dict,
|
||||
service_name="Meeting Demand",
|
||||
service_description="Demand Meeting Service",
|
||||
service_code="MEET-DMN",
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
return
|
||||
|
||||
|
||||
def create_modules_and_services_and_actions():
|
||||
|
||||
erp_module = Modules.find_or_create(
|
||||
**{
|
||||
"module_name": "EVYOS ERP",
|
||||
"module_description": "EVYOS Enterprise Resource Planning",
|
||||
"module_code": "EVYOS-ERP",
|
||||
"module_layer": 1,
|
||||
"is_default_module": False,
|
||||
}
|
||||
)
|
||||
erp_module.save_and_confirm()
|
||||
|
||||
build_module = Modules.find_or_create(
|
||||
**{
|
||||
"module_name": "Bina Yönetim Modülü",
|
||||
"module_description": "Building Management Module",
|
||||
"module_code": "BLD-MNG",
|
||||
"module_layer": 1,
|
||||
"is_default_module": False,
|
||||
}
|
||||
)
|
||||
build_module.save_and_confirm()
|
||||
|
||||
user_module = Modules.find_or_create(
|
||||
**{
|
||||
"module_name": "Kullancı Modülü",
|
||||
"module_description": "Kullanıcı Genel Modülü",
|
||||
"module_code": "USR-PUB",
|
||||
"module_layer": 1,
|
||||
"is_default_module": True,
|
||||
}
|
||||
)
|
||||
user_module.save_and_confirm()
|
||||
|
||||
erp_module_module_dict = dict(
|
||||
module_id=erp_module.id,
|
||||
module_uu_id=str(erp_module.uu_id),
|
||||
)
|
||||
build_module_module_dict = dict(
|
||||
module_id=build_module.id,
|
||||
module_uu_id=str(build_module.uu_id),
|
||||
)
|
||||
user_module_module_dict = dict(
|
||||
module_id=user_module.id,
|
||||
module_uu_id=str(user_module.uu_id),
|
||||
)
|
||||
|
||||
duty_objects = Duty.filter_all(
|
||||
Duty.duty_code.notin_(["BULK", "OCCUPANT", "BM0001"])
|
||||
)
|
||||
for duty_object in duty_objects.data:
|
||||
created_service = Services.find_or_create(
|
||||
**erp_module_module_dict,
|
||||
service_name=duty_object.duty_name,
|
||||
service_description=duty_object.duty_description,
|
||||
service_code=f"SRE-{duty_object.duty_code}",
|
||||
related_responsibility=duty_object.duty_code,
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
|
||||
occupant_types = OccupantTypes.filter_all()
|
||||
for occupant_type in occupant_types.data:
|
||||
created_service = Services.find_or_create(
|
||||
**build_module_module_dict,
|
||||
service_name=occupant_type.occupant_type,
|
||||
service_description=occupant_type.occupant_description,
|
||||
service_code=f"SRO-{occupant_type.occupant_code}",
|
||||
related_responsibility=occupant_type.occupant_code,
|
||||
)
|
||||
created_service.save_and_confirm()
|
||||
|
||||
create_services_authenticate(module_dict=user_module_module_dict)
|
||||
create_services_meeting(module_dict=build_module_module_dict)
|
||||
create_services_building(module_dict=build_module_module_dict)
|
||||
create_services_flat(module_dict=build_module_module_dict)
|
||||
return
|
||||
#
|
||||
# super_admin_module_created=None
|
||||
# create_address_service(add_module=super_admin_module_created)
|
||||
# create_post_code_service(add_module=super_admin_module_created)
|
||||
# create_authentication_service(add_module=super_admin_module_created)
|
||||
# create_build_service(add_module=super_admin_module_created)
|
||||
# create_build_parts_service(add_module=super_admin_module_created)
|
||||
# create_build_area_service(add_module=super_admin_module_created)
|
||||
# create_build_sites_service(add_module=super_admin_module_created)
|
||||
# # create_build_types_service(add_module=super_admin_module_created)
|
||||
# create_living_spaces_service(add_module=super_admin_module_created)
|
||||
# create_company_service(add_module=super_admin_module_created)
|
||||
# create_department_service(add_module=super_admin_module_created)
|
||||
# create_duties_service(add_module=super_admin_module_created)
|
||||
# create_duty_service(add_module=super_admin_module_created)
|
||||
# create_employee_service(add_module=super_admin_module_created)
|
||||
# create_staff_service(add_module=super_admin_module_created)
|
||||
#
|
||||
# create_decision_book_service(add_module=super_admin_module_created)
|
||||
# create_decision_book_items_service(add_module=super_admin_module_created)
|
||||
# create_build_decision_book_items_debits_service(
|
||||
# add_module=super_admin_module_created
|
||||
# )
|
||||
# # create_build_decision_book_person_service(add_module=super_admin_module_created)
|
||||
# # create_build_decision_book_person_service(add_module=super_admin_module_created)
|
||||
#
|
||||
# create_actions_service(add_module=super_admin_module_created)
|
||||
# create_events_service(add_module=super_admin_module_created)
|
||||
# create_model_service(add_module=super_admin_module_created)
|
||||
# create_model_entities_service(add_module=super_admin_module_created)
|
||||
# create_modules_service(add_module=super_admin_module_created)
|
||||
# create_services_service(add_module=super_admin_module_created)
|
||||
# create_event_to_bind_people_service(add_module=super_admin_module_created)
|
||||
# create_bind_service_service(add_module=super_admin_module_created)
|
||||
#
|
||||
# create_people_service(add_module=super_admin_module_created)
|
||||
# create_project_decision_book_service(add_module=super_admin_module_created)
|
||||
# create_project_decision_book_items_service(add_module=super_admin_module_created)
|
||||
# create_build_project_decision_book_items_debits_service(
|
||||
# add_module=super_admin_module_created
|
||||
# )
|
||||
# create_build_project_decision_book_person_service(
|
||||
# add_module=super_admin_module_created
|
||||
# )
|
||||
# create_endpoint_restriction_service(add_module=super_admin_module_created)
|
||||
# create_users_service(add_module=super_admin_module_created)
|
||||
#
|
||||
# return
|
||||
|
||||
# account_service_endpoint = EndpointRestriction.find_one(
|
||||
# endpoint_function="account_service"
|
||||
# )
|
||||
# account_service = Services.find_or_create(
|
||||
# service_name="Account",
|
||||
# service_description="Account Service",
|
||||
# service_code="ACC",
|
||||
# )
|
||||
# address_service = Services.find_or_create(
|
||||
# service_name="Address",
|
||||
# service_description="Address Service",
|
||||
# service_code="ADD",
|
||||
# )
|
||||
# api_service = Services.find_or_create(
|
||||
# service_name="Api",
|
||||
# service_description="Api Service",
|
||||
# service_code="API",
|
||||
# )
|
||||
# application_service = Services.find_or_create(
|
||||
# service_name="Application",
|
||||
# service_description="Application Service",
|
||||
# service_code="APP",
|
||||
# )
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user