auth endpoints added
This commit is contained in:
parent
3583d178e9
commit
ee405133be
|
|
@ -25,4 +25,4 @@ COPY /Schemas/identity /Schemas/identity
|
|||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "ApiServices/TemplateService/app.py"]
|
||||
CMD ["poetry", "run", "python", "ApiServices/AuthService/app.py"]
|
||||
|
|
|
|||
|
|
@ -2,11 +2,12 @@ import uvicorn
|
|||
|
||||
from config import api_config
|
||||
|
||||
from ApiServices.TemplateService.create_app import create_app
|
||||
from ApiServices.AuthService.create_app import create_app
|
||||
|
||||
# from prometheus_fastapi_instrumentator import Instrumentator
|
||||
|
||||
|
||||
app = create_app() # Create FastAPI application
|
||||
app = create_app() # Create FastAPI application
|
||||
# Instrumentator().instrument(app=app).expose(app=app) # Setup Prometheus metrics
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ class Configs(BaseSettings):
|
|||
"""
|
||||
|
||||
PATH: str = ""
|
||||
HOST: str = "",
|
||||
PORT: int = 0,
|
||||
LOG_LEVEL: str = "info",
|
||||
HOST: str = ("",)
|
||||
PORT: int = (0,)
|
||||
LOG_LEVEL: str = ("info",)
|
||||
RELOAD: int = 0
|
||||
ACCESS_TOKEN_TAG: str = ""
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ class Configs(BaseSettings):
|
|||
"host": self.HOST,
|
||||
"port": int(self.PORT),
|
||||
"log_level": self.LOG_LEVEL,
|
||||
"reload": bool(self.RELOAD)
|
||||
"reload": bool(self.RELOAD),
|
||||
}
|
||||
|
||||
@property
|
||||
|
|
|
|||
|
|
@ -3,16 +3,16 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from ApiServices.TemplateService.create_route import RouteRegisterController
|
||||
from ApiServices.TemplateService.endpoints.routes import get_routes
|
||||
from ApiServices.TemplateService.open_api_creator import create_openapi_schema
|
||||
from ApiServices.TemplateService.middlewares.token_middleware import token_middleware
|
||||
from ApiServices.TemplateService.config import template_api_config
|
||||
from ApiServices.AuthService.create_route import RouteRegisterController
|
||||
from ApiServices.AuthService.endpoints.routes import get_routes
|
||||
from ApiServices.AuthService.open_api_creator import create_openapi_schema
|
||||
from ApiServices.AuthService.middlewares.token_middleware import token_middleware
|
||||
from ApiServices.AuthService.config import api_config
|
||||
|
||||
|
||||
def create_app():
|
||||
|
||||
application = FastAPI(**template_api_config.api_info)
|
||||
application = FastAPI(**api_config.api_info)
|
||||
# application.mount(
|
||||
# "/application/static",
|
||||
# StaticFiles(directory="application/static"),
|
||||
|
|
@ -20,7 +20,7 @@ def create_app():
|
|||
# )
|
||||
application.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=template_api_config.ALLOW_ORIGINS,
|
||||
allow_origins=api_config.ALLOW_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,322 @@
|
|||
import uuid
|
||||
from typing import Union
|
||||
|
||||
from fastapi import APIRouter, Request, status, Header
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from ApiServices.AuthService.config import api_config
|
||||
from ApiServices.AuthService.validations.request.authentication.login_post import (
|
||||
RequestLogin,
|
||||
RequestSelectLiving,
|
||||
RequestSelectOccupant, RequestCreatePassword, RequestChangePassword, RequestForgotPasswordPhone,
|
||||
RequestForgotPasswordEmail,
|
||||
)
|
||||
|
||||
|
||||
auth_route = APIRouter(
|
||||
prefix="/authentication",
|
||||
tags=["Authentication Cluster"],
|
||||
)
|
||||
|
||||
|
||||
@auth_route.post(
|
||||
path="/login",
|
||||
summary="Login via domain and access key : [email] | [phone]",
|
||||
description="Login Route",
|
||||
)
|
||||
def authentication_login_post(
|
||||
request: Request,
|
||||
data: RequestLogin,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Authentication Login Route with Post Method
|
||||
"""
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0001"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={**data.model_dump()},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.post(
|
||||
path="/select",
|
||||
summary="Select company or occupant type",
|
||||
description="Selection of users company or occupant type",
|
||||
)
|
||||
def authentication_select_post(
|
||||
request: Request,
|
||||
data: Union[RequestSelectOccupant, RequestSelectLiving],
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Authentication Select Route with Post Method
|
||||
"""
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0001"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content=data.model_dump(),
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.get(
|
||||
path="/logout",
|
||||
summary="Logout user",
|
||||
description="Logout only single session of user which domain is provided",
|
||||
)
|
||||
def authentication_logout_post(
|
||||
request: Request,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Logout user from the system
|
||||
"""
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0003"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content={},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.get(
|
||||
path="/disconnect",
|
||||
summary="Disconnect all sessions",
|
||||
description="Disconnect all sessions of user in access token",
|
||||
)
|
||||
def authentication_disconnect_post(
|
||||
request: Request,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Disconnect all sessions of user in access token
|
||||
"""
|
||||
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0003"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content={},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.get(
|
||||
path="/token/check",
|
||||
summary="Check if token is valid",
|
||||
description="Check if access token is valid for user",
|
||||
)
|
||||
def authentication_token_check_post(
|
||||
request: Request,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Check if access token is valid for user
|
||||
"""
|
||||
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0003"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content={},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.get(
|
||||
path="/token/refresh",
|
||||
summary="Refresh if token is valid",
|
||||
description="Refresh if access token is valid for user",
|
||||
)
|
||||
def authentication_token_refresh_post(
|
||||
request: Request,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Refresh if access token is valid for user
|
||||
"""
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0003"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content={},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.post(
|
||||
path="/password/create",
|
||||
summary="Create password with access token",
|
||||
description="Create password",
|
||||
)
|
||||
def authentication_password_create_post(
|
||||
request: Request,
|
||||
data: RequestCreatePassword,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Authentication create password Route with Post Method
|
||||
"""
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0001"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={**data.model_dump()},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.post(
|
||||
path="/password/change",
|
||||
summary="Change password with access token",
|
||||
description="Change password",
|
||||
)
|
||||
def authentication_password_change_post(
|
||||
request: Request,
|
||||
data: RequestChangePassword,
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Authentication change password Route with Post Method
|
||||
"""
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
"token": token,
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0001"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={**data.model_dump()},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
@auth_route.post(
|
||||
path="/password/reset",
|
||||
summary="Reset password with access token",
|
||||
description="Reset password",
|
||||
)
|
||||
def authentication_password_reset_post(
|
||||
request: Request,
|
||||
data: Union[RequestForgotPasswordEmail, RequestForgotPasswordPhone],
|
||||
language: str = Header(None, alias="language"),
|
||||
domain: str = Header(None, alias="domain"),
|
||||
):
|
||||
"""
|
||||
Authentication reset password Route with Post Method
|
||||
"""
|
||||
headers = {
|
||||
"language": language or "",
|
||||
"domain": domain or "",
|
||||
"eys-ext": f"{str(uuid.uuid4())}",
|
||||
}
|
||||
if not domain or not language:
|
||||
return JSONResponse(
|
||||
content={"error": "EYS_0001"},
|
||||
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||
headers=headers,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={**data.model_dump()},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
headers=headers,
|
||||
)
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
from fastapi import APIRouter
|
||||
from .test_template.route import test_template_route
|
||||
from ApiServices.AuthService.endpoints.auth.route import auth_route
|
||||
|
||||
|
||||
def get_routes() -> list[APIRouter]:
|
||||
return [test_template_route]
|
||||
return [auth_route]
|
||||
|
||||
|
||||
def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
||||
|
|
@ -15,6 +15,5 @@ def get_safe_endpoint_urls() -> list[tuple[str, str]]:
|
|||
("/auth/register", "POST"),
|
||||
("/auth/login", "POST"),
|
||||
("/metrics", "GET"),
|
||||
("/test/template", "GET"),
|
||||
("/test/template", "POST"),
|
||||
("/authentication/login", "POST"),
|
||||
]
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
from fastapi import APIRouter, Request, Response
|
||||
|
||||
test_template_route = APIRouter(prefix="/test", tags=["Test"])
|
||||
|
||||
|
||||
@test_template_route.get(path="/template", description="Test Template Route")
|
||||
def test_template(request: Request, response: Response):
|
||||
"""
|
||||
Test Template Route
|
||||
"""
|
||||
headers = dict(request.headers)
|
||||
response.headers["X-Header"] = "Test Header GET"
|
||||
return {
|
||||
"completed": True,
|
||||
"message": "Test Template Route",
|
||||
"info": {
|
||||
"host": headers.get("host", "Not Found"),
|
||||
"user_agent": headers.get("user-agent", "Not Found"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@test_template_route.post(
|
||||
path="/template",
|
||||
description="Test Template Route with Post Method",
|
||||
)
|
||||
def test_template_post(request: Request, response: Response):
|
||||
"""
|
||||
Test Template Route with Post Method
|
||||
"""
|
||||
headers = dict(request.headers)
|
||||
response.headers["X-Header"] = "Test Header POST"
|
||||
return {
|
||||
"completed": True,
|
||||
"message": "Test Template Route with Post Method",
|
||||
"info": {
|
||||
"host": headers.get("host", "Not Found"),
|
||||
"user_agent": headers.get("user-agent", "Not Found"),
|
||||
},
|
||||
}
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
from fastapi import Request, Response
|
||||
from ApiServices.TemplateService.endpoints.routes import get_safe_endpoint_urls
|
||||
from fastapi import Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from ..endpoints.routes import get_safe_endpoint_urls
|
||||
from ..config import api_config
|
||||
|
||||
|
||||
async def token_middleware(request: Request, call_next):
|
||||
|
|
@ -9,9 +11,14 @@ async def token_middleware(request: Request, call_next):
|
|||
if base_url in safe_endpoints:
|
||||
return await call_next(request)
|
||||
|
||||
token = request.headers.get("Authorization")
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
if not token:
|
||||
return Response(content="Missing token", status_code=400)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"error": "EYS_0002",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ from fastapi import FastAPI
|
|||
from fastapi.routing import APIRoute
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from ApiServices.TemplateService.config import template_api_config
|
||||
from ApiServices.TemplateService.endpoints.routes import get_safe_endpoint_urls
|
||||
from ApiServices.AuthService.config import api_config
|
||||
from ApiServices.AuthService.endpoints.routes import get_safe_endpoint_urls
|
||||
|
||||
|
||||
class OpenAPISchemaCreator:
|
||||
|
|
@ -36,7 +36,7 @@ class OpenAPISchemaCreator:
|
|||
"BearerAuth": {
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": template_api_config.ACCESS_TOKEN_TAG,
|
||||
"name": api_config.ACCESS_TOKEN_TAG,
|
||||
"description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
|
||||
}
|
||||
}
|
||||
|
|
@ -73,9 +73,9 @@ class OpenAPISchemaCreator:
|
|||
Dict[str, Any]: Complete OpenAPI schema
|
||||
"""
|
||||
openapi_schema = get_openapi(
|
||||
title=template_api_config.TITLE,
|
||||
description=template_api_config.DESCRIPTION,
|
||||
version=template_api_config.VERSION,
|
||||
title=api_config.TITLE,
|
||||
description=api_config.DESCRIPTION,
|
||||
version=api_config.VERSION,
|
||||
routes=self.app.routes,
|
||||
)
|
||||
|
||||
|
|
@ -83,9 +83,7 @@ class OpenAPISchemaCreator:
|
|||
if "components" not in openapi_schema:
|
||||
openapi_schema["components"] = {}
|
||||
|
||||
openapi_schema["components"][
|
||||
"securitySchemes"
|
||||
] = self.create_security_schemes()
|
||||
openapi_schema["components"]["securitySchemes"] = self.create_security_schemes()
|
||||
|
||||
# Configure route security and responses
|
||||
for route in self.app.routes:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class RequestLogin(BaseModel):
|
||||
access_key: str
|
||||
password: str
|
||||
remember_me: Optional[bool]
|
||||
|
||||
|
||||
class RequestSelectOccupant(BaseModel):
|
||||
company_uu_id: str
|
||||
|
||||
|
||||
class RequestSelectLiving(BaseModel):
|
||||
build_living_space_uu_id: str
|
||||
|
||||
|
||||
class RequestCreatePassword(BaseModel):
|
||||
password_token: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
|
||||
class RequestChangePassword(BaseModel):
|
||||
old_password: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
|
||||
class RequestForgotPasswordEmail(BaseModel):
|
||||
email: str
|
||||
|
||||
|
||||
class RequestForgotPasswordPhone(BaseModel):
|
||||
phone_number: str
|
||||
|
||||
|
|
@ -3,10 +3,11 @@ import uvicorn
|
|||
from config import api_config
|
||||
|
||||
from ApiServices.TemplateService.create_app import create_app
|
||||
|
||||
# from prometheus_fastapi_instrumentator import Instrumentator
|
||||
|
||||
|
||||
app = create_app() # Create FastAPI application
|
||||
app = create_app() # Create FastAPI application
|
||||
# Instrumentator().instrument(app=app).expose(app=app) # Setup Prometheus metrics
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ class Configs(BaseSettings):
|
|||
"""
|
||||
|
||||
PATH: str = ""
|
||||
HOST: str = "",
|
||||
PORT: int = 0,
|
||||
LOG_LEVEL: str = "info",
|
||||
HOST: str = ("",)
|
||||
PORT: int = (0,)
|
||||
LOG_LEVEL: str = ("info",)
|
||||
RELOAD: int = 0
|
||||
ACCESS_TOKEN_TAG: str = ""
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ class Configs(BaseSettings):
|
|||
"host": self.HOST,
|
||||
"port": int(self.PORT),
|
||||
"log_level": self.LOG_LEVEL,
|
||||
"reload": bool(self.RELOAD)
|
||||
"reload": bool(self.RELOAD),
|
||||
}
|
||||
|
||||
@property
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from fastapi import Request, Response
|
||||
from ApiServices.TemplateService.endpoints.routes import get_safe_endpoint_urls
|
||||
from fastapi import Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from ..endpoints.routes import get_safe_endpoint_urls
|
||||
from ..config import api_config
|
||||
|
||||
|
||||
async def token_middleware(request: Request, call_next):
|
||||
|
|
@ -9,9 +11,14 @@ async def token_middleware(request: Request, call_next):
|
|||
if base_url in safe_endpoints:
|
||||
return await call_next(request)
|
||||
|
||||
token = request.headers.get("Authorization")
|
||||
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
|
||||
if not token:
|
||||
return Response(content="Missing token", status_code=400)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"error": "EYS_0002",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -83,9 +83,7 @@ class OpenAPISchemaCreator:
|
|||
if "components" not in openapi_schema:
|
||||
openapi_schema["components"] = {}
|
||||
|
||||
openapi_schema["components"][
|
||||
"securitySchemes"
|
||||
] = self.create_security_schemes()
|
||||
openapi_schema["components"]["securitySchemes"] = self.create_security_schemes()
|
||||
|
||||
# Configure route security and responses
|
||||
for route in self.app.routes:
|
||||
|
|
|
|||
|
|
@ -17,29 +17,30 @@ def test_basic_crud_operations():
|
|||
try:
|
||||
with mongo_handler.collection("users") as users_collection:
|
||||
# Insert multiple documents
|
||||
users_collection.insert_many([
|
||||
{"username": "john", "email": "john@example.com", "role": "user"},
|
||||
{"username": "jane", "email": "jane@example.com", "role": "admin"},
|
||||
{"username": "bob", "email": "bob@example.com", "role": "user"}
|
||||
])
|
||||
users_collection.insert_many(
|
||||
[
|
||||
{"username": "john", "email": "john@example.com", "role": "user"},
|
||||
{"username": "jane", "email": "jane@example.com", "role": "admin"},
|
||||
{"username": "bob", "email": "bob@example.com", "role": "user"},
|
||||
]
|
||||
)
|
||||
|
||||
# Find with multiple conditions
|
||||
admin_users = list(users_collection.find({"role": "admin"}))
|
||||
|
||||
# Update multiple documents
|
||||
update_result = users_collection.update_many(
|
||||
{"role": "user"},
|
||||
{"$set": {"last_login": datetime.now().isoformat()}}
|
||||
{"role": "user"}, {"$set": {"last_login": datetime.now().isoformat()}}
|
||||
)
|
||||
|
||||
# Delete documents
|
||||
delete_result = users_collection.delete_many({"username": "bob"})
|
||||
|
||||
success = (
|
||||
len(admin_users) == 1 and
|
||||
admin_users[0]["username"] == "jane" and
|
||||
update_result.modified_count == 2 and
|
||||
delete_result.deleted_count == 1
|
||||
len(admin_users) == 1
|
||||
and admin_users[0]["username"] == "jane"
|
||||
and update_result.modified_count == 2
|
||||
and delete_result.deleted_count == 1
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -54,35 +55,32 @@ def test_nested_documents():
|
|||
try:
|
||||
with mongo_handler.collection("products") as products_collection:
|
||||
# Insert a product with nested data
|
||||
products_collection.insert_one({
|
||||
"name": "Laptop",
|
||||
"price": 999.99,
|
||||
"specs": {
|
||||
"cpu": "Intel i7",
|
||||
"ram": "16GB",
|
||||
"storage": "512GB SSD"
|
||||
},
|
||||
"in_stock": True,
|
||||
"tags": ["electronics", "computers", "laptops"]
|
||||
})
|
||||
products_collection.insert_one(
|
||||
{
|
||||
"name": "Laptop",
|
||||
"price": 999.99,
|
||||
"specs": {"cpu": "Intel i7", "ram": "16GB", "storage": "512GB SSD"},
|
||||
"in_stock": True,
|
||||
"tags": ["electronics", "computers", "laptops"],
|
||||
}
|
||||
)
|
||||
|
||||
# Find with nested field query
|
||||
laptop = products_collection.find_one({"specs.cpu": "Intel i7"})
|
||||
|
||||
# Update nested field
|
||||
update_result = products_collection.update_one(
|
||||
{"name": "Laptop"},
|
||||
{"$set": {"specs.ram": "32GB"}}
|
||||
{"name": "Laptop"}, {"$set": {"specs.ram": "32GB"}}
|
||||
)
|
||||
|
||||
# Verify the update
|
||||
updated_laptop = products_collection.find_one({"name": "Laptop"})
|
||||
|
||||
success = (
|
||||
laptop is not None and
|
||||
laptop["specs"]["ram"] == "16GB" and
|
||||
update_result.modified_count == 1 and
|
||||
updated_laptop["specs"]["ram"] == "32GB"
|
||||
laptop is not None
|
||||
and laptop["specs"]["ram"] == "16GB"
|
||||
and update_result.modified_count == 1
|
||||
and updated_laptop["specs"]["ram"] == "32GB"
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -97,16 +95,18 @@ def test_array_operations():
|
|||
try:
|
||||
with mongo_handler.collection("orders") as orders_collection:
|
||||
# Insert an order with array of items
|
||||
orders_collection.insert_one({
|
||||
"order_id": "ORD001",
|
||||
"customer": "john",
|
||||
"items": [
|
||||
{"product": "Laptop", "quantity": 1},
|
||||
{"product": "Mouse", "quantity": 2}
|
||||
],
|
||||
"total": 1099.99,
|
||||
"status": "pending"
|
||||
})
|
||||
orders_collection.insert_one(
|
||||
{
|
||||
"order_id": "ORD001",
|
||||
"customer": "john",
|
||||
"items": [
|
||||
{"product": "Laptop", "quantity": 1},
|
||||
{"product": "Mouse", "quantity": 2},
|
||||
],
|
||||
"total": 1099.99,
|
||||
"status": "pending",
|
||||
}
|
||||
)
|
||||
|
||||
# Find orders containing specific items
|
||||
laptop_orders = list(orders_collection.find({"items.product": "Laptop"}))
|
||||
|
|
@ -114,17 +114,17 @@ def test_array_operations():
|
|||
# Update array elements
|
||||
update_result = orders_collection.update_one(
|
||||
{"order_id": "ORD001"},
|
||||
{"$push": {"items": {"product": "Keyboard", "quantity": 1}}}
|
||||
{"$push": {"items": {"product": "Keyboard", "quantity": 1}}},
|
||||
)
|
||||
|
||||
# Verify the update
|
||||
updated_order = orders_collection.find_one({"order_id": "ORD001"})
|
||||
|
||||
success = (
|
||||
len(laptop_orders) == 1 and
|
||||
update_result.modified_count == 1 and
|
||||
len(updated_order["items"]) == 3 and
|
||||
updated_order["items"][-1]["product"] == "Keyboard"
|
||||
len(laptop_orders) == 1
|
||||
and update_result.modified_count == 1
|
||||
and len(updated_order["items"]) == 3
|
||||
and updated_order["items"][-1]["product"] == "Keyboard"
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -139,23 +139,32 @@ def test_aggregation():
|
|||
try:
|
||||
with mongo_handler.collection("sales") as sales_collection:
|
||||
# Insert sample sales data
|
||||
sales_collection.insert_many([
|
||||
{"product": "Laptop", "amount": 999.99, "date": datetime.now()},
|
||||
{"product": "Mouse", "amount": 29.99, "date": datetime.now()},
|
||||
{"product": "Keyboard", "amount": 59.99, "date": datetime.now()}
|
||||
])
|
||||
sales_collection.insert_many(
|
||||
[
|
||||
{"product": "Laptop", "amount": 999.99, "date": datetime.now()},
|
||||
{"product": "Mouse", "amount": 29.99, "date": datetime.now()},
|
||||
{"product": "Keyboard", "amount": 59.99, "date": datetime.now()},
|
||||
]
|
||||
)
|
||||
|
||||
# Calculate total sales by product
|
||||
pipeline = [
|
||||
{"$group": {"_id": "$product", "total": {"$sum": "$amount"}}}
|
||||
]
|
||||
pipeline = [{"$group": {"_id": "$product", "total": {"$sum": "$amount"}}}]
|
||||
sales_summary = list(sales_collection.aggregate(pipeline))
|
||||
|
||||
success = (
|
||||
len(sales_summary) == 3 and
|
||||
any(item["_id"] == "Laptop" and item["total"] == 999.99 for item in sales_summary) and
|
||||
any(item["_id"] == "Mouse" and item["total"] == 29.99 for item in sales_summary) and
|
||||
any(item["_id"] == "Keyboard" and item["total"] == 59.99 for item in sales_summary)
|
||||
len(sales_summary) == 3
|
||||
and any(
|
||||
item["_id"] == "Laptop" and item["total"] == 999.99
|
||||
for item in sales_summary
|
||||
)
|
||||
and any(
|
||||
item["_id"] == "Mouse" and item["total"] == 29.99
|
||||
for item in sales_summary
|
||||
)
|
||||
and any(
|
||||
item["_id"] == "Keyboard" and item["total"] == 59.99
|
||||
for item in sales_summary
|
||||
)
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -174,11 +183,15 @@ def test_index_operations():
|
|||
users_collection.create_index([("username", 1), ("role", 1)])
|
||||
|
||||
# Insert initial document
|
||||
users_collection.insert_one({"username": "test_user", "email": "test@example.com"})
|
||||
users_collection.insert_one(
|
||||
{"username": "test_user", "email": "test@example.com"}
|
||||
)
|
||||
|
||||
# Try to insert duplicate email (should fail)
|
||||
try:
|
||||
users_collection.insert_one({"username": "test_user2", "email": "test@example.com"})
|
||||
users_collection.insert_one(
|
||||
{"username": "test_user2", "email": "test@example.com"}
|
||||
)
|
||||
success = False # Should not reach here
|
||||
except Exception:
|
||||
success = True
|
||||
|
|
@ -196,49 +209,49 @@ def test_complex_queries():
|
|||
try:
|
||||
with mongo_handler.collection("products") as products_collection:
|
||||
# Insert test data
|
||||
products_collection.insert_many([
|
||||
{
|
||||
"name": "Expensive Laptop",
|
||||
"price": 999.99,
|
||||
"tags": ["electronics", "computers"],
|
||||
"in_stock": True
|
||||
},
|
||||
{
|
||||
"name": "Cheap Mouse",
|
||||
"price": 29.99,
|
||||
"tags": ["electronics", "peripherals"],
|
||||
"in_stock": True
|
||||
}
|
||||
])
|
||||
products_collection.insert_many(
|
||||
[
|
||||
{
|
||||
"name": "Expensive Laptop",
|
||||
"price": 999.99,
|
||||
"tags": ["electronics", "computers"],
|
||||
"in_stock": True,
|
||||
},
|
||||
{
|
||||
"name": "Cheap Mouse",
|
||||
"price": 29.99,
|
||||
"tags": ["electronics", "peripherals"],
|
||||
"in_stock": True,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
# Find products with price range and specific tags
|
||||
expensive_electronics = list(products_collection.find({
|
||||
"price": {"$gt": 500},
|
||||
"tags": {"$in": ["electronics"]},
|
||||
"in_stock": True
|
||||
}))
|
||||
expensive_electronics = list(
|
||||
products_collection.find(
|
||||
{
|
||||
"price": {"$gt": 500},
|
||||
"tags": {"$in": ["electronics"]},
|
||||
"in_stock": True,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Update with multiple conditions
|
||||
update_result = products_collection.update_many(
|
||||
{
|
||||
"price": {"$lt": 100},
|
||||
"in_stock": True
|
||||
},
|
||||
{
|
||||
"$set": {"discount": 0.1},
|
||||
"$inc": {"price": -10}
|
||||
}
|
||||
{"price": {"$lt": 100}, "in_stock": True},
|
||||
{"$set": {"discount": 0.1}, "$inc": {"price": -10}},
|
||||
)
|
||||
|
||||
# Verify the update
|
||||
updated_product = products_collection.find_one({"name": "Cheap Mouse"})
|
||||
|
||||
success = (
|
||||
len(expensive_electronics) == 1 and
|
||||
expensive_electronics[0]["name"] == "Expensive Laptop" and
|
||||
update_result.modified_count == 1 and
|
||||
updated_product["price"] == 19.99 and
|
||||
updated_product["discount"] == 0.1
|
||||
len(expensive_electronics) == 1
|
||||
and expensive_electronics[0]["name"] == "Expensive Laptop"
|
||||
and update_result.modified_count == 1
|
||||
and updated_product["price"] == 19.99
|
||||
and updated_product["discount"] == 0.1
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -260,7 +273,7 @@ def run_all_tests():
|
|||
test_array_operations,
|
||||
test_aggregation,
|
||||
test_index_operations,
|
||||
test_complex_queries
|
||||
test_complex_queries,
|
||||
]
|
||||
|
||||
passed_list, not_passed_list = [], []
|
||||
|
|
@ -282,9 +295,9 @@ def run_all_tests():
|
|||
not_passed_list.append(f"Test {test.__name__} failed")
|
||||
|
||||
print(f"\nTest Results: {passed} passed, {failed} failed")
|
||||
print('Passed Tests:')
|
||||
print("Passed Tests:")
|
||||
print("\n".join(passed_list))
|
||||
print('Failed Tests:')
|
||||
print("Failed Tests:")
|
||||
print("\n".join(not_passed_list))
|
||||
|
||||
return passed, failed
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ class Credentials(BaseModel):
|
|||
"""
|
||||
Class to store user credentials.
|
||||
"""
|
||||
|
||||
person_id: int
|
||||
person_name: str
|
||||
full_name: Optional[str] = None
|
||||
|
|
@ -23,6 +24,7 @@ class MetaData:
|
|||
"""
|
||||
Class to store metadata for a query.
|
||||
"""
|
||||
|
||||
created: bool = False
|
||||
updated: bool = False
|
||||
|
||||
|
|
@ -46,13 +48,13 @@ class CRUDModel:
|
|||
|
||||
# Define required columns for CRUD operations
|
||||
required_columns = {
|
||||
'expiry_starts': TIMESTAMP,
|
||||
'expiry_ends': TIMESTAMP,
|
||||
'created_by': str,
|
||||
'created_by_id': int,
|
||||
'updated_by': str,
|
||||
'updated_by_id': int,
|
||||
'deleted': bool
|
||||
"expiry_starts": TIMESTAMP,
|
||||
"expiry_ends": TIMESTAMP,
|
||||
"created_by": str,
|
||||
"created_by_id": int,
|
||||
"updated_by": str,
|
||||
"updated_by_id": int,
|
||||
"deleted": bool,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
@ -66,12 +68,16 @@ class CRUDModel:
|
|||
if not cls.creds:
|
||||
return
|
||||
|
||||
if getattr(cls.creds, "person_id", None) and getattr(cls.creds, "person_name", None):
|
||||
if getattr(cls.creds, "person_id", None) and getattr(
|
||||
cls.creds, "person_name", None
|
||||
):
|
||||
record_created.created_by_id = cls.creds.person_id
|
||||
record_created.created_by = cls.creds.person_name
|
||||
|
||||
@classmethod
|
||||
def raise_exception(cls, message: str = "Exception raised.", status_code: int = 400):
|
||||
def raise_exception(
|
||||
cls, message: str = "Exception raised.", status_code: int = 400
|
||||
):
|
||||
"""
|
||||
Raise HTTP exception with custom message and status code.
|
||||
|
||||
|
|
@ -79,10 +85,7 @@ class CRUDModel:
|
|||
message: Error message
|
||||
status_code: HTTP status code
|
||||
"""
|
||||
raise HTTPException(
|
||||
status_code=status_code,
|
||||
detail={"message": message}
|
||||
)
|
||||
raise HTTPException(status_code=status_code, detail={"message": message})
|
||||
|
||||
@classmethod
|
||||
def create_or_abort(cls, db: Session, **kwargs):
|
||||
|
|
@ -146,7 +149,7 @@ class CRUDModel:
|
|||
"""
|
||||
try:
|
||||
key_ = cls.__annotations__.get(key, None)
|
||||
is_primary = key in getattr(cls, 'primary_keys', [])
|
||||
is_primary = key in getattr(cls, "primary_keys", [])
|
||||
row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
|
||||
|
||||
# Skip primary keys and foreign keys
|
||||
|
|
@ -167,12 +170,16 @@ class CRUDModel:
|
|||
elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
|
||||
return True, round(float(val), 3)
|
||||
elif key_ == Mapped[TIMESTAMP]:
|
||||
return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ"))
|
||||
return True, str(
|
||||
arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
|
||||
)
|
||||
elif key_ == Mapped[str]:
|
||||
return True, str(val)
|
||||
else: # Handle based on Python types
|
||||
if isinstance(val, datetime.datetime):
|
||||
return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ"))
|
||||
return True, str(
|
||||
arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
|
||||
)
|
||||
elif isinstance(val, bool):
|
||||
return True, bool(val)
|
||||
elif isinstance(val, (float, Decimal)):
|
||||
|
|
@ -189,7 +196,9 @@ class CRUDModel:
|
|||
except Exception as e:
|
||||
return False, None
|
||||
|
||||
def get_dict(self, exclude_list: Optional[list[InstrumentedAttribute]] = None) -> Dict[str, Any]:
|
||||
def get_dict(
|
||||
self, exclude_list: Optional[list[InstrumentedAttribute]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert model instance to dictionary with customizable fields.
|
||||
|
||||
|
|
@ -277,7 +286,9 @@ class CRUDModel:
|
|||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
cls.raise_exception(f"Failed to find or create record: {str(e)}", status_code=500)
|
||||
cls.raise_exception(
|
||||
f"Failed to find or create record: {str(e)}", status_code=500
|
||||
)
|
||||
|
||||
def update(self, db: Session, **kwargs):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -10,11 +10,11 @@ from sqlalchemy.orm import declarative_base, sessionmaker, scoped_session, Sessi
|
|||
engine = create_engine(
|
||||
postgres_configs.url,
|
||||
pool_pre_ping=True,
|
||||
pool_size=10, # Reduced from 20 to better match your CPU cores
|
||||
max_overflow=5, # Reduced from 10 to prevent too many connections
|
||||
pool_recycle=600, # Keep as is
|
||||
pool_timeout=30, # Keep as is
|
||||
echo=False, # Consider setting to False in production
|
||||
pool_size=10, # Reduced from 20 to better match your CPU cores
|
||||
max_overflow=5, # Reduced from 10 to prevent too many connections
|
||||
pool_recycle=600, # Keep as is
|
||||
pool_timeout=30, # Keep as is
|
||||
echo=False, # Consider setting to False in production
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class QueryModel:
|
|||
cls: Type[T],
|
||||
args_list: tuple[BinaryExpression, ...],
|
||||
argument: str,
|
||||
value: BinaryExpression
|
||||
value: BinaryExpression,
|
||||
) -> tuple[BinaryExpression, ...]:
|
||||
"""
|
||||
Add a new argument to the query arguments if it doesn't exist.
|
||||
|
|
@ -52,10 +52,9 @@ class QueryModel:
|
|||
Updated tuple of query arguments
|
||||
"""
|
||||
# Convert to set to remove duplicates while preserving order
|
||||
new_args = list(dict.fromkeys(
|
||||
arg for arg in args_list
|
||||
if isinstance(arg, BinaryExpression)
|
||||
))
|
||||
new_args = list(
|
||||
dict.fromkeys(arg for arg in args_list if isinstance(arg, BinaryExpression))
|
||||
)
|
||||
|
||||
# Check if argument already exists
|
||||
if not any(
|
||||
|
|
@ -68,8 +67,7 @@ class QueryModel:
|
|||
|
||||
@classmethod
|
||||
def get_not_expired_query_arg(
|
||||
cls: Type[T],
|
||||
args: tuple[BinaryExpression, ...]
|
||||
cls: Type[T], args: tuple[BinaryExpression, ...]
|
||||
) -> tuple[BinaryExpression, ...]:
|
||||
"""
|
||||
Add expiry date filtering to the query arguments.
|
||||
|
|
@ -141,9 +139,7 @@ class QueryModel:
|
|||
|
||||
@classmethod
|
||||
def convert(
|
||||
cls: Type[T],
|
||||
smart_options: dict[str, Any],
|
||||
validate_model: Any = None
|
||||
cls: Type[T], smart_options: dict[str, Any], validate_model: Any = None
|
||||
) -> Optional[tuple[BinaryExpression, ...]]:
|
||||
"""
|
||||
Convert smart options to SQLAlchemy filter expressions.
|
||||
|
|
@ -163,10 +159,7 @@ class QueryModel:
|
|||
|
||||
@classmethod
|
||||
def filter_by_one(
|
||||
cls: Type[T],
|
||||
db: Session,
|
||||
system: bool = False,
|
||||
**kwargs: Any
|
||||
cls: Type[T], db: Session, system: bool = False, **kwargs: Any
|
||||
) -> PostgresResponse[T]:
|
||||
"""
|
||||
Filter single record by keyword arguments.
|
||||
|
|
@ -191,9 +184,7 @@ class QueryModel:
|
|||
# Add status filters if not system query
|
||||
if not system:
|
||||
query = query.filter(
|
||||
cls.is_confirmed == True,
|
||||
cls.deleted == False,
|
||||
cls.active == True
|
||||
cls.is_confirmed == True, cls.deleted == False, cls.active == True
|
||||
)
|
||||
|
||||
# Add expiry filters last
|
||||
|
|
@ -204,7 +195,7 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=False
|
||||
is_array=False,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -234,9 +225,7 @@ class QueryModel:
|
|||
|
||||
# Add status filters
|
||||
query = query.filter(
|
||||
cls.is_confirmed == True,
|
||||
cls.deleted == False,
|
||||
cls.active == True
|
||||
cls.is_confirmed == True, cls.deleted == False, cls.active == True
|
||||
)
|
||||
|
||||
# Add expiry filters last
|
||||
|
|
@ -247,7 +236,7 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=False
|
||||
is_array=False,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -283,7 +272,7 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=False
|
||||
is_array=False,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -319,7 +308,7 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=True
|
||||
is_array=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -349,9 +338,7 @@ class QueryModel:
|
|||
|
||||
# Add status filters
|
||||
query = query.filter(
|
||||
cls.is_confirmed == True,
|
||||
cls.deleted == False,
|
||||
cls.active == True
|
||||
cls.is_confirmed == True, cls.deleted == False, cls.active == True
|
||||
)
|
||||
|
||||
# Add expiry filters last
|
||||
|
|
@ -362,14 +349,12 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=True
|
||||
is_array=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def filter_by_all_system(
|
||||
cls: Type[T],
|
||||
db: Session,
|
||||
**kwargs: Any
|
||||
cls: Type[T], db: Session, **kwargs: Any
|
||||
) -> PostgresResponse[T]:
|
||||
"""
|
||||
Filter multiple records by keyword arguments without status filtering.
|
||||
|
|
@ -398,14 +383,12 @@ class QueryModel:
|
|||
model=cls,
|
||||
pre_query=base_query, # Use the base query for pre_query
|
||||
query=query,
|
||||
is_array=True
|
||||
is_array=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def filter_by_one_system(
|
||||
cls: Type[T],
|
||||
db: Session,
|
||||
**kwargs: Any
|
||||
cls: Type[T], db: Session, **kwargs: Any
|
||||
) -> PostgresResponse[T]:
|
||||
"""
|
||||
Filter single record by keyword arguments without status filtering.
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ def generate_table_in_postgres():
|
|||
Base.metadata.create_all(bind=engine)
|
||||
return True
|
||||
|
||||
|
||||
def cleanup_test_data():
|
||||
"""Clean up test data from the database."""
|
||||
with EndpointRestriction.new_session() as db_session:
|
||||
try:
|
||||
# Get all test records
|
||||
test_records = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_code.like("TEST%"),
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_code.like("TEST%"), db=db_session
|
||||
).data
|
||||
|
||||
# Delete each record using the same session
|
||||
|
|
@ -33,6 +33,7 @@ def cleanup_test_data():
|
|||
db_session.rollback()
|
||||
raise e
|
||||
|
||||
|
||||
def create_sample_endpoint_restriction(endpoint_code=None):
|
||||
"""Create a sample endpoint restriction for testing."""
|
||||
if endpoint_code is None:
|
||||
|
|
@ -43,8 +44,7 @@ def create_sample_endpoint_restriction(endpoint_code=None):
|
|||
try:
|
||||
# First check if record exists
|
||||
existing = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_code == endpoint_code,
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_code == endpoint_code, db=db_session
|
||||
)
|
||||
|
||||
if existing and existing.data:
|
||||
|
|
@ -77,6 +77,7 @@ def create_sample_endpoint_restriction(endpoint_code=None):
|
|||
db_session.rollback()
|
||||
raise e
|
||||
|
||||
|
||||
def test_filter_by_one():
|
||||
"""Test filtering a single record by keyword arguments."""
|
||||
print("\nTesting filter_by_one...")
|
||||
|
|
@ -84,22 +85,20 @@ def test_filter_by_one():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
sample_endpoint = create_sample_endpoint_restriction("TEST001")
|
||||
result = EndpointRestriction.filter_by_one(
|
||||
db=db_session,
|
||||
endpoint_code="TEST001"
|
||||
db=db_session, endpoint_code="TEST001"
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 1 and
|
||||
result.total_count == 1 and
|
||||
result.is_list is False
|
||||
result is not None
|
||||
and result.count == 1
|
||||
and result.total_count == 1
|
||||
and result.is_list is False
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -107,6 +106,7 @@ def test_filter_by_one():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_by_one_system():
|
||||
"""Test filtering a single record by keyword arguments without status filtering."""
|
||||
print("\nTesting filter_by_one_system...")
|
||||
|
|
@ -114,23 +114,20 @@ def test_filter_by_one_system():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
sample_endpoint = create_sample_endpoint_restriction("TEST002")
|
||||
result = EndpointRestriction.filter_by_one(
|
||||
db=db_session,
|
||||
endpoint_code="TEST002",
|
||||
system=True
|
||||
db=db_session, endpoint_code="TEST002", system=True
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 1 and
|
||||
result.total_count == 1 and
|
||||
result.is_list is False
|
||||
result is not None
|
||||
and result.count == 1
|
||||
and result.total_count == 1
|
||||
and result.is_list is False
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -138,6 +135,7 @@ def test_filter_by_one_system():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_one():
|
||||
"""Test filtering a single record by expressions."""
|
||||
print("\nTesting filter_one...")
|
||||
|
|
@ -145,22 +143,20 @@ def test_filter_one():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
sample_endpoint = create_sample_endpoint_restriction("TEST003")
|
||||
result = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_code == "TEST003",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_code == "TEST003", db=db_session
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 1 and
|
||||
result.total_count == 1 and
|
||||
result.is_list is False
|
||||
result is not None
|
||||
and result.count == 1
|
||||
and result.total_count == 1
|
||||
and result.is_list is False
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -168,6 +164,7 @@ def test_filter_one():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_one_system():
|
||||
"""Test filtering a single record by expressions without status filtering."""
|
||||
print("\nTesting filter_one_system...")
|
||||
|
|
@ -175,22 +172,20 @@ def test_filter_one_system():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
sample_endpoint = create_sample_endpoint_restriction("TEST004")
|
||||
result = EndpointRestriction.filter_one_system(
|
||||
EndpointRestriction.endpoint_code == "TEST004",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_code == "TEST004", db=db_session
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 1 and
|
||||
result.total_count == 1 and
|
||||
result.is_list is False
|
||||
result is not None
|
||||
and result.count == 1
|
||||
and result.total_count == 1
|
||||
and result.is_list is False
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -198,6 +193,7 @@ def test_filter_one_system():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_all():
|
||||
"""Test filtering multiple records by expressions."""
|
||||
print("\nTesting filter_all...")
|
||||
|
|
@ -205,8 +201,7 @@ def test_filter_all():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
# Create two endpoint restrictions
|
||||
|
|
@ -214,16 +209,15 @@ def test_filter_all():
|
|||
endpoint2 = create_sample_endpoint_restriction("TEST006")
|
||||
|
||||
result = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method.in_(["GET", "GET"]),
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method.in_(["GET", "GET"]), db=db_session
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 2 and
|
||||
result.total_count == 2 and
|
||||
result.is_list is True
|
||||
result is not None
|
||||
and result.count == 2
|
||||
and result.total_count == 2
|
||||
and result.is_list is True
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -231,6 +225,7 @@ def test_filter_all():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_all_system():
|
||||
"""Test filtering multiple records by expressions without status filtering."""
|
||||
print("\nTesting filter_all_system...")
|
||||
|
|
@ -238,8 +233,7 @@ def test_filter_all_system():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
# Create two endpoint restrictions
|
||||
|
|
@ -247,16 +241,15 @@ def test_filter_all_system():
|
|||
endpoint2 = create_sample_endpoint_restriction("TEST008")
|
||||
|
||||
result = EndpointRestriction.filter_all_system(
|
||||
EndpointRestriction.endpoint_method.in_(["GET", "GET"]),
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method.in_(["GET", "GET"]), db=db_session
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 2 and
|
||||
result.total_count == 2 and
|
||||
result.is_list is True
|
||||
result is not None
|
||||
and result.count == 2
|
||||
and result.total_count == 2
|
||||
and result.is_list is True
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -264,6 +257,7 @@ def test_filter_all_system():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_filter_by_all_system():
|
||||
"""Test filtering multiple records by keyword arguments without status filtering."""
|
||||
print("\nTesting filter_by_all_system...")
|
||||
|
|
@ -271,8 +265,7 @@ def test_filter_by_all_system():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
# Create two endpoint restrictions
|
||||
|
|
@ -280,16 +273,15 @@ def test_filter_by_all_system():
|
|||
endpoint2 = create_sample_endpoint_restriction("TEST010")
|
||||
|
||||
result = EndpointRestriction.filter_by_all_system(
|
||||
db=db_session,
|
||||
endpoint_method="GET"
|
||||
db=db_session, endpoint_method="GET"
|
||||
)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
result is not None and
|
||||
result.count == 2 and
|
||||
result.total_count == 2 and
|
||||
result.is_list is True
|
||||
result is not None
|
||||
and result.count == 2
|
||||
and result.total_count == 2
|
||||
and result.is_list is True
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -297,13 +289,16 @@ def test_filter_by_all_system():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_get_not_expired_query_arg():
|
||||
"""Test adding expiry date filtering to query arguments."""
|
||||
print("\nTesting get_not_expired_query_arg...")
|
||||
with EndpointRestriction.new_session() as db_session:
|
||||
try:
|
||||
# Create a sample endpoint with a unique code
|
||||
endpoint_code = f"TEST{int(arrow.now().timestamp())}{arrow.now().microsecond}"
|
||||
endpoint_code = (
|
||||
f"TEST{int(arrow.now().timestamp())}{arrow.now().microsecond}"
|
||||
)
|
||||
sample_endpoint = create_sample_endpoint_restriction(endpoint_code)
|
||||
|
||||
# Test the query argument generation
|
||||
|
|
@ -311,9 +306,15 @@ def test_get_not_expired_query_arg():
|
|||
|
||||
# Verify the arguments
|
||||
success = (
|
||||
len(args) == 2 and
|
||||
any(str(arg).startswith("endpoint_restriction.expiry_starts") for arg in args) and
|
||||
any(str(arg).startswith("endpoint_restriction.expiry_ends") for arg in args)
|
||||
len(args) == 2
|
||||
and any(
|
||||
str(arg).startswith("endpoint_restriction.expiry_starts")
|
||||
for arg in args
|
||||
)
|
||||
and any(
|
||||
str(arg).startswith("endpoint_restriction.expiry_ends")
|
||||
for arg in args
|
||||
)
|
||||
)
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
return success
|
||||
|
|
@ -321,6 +322,7 @@ def test_get_not_expired_query_arg():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_add_new_arg_to_args():
|
||||
"""Test adding new arguments to query arguments."""
|
||||
print("\nTesting add_new_arg_to_args...")
|
||||
|
|
@ -328,12 +330,16 @@ def test_add_new_arg_to_args():
|
|||
args = (EndpointRestriction.endpoint_code == "TEST001",)
|
||||
new_arg = EndpointRestriction.endpoint_method == "GET"
|
||||
|
||||
updated_args = EndpointRestriction.add_new_arg_to_args(args, "endpoint_method", new_arg)
|
||||
updated_args = EndpointRestriction.add_new_arg_to_args(
|
||||
args, "endpoint_method", new_arg
|
||||
)
|
||||
success = len(updated_args) == 2
|
||||
|
||||
# Test duplicate prevention
|
||||
duplicate_arg = EndpointRestriction.endpoint_method == "GET"
|
||||
updated_args = EndpointRestriction.add_new_arg_to_args(updated_args, "endpoint_method", duplicate_arg)
|
||||
updated_args = EndpointRestriction.add_new_arg_to_args(
|
||||
updated_args, "endpoint_method", duplicate_arg
|
||||
)
|
||||
success = success and len(updated_args) == 2 # Should not add duplicate
|
||||
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
|
|
@ -342,6 +348,7 @@ def test_add_new_arg_to_args():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_produce_query_to_add():
|
||||
"""Test adding query parameters to filter options."""
|
||||
print("\nTesting produce_query_to_add...")
|
||||
|
|
@ -349,28 +356,22 @@ def test_produce_query_to_add():
|
|||
try:
|
||||
sample_endpoint = create_sample_endpoint_restriction("TEST001")
|
||||
filter_list = {
|
||||
"query": {
|
||||
"endpoint_method": "GET",
|
||||
"endpoint_code": "TEST001"
|
||||
}
|
||||
"query": {"endpoint_method": "GET", "endpoint_code": "TEST001"}
|
||||
}
|
||||
args = ()
|
||||
|
||||
updated_args = EndpointRestriction.produce_query_to_add(filter_list, args)
|
||||
success = len(updated_args) == 2
|
||||
|
||||
result = EndpointRestriction.filter_all(
|
||||
*updated_args,
|
||||
db=db_session
|
||||
)
|
||||
result = EndpointRestriction.filter_all(*updated_args, db=db_session)
|
||||
|
||||
# Test PostgresResponse properties
|
||||
success = (
|
||||
success and
|
||||
result is not None and
|
||||
result.count == 1 and
|
||||
result.total_count == 1 and
|
||||
result.is_list is True
|
||||
success
|
||||
and result is not None
|
||||
and result.count == 1
|
||||
and result.total_count == 1
|
||||
and result.is_list is True
|
||||
)
|
||||
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
|
|
@ -379,6 +380,7 @@ def test_produce_query_to_add():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def test_get_dict():
|
||||
"""Test the get_dict() function for single-record filters."""
|
||||
print("\nTesting get_dict...")
|
||||
|
|
@ -386,8 +388,7 @@ def test_get_dict():
|
|||
try:
|
||||
# Set up pre_query first
|
||||
EndpointRestriction.pre_query = EndpointRestriction.filter_all(
|
||||
EndpointRestriction.endpoint_method == "GET",
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_method == "GET", db=db_session
|
||||
).query
|
||||
|
||||
# Create a sample endpoint
|
||||
|
|
@ -396,8 +397,7 @@ def test_get_dict():
|
|||
|
||||
# Get the endpoint using filter_one
|
||||
result = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_code == endpoint_code,
|
||||
db=db_session
|
||||
EndpointRestriction.endpoint_code == endpoint_code, db=db_session
|
||||
)
|
||||
|
||||
# Get the data and convert to dict
|
||||
|
|
@ -406,16 +406,16 @@ def test_get_dict():
|
|||
|
||||
# Test dictionary properties
|
||||
success = (
|
||||
data_dict is not None and
|
||||
isinstance(data_dict, dict) and
|
||||
data_dict.get("endpoint_code") == endpoint_code and
|
||||
data_dict.get("endpoint_method") == "GET" and
|
||||
data_dict.get("endpoint_function") == "test_function" and
|
||||
data_dict.get("endpoint_name") == "Test Endpoint" and
|
||||
data_dict.get("endpoint_desc") == "Test Description" and
|
||||
data_dict.get("is_confirmed") is True and
|
||||
data_dict.get("active") is True and
|
||||
data_dict.get("deleted") is False
|
||||
data_dict is not None
|
||||
and isinstance(data_dict, dict)
|
||||
and data_dict.get("endpoint_code") == endpoint_code
|
||||
and data_dict.get("endpoint_method") == "GET"
|
||||
and data_dict.get("endpoint_function") == "test_function"
|
||||
and data_dict.get("endpoint_name") == "Test Endpoint"
|
||||
and data_dict.get("endpoint_desc") == "Test Description"
|
||||
and data_dict.get("is_confirmed") is True
|
||||
and data_dict.get("active") is True
|
||||
and data_dict.get("deleted") is False
|
||||
)
|
||||
|
||||
print(f"Test {'passed' if success else 'failed'}")
|
||||
|
|
@ -424,6 +424,7 @@ def test_get_dict():
|
|||
print(f"Test failed with exception: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def run_all_tests():
|
||||
"""Run all tests and report results."""
|
||||
print("Starting EndpointRestriction tests...")
|
||||
|
|
@ -442,7 +443,7 @@ def run_all_tests():
|
|||
test_get_not_expired_query_arg,
|
||||
test_add_new_arg_to_args,
|
||||
test_produce_query_to_add,
|
||||
test_get_dict # Added new test
|
||||
test_get_dict, # Added new test
|
||||
]
|
||||
passed_list, not_passed_list = [], []
|
||||
passed, failed = 0, 0
|
||||
|
|
@ -453,33 +454,24 @@ def run_all_tests():
|
|||
try:
|
||||
if test():
|
||||
passed += 1
|
||||
passed_list.append(
|
||||
f"Test {test.__name__} passed"
|
||||
)
|
||||
passed_list.append(f"Test {test.__name__} passed")
|
||||
else:
|
||||
failed += 1
|
||||
not_passed_list.append(
|
||||
f"Test {test.__name__} failed"
|
||||
)
|
||||
not_passed_list.append(f"Test {test.__name__} failed")
|
||||
except Exception as e:
|
||||
print(f"Test {test.__name__} failed with exception: {e}")
|
||||
failed += 1
|
||||
not_passed_list.append(
|
||||
f"Test {test.__name__} failed"
|
||||
)
|
||||
not_passed_list.append(f"Test {test.__name__} failed")
|
||||
|
||||
print(f"\nTest Results: {passed} passed, {failed} failed")
|
||||
print('Passed Tests:')
|
||||
print(
|
||||
"\n".join(passed_list)
|
||||
)
|
||||
print('Failed Tests:')
|
||||
print(
|
||||
"\n".join(not_passed_list)
|
||||
)
|
||||
print("Passed Tests:")
|
||||
print("\n".join(passed_list))
|
||||
print("Failed Tests:")
|
||||
print("\n".join(not_passed_list))
|
||||
|
||||
return passed, failed
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_table_in_postgres()
|
||||
run_all_tests()
|
||||
|
|
|
|||
|
|
@ -27,4 +27,3 @@ class EndpointRestriction(CrudCollection):
|
|||
endpoint_code: Mapped[str] = mapped_column(
|
||||
String, server_default="", unique=True, comment="Unique code for the endpoint"
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from typing import Union, Dict, List, Optional, Any, TypeVar
|
|||
from Controllers.Redis.connection import redis_cli
|
||||
|
||||
|
||||
T = TypeVar('T', Dict[str, Any], List[Any])
|
||||
T = TypeVar("T", Dict[str, Any], List[Any])
|
||||
|
||||
|
||||
class RedisKeyError(Exception):
|
||||
|
|
@ -286,7 +286,7 @@ class RedisRow:
|
|||
raise RedisKeyError("Key exceeds maximum length of 512MB")
|
||||
|
||||
# Validate key format (basic check for invalid characters)
|
||||
if any(c in key_str for c in ['\n', '\r', '\t', '\0']):
|
||||
if any(c in key_str for c in ["\n", "\r", "\t", "\0"]):
|
||||
raise RedisKeyError("Key contains invalid characters")
|
||||
|
||||
self.key = key if isinstance(key, bytes) else str(key).encode()
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ class Configs(BaseSettings):
|
|||
"""
|
||||
MongoDB configuration settings.
|
||||
"""
|
||||
|
||||
HOST: str = ""
|
||||
PASSWORD: str = ""
|
||||
PORT: int = 0
|
||||
|
|
|
|||
|
|
@ -98,9 +98,7 @@ class RedisConn:
|
|||
err = e
|
||||
return False
|
||||
|
||||
def set_connection(
|
||||
self, **kwargs
|
||||
) -> Redis:
|
||||
def set_connection(self, **kwargs) -> Redis:
|
||||
"""
|
||||
Recreate Redis connection with new parameters.
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ def example_set_json() -> None:
|
|||
result = RedisActions.set_json(list_keys=keys, value=data, expires=expiry)
|
||||
print("Set JSON with expiry:", result.as_dict())
|
||||
|
||||
|
||||
def example_get_json() -> None:
|
||||
"""Example of retrieving JSON data from Redis."""
|
||||
# Example 1: Get all matching keys
|
||||
|
|
@ -25,11 +26,16 @@ def example_get_json() -> None:
|
|||
result = RedisActions.get_json(list_keys=keys, limit=5)
|
||||
print("Get JSON with limit:", result.as_dict())
|
||||
|
||||
|
||||
def example_get_json_iterator() -> None:
|
||||
"""Example of using the JSON iterator for large datasets."""
|
||||
keys = ["user", "profile", "*"]
|
||||
for row in RedisActions.get_json_iterator(list_keys=keys):
|
||||
print("Iterating over JSON row:", row.as_dict if isinstance(row.as_dict, dict) else row.as_dict)
|
||||
print(
|
||||
"Iterating over JSON row:",
|
||||
row.as_dict if isinstance(row.as_dict, dict) else row.as_dict,
|
||||
)
|
||||
|
||||
|
||||
def example_delete_key() -> None:
|
||||
"""Example of deleting a specific key."""
|
||||
|
|
@ -37,12 +43,14 @@ def example_delete_key() -> None:
|
|||
result = RedisActions.delete_key(key)
|
||||
print("Delete specific key:", result)
|
||||
|
||||
|
||||
def example_delete() -> None:
|
||||
"""Example of deleting multiple keys matching a pattern."""
|
||||
keys = ["user", "profile", "*"]
|
||||
result = RedisActions.delete(list_keys=keys)
|
||||
print("Delete multiple keys:", result)
|
||||
|
||||
|
||||
def example_refresh_ttl() -> None:
|
||||
"""Example of refreshing TTL for a key."""
|
||||
key = "user:profile:123"
|
||||
|
|
@ -50,21 +58,25 @@ def example_refresh_ttl() -> None:
|
|||
result = RedisActions.refresh_ttl(key=key, expires=new_expiry)
|
||||
print("Refresh TTL:", result.as_dict())
|
||||
|
||||
|
||||
def example_key_exists() -> None:
|
||||
"""Example of checking if a key exists."""
|
||||
key = "user:profile:123"
|
||||
exists = RedisActions.key_exists(key)
|
||||
print(f"Key {key} exists:", exists)
|
||||
|
||||
|
||||
def example_resolve_expires_at() -> None:
|
||||
"""Example of resolving expiry time for a key."""
|
||||
from Controllers.Redis.base import RedisRow
|
||||
|
||||
redis_row = RedisRow()
|
||||
redis_row.set_key("user:profile:123")
|
||||
print(redis_row.keys)
|
||||
expires_at = RedisActions.resolve_expires_at(redis_row)
|
||||
print("Resolve expires at:", expires_at)
|
||||
|
||||
|
||||
def run_all_examples() -> None:
|
||||
"""Run all example functions to demonstrate RedisActions functionality."""
|
||||
print("\n=== Redis Actions Examples ===\n")
|
||||
|
|
@ -93,5 +105,6 @@ def run_all_examples() -> None:
|
|||
print("\n8. Resolving expiry time:")
|
||||
example_resolve_expires_at()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_all_examples()
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class RedisResponse:
|
|||
# Process single RedisRow
|
||||
if isinstance(data, RedisRow):
|
||||
result = {**main_dict}
|
||||
if hasattr(data, 'keys') and hasattr(data, 'row'):
|
||||
if hasattr(data, "keys") and hasattr(data, "row"):
|
||||
if not isinstance(data.keys, str):
|
||||
raise ValueError("RedisRow keys must be string type")
|
||||
result[data.keys] = data.row
|
||||
|
|
@ -80,7 +80,11 @@ class RedisResponse:
|
|||
# Handle list of RedisRow objects
|
||||
rows_dict = {}
|
||||
for row in data:
|
||||
if isinstance(row, RedisRow) and hasattr(row, 'keys') and hasattr(row, 'row'):
|
||||
if (
|
||||
isinstance(row, RedisRow)
|
||||
and hasattr(row, "keys")
|
||||
and hasattr(row, "row")
|
||||
):
|
||||
if not isinstance(row.keys, str):
|
||||
raise ValueError("RedisRow keys must be string type")
|
||||
rows_dict[row.keys] = row.row
|
||||
|
|
@ -137,10 +141,10 @@ class RedisResponse:
|
|||
|
||||
if isinstance(self.data, list) and self.data:
|
||||
item = self.data[0]
|
||||
if isinstance(item, RedisRow) and hasattr(item, 'row'):
|
||||
if isinstance(item, RedisRow) and hasattr(item, "row"):
|
||||
return item.row
|
||||
return item
|
||||
elif isinstance(self.data, RedisRow) and hasattr(self.data, 'row'):
|
||||
elif isinstance(self.data, RedisRow) and hasattr(self.data, "row"):
|
||||
return self.data.row
|
||||
elif isinstance(self.data, dict):
|
||||
return self.data
|
||||
|
|
@ -173,11 +177,11 @@ class RedisResponse:
|
|||
response["error"] = self.error
|
||||
|
||||
if self.data is not None:
|
||||
if self.data_type == "row" and hasattr(self.data, 'to_dict'):
|
||||
if self.data_type == "row" and hasattr(self.data, "to_dict"):
|
||||
response["data"] = self.data.to_dict()
|
||||
elif self.data_type == "list":
|
||||
try:
|
||||
if all(hasattr(item, 'to_dict') for item in self.data):
|
||||
if all(hasattr(item, "to_dict") for item in self.data):
|
||||
response["data"] = [item.to_dict() for item in self.data]
|
||||
else:
|
||||
response["data"] = self.data
|
||||
|
|
@ -192,5 +196,5 @@ class RedisResponse:
|
|||
return {
|
||||
"success": False,
|
||||
"message": "Error formatting response",
|
||||
"error": str(e)
|
||||
"error": str(e),
|
||||
}
|
||||
|
|
@ -15,11 +15,15 @@ class PasswordModule:
|
|||
@staticmethod
|
||||
def generate_token(length=32) -> str:
|
||||
letters = "abcdefghijklmnopqrstuvwxyz"
|
||||
merged_letters = [letter for letter in letters] + [letter.upper() for letter in letters]
|
||||
merged_letters = [letter for letter in letters] + [
|
||||
letter.upper() for letter in letters
|
||||
]
|
||||
token_generated = secrets.token_urlsafe(length)
|
||||
for i in str(token_generated):
|
||||
if i not in merged_letters:
|
||||
token_generated = token_generated.replace(i, random.choice(merged_letters), 1)
|
||||
token_generated = token_generated.replace(
|
||||
i, random.choice(merged_letters), 1
|
||||
)
|
||||
return token_generated
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -573,4 +573,3 @@ class AccountRecords(CrudCollection):
|
|||
# )
|
||||
# )
|
||||
# print("is all dues_type", payment_dict["dues_type"], paid_value)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ from sqlalchemy import (
|
|||
Boolean,
|
||||
BigInteger,
|
||||
Integer,
|
||||
Text, or_,
|
||||
Text,
|
||||
or_,
|
||||
)
|
||||
from sqlalchemy.orm import mapped_column, Mapped
|
||||
from Controllers.Postgres.mixin import CrudCollection
|
||||
|
|
@ -107,7 +108,7 @@ class Addresses(CrudCollection):
|
|||
post_code_list = RelationshipEmployee2PostCode.filter_all(
|
||||
RelationshipEmployee2PostCode.employee_id
|
||||
== token_dict.selected_company.employee_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data
|
||||
post_code_id_list = [post_code.member_id for post_code in post_code_list]
|
||||
if not post_code_id_list:
|
||||
|
|
@ -118,7 +119,9 @@ class Addresses(CrudCollection):
|
|||
# status_code=404,
|
||||
# detail="User has no post code registered. User can not list addresses.",
|
||||
# )
|
||||
cls.pre_query = cls.filter_all(cls.post_code_id.in_(post_code_id_list), db=db_session).query
|
||||
cls.pre_query = cls.filter_all(
|
||||
cls.post_code_id.in_(post_code_id_list), db=db_session
|
||||
).query
|
||||
filter_cls = cls.filter_all(*filter_expr or [], db=db_session)
|
||||
cls.pre_query = None
|
||||
return filter_cls.data
|
||||
|
|
|
|||
|
|
@ -244,7 +244,7 @@ class Build(CrudCollection):
|
|||
livable_parts = BuildParts.filter_all(
|
||||
BuildParts.build_id == self.id,
|
||||
BuildParts.human_livable == True,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
if not livable_parts.data:
|
||||
raise HTTPException(
|
||||
|
|
@ -260,8 +260,7 @@ class Build(CrudCollection):
|
|||
for part in self.parts:
|
||||
building_types = {}
|
||||
build_type = BuildTypes.filter_by_one(
|
||||
system=True, id=part.build_part_type_id,
|
||||
db=db_session
|
||||
system=True, id=part.build_part_type_id, db=db_session
|
||||
).data
|
||||
if build_type.type_code in building_types:
|
||||
building_types[build_type.type_code]["list"].append(part.part_no)
|
||||
|
|
@ -354,7 +353,9 @@ class BuildParts(CrudCollection):
|
|||
if build_type := BuildTypes.filter_by_one(
|
||||
system=True, id=self.part_type_id, db=db_session
|
||||
).data:
|
||||
return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
|
||||
return (
|
||||
f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
|
||||
)
|
||||
return f"Undefined:{str(build_type.type_name).upper()}"
|
||||
|
||||
|
||||
|
|
@ -430,7 +431,7 @@ class BuildLivingSpace(CrudCollection):
|
|||
),
|
||||
cls.start_date < formatted_date - timedelta(days=add_days),
|
||||
cls.stop_date > formatted_date + timedelta(days=add_days),
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
return living_spaces.data, living_spaces.count
|
||||
|
||||
|
|
@ -625,4 +626,3 @@ class BuildPersonProviding(CrudCollection):
|
|||
),
|
||||
{"comment": "People providing services for building"},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -92,6 +92,7 @@ class BuildDecisionBook(CrudCollection):
|
|||
@classmethod
|
||||
def retrieve_active_rbm(cls):
|
||||
from Schemas.building.build import Build
|
||||
|
||||
with cls.new_session() as db_session:
|
||||
related_build = Build.find_one(id=cls.build_id)
|
||||
related_date = arrow.get(related_build.build_date)
|
||||
|
|
@ -103,7 +104,7 @@ class BuildDecisionBook(CrudCollection):
|
|||
cls.expiry_ends <= date_processed,
|
||||
cls.decision_type == "RBM",
|
||||
cls.build_id == related_build.id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data
|
||||
if not book:
|
||||
cls.raise_http_exception(
|
||||
|
|
@ -220,7 +221,8 @@ class BuildDecisionBookInvitations(CrudCollection):
|
|||
first_book_invitation = BuildDecisionBookInvitations.filter_one(
|
||||
BuildDecisionBookInvitations.build_id
|
||||
== token_dict.selected_occupant.build_id,
|
||||
BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
|
||||
BuildDecisionBookInvitations.decision_book_id
|
||||
== selected_decision_book.id,
|
||||
BuildDecisionBookInvitations.invitation_attempt == 1,
|
||||
db=db_session,
|
||||
).data
|
||||
|
|
@ -247,11 +249,15 @@ class BuildDecisionBookInvitations(CrudCollection):
|
|||
second_book_invitation = BuildDecisionBookInvitations.filter_one_system(
|
||||
BuildDecisionBookInvitations.build_id
|
||||
== token_dict.selected_occupant.build_id,
|
||||
BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
|
||||
BuildDecisionBookInvitations.decision_book_id
|
||||
== selected_decision_book.id,
|
||||
BuildDecisionBookInvitations.invitation_attempt == 2,
|
||||
db=db_session,
|
||||
).data
|
||||
if not valid_invite_count >= need_attend_count and not second_book_invitation:
|
||||
if (
|
||||
not valid_invite_count >= need_attend_count
|
||||
and not second_book_invitation
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"In order meeting to be held, {math.ceil(need_attend_count)} people must attend "
|
||||
|
|
@ -336,7 +342,7 @@ class BuildDecisionBookPerson(CrudCollection):
|
|||
with self.new_session() as db_session:
|
||||
all_decision_book_people = self.filter_all_system(
|
||||
BuildDecisionBookPersonOccupants.invite_id == self.invite_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
BuildDecisionBookPersonOccupants.pre_query = all_decision_book_people.query
|
||||
return BuildDecisionBookPersonOccupants.filter_all_system(
|
||||
|
|
@ -346,8 +352,9 @@ class BuildDecisionBookPerson(CrudCollection):
|
|||
def get_occupant_types(self):
|
||||
with self.new_session() as db_session:
|
||||
if occupants := BuildDecisionBookPersonOccupants.filter_all(
|
||||
BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
|
||||
db=db_session
|
||||
BuildDecisionBookPersonOccupants.build_decision_book_person_id
|
||||
== self.id,
|
||||
db=db_session,
|
||||
).data:
|
||||
return occupants
|
||||
return
|
||||
|
|
@ -355,7 +362,8 @@ class BuildDecisionBookPerson(CrudCollection):
|
|||
def check_occupant_type(self, occupant_type):
|
||||
with self.new_session() as db_session:
|
||||
book_person_occupant_type = BuildDecisionBookPersonOccupants.filter_one(
|
||||
BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
|
||||
BuildDecisionBookPersonOccupants.build_decision_book_person_id
|
||||
== self.id,
|
||||
BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type.id,
|
||||
BuildDecisionBookPersonOccupants.active == True,
|
||||
BuildDecisionBookPersonOccupants.is_confirmed == True,
|
||||
|
|
|
|||
|
|
@ -66,13 +66,12 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
)
|
||||
list_match_company_id = []
|
||||
send_duties = Duties.filter_one(
|
||||
Duties.uu_id == data.duty_uu_id,
|
||||
db=db_session
|
||||
Duties.uu_id == data.duty_uu_id, db=db_session
|
||||
)
|
||||
send_user_duties = Duties.filter_one(
|
||||
Duties.duties_id == send_duties.id,
|
||||
Duties.company_id == token_duties_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
if not send_user_duties:
|
||||
raise Exception(
|
||||
|
|
@ -81,14 +80,13 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
|
||||
for company_uu_id in list(data.match_company_uu_id):
|
||||
company = Companies.filter_one(
|
||||
Companies.uu_id == company_uu_id,
|
||||
db=db_session
|
||||
Companies.uu_id == company_uu_id, db=db_session
|
||||
)
|
||||
bulk_company = RelationshipDutyCompany.filter_one(
|
||||
RelationshipDutyCompany.owner_id == token_company_id,
|
||||
RelationshipDutyCompany.relationship_type == "Bulk",
|
||||
RelationshipDutyCompany.member_id == company.id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
if not bulk_company:
|
||||
raise Exception(
|
||||
|
|
@ -105,7 +103,7 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
parent_id=match_company_id.parent_id,
|
||||
relationship_type="Commercial",
|
||||
show_only=False,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -116,13 +114,12 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
)
|
||||
list_match_company_id = []
|
||||
send_duties = Duties.filter_one(
|
||||
Duties.uu_id == data.duty_uu_id,
|
||||
db=db_session
|
||||
Duties.uu_id == data.duty_uu_id, db=db_session
|
||||
)
|
||||
send_user_duties = Duties.filter_one(
|
||||
Duties.duties_id == send_duties.id,
|
||||
Duties.company_id == token_duties_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
if not send_user_duties:
|
||||
raise Exception(
|
||||
|
|
@ -131,14 +128,13 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
|
||||
for company_uu_id in list(data.match_company_uu_id):
|
||||
company = Companies.filter_one(
|
||||
Companies.uu_id == company_uu_id,
|
||||
db=db_session
|
||||
Companies.uu_id == company_uu_id, db=db_session
|
||||
)
|
||||
bulk_company = RelationshipDutyCompany.filter_one(
|
||||
RelationshipDutyCompany.owner_id == token_company_id,
|
||||
RelationshipDutyCompany.relationship_type == "Bulk",
|
||||
RelationshipDutyCompany.member_id == company.id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
if not bulk_company:
|
||||
raise Exception(
|
||||
|
|
@ -151,7 +147,7 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
Duties.init_a_company_default_duties(
|
||||
company_id=match_company_id.id,
|
||||
company_uu_id=str(match_company_id.uu_id),
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
RelationshipDutyCompany.find_or_create(
|
||||
owner_id=token_company_id,
|
||||
|
|
@ -160,7 +156,7 @@ class RelationshipDutyCompany(CrudCollection):
|
|||
parent_id=match_company_id.parent_id,
|
||||
relationship_type="Organization",
|
||||
show_only=False,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
|
|
@ -236,4 +232,3 @@ class Companies(CrudCollection):
|
|||
Index("_company_ndx_02", formal_name, public_name),
|
||||
{"comment": "Company Information"},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,12 +13,20 @@ class Staff(CrudCollection):
|
|||
__tablename__ = "staff"
|
||||
__exclude__fields__ = []
|
||||
|
||||
staff_description: Mapped[str] = mapped_column(String, server_default="", comment="Staff Description")
|
||||
staff_name: Mapped[str] = mapped_column(String, nullable=False, comment="Staff Name")
|
||||
staff_code: Mapped[str] = mapped_column(String, nullable=False, comment="Staff Code")
|
||||
staff_description: Mapped[str] = mapped_column(
|
||||
String, server_default="", comment="Staff Description"
|
||||
)
|
||||
staff_name: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Staff Name"
|
||||
)
|
||||
staff_code: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Staff Code"
|
||||
)
|
||||
|
||||
duties_id: Mapped[int] = mapped_column(ForeignKey("duties.id"), nullable=False)
|
||||
duties_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Duty UUID")
|
||||
duties_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Duty UUID"
|
||||
)
|
||||
|
||||
__table_args__ = ({"comment": "Staff Information"},)
|
||||
|
||||
|
|
@ -29,9 +37,13 @@ class Employees(CrudCollection):
|
|||
__exclude__fields__ = []
|
||||
|
||||
staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id"))
|
||||
staff_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Staff UUID")
|
||||
staff_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Staff UUID"
|
||||
)
|
||||
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||
people_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="People UUID")
|
||||
people_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="People UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("employees_ndx_00", people_id, staff_id, unique=True),
|
||||
|
|
@ -44,10 +56,18 @@ class EmployeeHistory(CrudCollection):
|
|||
__tablename__ = "employee_history"
|
||||
__exclude__fields__ = []
|
||||
|
||||
staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id"), nullable=False, comment="Staff ID")
|
||||
staff_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Staff UUID")
|
||||
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False, comment="People ID")
|
||||
people_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="People UUID")
|
||||
staff_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("staff.id"), nullable=False, comment="Staff ID"
|
||||
)
|
||||
staff_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Staff UUID"
|
||||
)
|
||||
people_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("people.id"), nullable=False, comment="People ID"
|
||||
)
|
||||
people_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="People UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("_employee_history_ndx_00", people_id, staff_id),
|
||||
|
|
@ -67,7 +87,9 @@ class EmployeesSalaries(CrudCollection):
|
|||
Numeric(20, 6), nullable=False, comment="Net Salary"
|
||||
)
|
||||
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
|
||||
people_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="People UUID")
|
||||
people_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="People UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("_employee_salaries_ndx_00", people_id, "expiry_starts"),
|
||||
|
|
|
|||
|
|
@ -110,9 +110,7 @@ class Services(CrudCollection):
|
|||
def retrieve_service_via_occupant_code(cls, occupant_code):
|
||||
with cls.new_session() as db_session:
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
system=True,
|
||||
occupant_code=occupant_code,
|
||||
db=db_session
|
||||
system=True, occupant_code=occupant_code, db=db_session
|
||||
).data
|
||||
if not occupant_type:
|
||||
cls.raise_http_exception(
|
||||
|
|
@ -124,8 +122,7 @@ class Services(CrudCollection):
|
|||
},
|
||||
)
|
||||
return cls.filter_one(
|
||||
cls.related_responsibility == occupant_type.occupant_code,
|
||||
db=db_session
|
||||
cls.related_responsibility == occupant_type.occupant_code, db=db_session
|
||||
).data
|
||||
|
||||
__table_args__ = ({"comment": "Services Information"},)
|
||||
|
|
|
|||
|
|
@ -431,4 +431,3 @@ class Contracts(CrudCollection):
|
|||
Index("_contract_ndx_01", contract_code, unique=True),
|
||||
{"comment": "Contract Information"},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -40,15 +40,19 @@ class ApiEnumDropdown(CrudCollection):
|
|||
if search := cls.filter_one_system(
|
||||
cls.enum_class.in_(["DebitTypes"]),
|
||||
cls.uu_id == search_uu_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data:
|
||||
return search
|
||||
elif search_debit:
|
||||
if search := cls.filter_one(
|
||||
cls.enum_class.in_(["DebitTypes"]), cls.key == search_debit, db=db_session
|
||||
cls.enum_class.in_(["DebitTypes"]),
|
||||
cls.key == search_debit,
|
||||
db=db_session,
|
||||
).data:
|
||||
return search
|
||||
return cls.filter_all_system(cls.enum_class.in_(["DebitTypes"]), db=db_session).data
|
||||
return cls.filter_all_system(
|
||||
cls.enum_class.in_(["DebitTypes"]), db=db_session
|
||||
).data
|
||||
|
||||
@classmethod
|
||||
def get_due_types(cls):
|
||||
|
|
@ -56,7 +60,7 @@ class ApiEnumDropdown(CrudCollection):
|
|||
if due_list := cls.filter_all_system(
|
||||
cls.enum_class == "BuildDuesTypes",
|
||||
cls.key.in_(["BDT-A", "BDT-D"]),
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data:
|
||||
return [due.uu_id.__str__() for due in due_list]
|
||||
# raise HTTPException(
|
||||
|
|
@ -71,17 +75,19 @@ class ApiEnumDropdown(CrudCollection):
|
|||
if search := cls.filter_one_system(
|
||||
cls.enum_class.in_(["BuildDuesTypes"]),
|
||||
cls.uu_id == search_uu_id,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data:
|
||||
return search
|
||||
elif search_management:
|
||||
if search := cls.filter_one_system(
|
||||
cls.enum_class.in_(["BuildDuesTypes"]),
|
||||
cls.key == search_management,
|
||||
db=db_session
|
||||
db=db_session,
|
||||
).data:
|
||||
return search
|
||||
return cls.filter_all_system(cls.enum_class.in_(["BuildDuesTypes"]), db=db_session).data
|
||||
return cls.filter_all_system(
|
||||
cls.enum_class.in_(["BuildDuesTypes"]), db=db_session
|
||||
).data
|
||||
|
||||
def get_enum_dict(self):
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -50,11 +50,39 @@ services:
|
|||
ports:
|
||||
- "11222:6379"
|
||||
|
||||
template_service:
|
||||
container_name: template_service
|
||||
# template_service:
|
||||
# container_name: template_service
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: ApiServices/TemplateService/Dockerfile
|
||||
# networks:
|
||||
# - wag-services
|
||||
# env_file:
|
||||
# - api_env.env
|
||||
# environment:
|
||||
# - API_PATH=app:app
|
||||
# - API_HOST=0.0.0.0
|
||||
# - API_PORT=8000
|
||||
# - API_LOG_LEVEL=info
|
||||
# - API_RELOAD=1
|
||||
# - API_ACCESS_TOKEN_TAG=1
|
||||
# - API_APP_NAME=evyos-template-api-gateway
|
||||
# - API_TITLE=WAG API Template Api Gateway
|
||||
# - API_FORGOT_LINK=https://template_service/forgot-password
|
||||
# - API_DESCRIPTION=This api is serves as web template api gateway only to evyos web services.
|
||||
# - API_APP_URL=https://template_service
|
||||
# ports:
|
||||
# - "8000:8000"
|
||||
# depends_on:
|
||||
# - postgres-service
|
||||
# - mongo_service
|
||||
# - redis_service
|
||||
|
||||
auth_service:
|
||||
container_name: auth_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ApiServices/TemplateService/Dockerfile
|
||||
dockerfile: ApiServices/AuthService/Dockerfile
|
||||
networks:
|
||||
- wag-services
|
||||
env_file:
|
||||
|
|
@ -62,17 +90,17 @@ services:
|
|||
environment:
|
||||
- API_PATH=app:app
|
||||
- API_HOST=0.0.0.0
|
||||
- API_PORT=8000
|
||||
- API_PORT=8001
|
||||
- API_LOG_LEVEL=info
|
||||
- API_RELOAD=1
|
||||
- API_ACCESS_TOKEN_TAG=1
|
||||
- API_APP_NAME=evyos-template-api-gateway
|
||||
- API_TITLE=WAG API Template Api Gateway
|
||||
- API_FORGOT_LINK=https://template_service/forgot-password
|
||||
- API_DESCRIPTION=This api is serves as web template api gateway only to evyos web services.
|
||||
- API_APP_URL=https://template_service
|
||||
- API_ACCESS_TOKEN_TAG=eys-acs-tkn
|
||||
- API_APP_NAME=evyos-auth-api-gateway
|
||||
- API_TITLE=WAG API Auth Api Gateway
|
||||
- API_FORGOT_LINK=https://auth_service/forgot-password
|
||||
- API_DESCRIPTION=This api is serves as web auth api gateway only to evyos web services.
|
||||
- API_APP_URL=https://auth_service
|
||||
ports:
|
||||
- "8000:8000"
|
||||
- "8001:8001"
|
||||
depends_on:
|
||||
- postgres-service
|
||||
- mongo_service
|
||||
|
|
|
|||
Loading…
Reference in New Issue