updated docs
This commit is contained in:
parent
87e5f5ab06
commit
1ba2694a9d
|
|
@ -71,7 +71,10 @@ async def authentication_select_company_or_occupant_type(
|
||||||
if data.is_employee:
|
if data.is_employee:
|
||||||
return {"selected_company": data.company_uu_id, "completed": True}
|
return {"selected_company": data.company_uu_id, "completed": True}
|
||||||
elif data.is_occupant:
|
elif data.is_occupant:
|
||||||
return {"selected_occupant": data.build_living_space_uu_id, "completed": True}
|
return {
|
||||||
|
"selected_occupant": data.build_living_space_uu_id,
|
||||||
|
"completed": True,
|
||||||
|
}
|
||||||
return {"completed": False, "selected_company": None, "selected_occupant": None}
|
return {"completed": False, "selected_company": None, "selected_occupant": None}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -46,8 +46,14 @@ class AccountListEventMethod(MethodToEvent):
|
||||||
"208e6273-17ef-44f0-814a-8098f816b63a": "account_records_list_flt_res",
|
"208e6273-17ef-44f0-814a-8098f816b63a": "account_records_list_flt_res",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": (AccountRecordResponse, [AccountRecords.__language_model__]),
|
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": (
|
||||||
"208e6273-17ef-44f0-814a-8098f816b63a": (AccountRecordResponse, [AccountRecords.__language_model__]),
|
AccountRecordResponse,
|
||||||
|
[AccountRecords.__language_model__],
|
||||||
|
),
|
||||||
|
"208e6273-17ef-44f0-814a-8098f816b63a": (
|
||||||
|
AccountRecordResponse,
|
||||||
|
[AccountRecords.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -226,7 +232,10 @@ class AccountCreateEventMethod(MethodToEvent):
|
||||||
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": "account_records_create",
|
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": "account_records_create",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": (InsertAccountRecord, [AccountRecords.__language_model__]),
|
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": (
|
||||||
|
InsertAccountRecord,
|
||||||
|
[AccountRecords.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -314,7 +323,10 @@ class AccountUpdateEventMethod(MethodToEvent):
|
||||||
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": "account_records_update",
|
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": "account_records_update",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": (UpdateAccountRecord, [AccountRecords.__language_model__]),
|
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": (
|
||||||
|
UpdateAccountRecord,
|
||||||
|
[AccountRecords.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
||||||
|
|
@ -39,10 +39,16 @@ class AddressListEventMethod(MethodToEvent):
|
||||||
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": "address_list_employee",
|
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": "address_list_employee",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"9c251d7d-da70-4d63-a72c-e69c26270442": (ListAddressResponse, [Addresses.__language_model__]),
|
"9c251d7d-da70-4d63-a72c-e69c26270442": (
|
||||||
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": (ListAddressResponse, [Addresses.__language_model__]),
|
ListAddressResponse,
|
||||||
|
[Addresses.__language_model__],
|
||||||
|
),
|
||||||
|
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": (
|
||||||
|
ListAddressResponse,
|
||||||
|
[Addresses.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def address_list_super_user(
|
def address_list_super_user(
|
||||||
cls,
|
cls,
|
||||||
|
|
@ -113,7 +119,10 @@ class AddressCreateEventMethod(MethodToEvent):
|
||||||
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": "create_address",
|
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": "create_address",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": (InsertAddress, [Addresses.__language_model__]),
|
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": (
|
||||||
|
InsertAddress,
|
||||||
|
[Addresses.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -161,7 +170,10 @@ class AddressSearchEventMethod(MethodToEvent):
|
||||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": "search_address",
|
"e0ac1269-e9a7-4806-9962-219ac224b0d0": "search_address",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": (SearchAddress, [Addresses.__language_model__]),
|
"e0ac1269-e9a7-4806-9962-219ac224b0d0": (
|
||||||
|
SearchAddress,
|
||||||
|
[Addresses.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -301,7 +313,10 @@ class AddressUpdateEventMethod(MethodToEvent):
|
||||||
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": "update_address",
|
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": "update_address",
|
||||||
}
|
}
|
||||||
__event_validation__ = {
|
__event_validation__ = {
|
||||||
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": (UpdateAddress, [Addresses.__language_model__]),
|
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": (
|
||||||
|
UpdateAddress,
|
||||||
|
[Addresses.__language_model__],
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,70 @@
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
from fastapi import APIRouter, Header
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
class LanguageStrings(BaseModel):
|
||||||
|
validation: Dict[str, Dict[str, str]] # validation.required.field: {tr: "...", en: "..."}
|
||||||
|
messages: Dict[str, Dict[str, str]] # messages.welcome: {tr: "...", en: "..."}
|
||||||
|
labels: Dict[str, Dict[str, str]] # labels.submit_button: {tr: "...", en: "..."}
|
||||||
|
|
||||||
|
class LanguageService:
|
||||||
|
def __init__(self):
|
||||||
|
self.strings: Dict[str, Dict[str, Dict[str, str]]] = {
|
||||||
|
"validation": {
|
||||||
|
"required": {
|
||||||
|
"tr": "Bu alan zorunludur",
|
||||||
|
"en": "This field is required"
|
||||||
|
},
|
||||||
|
"email": {
|
||||||
|
"tr": "Geçerli bir e-posta adresi giriniz",
|
||||||
|
"en": "Please enter a valid email"
|
||||||
|
},
|
||||||
|
"min_length": {
|
||||||
|
"tr": "En az {min} karakter giriniz",
|
||||||
|
"en": "Enter at least {min} characters"
|
||||||
|
},
|
||||||
|
# Add more validation messages
|
||||||
|
},
|
||||||
|
"messages": {
|
||||||
|
"welcome": {
|
||||||
|
"tr": "Hoş geldiniz",
|
||||||
|
"en": "Welcome"
|
||||||
|
},
|
||||||
|
"success": {
|
||||||
|
"tr": "İşlem başarılı",
|
||||||
|
"en": "Operation successful"
|
||||||
|
},
|
||||||
|
# Add more messages
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
"submit": {
|
||||||
|
"tr": "Gönder",
|
||||||
|
"en": "Submit"
|
||||||
|
},
|
||||||
|
"cancel": {
|
||||||
|
"tr": "İptal",
|
||||||
|
"en": "Cancel"
|
||||||
|
},
|
||||||
|
# Add more labels
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_strings(self, lang: str = "tr") -> LanguageStrings:
|
||||||
|
"""Get all strings for a specific language"""
|
||||||
|
return LanguageStrings(
|
||||||
|
validation={k: v[lang] for k, v in self.strings["validation"].items()},
|
||||||
|
messages={k: v[lang] for k, v in self.strings["messages"].items()},
|
||||||
|
labels={k: v[lang] for k, v in self.strings["labels"].items()}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create FastAPI router
|
||||||
|
router = APIRouter(prefix="/api/language", tags=["Language"])
|
||||||
|
language_service = LanguageService()
|
||||||
|
|
||||||
|
@router.get("/strings")
|
||||||
|
async def get_language_strings(
|
||||||
|
accept_language: Optional[str] = Header(default="tr")
|
||||||
|
) -> LanguageStrings:
|
||||||
|
"""Get all language strings based on Accept-Language header"""
|
||||||
|
lang = accept_language.split(",")[0][:2] # Get primary language code
|
||||||
|
return language_service.get_strings(lang if lang in ["tr", "en"] else "tr")
|
||||||
|
|
@ -0,0 +1,81 @@
|
||||||
|
from typing import Dict
|
||||||
|
from fastapi import APIRouter, Header
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
class ZodMessages(BaseModel):
|
||||||
|
"""Messages that match Zod's error types"""
|
||||||
|
required_error: str
|
||||||
|
invalid_type_error: str
|
||||||
|
invalid_string: Dict[str, str] # email, url, etc
|
||||||
|
too_small: Dict[str, str] # string, array, number
|
||||||
|
too_big: Dict[str, str] # string, array, number
|
||||||
|
custom: Dict[str, str] # custom validation messages
|
||||||
|
|
||||||
|
class LanguageService:
|
||||||
|
def __init__(self):
|
||||||
|
self.messages = {
|
||||||
|
"tr": {
|
||||||
|
"required_error": "Bu alan zorunludur",
|
||||||
|
"invalid_type_error": "Geçersiz tip",
|
||||||
|
"invalid_string": {
|
||||||
|
"email": "Geçerli bir e-posta adresi giriniz",
|
||||||
|
"url": "Geçerli bir URL giriniz",
|
||||||
|
"uuid": "Geçerli bir UUID giriniz"
|
||||||
|
},
|
||||||
|
"too_small": {
|
||||||
|
"string": "{min} karakterden az olamaz",
|
||||||
|
"array": "En az {min} öğe gereklidir",
|
||||||
|
"number": "En az {min} olmalıdır"
|
||||||
|
},
|
||||||
|
"too_big": {
|
||||||
|
"string": "{max} karakterden fazla olamaz",
|
||||||
|
"array": "En fazla {max} öğe olabilir",
|
||||||
|
"number": "En fazla {max} olabilir"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"password_match": "Şifreler eşleşmiyor",
|
||||||
|
"unique_email": "Bu e-posta adresi zaten kullanılıyor",
|
||||||
|
"strong_password": "Şifre en az bir büyük harf, bir küçük harf ve bir rakam içermelidir"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"en": {
|
||||||
|
"required_error": "This field is required",
|
||||||
|
"invalid_type_error": "Invalid type",
|
||||||
|
"invalid_string": {
|
||||||
|
"email": "Please enter a valid email",
|
||||||
|
"url": "Please enter a valid URL",
|
||||||
|
"uuid": "Please enter a valid UUID"
|
||||||
|
},
|
||||||
|
"too_small": {
|
||||||
|
"string": "Must be at least {min} characters",
|
||||||
|
"array": "Must contain at least {min} items",
|
||||||
|
"number": "Must be at least {min}"
|
||||||
|
},
|
||||||
|
"too_big": {
|
||||||
|
"string": "Must be at most {max} characters",
|
||||||
|
"array": "Must contain at most {max} items",
|
||||||
|
"number": "Must be at most {max}"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"password_match": "Passwords do not match",
|
||||||
|
"unique_email": "This email is already in use",
|
||||||
|
"strong_password": "Password must contain at least one uppercase letter, one lowercase letter, and one number"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_messages(self, lang: str = "tr") -> Dict:
|
||||||
|
"""Get all Zod messages for a specific language"""
|
||||||
|
return self.messages.get(lang, self.messages["tr"])
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/language", tags=["Language"])
|
||||||
|
language_service = LanguageService()
|
||||||
|
|
||||||
|
@router.get("/zod-messages")
|
||||||
|
async def get_zod_messages(
|
||||||
|
accept_language: Optional[str] = Header(default="tr")
|
||||||
|
) -> Dict:
|
||||||
|
"""Get Zod validation messages based on Accept-Language header"""
|
||||||
|
lang = accept_language.split(",")[0][:2] # Get primary language code
|
||||||
|
return language_service.get_messages(lang if lang in ["tr", "en"] else "tr")
|
||||||
|
|
@ -3,4 +3,3 @@
|
||||||
from .route_configs import get_route_configs
|
from .route_configs import get_route_configs
|
||||||
|
|
||||||
__all__ = ["get_route_configs"]
|
__all__ = ["get_route_configs"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,9 +19,7 @@ if TYPE_CHECKING:
|
||||||
prefix = "/available"
|
prefix = "/available"
|
||||||
|
|
||||||
|
|
||||||
async def check_endpoints_available(
|
async def check_endpoints_available(request: "Request") -> Dict[str, Any]:
|
||||||
request: "Request"
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
"""
|
||||||
Check if endpoints are available.
|
Check if endpoints are available.
|
||||||
"""
|
"""
|
||||||
|
|
@ -52,7 +50,7 @@ async def check_endpoint_available(
|
||||||
print("data", data)
|
print("data", data)
|
||||||
data_dict = data.data
|
data_dict = data.data
|
||||||
endpoint_asked = data_dict.get("endpoint", None)
|
endpoint_asked = data_dict.get("endpoint", None)
|
||||||
|
|
||||||
if not endpoint_asked:
|
if not endpoint_asked:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
error_code="",
|
error_code="",
|
||||||
|
|
@ -81,10 +79,7 @@ async def check_endpoint_available(
|
||||||
loc=get_line_number_for_error(),
|
loc=get_line_number_for_error(),
|
||||||
sys_msg="Endpoint not found",
|
sys_msg="Endpoint not found",
|
||||||
)
|
)
|
||||||
return {
|
return {"endpoint": endpoint_asked, "status": "OK"}
|
||||||
"endpoint": endpoint_asked,
|
|
||||||
"status": "OK"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
AVAILABLE_CONFIG = RouteFactoryConfig(
|
AVAILABLE_CONFIG = RouteFactoryConfig(
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,17 @@ prefix = "/validation"
|
||||||
|
|
||||||
|
|
||||||
@TokenEventMiddleware.validation_required
|
@TokenEventMiddleware.validation_required
|
||||||
async def validations_validations_select(request: Request, data: EndpointBaseRequestModel) -> Dict[str, Any]:
|
async def validations_validations_select(
|
||||||
|
request: Request, data: EndpointBaseRequestModel
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Select validations.
|
Select validations.
|
||||||
"""
|
"""
|
||||||
wrapped_context = getattr(validations_validations_select, "__wrapped__", None)
|
wrapped_context = getattr(validations_validations_select, "__wrapped__", None)
|
||||||
auth_context = getattr(wrapped_context, "auth", None)
|
auth_context = getattr(wrapped_context, "auth", None)
|
||||||
validation_code = getattr(validations_validations_select, "validation_code", {"validation_code": None})
|
validation_code = getattr(
|
||||||
|
validations_validations_select, "validation_code", {"validation_code": None}
|
||||||
|
)
|
||||||
if not validation_code:
|
if not validation_code:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
error_code="",
|
error_code="",
|
||||||
|
|
@ -41,12 +45,16 @@ async def validations_validations_select(request: Request, data: EndpointBaseReq
|
||||||
reachable_event_code=validation_code.get("reachable_event_code", None),
|
reachable_event_code=validation_code.get("reachable_event_code", None),
|
||||||
lang=getattr(auth_context, "lang", None),
|
lang=getattr(auth_context, "lang", None),
|
||||||
)
|
)
|
||||||
validations_both = ValidationsBoth.retrieve_both_validations_and_headers(validations_pydantic)
|
validations_both = ValidationsBoth.retrieve_both_validations_and_headers(
|
||||||
return {"status": "OK", "validation_code": validation_code, **validations_both }
|
validations_pydantic
|
||||||
|
)
|
||||||
|
return {"status": "OK", "validation_code": validation_code, **validations_both}
|
||||||
|
|
||||||
|
|
||||||
@TokenEventMiddleware.validation_required
|
@TokenEventMiddleware.validation_required
|
||||||
async def validations_headers_select(request: Request, data: EndpointBaseRequestModel) -> Dict[str, Any]:
|
async def validations_headers_select(
|
||||||
|
request: Request, data: EndpointBaseRequestModel
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Select headers.
|
Select headers.
|
||||||
"""
|
"""
|
||||||
|
|
@ -57,7 +65,9 @@ async def validations_headers_select(request: Request, data: EndpointBaseRequest
|
||||||
|
|
||||||
|
|
||||||
@TokenEventMiddleware.validation_required
|
@TokenEventMiddleware.validation_required
|
||||||
async def validations_validations_and_headers_select(request: Request, data: EndpointBaseRequestModel) -> Dict[str, Any]:
|
async def validations_validations_and_headers_select(
|
||||||
|
request: Request, data: EndpointBaseRequestModel
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Select validations and headers.
|
Select validations and headers.
|
||||||
"""
|
"""
|
||||||
|
|
@ -67,7 +77,7 @@ async def validations_validations_and_headers_select(request: Request, data: End
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
VALIDATION_CONFIG_MAIN =RouteFactoryConfig(
|
VALIDATION_CONFIG_MAIN = RouteFactoryConfig(
|
||||||
name="validations",
|
name="validations",
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
tags=["Validation"],
|
tags=["Validation"],
|
||||||
|
|
@ -113,4 +123,6 @@ VALIDATION_CONFIG_MAIN =RouteFactoryConfig(
|
||||||
)
|
)
|
||||||
VALIDATION_CONFIG = VALIDATION_CONFIG_MAIN.as_dict()
|
VALIDATION_CONFIG = VALIDATION_CONFIG_MAIN.as_dict()
|
||||||
|
|
||||||
VALIDATION_ENDPOINTS = [endpoint.url_of_endpoint for endpoint in VALIDATION_CONFIG_MAIN.endpoints]
|
VALIDATION_ENDPOINTS = [
|
||||||
|
endpoint.url_of_endpoint for endpoint in VALIDATION_CONFIG_MAIN.endpoints
|
||||||
|
]
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
||||||
ListOptions,
|
ListOptions,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ValidationsPydantic(BaseModel):
|
class ValidationsPydantic(BaseModel):
|
||||||
class_model: str
|
class_model: str
|
||||||
reachable_event_code: str
|
reachable_event_code: str
|
||||||
|
|
|
||||||
|
|
@ -2,26 +2,16 @@
|
||||||
Validation request models.
|
Validation request models.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Dict, Any, Literal, Optional, TypedDict, Union
|
from typing import TYPE_CHECKING, Dict, Any
|
||||||
from pydantic import BaseModel, Field, model_validator, RootModel, ConfigDict
|
|
||||||
|
|
||||||
|
from ApiEvents.abstract_class import MethodToEvent
|
||||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||||
from ApiValidations.Custom.validation_response import ValidationModel, ValidationParser
|
from ApiValidations.Custom.validation_response import ValidationModel, ValidationParser
|
||||||
|
|
||||||
from ApiEvents.abstract_class import MethodToEvent
|
|
||||||
from ApiEvents.base_request_model import BaseRequestModel, DictRequestModel
|
|
||||||
|
|
||||||
from ApiValidations.Custom.token_objects import EmployeeTokenObject, OccupantTokenObject
|
|
||||||
from ApiValidations.Request.base_validations import ListOptions
|
|
||||||
|
|
||||||
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
||||||
from .models import ValidationsPydantic
|
from .models import ValidationsPydantic
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from fastapi import Request
|
|
||||||
|
|
||||||
|
|
||||||
class AllModelsImport:
|
class AllModelsImport:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -46,18 +36,14 @@ class AllModelsImport:
|
||||||
AddressCreateEventMethod=AddressCreateEventMethod,
|
AddressCreateEventMethod=AddressCreateEventMethod,
|
||||||
AddressSearchEventMethod=AddressSearchEventMethod,
|
AddressSearchEventMethod=AddressSearchEventMethod,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationsBoth(MethodToEvent):
|
class ValidationsBoth(MethodToEvent):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_both_validations_and_headers(
|
def retrieve_both_validations_and_headers(cls, event: ValidationsPydantic) -> Dict[str, Any]:
|
||||||
cls, event: ValidationsPydantic
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
EVENT_MODELS = AllModelsImport.import_all_models()
|
EVENT_MODELS = AllModelsImport.import_all_models()
|
||||||
return_single_model = EVENT_MODELS.get(event.class_model, None)
|
return_single_model = EVENT_MODELS.get(event.class_model, None)
|
||||||
print("return_single_model", return_single_model, type(return_single_model))
|
|
||||||
# event_class_validation = getattr(return_single_model, "__event_validation__", None)
|
# event_class_validation = getattr(return_single_model, "__event_validation__", None)
|
||||||
if not return_single_model:
|
if not return_single_model:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
|
|
@ -67,11 +53,17 @@ class ValidationsBoth(MethodToEvent):
|
||||||
sys_msg="Validation code not found",
|
sys_msg="Validation code not found",
|
||||||
)
|
)
|
||||||
response_model = return_single_model.retrieve_event_response_model(event.reachable_event_code)
|
response_model = return_single_model.retrieve_event_response_model(event.reachable_event_code)
|
||||||
language_model_all = return_single_model.retrieve_language_parameters(function_code=event.reachable_event_code, language=event.lang)
|
language_model_all = return_single_model.retrieve_language_parameters(
|
||||||
|
function_code=event.reachable_event_code, language=event.lang
|
||||||
|
)
|
||||||
language_model = language_model_all.get("language_model", None)
|
language_model = language_model_all.get("language_model", None)
|
||||||
language_models = language_model_all.get("language_models", None)
|
language_models = language_model_all.get("language_models", None)
|
||||||
|
|
||||||
validation = ValidationModel(response_model, language_model, language_models)
|
validation = ValidationModel(response_model, language_model, language_models)
|
||||||
|
"""
|
||||||
|
Headers: Headers which is merged with response model && language models of event
|
||||||
|
Validation: Validation of event which is merged with response model && language models of event
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"headers": validation.headers,
|
"headers": validation.headers,
|
||||||
"validation": validation.validation,
|
"validation": validation.validation,
|
||||||
|
|
@ -82,16 +74,65 @@ class ValidationsBoth(MethodToEvent):
|
||||||
class ValidationsValidations(MethodToEvent):
|
class ValidationsValidations(MethodToEvent):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_validations(
|
def retrieve_validations(cls, event: ValidationsPydantic) -> Dict[str, Any]:
|
||||||
cls, event: ValidationsPydantic
|
EVENT_MODELS = AllModelsImport.import_all_models()
|
||||||
) -> Dict[str, Any]:
|
return_single_model = EVENT_MODELS.get(event.class_model, None)
|
||||||
return {}
|
# event_class_validation = getattr(return_single_model, "__event_validation__", None)
|
||||||
|
if not return_single_model:
|
||||||
|
raise HTTPExceptionApi(
|
||||||
|
error_code="",
|
||||||
|
lang="en",
|
||||||
|
loc=get_line_number_for_error(),
|
||||||
|
sys_msg="Validation code not found",
|
||||||
|
)
|
||||||
|
response_model = return_single_model.retrieve_event_response_model(event.reachable_event_code)
|
||||||
|
language_model_all = return_single_model.retrieve_language_parameters(
|
||||||
|
function_code=event.reachable_event_code, language=event.lang
|
||||||
|
)
|
||||||
|
language_model = language_model_all.get("language_model", None)
|
||||||
|
language_models = language_model_all.get("language_models", None)
|
||||||
|
|
||||||
|
validation = ValidationModel(response_model, language_model, language_models)
|
||||||
|
"""
|
||||||
|
Headers: Headers which is merged with response model && language models of event
|
||||||
|
Validation: Validation of event which is merged with response model && language models of event
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"validation": validation.validation,
|
||||||
|
# "headers": validation.headers,
|
||||||
|
# "language_models": language_model_all,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ValidationsHeaders(MethodToEvent):
|
class ValidationsHeaders(MethodToEvent):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_headers(
|
def retrieve_headers(cls, event: ValidationsPydantic
|
||||||
cls, event: ValidationsPydantic
|
) -> Dict[str, Any]:
|
||||||
) -> Dict[str, Any]:
|
EVENT_MODELS = AllModelsImport.import_all_models()
|
||||||
return {}
|
return_single_model = EVENT_MODELS.get(event.class_model, None)
|
||||||
|
# event_class_validation = getattr(return_single_model, "__event_validation__", None)
|
||||||
|
if not return_single_model:
|
||||||
|
raise HTTPExceptionApi(
|
||||||
|
error_code="",
|
||||||
|
lang="en",
|
||||||
|
loc=get_line_number_for_error(),
|
||||||
|
sys_msg="Validation code not found",
|
||||||
|
)
|
||||||
|
response_model = return_single_model.retrieve_event_response_model(event.reachable_event_code)
|
||||||
|
language_model_all = return_single_model.retrieve_language_parameters(
|
||||||
|
function_code=event.reachable_event_code, language=event.lang
|
||||||
|
)
|
||||||
|
language_model = language_model_all.get("language_model", None)
|
||||||
|
language_models = language_model_all.get("language_models", None)
|
||||||
|
|
||||||
|
validation = ValidationModel(response_model, language_model, language_models)
|
||||||
|
"""
|
||||||
|
Headers: Headers which is merged with response model && language models of event
|
||||||
|
Validation: Validation of event which is merged with response model && language models of event
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"headers": validation.headers,
|
||||||
|
# "validation": validation.validation,
|
||||||
|
# "language_models": language_model_all,
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,158 @@
|
||||||
|
from typing import Dict, Any, Type, get_type_hints, get_args, get_origin
|
||||||
|
from pydantic import BaseModel, Field, EmailStr
|
||||||
|
from enum import Enum
|
||||||
|
import inspect
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class SchemaConverter:
|
||||||
|
"""Converts Pydantic models to Zod schema definitions"""
|
||||||
|
|
||||||
|
TYPE_MAPPINGS = {
|
||||||
|
str: "string",
|
||||||
|
int: "number",
|
||||||
|
float: "number",
|
||||||
|
bool: "boolean",
|
||||||
|
list: "array",
|
||||||
|
dict: "object",
|
||||||
|
datetime: "date",
|
||||||
|
EmailStr: "string.email()",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.processed_models = set()
|
||||||
|
|
||||||
|
def convert_model(self, model: Type[BaseModel]) -> Dict[str, Any]:
|
||||||
|
"""Convert a Pydantic model to a Zod schema definition"""
|
||||||
|
if model.__name__ in self.processed_models:
|
||||||
|
return {"$ref": model.__name__}
|
||||||
|
|
||||||
|
self.processed_models.add(model.__name__)
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"name": model.__name__,
|
||||||
|
"type": "object",
|
||||||
|
"fields": {},
|
||||||
|
"validations": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
for field_name, field in model.__fields__.items():
|
||||||
|
field_info = self._convert_field(field)
|
||||||
|
schema["fields"][field_name] = field_info
|
||||||
|
|
||||||
|
# Get validations from field
|
||||||
|
validations = self._get_field_validations(field)
|
||||||
|
if validations:
|
||||||
|
schema["validations"][field_name] = validations
|
||||||
|
|
||||||
|
return schema
|
||||||
|
|
||||||
|
def _convert_field(self, field) -> Dict[str, Any]:
|
||||||
|
"""Convert a Pydantic field to Zod field definition"""
|
||||||
|
field_type = field.outer_type_
|
||||||
|
origin = get_origin(field_type)
|
||||||
|
|
||||||
|
if origin is not None:
|
||||||
|
# Handle generic types (List, Dict, etc)
|
||||||
|
args = get_args(field_type)
|
||||||
|
if origin == list:
|
||||||
|
return {
|
||||||
|
"type": "array",
|
||||||
|
"items": self._get_type_name(args[0])
|
||||||
|
}
|
||||||
|
elif origin == dict:
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": self._get_type_name(args[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
if inspect.isclass(field_type) and issubclass(field_type, BaseModel):
|
||||||
|
# Nested model
|
||||||
|
return self.convert_model(field_type)
|
||||||
|
|
||||||
|
if inspect.isclass(field_type) and issubclass(field_type, Enum):
|
||||||
|
# Enum type
|
||||||
|
return {
|
||||||
|
"type": "enum",
|
||||||
|
"values": [e.value for e in field_type]
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": self._get_type_name(field_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_field_validations(self, field) -> Dict[str, Any]:
|
||||||
|
"""Extract validations from field"""
|
||||||
|
validations = {}
|
||||||
|
|
||||||
|
if field.field_info.min_length is not None:
|
||||||
|
validations["min_length"] = field.field_info.min_length
|
||||||
|
if field.field_info.max_length is not None:
|
||||||
|
validations["max_length"] = field.field_info.max_length
|
||||||
|
if field.field_info.regex is not None:
|
||||||
|
validations["pattern"] = field.field_info.regex.pattern
|
||||||
|
if field.field_info.gt is not None:
|
||||||
|
validations["gt"] = field.field_info.gt
|
||||||
|
if field.field_info.lt is not None:
|
||||||
|
validations["lt"] = field.field_info.lt
|
||||||
|
|
||||||
|
return validations
|
||||||
|
|
||||||
|
def _get_type_name(self, type_: Type) -> str:
|
||||||
|
"""Get Zod type name for Python type"""
|
||||||
|
return self.TYPE_MAPPINGS.get(type_, "any")
|
||||||
|
|
||||||
|
# FastAPI router
|
||||||
|
router = APIRouter(prefix="/api/validation", tags=["Validation"])
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
@router.get("/schema/{model_name}")
|
||||||
|
async def get_schema(model_name: str) -> Dict[str, Any]:
|
||||||
|
"""Get Zod schema for a specific model"""
|
||||||
|
# This is just an example - you'd need to implement model lookup
|
||||||
|
models = {
|
||||||
|
"User": UserModel,
|
||||||
|
"Product": ProductModel,
|
||||||
|
# Add your models here
|
||||||
|
}
|
||||||
|
|
||||||
|
if model_name not in models:
|
||||||
|
raise ValueError(f"Model {model_name} not found")
|
||||||
|
|
||||||
|
return converter.convert_model(models[model_name])
|
||||||
|
|
||||||
|
# Example usage:
|
||||||
|
"""
|
||||||
|
class UserModel(BaseModel):
|
||||||
|
email: EmailStr
|
||||||
|
username: str = Field(min_length=3, max_length=50)
|
||||||
|
age: int = Field(gt=0, lt=150)
|
||||||
|
is_active: bool = True
|
||||||
|
roles: List[str] = []
|
||||||
|
|
||||||
|
# GET /api/validation/schema/User would return:
|
||||||
|
{
|
||||||
|
"name": "User",
|
||||||
|
"type": "object",
|
||||||
|
"fields": {
|
||||||
|
"email": {"type": "string.email()"},
|
||||||
|
"username": {"type": "string"},
|
||||||
|
"age": {"type": "number"},
|
||||||
|
"is_active": {"type": "boolean"},
|
||||||
|
"roles": {
|
||||||
|
"type": "array",
|
||||||
|
"items": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"validations": {
|
||||||
|
"username": {
|
||||||
|
"min_length": 3,
|
||||||
|
"max_length": 50
|
||||||
|
},
|
||||||
|
"age": {
|
||||||
|
"gt": 0,
|
||||||
|
"lt": 150
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
@ -16,8 +16,10 @@ from typing import (
|
||||||
Type,
|
Type,
|
||||||
ClassVar,
|
ClassVar,
|
||||||
Union,
|
Union,
|
||||||
Awaitable,
|
Set,
|
||||||
)
|
)
|
||||||
|
from collections import defaultdict
|
||||||
|
import uuid
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from fastapi import Request, Depends, APIRouter
|
from fastapi import Request, Depends, APIRouter
|
||||||
|
|
@ -178,64 +180,26 @@ class RouteFactoryConfig:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ActionsSchema:
|
|
||||||
"""Base class for defining API action schemas.
|
|
||||||
|
|
||||||
This class handles endpoint registration and validation in the database.
|
|
||||||
Subclasses should implement specific validation logic.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, endpoint: str):
|
|
||||||
"""Initialize with an API endpoint path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
endpoint: The API endpoint path (e.g. "/users/create")
|
|
||||||
"""
|
|
||||||
self.endpoint = endpoint
|
|
||||||
|
|
||||||
def retrieve_action_from_endpoint(self) -> Dict[str, Any]:
|
|
||||||
"""Retrieve the endpoint registration from the database.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict containing the endpoint registration data
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If endpoint is not found in database
|
|
||||||
"""
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Subclasses must implement retrieve_action_from_endpoint"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ActionsSchemaFactory:
|
|
||||||
"""Factory class for creating and validating action schemas.
|
|
||||||
|
|
||||||
This class ensures proper initialization and validation of API endpoints
|
|
||||||
through their action schemas.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, action: ActionsSchema):
|
|
||||||
"""Initialize with an action schema.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
action: The action schema to initialize
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If action initialization fails
|
|
||||||
"""
|
|
||||||
self.action = action
|
|
||||||
self.action_match = self.action.retrieve_action_from_endpoint()
|
|
||||||
|
|
||||||
|
|
||||||
class MethodToEvent:
|
class MethodToEvent:
|
||||||
"""Base class for mapping methods to API events with type safety.
|
"""Base class for mapping methods to API events with type safety and endpoint configuration.
|
||||||
|
|
||||||
This class provides a framework for handling API events with proper
|
This class provides a framework for handling API events with proper
|
||||||
type checking for tokens and response models.
|
type checking for tokens and response models, as well as managing
|
||||||
|
endpoint configurations and frontend page structure.
|
||||||
|
|
||||||
Type Parameters:
|
Type Parameters:
|
||||||
TokenType: Type of authentication token
|
TokenType: Type of authentication token
|
||||||
ResponseModel: Type of response model
|
ResponseModel: Type of response model
|
||||||
|
|
||||||
|
Class Variables:
|
||||||
|
action_key: Unique identifier for the action
|
||||||
|
event_type: Type of event (e.g., 'query', 'command')
|
||||||
|
event_description: Human-readable description of the event
|
||||||
|
event_category: Category for grouping related events
|
||||||
|
__event_keys__: Mapping of UUIDs to event names
|
||||||
|
__event_validation__: Validation rules for events
|
||||||
|
__endpoint_config__: API endpoint configuration
|
||||||
|
__page_info__: Frontend page configuration
|
||||||
"""
|
"""
|
||||||
|
|
||||||
action_key: ClassVar[Optional[str]] = None
|
action_key: ClassVar[Optional[str]] = None
|
||||||
|
|
@ -244,9 +208,163 @@ class MethodToEvent:
|
||||||
event_category: ClassVar[str] = ""
|
event_category: ClassVar[str] = ""
|
||||||
__event_keys__: ClassVar[Dict[str, str]] = {}
|
__event_keys__: ClassVar[Dict[str, str]] = {}
|
||||||
__event_validation__: Dict[str, Tuple[Type, Union[List, tuple]]] = {}
|
__event_validation__: Dict[str, Tuple[Type, Union[List, tuple]]] = {}
|
||||||
|
__endpoint_config__: ClassVar[Dict[str, Dict[str, Any]]] = {
|
||||||
|
"endpoints": {}, # Mapping of event UUIDs to endpoint configs
|
||||||
|
"router_prefix": "", # Router prefix for all endpoints in this class
|
||||||
|
"tags": [], # OpenAPI tags
|
||||||
|
}
|
||||||
|
__page_info__: ClassVar[Dict[str, Any]] = {
|
||||||
|
"name": "", # Page name (e.g., "AccountPage")
|
||||||
|
"title": {"tr": "", "en": ""}, # Multi-language titles
|
||||||
|
"icon": "", # Icon name
|
||||||
|
"url": "", # Frontend route
|
||||||
|
"component": None, # Optional component name
|
||||||
|
"parent": None, # Parent page name if this is a subpage
|
||||||
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_event_response_model(cls, function_code: str) -> Tuple:
|
def register_endpoint(
|
||||||
|
cls,
|
||||||
|
event_uuid: str,
|
||||||
|
path: str,
|
||||||
|
method: str = "POST",
|
||||||
|
response_model: Optional[Type] = None,
|
||||||
|
**kwargs
|
||||||
|
) -> None:
|
||||||
|
"""Register an API endpoint configuration for an event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_uuid: UUID of the event
|
||||||
|
path: Endpoint path (will be prefixed with router_prefix)
|
||||||
|
method: HTTP method (default: POST)
|
||||||
|
response_model: Pydantic model for response
|
||||||
|
**kwargs: Additional FastAPI endpoint parameters
|
||||||
|
"""
|
||||||
|
if event_uuid not in cls.__event_keys__:
|
||||||
|
raise ValueError(f"Event UUID {event_uuid} not found in {cls.__name__}")
|
||||||
|
|
||||||
|
cls.__endpoint_config__["endpoints"][event_uuid] = {
|
||||||
|
"path": path,
|
||||||
|
"method": method,
|
||||||
|
"response_model": response_model,
|
||||||
|
**kwargs
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def configure_router(cls, prefix: str, tags: List[str]) -> None:
|
||||||
|
"""Configure the API router settings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prefix: Router prefix for all endpoints
|
||||||
|
tags: OpenAPI tags for documentation
|
||||||
|
"""
|
||||||
|
cls.__endpoint_config__["router_prefix"] = prefix
|
||||||
|
cls.__endpoint_config__["tags"] = tags
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def configure_page(
|
||||||
|
cls,
|
||||||
|
name: str,
|
||||||
|
title: Dict[str, str],
|
||||||
|
icon: str,
|
||||||
|
url: str,
|
||||||
|
component: Optional[str] = None,
|
||||||
|
parent: Optional[str] = None
|
||||||
|
) -> None:
|
||||||
|
"""Configure the frontend page information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Page name
|
||||||
|
title: Multi-language titles (must include 'tr' and 'en')
|
||||||
|
icon: Icon name
|
||||||
|
url: Frontend route
|
||||||
|
component: Optional component name
|
||||||
|
parent: Parent page name for subpages
|
||||||
|
"""
|
||||||
|
required_langs = {"tr", "en"}
|
||||||
|
if not all(lang in title for lang in required_langs):
|
||||||
|
raise ValueError(f"Title must contain all required languages: {required_langs}")
|
||||||
|
|
||||||
|
cls.__page_info__.update({
|
||||||
|
"name": name,
|
||||||
|
"title": title,
|
||||||
|
"icon": icon,
|
||||||
|
"url": url,
|
||||||
|
"component": component,
|
||||||
|
"parent": parent
|
||||||
|
})
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_endpoint_config(cls) -> Dict[str, Any]:
|
||||||
|
"""Get the complete endpoint configuration."""
|
||||||
|
return cls.__endpoint_config__
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_page_info(cls) -> Dict[str, Any]:
|
||||||
|
"""Get the frontend page configuration."""
|
||||||
|
return cls.__page_info__
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def has_available_events(cls, user_permission_uuids: Set[str]) -> bool:
|
||||||
|
"""Check if any events are available based on user permissions."""
|
||||||
|
return bool(set(cls.__event_keys__.keys()) & user_permission_uuids)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_page_info_with_permissions(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str],
|
||||||
|
include_endpoints: bool = False
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get page info if user has required permissions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_permission_uuids: Set of UUIDs the user has permission for
|
||||||
|
include_endpoints: Whether to include available endpoint information
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with page info if user has permissions, None otherwise
|
||||||
|
"""
|
||||||
|
# Check if user has any permissions for this page's events
|
||||||
|
if not cls.has_available_events(user_permission_uuids):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Start with basic page info
|
||||||
|
page_info = {
|
||||||
|
**cls.__page_info__,
|
||||||
|
"category": cls.event_category,
|
||||||
|
"type": cls.event_type,
|
||||||
|
"description": cls.event_description
|
||||||
|
}
|
||||||
|
|
||||||
|
# Optionally include available endpoints
|
||||||
|
if include_endpoints:
|
||||||
|
available_endpoints = {}
|
||||||
|
for uuid, endpoint in cls.__endpoint_config__["endpoints"].items():
|
||||||
|
if uuid in user_permission_uuids:
|
||||||
|
available_endpoints[uuid] = {
|
||||||
|
"path": f"{cls.__endpoint_config__['router_prefix']}{endpoint['path']}",
|
||||||
|
"method": endpoint["method"],
|
||||||
|
"event_name": cls.__event_keys__[uuid]
|
||||||
|
}
|
||||||
|
if available_endpoints:
|
||||||
|
page_info["available_endpoints"] = available_endpoints
|
||||||
|
|
||||||
|
return page_info
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_events_config(cls) -> Dict[str, Any]:
|
||||||
|
"""Get the complete configuration including events, endpoints, and page info."""
|
||||||
|
return {
|
||||||
|
"events": cls.__event_keys__,
|
||||||
|
"endpoints": cls.__endpoint_config__,
|
||||||
|
"page_info": cls.__page_info__,
|
||||||
|
"category": cls.event_category,
|
||||||
|
"type": cls.event_type,
|
||||||
|
"description": cls.event_description
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retrieve_event_response_model(cls, function_code: str) -> Any:
|
||||||
"""Retrieve event validation for a specific function.
|
"""Retrieve event validation for a specific function.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -264,7 +382,7 @@ class MethodToEvent:
|
||||||
sys_msg="Function not found",
|
sys_msg="Function not found",
|
||||||
)
|
)
|
||||||
return event_validation_list[0]
|
return event_validation_list[0]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_event_languages(cls, function_code: str) -> Union[List, tuple]:
|
def retrieve_event_languages(cls, function_code: str) -> Union[List, tuple]:
|
||||||
"""Retrieve event description for a specific function.
|
"""Retrieve event description for a specific function.
|
||||||
|
|
@ -276,7 +394,6 @@ class MethodToEvent:
|
||||||
Event description
|
Event description
|
||||||
"""
|
"""
|
||||||
event_keys_list = cls.__event_validation__.get(function_code, None)
|
event_keys_list = cls.__event_validation__.get(function_code, None)
|
||||||
print('event_keys_list', event_keys_list)
|
|
||||||
if not event_keys_list:
|
if not event_keys_list:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
error_code="",
|
error_code="",
|
||||||
|
|
@ -295,7 +412,7 @@ class MethodToEvent:
|
||||||
return function_language_models
|
return function_language_models
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def merge_models(language_model: List) -> Tuple:
|
def merge_models(language_model: List) -> Dict:
|
||||||
merged_models = {"tr": {}, "en": {}}
|
merged_models = {"tr": {}, "en": {}}
|
||||||
for model in language_model:
|
for model in language_model:
|
||||||
for lang in dict(model).keys():
|
for lang in dict(model).keys():
|
||||||
|
|
@ -327,7 +444,9 @@ class MethodToEvent:
|
||||||
return function_itself
|
return function_itself
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_language_parameters(cls, function_code: str, language: str = "tr") -> Dict[str, str]:
|
def retrieve_language_parameters(
|
||||||
|
cls, function_code: str, language: str = "tr"
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""Retrieve language-specific parameters for an event.
|
"""Retrieve language-specific parameters for an event.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -342,19 +461,459 @@ class MethodToEvent:
|
||||||
event_response_model_merged = cls.merge_models(event_language_models)
|
event_response_model_merged = cls.merge_models(event_language_models)
|
||||||
event_response_model_merged_lang = event_response_model_merged[language]
|
event_response_model_merged_lang = event_response_model_merged[language]
|
||||||
# Map response model fields to language-specific values
|
# Map response model fields to language-specific values
|
||||||
print('event_response_model', dict(
|
|
||||||
event_response_model=event_response_model,
|
|
||||||
event_response_model_merged_lang=event_response_model_merged_lang,
|
|
||||||
event_response_model_merged=event_response_model_merged,
|
|
||||||
language=language,
|
|
||||||
function_code=function_code,
|
|
||||||
))
|
|
||||||
only_language_dict = {
|
only_language_dict = {
|
||||||
field: event_response_model_merged_lang[field]
|
field: event_response_model_merged_lang[field]
|
||||||
for field in event_response_model.model_fields
|
for field in event_response_model.model_fields
|
||||||
if field in event_response_model_merged_lang
|
if field in event_response_model_merged_lang
|
||||||
}
|
}
|
||||||
|
"""
|
||||||
|
__event_validation__ : {"key": [A, B, C]}
|
||||||
|
Language Model : Language Model that is model pydatnic requires
|
||||||
|
Language Models : All language_models that is included in Langugage Models Section
|
||||||
|
Merged Language Models : Merged with all models in list event_validation
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"language_model": only_language_dict,
|
"language_model": only_language_dict,
|
||||||
"language_models": event_response_model_merged,
|
"language_models": event_response_model_merged,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class EventMethodRegistry:
|
||||||
|
"""Registry for mapping event method UUIDs to categories and managing permissions."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._uuid_map: Dict[str, Tuple[Type[MethodToEvent], str]] = {} # uuid -> (method_class, event_name)
|
||||||
|
self._category_events: Dict[str, Set[str]] = defaultdict(set) # category -> set of uuids
|
||||||
|
|
||||||
|
def register_method(self, category_name: str, method_class: Type[MethodToEvent]) -> None:
|
||||||
|
"""Register a method class with its category."""
|
||||||
|
# Register all UUIDs from the method
|
||||||
|
for event_uuid, event_name in method_class.__event_keys__.items():
|
||||||
|
self._uuid_map[event_uuid] = (method_class, event_name)
|
||||||
|
self._category_events[category_name].add(event_uuid)
|
||||||
|
|
||||||
|
def get_method_by_uuid(self, event_uuid: str) -> Optional[Tuple[Type[MethodToEvent], str]]:
|
||||||
|
"""Get method class and event name by UUID."""
|
||||||
|
return self._uuid_map.get(event_uuid)
|
||||||
|
|
||||||
|
def get_events_for_category(self, category_name: str) -> Set[str]:
|
||||||
|
"""Get all event UUIDs for a category."""
|
||||||
|
return self._category_events.get(category_name, set())
|
||||||
|
|
||||||
|
|
||||||
|
class EventCategory:
|
||||||
|
"""Base class for defining event categories similar to frontend page structure."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
title: Dict[str, str],
|
||||||
|
icon: str,
|
||||||
|
url: str,
|
||||||
|
component: Optional[str] = None,
|
||||||
|
page_info: Any = None,
|
||||||
|
all_endpoints: Dict[str, Set[str]] = None, # category -> set of event UUIDs
|
||||||
|
sub_categories: List = None,
|
||||||
|
):
|
||||||
|
self.name = name
|
||||||
|
self.title = self._validate_title(title)
|
||||||
|
self.icon = icon
|
||||||
|
self.url = url
|
||||||
|
self.component = component
|
||||||
|
self.page_info = page_info
|
||||||
|
self.all_endpoints = all_endpoints or {}
|
||||||
|
self.sub_categories = self._process_subcategories(sub_categories or [])
|
||||||
|
|
||||||
|
def _validate_title(self, title: Dict[str, str]) -> Dict[str, str]:
|
||||||
|
"""Validate title has required languages."""
|
||||||
|
required_langs = {"tr", "en"}
|
||||||
|
if not all(lang in title for lang in required_langs):
|
||||||
|
raise ValueError(f"Title must contain all required languages: {required_langs}")
|
||||||
|
return title
|
||||||
|
|
||||||
|
def _process_subcategories(self, categories: List[Union[Dict, "EventCategory"]]) -> List["EventCategory"]:
|
||||||
|
"""Process subcategories ensuring they are all EventCategory instances."""
|
||||||
|
processed = []
|
||||||
|
for category in categories:
|
||||||
|
if isinstance(category, dict):
|
||||||
|
processed.append(EventCategory.from_dict(category))
|
||||||
|
elif isinstance(category, EventCategory):
|
||||||
|
processed.append(category)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid subcategory type: {type(category)}")
|
||||||
|
return processed
|
||||||
|
|
||||||
|
def has_available_events(self, user_permission_uuids: Set[str]) -> bool:
|
||||||
|
"""Check if category has available events based on UUID intersection."""
|
||||||
|
# Check current category's events
|
||||||
|
return any(
|
||||||
|
bool(events & user_permission_uuids)
|
||||||
|
for events in self.all_endpoints.values()
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_menu_item(self, user_permission_uuids: Set[str]) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get menu item if category has available events."""
|
||||||
|
# First check if this category has available events
|
||||||
|
if not self.has_available_events(user_permission_uuids):
|
||||||
|
return None
|
||||||
|
|
||||||
|
menu_item = {
|
||||||
|
"name": self.name,
|
||||||
|
"title": self.title,
|
||||||
|
"icon": self.icon,
|
||||||
|
"url": self.url
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.component:
|
||||||
|
menu_item["component"] = self.component
|
||||||
|
|
||||||
|
# Only process subcategories if parent has permissions
|
||||||
|
sub_items = []
|
||||||
|
for subcategory in self.sub_categories:
|
||||||
|
if sub_menu := subcategory.get_menu_item(user_permission_uuids):
|
||||||
|
sub_items.append(sub_menu)
|
||||||
|
|
||||||
|
if sub_items:
|
||||||
|
menu_item["items"] = sub_items
|
||||||
|
|
||||||
|
return menu_item
|
||||||
|
|
||||||
|
def get_available_events(self, registry: EventMethodRegistry, user_permission_uuids: Set[str]) -> Dict[str, List[Dict[str, Any]]]:
|
||||||
|
"""Get available events based on user permission UUIDs."""
|
||||||
|
available_events = defaultdict(list)
|
||||||
|
|
||||||
|
# Process endpoints in current category
|
||||||
|
category_events = self.all_endpoints.get(self.name, set())
|
||||||
|
for event_uuid in category_events & user_permission_uuids:
|
||||||
|
method_info = registry.get_method_by_uuid(event_uuid)
|
||||||
|
if method_info:
|
||||||
|
method_class, event_name = method_info
|
||||||
|
available_events[method_class.event_type].append({
|
||||||
|
"uuid": event_uuid,
|
||||||
|
"name": event_name,
|
||||||
|
"description": method_class.event_description,
|
||||||
|
"category": method_class.event_category
|
||||||
|
})
|
||||||
|
|
||||||
|
# Process subcategories recursively
|
||||||
|
for subcategory in self.sub_categories:
|
||||||
|
sub_events = subcategory.get_available_events(registry, user_permission_uuids)
|
||||||
|
for event_type, events in sub_events.items():
|
||||||
|
available_events[event_type].extend(events)
|
||||||
|
|
||||||
|
return dict(available_events)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict[str, Any]) -> "EventCategory":
|
||||||
|
"""Create category from dictionary."""
|
||||||
|
return cls(
|
||||||
|
name=data["name"],
|
||||||
|
title=data["title"],
|
||||||
|
icon=data["icon"],
|
||||||
|
url=data["url"],
|
||||||
|
component=data.get("component"),
|
||||||
|
page_info=data.get("pageInfo"),
|
||||||
|
all_endpoints=data.get("allEndpoints", {}),
|
||||||
|
sub_categories=data.get("subCategories", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_dict(self, registry: EventMethodRegistry, user_permission_uuids: Optional[Set[str]] = None) -> Dict[str, Any]:
|
||||||
|
"""Convert category to dictionary with optional permission filtering."""
|
||||||
|
result = {
|
||||||
|
"name": self.name,
|
||||||
|
"title": self.title,
|
||||||
|
"icon": self.icon,
|
||||||
|
"url": self.url,
|
||||||
|
"pageInfo": self.page_info,
|
||||||
|
}
|
||||||
|
|
||||||
|
if user_permission_uuids is not None:
|
||||||
|
# Only include endpoints and their info if user has permissions
|
||||||
|
available_events = self.get_available_events(registry, user_permission_uuids)
|
||||||
|
if available_events:
|
||||||
|
result["availableEvents"] = available_events
|
||||||
|
result["allEndpoints"] = self.all_endpoints
|
||||||
|
else:
|
||||||
|
# Include all endpoints if no permissions specified
|
||||||
|
result["allEndpoints"] = self.all_endpoints
|
||||||
|
|
||||||
|
# Process subcategories
|
||||||
|
subcategories = [
|
||||||
|
sub.to_dict(registry, user_permission_uuids) for sub in self.sub_categories
|
||||||
|
]
|
||||||
|
# Only include subcategories that have available events
|
||||||
|
if user_permission_uuids is None or any(
|
||||||
|
"availableEvents" in sub for sub in subcategories
|
||||||
|
):
|
||||||
|
result["subCategories"] = subcategories
|
||||||
|
|
||||||
|
if self.component:
|
||||||
|
result["component"] = self.component
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class EventCategoryManager:
|
||||||
|
"""Manager class for handling event categories and their relationships."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.categories: List[EventCategory] = []
|
||||||
|
self.registry = EventMethodRegistry()
|
||||||
|
|
||||||
|
def get_menu_tree(self, user_permission_uuids: Set[str]) -> List[Dict[str, Any]]:
|
||||||
|
"""Get menu tree based on available events."""
|
||||||
|
return [
|
||||||
|
menu_item for category in self.categories
|
||||||
|
if (menu_item := category.get_menu_item(user_permission_uuids))
|
||||||
|
]
|
||||||
|
|
||||||
|
def register_category(self, category: EventCategory) -> None:
|
||||||
|
"""Register a category and its endpoints in the registry."""
|
||||||
|
self.categories.append(category)
|
||||||
|
|
||||||
|
def add_category(self, category: Union[EventCategory, Dict[str, Any]]) -> None:
|
||||||
|
"""Add a new category."""
|
||||||
|
if isinstance(category, dict):
|
||||||
|
category = EventCategory.from_dict(category)
|
||||||
|
self.register_category(category)
|
||||||
|
|
||||||
|
def add_categories(self, categories: List[Union[EventCategory, Dict[str, Any]]]) -> None:
|
||||||
|
"""Add multiple categories at once."""
|
||||||
|
for category in categories:
|
||||||
|
self.add_category(category)
|
||||||
|
|
||||||
|
def get_category(self, name: str) -> Optional[EventCategory]:
|
||||||
|
"""Get category by name."""
|
||||||
|
return next((cat for cat in self.categories if cat.name == name), None)
|
||||||
|
|
||||||
|
def get_all_categories(self, user_permission_uuids: Optional[Set[str]] = None) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all categories as dictionary, filtered by user permissions."""
|
||||||
|
return [cat.to_dict(self.registry, user_permission_uuids) for cat in self.categories]
|
||||||
|
|
||||||
|
def get_category_endpoints(self, category_name: str) -> Set[str]:
|
||||||
|
"""Get all endpoint UUIDs for a category."""
|
||||||
|
category = self.get_category(category_name)
|
||||||
|
return category.all_endpoints.get(category_name, set()) if category else set()
|
||||||
|
|
||||||
|
def get_subcategories(self, category_name: str, user_permission_uuids: Optional[Set[str]] = None) -> List[Dict[str, Any]]:
|
||||||
|
"""Get subcategories for a category."""
|
||||||
|
category = self.get_category(category_name)
|
||||||
|
if not category:
|
||||||
|
return []
|
||||||
|
return [sub.to_dict(self.registry, user_permission_uuids) for sub in category.sub_categories]
|
||||||
|
|
||||||
|
def find_category_by_url(self, url: str) -> Optional[EventCategory]:
|
||||||
|
"""Find a category by its URL."""
|
||||||
|
for category in self.categories:
|
||||||
|
if category.url == url:
|
||||||
|
return category
|
||||||
|
for subcategory in category.sub_categories:
|
||||||
|
if subcategory.url == url:
|
||||||
|
return subcategory
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class EventMethodRegistry:
|
||||||
|
"""Registry for all MethodToEvent classes and menu building."""
|
||||||
|
|
||||||
|
_instance = None
|
||||||
|
_method_classes: Dict[str, Type[MethodToEvent]] = {}
|
||||||
|
|
||||||
|
def __new__(cls):
|
||||||
|
if cls._instance is None:
|
||||||
|
cls._instance = super().__new__(cls)
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_method_class(cls, method_class: Type[MethodToEvent]) -> None:
|
||||||
|
"""Register a MethodToEvent class."""
|
||||||
|
if not issubclass(method_class, MethodToEvent):
|
||||||
|
raise ValueError(f"{method_class.__name__} must be a subclass of MethodToEvent")
|
||||||
|
|
||||||
|
page_info = method_class.get_page_info()
|
||||||
|
cls._method_classes[page_info["name"]] = method_class
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_all_menu_items(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str],
|
||||||
|
include_endpoints: bool = False
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all menu items based on user permissions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_permission_uuids: Set of UUIDs the user has permission for
|
||||||
|
include_endpoints: Whether to include available endpoint information
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of menu items organized in a tree structure
|
||||||
|
"""
|
||||||
|
# First get all page infos
|
||||||
|
page_infos = {}
|
||||||
|
for method_class in cls._method_classes.values():
|
||||||
|
if page_info := method_class.get_page_info_with_permissions(user_permission_uuids, include_endpoints):
|
||||||
|
page_infos[page_info["name"]] = page_info
|
||||||
|
|
||||||
|
# Build tree structure
|
||||||
|
menu_tree = []
|
||||||
|
child_pages = set()
|
||||||
|
|
||||||
|
# First pass: identify all child pages
|
||||||
|
for page_info in page_infos.values():
|
||||||
|
if page_info.get("parent"):
|
||||||
|
child_pages.add(page_info["name"])
|
||||||
|
|
||||||
|
# Second pass: build tree structure
|
||||||
|
for name, page_info in page_infos.items():
|
||||||
|
# Skip if this is a child page
|
||||||
|
if name in child_pages:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Start with this page's info
|
||||||
|
menu_item = page_info.copy()
|
||||||
|
|
||||||
|
# Find and add children
|
||||||
|
children = []
|
||||||
|
for child_info in page_infos.values():
|
||||||
|
if child_info.get("parent") == name:
|
||||||
|
children.append(child_info)
|
||||||
|
|
||||||
|
if children:
|
||||||
|
menu_item["items"] = sorted(
|
||||||
|
children,
|
||||||
|
key=lambda x: x["name"]
|
||||||
|
)
|
||||||
|
|
||||||
|
menu_tree.append(menu_item)
|
||||||
|
|
||||||
|
return sorted(menu_tree, key=lambda x: x["name"])
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_available_endpoints(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str]
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Get all available endpoints based on user permissions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_permission_uuids: Set of UUIDs the user has permission for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping event UUIDs to endpoint configurations
|
||||||
|
"""
|
||||||
|
available_endpoints = {}
|
||||||
|
|
||||||
|
for method_class in cls._method_classes.values():
|
||||||
|
if page_info := method_class.get_page_info_with_permissions(
|
||||||
|
user_permission_uuids,
|
||||||
|
include_endpoints=True
|
||||||
|
):
|
||||||
|
if endpoints := page_info.get("available_endpoints"):
|
||||||
|
available_endpoints.update(endpoints)
|
||||||
|
|
||||||
|
return available_endpoints
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Example usage
|
||||||
|
|
||||||
|
# Register your MethodToEvent classes
|
||||||
|
registry = EventMethodRegistry()
|
||||||
|
registry.register_method_class(AccountEventMethods)
|
||||||
|
registry.register_method_class(AccountDetailsEventMethods)
|
||||||
|
|
||||||
|
# Get complete menu structure
|
||||||
|
user_permissions = {
|
||||||
|
"uuid1",
|
||||||
|
"uuid2",
|
||||||
|
"uuid3"
|
||||||
|
}
|
||||||
|
|
||||||
|
menu_items = registry.get_all_menu_items(user_permissions, include_endpoints=True)
|
||||||
|
# Result:
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "AccountPage",
|
||||||
|
"title": {"tr": "Hesaplar", "en": "Accounts"},
|
||||||
|
"icon": "User",
|
||||||
|
"url": "/account",
|
||||||
|
"category": "account",
|
||||||
|
"type": "query",
|
||||||
|
"description": "Account management operations",
|
||||||
|
"available_endpoints": {
|
||||||
|
"uuid1": {"path": "/api/account/view", "method": "GET"},
|
||||||
|
"uuid2": {"path": "/api/account/edit", "method": "POST"}
|
||||||
|
},
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"name": "AccountDetailsPage",
|
||||||
|
"title": {"tr": "Hesap Detayları", "en": "Account Details"},
|
||||||
|
"icon": "FileText",
|
||||||
|
"url": "/account/details",
|
||||||
|
"parent": "AccountPage",
|
||||||
|
"category": "account_details",
|
||||||
|
"type": "query",
|
||||||
|
"available_endpoints": {
|
||||||
|
"uuid3": {"path": "/api/account/details/view", "method": "GET"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Get all available endpoints
|
||||||
|
endpoints = registry.get_available_endpoints(user_permissions)
|
||||||
|
# Result:
|
||||||
|
{
|
||||||
|
"uuid1": {
|
||||||
|
"path": "/api/account/view",
|
||||||
|
"method": "GET",
|
||||||
|
"event_name": "view_account"
|
||||||
|
},
|
||||||
|
"uuid2": {
|
||||||
|
"path": "/api/account/edit",
|
||||||
|
"method": "POST",
|
||||||
|
"event_name": "edit_account"
|
||||||
|
},
|
||||||
|
"uuid3": {
|
||||||
|
"path": "/api/account/details/view",
|
||||||
|
"method": "GET",
|
||||||
|
"event_name": "view_details"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get event UUIDs from MethodToEvent classes
|
||||||
|
account_events = {uuid for uuid in AccountEventMethods.__event_keys__}
|
||||||
|
|
||||||
|
# Define categories with event UUIDs
|
||||||
|
PAGES_INFO = [
|
||||||
|
{
|
||||||
|
"name": "AccountPage",
|
||||||
|
"title": {"tr": "Hesaplar", "en": "Accounts"},
|
||||||
|
"icon": "User",
|
||||||
|
"url": "/account",
|
||||||
|
"pageInfo": AccountPageInfo,
|
||||||
|
"allEndpoints": {"AccountPage": account_events},
|
||||||
|
"subCategories": [
|
||||||
|
{
|
||||||
|
"name": "AccountDetailsPage",
|
||||||
|
"title": {"tr": "Hesap Detayları", "en": "Account Details"},
|
||||||
|
"icon": "FileText",
|
||||||
|
"url": "/account/details",
|
||||||
|
"allEndpoints": {} # No direct endpoints, only shown if parent has permissions
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Initialize manager
|
||||||
|
manager = EventCategoryManager()
|
||||||
|
manager.add_categories(PAGES_INFO)
|
||||||
|
|
||||||
|
# Get menu tree based on available events
|
||||||
|
user_permission_uuids = {
|
||||||
|
"31f4f32f-0cd4-4995-8a6a-f9f56335848a",
|
||||||
|
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2"
|
||||||
|
}
|
||||||
|
menu_tree = manager.get_menu_tree(user_permission_uuids)
|
||||||
|
"""
|
||||||
|
|
@ -17,7 +17,10 @@ class ValidationParser:
|
||||||
self.parse()
|
self.parse()
|
||||||
|
|
||||||
def parse(self):
|
def parse(self):
|
||||||
from ApiValidations.Request.base_validations import CrudRecords, PydanticBaseModel
|
from ApiValidations.Request.base_validations import (
|
||||||
|
CrudRecords,
|
||||||
|
PydanticBaseModel,
|
||||||
|
)
|
||||||
|
|
||||||
properties = dict(self.annotations.get("properties")).items()
|
properties = dict(self.annotations.get("properties")).items()
|
||||||
total_class_annotations = {
|
total_class_annotations = {
|
||||||
|
|
@ -26,7 +29,11 @@ class ValidationParser:
|
||||||
**CrudRecords.__annotations__,
|
**CrudRecords.__annotations__,
|
||||||
}
|
}
|
||||||
for key, value in properties:
|
for key, value in properties:
|
||||||
default, required, possible_types = dict(value).get("default", None), True, []
|
default, required, possible_types = (
|
||||||
|
dict(value).get("default", None),
|
||||||
|
True,
|
||||||
|
[],
|
||||||
|
)
|
||||||
if dict(value).get("anyOf", None):
|
if dict(value).get("anyOf", None):
|
||||||
for _ in dict(value).get("anyOf") or []:
|
for _ in dict(value).get("anyOf") or []:
|
||||||
type_opt = json.loads(json.dumps(_))
|
type_opt = json.loads(json.dumps(_))
|
||||||
|
|
@ -47,10 +54,18 @@ class ValidationParser:
|
||||||
field_type, required = "boolean", aoc == "<class 'bool'>"
|
field_type, required = "boolean", aoc == "<class 'bool'>"
|
||||||
elif aoc in ("<class 'float'>", "typing.Optional[float]"):
|
elif aoc in ("<class 'float'>", "typing.Optional[float]"):
|
||||||
field_type, required = "float", aoc == "<class 'float'>"
|
field_type, required = "float", aoc == "<class 'float'>"
|
||||||
elif aoc in ("<class 'datetime.datetime'>", "typing.Optional[datetime.datetime]"):
|
elif aoc in (
|
||||||
field_type, required = "datetime", aoc == "<class 'datetime.datetime'>"
|
"<class 'datetime.datetime'>",
|
||||||
|
"typing.Optional[datetime.datetime]",
|
||||||
|
):
|
||||||
|
field_type, required = (
|
||||||
|
"datetime",
|
||||||
|
aoc == "<class 'datetime.datetime'>",
|
||||||
|
)
|
||||||
self.schema[key] = {
|
self.schema[key] = {
|
||||||
"type": field_type, "required": required, "default": default
|
"type": field_type,
|
||||||
|
"required": required,
|
||||||
|
"default": default,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
from .account_responses import AccountRecordResponse
|
from .account_responses import AccountRecordResponse
|
||||||
from .address_responses import ListAddressResponse
|
from .address_responses import ListAddressResponse
|
||||||
from .auth_responses import (
|
from .auth_responses import (
|
||||||
AuthenticationLoginResponse,
|
AuthenticationLoginResponse,
|
||||||
AuthenticationRefreshResponse,
|
AuthenticationRefreshResponse,
|
||||||
AuthenticationUserInfoResponse
|
AuthenticationUserInfoResponse,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,14 +18,12 @@ from .auth_middleware import MiddlewareModule
|
||||||
from Schemas import Events
|
from Schemas import Events
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class EventFunctions:
|
class EventFunctions:
|
||||||
|
|
||||||
def __init__(self, endpoint: str, request: Request):
|
def __init__(self, endpoint: str, request: Request):
|
||||||
self.endpoint = endpoint
|
self.endpoint = endpoint
|
||||||
self.request = request
|
self.request = request
|
||||||
|
|
||||||
|
|
||||||
def match_endpoint_with_accesiable_event(self) -> Optional[Dict[str, Any]]:
|
def match_endpoint_with_accesiable_event(self) -> Optional[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Match an endpoint with accessible events.
|
Match an endpoint with accessible events.
|
||||||
|
|
@ -221,6 +219,7 @@ class TokenEventMiddleware:
|
||||||
# # First apply authentication
|
# # First apply authentication
|
||||||
# authenticated_func = MiddlewareModule.auth_required(func)
|
# authenticated_func = MiddlewareModule.auth_required(func)
|
||||||
authenticated_func = func
|
authenticated_func = func
|
||||||
|
|
||||||
@wraps(authenticated_func)
|
@wraps(authenticated_func)
|
||||||
async def wrapper(request: Request, *args, **kwargs) -> Dict[str, Any]:
|
async def wrapper(request: Request, *args, **kwargs) -> Dict[str, Any]:
|
||||||
|
|
||||||
|
|
@ -233,9 +232,11 @@ class TokenEventMiddleware:
|
||||||
loc=get_line_number_for_error(),
|
loc=get_line_number_for_error(),
|
||||||
sys_msg="Function code not found",
|
sys_msg="Function code not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make handler available to all functions in the chain
|
# Make handler available to all functions in the chain
|
||||||
func.func_code = EventFunctions(endpoint_url, request).match_endpoint_with_accesiable_event()
|
func.func_code = EventFunctions(
|
||||||
|
endpoint_url, request
|
||||||
|
).match_endpoint_with_accesiable_event()
|
||||||
# Call the authenticated function
|
# Call the authenticated function
|
||||||
if inspect.iscoroutinefunction(authenticated_func):
|
if inspect.iscoroutinefunction(authenticated_func):
|
||||||
return await authenticated_func(request, *args, **kwargs)
|
return await authenticated_func(request, *args, **kwargs)
|
||||||
|
|
@ -243,7 +244,6 @@ class TokenEventMiddleware:
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validation_required(
|
def validation_required(
|
||||||
func: Callable[..., Dict[str, Any]]
|
func: Callable[..., Dict[str, Any]]
|
||||||
|
|
@ -268,7 +268,9 @@ class TokenEventMiddleware:
|
||||||
request: Request, *args: Any, **kwargs: Any
|
request: Request, *args: Any, **kwargs: Any
|
||||||
) -> Union[Dict[str, Any], BaseModel]:
|
) -> Union[Dict[str, Any], BaseModel]:
|
||||||
# Handle both async and sync functions
|
# Handle both async and sync functions
|
||||||
endpoint_asked = getattr(kwargs.get("data", None), "data", None).get("endpoint", None)
|
endpoint_asked = getattr(kwargs.get("data", None), "data", None).get(
|
||||||
|
"endpoint", None
|
||||||
|
)
|
||||||
if not endpoint_asked:
|
if not endpoint_asked:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
error_code="",
|
error_code="",
|
||||||
|
|
@ -276,7 +278,9 @@ class TokenEventMiddleware:
|
||||||
loc=get_line_number_for_error(),
|
loc=get_line_number_for_error(),
|
||||||
sys_msg="Endpoint not found",
|
sys_msg="Endpoint not found",
|
||||||
)
|
)
|
||||||
wrapper.validation_code = EventFunctions(endpoint_asked, request).retrieve_function_dict()
|
wrapper.validation_code = EventFunctions(
|
||||||
|
endpoint_asked, request
|
||||||
|
).retrieve_function_dict()
|
||||||
if inspect.iscoroutinefunction(authenticated_func):
|
if inspect.iscoroutinefunction(authenticated_func):
|
||||||
result = await authenticated_func(request, *args, **kwargs)
|
result = await authenticated_func(request, *args, **kwargs)
|
||||||
else:
|
else:
|
||||||
|
|
@ -289,4 +293,5 @@ class TokenEventMiddleware:
|
||||||
if inspect.iscoroutine(result):
|
if inspect.iscoroutine(result):
|
||||||
result = await result
|
result = await result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
|
||||||
87
README.md
87
README.md
|
|
@ -82,3 +82,90 @@ docker compose -f docker-compose.test.yml up --build
|
||||||
- Deployment scripts
|
- Deployment scripts
|
||||||
- Database migrations
|
- Database migrations
|
||||||
- Maintenance utilities
|
- Maintenance utilities
|
||||||
|
|
||||||
|
|
||||||
|
use arcjet @frontend
|
||||||
|
|
||||||
|
## Architecture Overview
|
||||||
|
|
||||||
|
This project follows a layered architecture with three core services:
|
||||||
|
|
||||||
|
### Core Services
|
||||||
|
1. **AuthServiceApi**: Authentication and authorization
|
||||||
|
2. **EventServiceApi**: Event processing and management
|
||||||
|
3. **ValidationServiceApi**: Input and schema validation
|
||||||
|
|
||||||
|
### System Layers
|
||||||
|
- **Dependencies Layer**: External dependencies and requirements
|
||||||
|
- **Application Layer**: Core business logic
|
||||||
|
- **Service Layer**: API implementations
|
||||||
|
- **Test Layer**: Testing infrastructure
|
||||||
|
- **Dev Layer**: Development utilities
|
||||||
|
- **Root Layer**: Configuration and documentation
|
||||||
|
|
||||||
|
For detailed architecture documentation, see [System Architecture](docs/architecture/system_architecture.md).
|
||||||
|
|
||||||
|
## Suggested Improvements
|
||||||
|
|
||||||
|
The following improvements have been identified to enhance the system:
|
||||||
|
|
||||||
|
### Infrastructure & Deployment
|
||||||
|
- **Service Isolation**: Containerize core services (Auth, Event, Validation)
|
||||||
|
- **API Gateway**: Add gateway layer for rate limiting, versioning, and security
|
||||||
|
- **Monitoring**: Implement distributed tracing and metrics collection
|
||||||
|
- **Configuration**: Move to centralized configuration service with feature flags
|
||||||
|
|
||||||
|
### Performance & Scaling
|
||||||
|
- **Caching Strategy**: Enhance Redis implementation with invalidation patterns
|
||||||
|
- **Database**: Implement sharding and read replicas
|
||||||
|
- **Event System**: Add message queue (RabbitMQ/Kafka) for event handling
|
||||||
|
- **Background Jobs**: Implement job processing and connection pooling
|
||||||
|
|
||||||
|
### Security & Reliability
|
||||||
|
- **API Security**: Implement key rotation and rate limiting
|
||||||
|
- **Error Handling**: Add centralized tracking and circuit breakers
|
||||||
|
- **Testing**: Add integration tests and performance benchmarks
|
||||||
|
- **Audit**: Implement comprehensive audit logging
|
||||||
|
|
||||||
|
### Development Experience
|
||||||
|
- **Code Organization**: Move to domain-driven design
|
||||||
|
- **Documentation**: Add OpenAPI/Swagger docs and ADRs
|
||||||
|
- **Internationalization**: Create translation management system
|
||||||
|
- **Developer Tools**: Enhance debugging and monitoring capabilities
|
||||||
|
|
||||||
|
For implementation details of these improvements, see:
|
||||||
|
- [Architecture Documentation](docs/architecture/system_architecture.md)
|
||||||
|
- [Detailed Improvement Plan](docs/improvements/detailed_improvement_plan.md) with code examples and implementation timeline
|
||||||
|
|
||||||
|
## Development Notes with AI-Windsurf
|
||||||
|
|
||||||
|
This project uses AI-Windsurf's intelligent note-taking system to maintain comprehensive development documentation. Notes are automatically organized and stored in the `/docs/notes/` directory.
|
||||||
|
|
||||||
|
### Note Structure
|
||||||
|
- **Topic-based Organization**: Notes are categorized by topics (architecture, features, bugs, etc.)
|
||||||
|
- **Date Tracking**: All notes include creation and modification dates
|
||||||
|
- **Automatic Linking**: Related components and documentation are automatically cross-referenced
|
||||||
|
- **Action Items**: Tasks and next steps are tracked within notes
|
||||||
|
|
||||||
|
### Accessing Notes
|
||||||
|
1. Browse the `/docs/notes/` directory
|
||||||
|
2. Notes are stored in markdown format for easy reading
|
||||||
|
3. Each note follows a standard template with:
|
||||||
|
- Overview
|
||||||
|
- Technical details
|
||||||
|
- Related components
|
||||||
|
- Action items
|
||||||
|
|
||||||
|
### Adding Notes
|
||||||
|
Work with AI-Windsurf to add notes by:
|
||||||
|
1. Describing what you want to document
|
||||||
|
2. Mentioning related components or previous notes
|
||||||
|
3. Specifying any action items or follow-ups
|
||||||
|
|
||||||
|
The AI will automatically:
|
||||||
|
- Create properly formatted note files
|
||||||
|
- Link related documentation
|
||||||
|
- Update existing notes when relevant
|
||||||
|
- Track development progress
|
||||||
|
|
||||||
|
For detailed documentation about specific components, refer to the corresponding files in the `/docs/` directory.
|
||||||
|
|
@ -281,6 +281,7 @@ class Event2Employee(CrudCollection):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_event_endpoints(cls, employee_id: int) -> list:
|
def get_event_endpoints(cls, employee_id: int) -> list:
|
||||||
from Schemas import EndpointRestriction
|
from Schemas import EndpointRestriction
|
||||||
|
|
||||||
db = cls.new_session()
|
db = cls.new_session()
|
||||||
employee_events = cls.filter_all(
|
employee_events = cls.filter_all(
|
||||||
cls.employee_id == employee_id,
|
cls.employee_id == employee_id,
|
||||||
|
|
@ -307,9 +308,7 @@ class Event2Employee(CrudCollection):
|
||||||
).data
|
).data
|
||||||
active_events.extend(events_extra)
|
active_events.extend(events_extra)
|
||||||
endpoint_restrictions = EndpointRestriction.filter_all(
|
endpoint_restrictions = EndpointRestriction.filter_all(
|
||||||
EndpointRestriction.id.in_(
|
EndpointRestriction.id.in_([event.endpoint_id for event in active_events]),
|
||||||
[event.endpoint_id for event in active_events]
|
|
||||||
),
|
|
||||||
db=db,
|
db=db,
|
||||||
).data
|
).data
|
||||||
return [event.endpoint_name for event in endpoint_restrictions]
|
return [event.endpoint_name for event in endpoint_restrictions]
|
||||||
|
|
@ -381,6 +380,7 @@ class Event2Occupant(CrudCollection):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_event_endpoints(cls, build_living_space_id) -> list:
|
def get_event_endpoints(cls, build_living_space_id) -> list:
|
||||||
from Schemas import EndpointRestriction
|
from Schemas import EndpointRestriction
|
||||||
|
|
||||||
db = cls.new_session()
|
db = cls.new_session()
|
||||||
occupant_events = cls.filter_all(
|
occupant_events = cls.filter_all(
|
||||||
cls.build_living_space_id == build_living_space_id,
|
cls.build_living_space_id == build_living_space_id,
|
||||||
|
|
@ -407,13 +407,12 @@ class Event2Occupant(CrudCollection):
|
||||||
).data
|
).data
|
||||||
active_events.extend(events_extra)
|
active_events.extend(events_extra)
|
||||||
endpoint_restrictions = EndpointRestriction.filter_all(
|
endpoint_restrictions = EndpointRestriction.filter_all(
|
||||||
EndpointRestriction.id.in_(
|
EndpointRestriction.id.in_([event.endpoint_id for event in active_events]),
|
||||||
[event.endpoint_id for event in active_events]
|
|
||||||
),
|
|
||||||
db=db,
|
db=db,
|
||||||
).data
|
).data
|
||||||
return [event.endpoint_name for event in endpoint_restrictions]
|
return [event.endpoint_name for event in endpoint_restrictions]
|
||||||
|
|
||||||
|
|
||||||
class ModulePrice(CrudCollection):
|
class ModulePrice(CrudCollection):
|
||||||
"""
|
"""
|
||||||
ModulePrice class based on declarative_base and BaseMixin via session
|
ModulePrice class based on declarative_base and BaseMixin via session
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,175 @@
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import List, Optional, Dict, Any, Callable
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
|
# First, let's create our category models
|
||||||
|
class CategoryBase(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryCreate(CategoryBase):
|
||||||
|
parent_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryResponse(CategoryBase):
|
||||||
|
children: List['CategoryResponse'] = []
|
||||||
|
parent_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# Category data structure for handling the hierarchy
|
||||||
|
@dataclass
|
||||||
|
class CategoryNode:
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
parent_id: Optional[str] = None
|
||||||
|
children: List['CategoryNode'] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
# Category Service for managing the hierarchy
|
||||||
|
class CategoryService:
|
||||||
|
def __init__(self):
|
||||||
|
self.categories: Dict[str, CategoryNode] = {}
|
||||||
|
|
||||||
|
def add_category(self, category: CategoryCreate) -> CategoryNode:
|
||||||
|
node = CategoryNode(
|
||||||
|
id=category.id,
|
||||||
|
name=category.name,
|
||||||
|
description=category.description,
|
||||||
|
parent_id=category.parent_id
|
||||||
|
)
|
||||||
|
|
||||||
|
self.categories[category.id] = node
|
||||||
|
|
||||||
|
if category.parent_id and category.parent_id in self.categories:
|
||||||
|
parent = self.categories[category.parent_id]
|
||||||
|
parent.children.append(node)
|
||||||
|
|
||||||
|
return node
|
||||||
|
|
||||||
|
def get_category_tree(self, category_id: str) -> Optional[CategoryNode]:
|
||||||
|
return self.categories.get(category_id)
|
||||||
|
|
||||||
|
def get_category_path(self, category_id: str) -> List[CategoryNode]:
|
||||||
|
path = []
|
||||||
|
current = self.categories.get(category_id)
|
||||||
|
|
||||||
|
while current:
|
||||||
|
path.append(current)
|
||||||
|
current = self.categories.get(current.parent_id) if current.parent_id else None
|
||||||
|
|
||||||
|
return list(reversed(path))
|
||||||
|
|
||||||
|
|
||||||
|
# Factory for creating category endpoints
|
||||||
|
class CategoryEndpointFactory:
|
||||||
|
def __init__(self, category_service: CategoryService):
|
||||||
|
self.category_service = category_service
|
||||||
|
|
||||||
|
def create_route_config(self, base_prefix: str) -> RouteFactoryConfig:
|
||||||
|
endpoints = [
|
||||||
|
# Create category endpoint
|
||||||
|
EndpointFactoryConfig(
|
||||||
|
url_prefix=base_prefix,
|
||||||
|
url_endpoint="/categories",
|
||||||
|
url_of_endpoint=f"{base_prefix}/categories",
|
||||||
|
endpoint="/categories",
|
||||||
|
method="POST",
|
||||||
|
summary="Create new category",
|
||||||
|
description="Create a new category with optional parent",
|
||||||
|
endpoint_function=self.create_category,
|
||||||
|
request_model=CategoryCreate,
|
||||||
|
response_model=CategoryResponse,
|
||||||
|
is_auth_required=True
|
||||||
|
),
|
||||||
|
|
||||||
|
# Get category tree endpoint
|
||||||
|
EndpointFactoryConfig(
|
||||||
|
url_prefix=base_prefix,
|
||||||
|
url_endpoint="/categories/{category_id}",
|
||||||
|
url_of_endpoint=f"{base_prefix}/categories/{{category_id}}",
|
||||||
|
endpoint="/categories/{category_id}",
|
||||||
|
method="GET",
|
||||||
|
summary="Get category tree",
|
||||||
|
description="Get category and its children",
|
||||||
|
endpoint_function=self.get_category_tree,
|
||||||
|
response_model=CategoryResponse,
|
||||||
|
is_auth_required=True
|
||||||
|
),
|
||||||
|
|
||||||
|
# Get category path endpoint
|
||||||
|
EndpointFactoryConfig(
|
||||||
|
url_prefix=base_prefix,
|
||||||
|
url_endpoint="/categories/{category_id}/path",
|
||||||
|
url_of_endpoint=f"{base_prefix}/categories/{{category_id}}/path",
|
||||||
|
endpoint="/categories/{category_id}/path",
|
||||||
|
method="GET",
|
||||||
|
summary="Get category path",
|
||||||
|
description="Get full path from root to this category",
|
||||||
|
endpoint_function=self.get_category_path,
|
||||||
|
response_model=List[CategoryResponse],
|
||||||
|
is_auth_required=True
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
return RouteFactoryConfig(
|
||||||
|
name="categories",
|
||||||
|
tags=["Categories"],
|
||||||
|
prefix=base_prefix,
|
||||||
|
endpoints=endpoints
|
||||||
|
)
|
||||||
|
|
||||||
|
async def create_category(self, category: CategoryCreate) -> CategoryResponse:
|
||||||
|
node = self.category_service.add_category(category)
|
||||||
|
return self._convert_to_response(node)
|
||||||
|
|
||||||
|
async def get_category_tree(self, category_id: str) -> CategoryResponse:
|
||||||
|
node = self.category_service.get_category_tree(category_id)
|
||||||
|
if not node:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
return self._convert_to_response(node)
|
||||||
|
|
||||||
|
async def get_category_path(self, category_id: str) -> List[CategoryResponse]:
|
||||||
|
path = self.category_service.get_category_path(category_id)
|
||||||
|
if not path:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
return [self._convert_to_response(node) for node in path]
|
||||||
|
|
||||||
|
def _convert_to_response(self, node: CategoryNode) -> CategoryResponse:
|
||||||
|
return CategoryResponse(
|
||||||
|
id=node.id,
|
||||||
|
name=node.name,
|
||||||
|
description=node.description,
|
||||||
|
parent_id=node.parent_id,
|
||||||
|
children=[self._convert_to_response(child) for child in node.children]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Usage example
|
||||||
|
def create_category_router(base_prefix: str = "/api/v1") -> APIRouter:
|
||||||
|
category_service = CategoryService()
|
||||||
|
factory = CategoryEndpointFactory(category_service)
|
||||||
|
route_config = factory.create_route_config(base_prefix)
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix=route_config.prefix,
|
||||||
|
tags=route_config.tags
|
||||||
|
)
|
||||||
|
|
||||||
|
for endpoint in route_config.endpoints:
|
||||||
|
router.add_api_route(
|
||||||
|
path=endpoint.endpoint,
|
||||||
|
endpoint=endpoint.endpoint_function,
|
||||||
|
methods=[endpoint.method],
|
||||||
|
response_model=endpoint.response_model,
|
||||||
|
summary=endpoint.summary,
|
||||||
|
description=endpoint.description,
|
||||||
|
**endpoint.extra_options
|
||||||
|
)
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
@ -7,10 +7,12 @@ This module provides MongoDB connection management with:
|
||||||
3. Error handling
|
3. Error handling
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Optional, Dict, Any, List, Union
|
from typing import Optional, Dict, Any, List, Union, Callable
|
||||||
|
from contextlib import contextmanager
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
from pymongo.results import InsertOneResult, DeleteResult, UpdateResult
|
from pymongo.results import InsertOneResult, DeleteResult, UpdateResult
|
||||||
from pymongo.cursor import Cursor
|
from pymongo.cursor import Cursor
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
from AllConfigs.NoSqlDatabase.configs import MongoConfig
|
from AllConfigs.NoSqlDatabase.configs import MongoConfig
|
||||||
|
|
||||||
|
|
@ -96,39 +98,44 @@ class MongoDBHandler(
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize MongoDB connection if not already initialized."""
|
"""Initialize MongoDB connection if not already initialized."""
|
||||||
if not self._client:
|
if not self._client:
|
||||||
# Build connection URL based on whether credentials are provided
|
|
||||||
if MongoConfig.USER_NAME and MongoConfig.PASSWORD:
|
|
||||||
connection_url = (
|
|
||||||
f"mongodb://{MongoConfig.USER_NAME}:{MongoConfig.PASSWORD}"
|
|
||||||
f"@{MongoConfig.HOST}:{MongoConfig.PORT}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
connection_url = f"mongodb://{MongoConfig.HOST}:{MongoConfig.PORT}"
|
|
||||||
|
|
||||||
# Build connection options
|
# Build connection options
|
||||||
connection_kwargs = {
|
connection_kwargs = {
|
||||||
"host": connection_url,
|
"host": MongoConfig.URL,
|
||||||
"maxPoolSize": 50, # Maximum number of connections in the pool
|
"maxPoolSize": 50, # Maximum number of connections in the pool
|
||||||
"minPoolSize": 10, # Minimum number of connections in the pool
|
"minPoolSize": 10, # Minimum number of connections in the pool
|
||||||
"maxIdleTimeMS": 30000, # Maximum time a connection can be idle (30 seconds)
|
"maxIdleTimeMS": 30000, # Maximum time a connection can be idle (30 seconds)
|
||||||
"waitQueueTimeoutMS": 2000, # How long a thread will wait for a connection
|
"waitQueueTimeoutMS": 2000, # How long a thread will wait for a connection
|
||||||
"serverSelectionTimeoutMS": 5000, # How long to wait for server selection
|
"serverSelectionTimeoutMS": 5000, # How long to wait for server selection
|
||||||
}
|
}
|
||||||
|
|
||||||
self._client = MongoClient(**connection_kwargs)
|
self._client = MongoClient(**connection_kwargs)
|
||||||
|
|
||||||
# Test connection
|
# Test connection
|
||||||
self._client.admin.command("ping")
|
self._client.admin.command("ping")
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Context manager entry point."""
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Context manager exit point - ensures connection is properly closed."""
|
||||||
|
try:
|
||||||
|
if self._client:
|
||||||
|
self._client.close()
|
||||||
|
self._client = None
|
||||||
|
except Exception:
|
||||||
|
# Silently pass any errors during shutdown
|
||||||
|
pass
|
||||||
|
return False # Don't suppress any exceptions
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close MongoDB connection."""
|
"""Close MongoDB connection."""
|
||||||
if self._client:
|
try:
|
||||||
self._client.close()
|
if self._client:
|
||||||
self._client = None
|
self._client.close()
|
||||||
|
self._client = None
|
||||||
def __del__(self):
|
except Exception:
|
||||||
"""Ensure connection is closed on deletion."""
|
# Silently pass any errors during shutdown
|
||||||
self.close()
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def client(self) -> MongoClient:
|
def client(self) -> MongoClient:
|
||||||
|
|
@ -145,6 +152,41 @@ class MongoDBHandler(
|
||||||
database = self.get_database(database_name)
|
database = self.get_database(database_name)
|
||||||
return database[collection_name]
|
return database[collection_name]
|
||||||
|
|
||||||
|
# Create a function to get the singleton instance
|
||||||
|
@classmethod
|
||||||
|
@contextmanager
|
||||||
|
def get_mongodb(cls):
|
||||||
|
"""Get or create the MongoDB singleton instance as a context manager."""
|
||||||
|
instance = cls()
|
||||||
|
try:
|
||||||
|
yield instance
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
if instance._client:
|
||||||
|
instance._client.close()
|
||||||
|
instance._client = None
|
||||||
|
except Exception:
|
||||||
|
# Silently pass any errors during shutdown
|
||||||
|
pass
|
||||||
|
|
||||||
# Create a singleton instance
|
@classmethod
|
||||||
|
def with_mongodb(cls, func: Callable):
|
||||||
|
"""Decorator to automatically handle MongoDB connection context.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
@MongoDBHandler.with_mongodb
|
||||||
|
def my_function(db, *args, **kwargs):
|
||||||
|
# db is the MongoDB instance
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
with cls.get_mongodb() as db:
|
||||||
|
return func(db, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# Create a singleton instance for backward compatibility
|
||||||
mongodb = MongoDBHandler()
|
mongodb = MongoDBHandler()
|
||||||
|
|
|
||||||
|
|
@ -5,25 +5,28 @@ This module provides practical examples of using MongoDB operations through our
|
||||||
Each example demonstrates different aspects of CRUD operations and aggregation.
|
Each example demonstrates different aspects of CRUD operations and aggregation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Dict, List, Any
|
import arrow
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from Services.MongoDb.database import mongodb
|
from Services.MongoDb.database import MongoDBHandler
|
||||||
|
|
||||||
|
|
||||||
def insert_examples() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
|
def insert_examples(db) -> None:
|
||||||
"""Examples of insert operations."""
|
"""Examples of insert operations."""
|
||||||
|
# Get the collection
|
||||||
|
users_collection = db.get_collection("users")
|
||||||
|
products_collection = db.get_collection("products")
|
||||||
|
|
||||||
# Single document insert
|
# Single document insert
|
||||||
user_doc = {
|
user_doc = {
|
||||||
"username": "john_doe",
|
"username": "john_doe",
|
||||||
"email": "john@example.com",
|
"email": "john@example.com",
|
||||||
"age": 30,
|
"age": 30,
|
||||||
"created_at": datetime.utcnow(),
|
"created_at": datetime.now(),
|
||||||
}
|
}
|
||||||
user_id = mongodb.insert_one(
|
result = users_collection.insert_one(user_doc)
|
||||||
database="user_db", collection="users", document=user_doc
|
print(f"Inserted user with ID: {result.inserted_id}")
|
||||||
)
|
|
||||||
print(f"Inserted user with ID: {user_id}")
|
|
||||||
|
|
||||||
# Multiple documents insert
|
# Multiple documents insert
|
||||||
products = [
|
products = [
|
||||||
|
|
@ -31,80 +34,68 @@ def insert_examples() -> None:
|
||||||
{"name": "Mouse", "price": 29.99, "stock": 100},
|
{"name": "Mouse", "price": 29.99, "stock": 100},
|
||||||
{"name": "Keyboard", "price": 59.99, "stock": 75},
|
{"name": "Keyboard", "price": 59.99, "stock": 75},
|
||||||
]
|
]
|
||||||
product_ids = mongodb.insert_many(
|
result = products_collection.insert_many(products)
|
||||||
database="store_db", collection="products", documents=products
|
print(f"Inserted {len(result.inserted_ids)} products")
|
||||||
)
|
|
||||||
print(f"Inserted {len(product_ids)} products")
|
|
||||||
|
|
||||||
|
|
||||||
def find_examples() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
|
def find_examples(db) -> None:
|
||||||
"""Examples of find operations."""
|
"""Examples of find operations."""
|
||||||
|
# Get the collections
|
||||||
|
users_collection = db.get_collection("users")
|
||||||
|
products_collection = db.get_collection("products")
|
||||||
|
|
||||||
# Find one document
|
# Find one document
|
||||||
user = mongodb.find_one(
|
user = users_collection.find_one({"email": "john@example.com"})
|
||||||
database="user_db",
|
|
||||||
collection="users",
|
|
||||||
filter_query={"email": "john@example.com"},
|
|
||||||
projection={"username": 1, "email": 1, "_id": 0},
|
|
||||||
)
|
|
||||||
print(f"Found user: {user}")
|
print(f"Found user: {user}")
|
||||||
|
|
||||||
# Find many with pagination
|
# Find many documents
|
||||||
page_size = 10
|
products_cursor = products_collection.find({"price": {"$lt": 100}})
|
||||||
page_number = 1
|
products = list(products_cursor)
|
||||||
products = mongodb.find_many(
|
|
||||||
database="store_db",
|
|
||||||
collection="products",
|
|
||||||
filter_query={"price": {"$lt": 100}},
|
|
||||||
projection={"name": 1, "price": 1},
|
|
||||||
sort=[("price", 1)], # Sort by price ascending
|
|
||||||
limit=page_size,
|
|
||||||
skip=(page_number - 1) * page_size,
|
|
||||||
)
|
|
||||||
print(f"Found {len(products)} products under $100")
|
print(f"Found {len(products)} products under $100")
|
||||||
|
|
||||||
|
|
||||||
def update_examples() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
|
def update_examples(db) -> None:
|
||||||
"""Examples of update operations."""
|
"""Examples of update operations."""
|
||||||
|
# Get the collections
|
||||||
|
products_collection = db.get_collection("products")
|
||||||
|
|
||||||
# Update single document
|
# Update single document
|
||||||
result = mongodb.update_one(
|
result = products_collection.update_one(
|
||||||
database="store_db",
|
{"name": "Laptop"}, {"$set": {"price": 899.99, "stock": 45}}
|
||||||
collection="products",
|
|
||||||
filter_query={"name": "Laptop"},
|
|
||||||
update_data={"price": 899.99, "stock": 45},
|
|
||||||
upsert=False,
|
|
||||||
)
|
)
|
||||||
print(f"Updated {result['modified_count']} laptop(s)")
|
print(f"Updated {result.modified_count} laptop(s)")
|
||||||
|
|
||||||
# Update multiple documents
|
# Update multiple documents
|
||||||
result = mongodb.update_many(
|
result = products_collection.update_many(
|
||||||
database="store_db",
|
{"stock": {"$lt": 10}}, {"$set": {"status": "low_stock"}}
|
||||||
collection="products",
|
|
||||||
filter_query={"stock": {"$lt": 10}},
|
|
||||||
update_data={"status": "low_stock"},
|
|
||||||
upsert=True,
|
|
||||||
)
|
)
|
||||||
print(f"Updated {result['modified_count']} low stock products")
|
print(f"Updated {result.modified_count} low stock products")
|
||||||
|
|
||||||
|
|
||||||
def delete_examples() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
|
def delete_examples(db) -> None:
|
||||||
"""Examples of delete operations."""
|
"""Examples of delete operations."""
|
||||||
|
# Get the collections
|
||||||
|
users_collection = db.get_collection("users")
|
||||||
|
products_collection = db.get_collection("products")
|
||||||
|
|
||||||
# Delete single document
|
# Delete single document
|
||||||
count = mongodb.delete_one(
|
result = users_collection.delete_one({"email": "john@example.com"})
|
||||||
database="user_db",
|
print(f"Deleted {result.deleted_count} user")
|
||||||
collection="users",
|
|
||||||
filter_query={"email": "john@example.com"},
|
|
||||||
)
|
|
||||||
print(f"Deleted {count} user")
|
|
||||||
|
|
||||||
# Delete multiple documents
|
# Delete multiple documents
|
||||||
count = mongodb.delete_many(
|
result = products_collection.delete_many({"stock": 0})
|
||||||
database="store_db", collection="products", filter_query={"stock": 0}
|
print(f"Deleted {result.deleted_count} out-of-stock products")
|
||||||
)
|
|
||||||
print(f"Deleted {count} out-of-stock products")
|
|
||||||
|
|
||||||
|
|
||||||
def aggregate_examples() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
"""Examples of aggregation operations."""
|
def aggregate_examples(db) -> None:
|
||||||
|
"""Examples of aggregate operations."""
|
||||||
|
# Get the collection
|
||||||
|
products_collection = db.get_collection("products")
|
||||||
|
|
||||||
# Calculate average price by category
|
# Calculate average price by category
|
||||||
pipeline = [
|
pipeline = [
|
||||||
{
|
{
|
||||||
|
|
@ -116,21 +107,23 @@ def aggregate_examples() -> None:
|
||||||
},
|
},
|
||||||
{"$sort": {"avg_price": -1}},
|
{"$sort": {"avg_price": -1}},
|
||||||
]
|
]
|
||||||
results = mongodb.aggregate(
|
results = products_collection.aggregate(pipeline)
|
||||||
database="store_db", collection="products", pipeline=pipeline
|
|
||||||
)
|
|
||||||
print("Category statistics:", list(results))
|
print("Category statistics:", list(results))
|
||||||
|
|
||||||
|
|
||||||
def complex_query_example() -> None:
|
@MongoDBHandler.with_mongodb
|
||||||
"""Example of a complex query combining multiple operations."""
|
def complex_query_example(db) -> None:
|
||||||
|
"""Example of a more complex query combining multiple operations."""
|
||||||
|
# Get the collection
|
||||||
|
users_collection = db.get_collection("users")
|
||||||
|
|
||||||
# Find active users who made purchases in last 30 days
|
# Find active users who made purchases in last 30 days
|
||||||
pipeline = [
|
pipeline = [
|
||||||
{
|
{
|
||||||
"$match": {
|
"$match": {
|
||||||
"status": "active",
|
"status": "active",
|
||||||
"last_purchase": {
|
"last_purchase": {
|
||||||
"$gte": datetime.utcnow().replace(day=datetime.utcnow().day - 30)
|
"$gte": arrow.now().shift(days=-30).datetime,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -152,9 +145,7 @@ def complex_query_example() -> None:
|
||||||
},
|
},
|
||||||
{"$sort": {"total_spent": -1}},
|
{"$sort": {"total_spent": -1}},
|
||||||
]
|
]
|
||||||
results = mongodb.aggregate(
|
results = users_collection.aggregate(pipeline)
|
||||||
database="user_db", collection="users", pipeline=pipeline
|
|
||||||
)
|
|
||||||
print("Active users with recent purchases:", list(results))
|
print("Active users with recent purchases:", list(results))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,12 +16,14 @@ class BaseAlchemyModel:
|
||||||
Session: Session object for model
|
Session: Session object for model
|
||||||
Actions: save, flush, rollback, commit
|
Actions: save, flush, rollback, commit
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new_session(cls) -> Session:
|
def new_session(cls) -> Session:
|
||||||
"""Get database session."""
|
"""Get database session."""
|
||||||
from Services.PostgresDb.database import get_db
|
from Services.PostgresDb.database import get_db
|
||||||
|
|
||||||
with get_db() as session:
|
with get_db() as session:
|
||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
@ -143,6 +145,3 @@ class BaseAlchemyModel:
|
||||||
db: Database session
|
db: Database session
|
||||||
"""
|
"""
|
||||||
db.rollback()
|
db.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,9 @@ class Credentials(BaseModel):
|
||||||
class CrudActions(SystemFields):
|
class CrudActions(SystemFields):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def extract_system_fields(cls, filter_kwargs: dict, create: bool = True) -> Dict[str, Any]:
|
def extract_system_fields(
|
||||||
|
cls, filter_kwargs: dict, create: bool = True
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Remove system-managed fields from input dictionary.
|
Remove system-managed fields from input dictionary.
|
||||||
|
|
||||||
|
|
@ -63,8 +65,6 @@ class CrudActions(SystemFields):
|
||||||
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
|
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
|
def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
|
||||||
"""
|
"""
|
||||||
|
|
@ -187,9 +187,9 @@ class CrudActions(SystemFields):
|
||||||
return_dict[key] = value_of_database
|
return_dict[key] = value_of_database
|
||||||
else:
|
else:
|
||||||
# Handle default field selection
|
# Handle default field selection
|
||||||
exclude_list = (
|
exclude_list = (getattr(self, "__exclude__fields__", []) or []) + list(
|
||||||
getattr(self, "__exclude__fields__", []) or []
|
self.__system_default_model__
|
||||||
) + list(self.__system_default_model__)
|
)
|
||||||
columns_list = list(set(self.columns).difference(set(exclude_list)))
|
columns_list = list(set(self.columns).difference(set(exclude_list)))
|
||||||
columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
|
columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
|
||||||
columns_list.extend(
|
columns_list.extend(
|
||||||
|
|
@ -230,18 +230,18 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if getattr(cls.creds, "person_id", None) and getattr(
|
if getattr(cls.creds, "person_id", None) and getattr(
|
||||||
cls.creds, "person_name", None
|
cls.creds, "person_name", None
|
||||||
):
|
):
|
||||||
record_created.created_by_id = cls.creds.person_id
|
record_created.created_by_id = cls.creds.person_id
|
||||||
record_created.created_by = cls.creds.person_name
|
record_created.created_by = cls.creds.person_name
|
||||||
return
|
return
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_metadata(cls, created: bool, error_case: str = None, message: str = None) -> None:
|
def update_metadata(
|
||||||
|
cls, created: bool, error_case: str = None, message: str = None
|
||||||
|
) -> None:
|
||||||
cls.meta_data = MetaDataRow(
|
cls.meta_data = MetaDataRow(
|
||||||
created=created,
|
created=created, error_case=error_case, message=message
|
||||||
error_case=error_case,
|
|
||||||
message=message
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -250,7 +250,7 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
|
||||||
error_code=cls.meta_data.error_case,
|
error_code=cls.meta_data.error_case,
|
||||||
lang=cls.lang,
|
lang=cls.lang,
|
||||||
loc=get_line_number_for_error(),
|
loc=get_line_number_for_error(),
|
||||||
sys_msg=cls.meta_data.message
|
sys_msg=cls.meta_data.message,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -385,11 +385,15 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
|
||||||
raise ValueError("Confirm field cannot be updated with other fields")
|
raise ValueError("Confirm field cannot be updated with other fields")
|
||||||
|
|
||||||
if is_confirmed_argument:
|
if is_confirmed_argument:
|
||||||
if getattr(self.creds, "person_id", None) and getattr(self.creds, "person_name", None):
|
if getattr(self.creds, "person_id", None) and getattr(
|
||||||
|
self.creds, "person_name", None
|
||||||
|
):
|
||||||
self.confirmed_by_id = self.creds.person_id
|
self.confirmed_by_id = self.creds.person_id
|
||||||
self.confirmed_by = self.creds.person_name
|
self.confirmed_by = self.creds.person_name
|
||||||
else:
|
else:
|
||||||
if getattr(self.creds, "person_id", None) and getattr(self.creds, "person_name", None):
|
if getattr(self.creds, "person_id", None) and getattr(
|
||||||
|
self.creds, "person_name", None
|
||||||
|
):
|
||||||
self.updated_by_id = self.creds.person_id
|
self.updated_by_id = self.creds.person_id
|
||||||
self.updated_by = self.creds.person_name
|
self.updated_by = self.creds.person_name
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -28,9 +28,7 @@ class ArgumentModel:
|
||||||
@classmethod
|
@classmethod
|
||||||
def _query(cls: Type[T], db: Session) -> Query:
|
def _query(cls: Type[T], db: Session) -> Query:
|
||||||
"""Returns the query to use in the model."""
|
"""Returns the query to use in the model."""
|
||||||
return (
|
return cls.pre_query if cls.pre_query else db.query(cls)
|
||||||
cls.pre_query if cls.pre_query else db.query(cls)
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def add_new_arg_to_args(cls: Type[T], args_list, argument, value):
|
def add_new_arg_to_args(cls: Type[T], args_list, argument, value):
|
||||||
|
|
@ -79,7 +77,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def convert(
|
def convert(
|
||||||
cls: Type[T], smart_options: dict, validate_model: Any = None
|
cls: Type[T], smart_options: dict, validate_model: Any = None
|
||||||
) -> tuple[BinaryExpression]:
|
) -> tuple[BinaryExpression]:
|
||||||
if not validate_model:
|
if not validate_model:
|
||||||
return tuple(cls.filter_expr(**smart_options))
|
return tuple(cls.filter_expr(**smart_options))
|
||||||
|
|
@ -107,11 +105,11 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_one(
|
def filter_one(
|
||||||
cls: Type[T],
|
cls: Type[T],
|
||||||
*args: Any,
|
*args: Any,
|
||||||
db: Session,
|
db: Session,
|
||||||
system: bool = False,
|
system: bool = False,
|
||||||
expired: bool = False,
|
expired: bool = False,
|
||||||
) -> PostgresResponse:
|
) -> PostgresResponse:
|
||||||
"""
|
"""
|
||||||
Filter single record by expressions.
|
Filter single record by expressions.
|
||||||
|
|
@ -132,7 +130,6 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||||
query = cls._query(db).filter(*args)
|
query = cls._query(db).filter(*args)
|
||||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
|
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_all_system(
|
def filter_all_system(
|
||||||
cls: Type[T], *args: BinaryExpression, db: Session
|
cls: Type[T], *args: BinaryExpression, db: Session
|
||||||
|
|
@ -152,9 +149,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_all(
|
def filter_all(cls: Type[T], *args: Any, db: Session) -> PostgresResponse:
|
||||||
cls: Type[T], *args: Any, db: Session
|
|
||||||
) -> PostgresResponse:
|
|
||||||
"""
|
"""
|
||||||
Filter multiple records by expressions.
|
Filter multiple records by expressions.
|
||||||
|
|
||||||
|
|
@ -170,9 +165,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_by_all_system(
|
def filter_by_all_system(cls: Type[T], db: Session, **kwargs) -> PostgresResponse:
|
||||||
cls: Type[T], db: Session, **kwargs
|
|
||||||
) -> PostgresResponse:
|
|
||||||
"""
|
"""
|
||||||
Filter multiple records by keyword arguments.
|
Filter multiple records by keyword arguments.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,2 @@
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class LanguageModel:
|
class LanguageModel:
|
||||||
__language_model__ = None
|
__language_model__ = None
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,6 @@ class CrudMixin(BasicMixin, SerializeMixin, ReprMixin):
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
|
|
||||||
|
|
||||||
# Primary and reference fields
|
# Primary and reference fields
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
uu_id: Mapped[str] = mapped_column(
|
uu_id: Mapped[str] = mapped_column(
|
||||||
|
|
@ -171,6 +170,3 @@ class CrudCollection(CrudMixin):
|
||||||
# )
|
# )
|
||||||
#
|
#
|
||||||
# return headers_and_validation
|
# return headers_and_validation
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -155,14 +155,18 @@ class PaginationResult:
|
||||||
)
|
)
|
||||||
order_criteria = zip(self.order_by, self.pagination.orderType)
|
order_criteria = zip(self.order_by, self.pagination.orderType)
|
||||||
for field, direction in order_criteria:
|
for field, direction in order_criteria:
|
||||||
if hasattr(self._query.column_descriptions[0]['entity'], field):
|
if hasattr(self._query.column_descriptions[0]["entity"], field):
|
||||||
if direction.lower().startswith("d"):
|
if direction.lower().startswith("d"):
|
||||||
self._query = self._query.order_by(
|
self._query = self._query.order_by(
|
||||||
desc(getattr(self._query.column_descriptions[0]['entity'], field))
|
desc(
|
||||||
|
getattr(self._query.column_descriptions[0]["entity"], field)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._query = self._query.order_by(
|
self._query = self._query.order_by(
|
||||||
asc(getattr(self._query.column_descriptions[0]['entity'], field))
|
asc(
|
||||||
|
getattr(self._query.column_descriptions[0]["entity"], field)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return self._query
|
return self._query
|
||||||
|
|
||||||
|
|
@ -171,6 +175,11 @@ class PaginationResult:
|
||||||
"""Get query object."""
|
"""Get query object."""
|
||||||
query_ordered = self.dynamic_order_by()
|
query_ordered = self.dynamic_order_by()
|
||||||
query_paginated = query_ordered.limit(self.limit).offset(self.offset)
|
query_paginated = query_ordered.limit(self.limit).offset(self.offset)
|
||||||
queried_data = query_paginated.all() if self.response_type else query_paginated.first()
|
queried_data = (
|
||||||
return [result.get_dict() for result in queried_data] if self.response_type else queried_data.get_dict()
|
query_paginated.all() if self.response_type else query_paginated.first()
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
[result.get_dict() for result in queried_data]
|
||||||
|
if self.response_type
|
||||||
|
else queried_data.get_dict()
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -26,11 +26,11 @@ class PostgresResponse(Generic[T]):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pre_query: Query,
|
pre_query: Query,
|
||||||
query: Query,
|
query: Query,
|
||||||
is_array: bool = True,
|
is_array: bool = True,
|
||||||
metadata: Any = None,
|
metadata: Any = None,
|
||||||
):
|
):
|
||||||
self._is_list = is_array
|
self._is_list = is_array
|
||||||
self._query = query
|
self._query = query
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
class SystemFields:
|
class SystemFields:
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
|
|
@ -50,4 +48,3 @@ class SystemFields:
|
||||||
"updated_by_id",
|
"updated_by_id",
|
||||||
"created_by_id",
|
"created_by_id",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ class TokenModel:
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
self.lang = str(self.lang or "tr").lower()
|
self.lang = str(self.lang or "tr").lower()
|
||||||
self.credentials = self.credentials or {}
|
self.credentials = self.credentials or {}
|
||||||
if 'GMT' in self.timezone:
|
if "GMT" in self.timezone:
|
||||||
raise HTTPExceptionApi(
|
raise HTTPExceptionApi(
|
||||||
error_code="HTTP_400_BAD_REQUEST",
|
error_code="HTTP_400_BAD_REQUEST",
|
||||||
lang=self.lang,
|
lang=self.lang,
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
from Schemas import AddressNeighborhood
|
from Schemas import AddressNeighborhood
|
||||||
from Services.PostgresDb.Models.crud_alchemy import Credentials
|
from Services.PostgresDb.Models.crud_alchemy import Credentials
|
||||||
from Services.PostgresDb.Models.mixin import BasicMixin
|
from Services.PostgresDb.Models.mixin import BasicMixin
|
||||||
|
|
@ -12,13 +11,13 @@ updating = True
|
||||||
new_session = AddressNeighborhood.new_session()
|
new_session = AddressNeighborhood.new_session()
|
||||||
new_session_test = AddressNeighborhood.new_session()
|
new_session_test = AddressNeighborhood.new_session()
|
||||||
|
|
||||||
BasicMixin.creds = Credentials(person_id=10, person_name='Berkay Super User')
|
BasicMixin.creds = Credentials(person_id=10, person_name="Berkay Super User")
|
||||||
|
|
||||||
|
|
||||||
if listing:
|
if listing:
|
||||||
"""List Options and Queries """
|
"""List Options and Queries"""
|
||||||
AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
|
AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
|
||||||
AddressNeighborhood.neighborhood_code.icontains('10'),
|
AddressNeighborhood.neighborhood_code.icontains("10"),
|
||||||
db=new_session,
|
db=new_session,
|
||||||
).query
|
).query
|
||||||
query_of_list_options = {
|
query_of_list_options = {
|
||||||
|
|
@ -32,18 +31,20 @@ if listing:
|
||||||
pagination = Pagination(data=address_neighborhoods)
|
pagination = Pagination(data=address_neighborhoods)
|
||||||
pagination.page = 9
|
pagination.page = 9
|
||||||
pagination.size = 10
|
pagination.size = 10
|
||||||
pagination.orderField = ['type_code','neighborhood_code']
|
pagination.orderField = ["type_code", "neighborhood_code"]
|
||||||
pagination.orderType = ['desc', 'asc']
|
pagination.orderType = ["desc", "asc"]
|
||||||
|
|
||||||
pagination_result = PaginationResult(data=address_neighborhoods, pagination=pagination)
|
pagination_result = PaginationResult(
|
||||||
|
data=address_neighborhoods, pagination=pagination
|
||||||
|
)
|
||||||
print(pagination_result.pagination.as_dict())
|
print(pagination_result.pagination.as_dict())
|
||||||
print(pagination_result.data)
|
print(pagination_result.data)
|
||||||
|
|
||||||
if creating:
|
if creating:
|
||||||
"""Create Queries """
|
"""Create Queries"""
|
||||||
find_or_create = AddressNeighborhood.find_or_create(
|
find_or_create = AddressNeighborhood.find_or_create(
|
||||||
neighborhood_code='100',
|
neighborhood_code="100",
|
||||||
neighborhood_name='Test',
|
neighborhood_name="Test",
|
||||||
locality_id=15334,
|
locality_id=15334,
|
||||||
db=new_session,
|
db=new_session,
|
||||||
)
|
)
|
||||||
|
|
@ -51,26 +52,26 @@ if creating:
|
||||||
find_or_create.destroy(db=new_session)
|
find_or_create.destroy(db=new_session)
|
||||||
find_or_create.save_via_metadata(db=new_session)
|
find_or_create.save_via_metadata(db=new_session)
|
||||||
find_or_create = AddressNeighborhood.find_or_create(
|
find_or_create = AddressNeighborhood.find_or_create(
|
||||||
neighborhood_code='100',
|
neighborhood_code="100",
|
||||||
neighborhood_name='Test',
|
neighborhood_name="Test",
|
||||||
locality_id=15334,
|
locality_id=15334,
|
||||||
db=new_session,
|
db=new_session,
|
||||||
)
|
)
|
||||||
find_or_create.save_via_metadata(db=new_session)
|
find_or_create.save_via_metadata(db=new_session)
|
||||||
|
|
||||||
if updating:
|
if updating:
|
||||||
"""Update Queries """
|
"""Update Queries"""
|
||||||
|
|
||||||
query_of_list_options = {
|
query_of_list_options = {
|
||||||
"uu_id": str("33a89767-d2dc-4531-8f66-7b650e22a8a7"),
|
"uu_id": str("33a89767-d2dc-4531-8f66-7b650e22a8a7"),
|
||||||
}
|
}
|
||||||
print('query_of_list_options', query_of_list_options)
|
print("query_of_list_options", query_of_list_options)
|
||||||
address_neighborhoods_one = AddressNeighborhood.filter_one(
|
address_neighborhoods_one = AddressNeighborhood.filter_one(
|
||||||
*AddressNeighborhood.convert(query_of_list_options),
|
*AddressNeighborhood.convert(query_of_list_options),
|
||||||
db=new_session,
|
db=new_session,
|
||||||
).data
|
).data
|
||||||
address_neighborhoods_one.update(
|
address_neighborhoods_one.update(
|
||||||
neighborhood_name='Test 44',
|
neighborhood_name="Test 44",
|
||||||
db=new_session,
|
db=new_session,
|
||||||
)
|
)
|
||||||
address_neighborhoods_one.save(db=new_session)
|
address_neighborhoods_one.save(db=new_session)
|
||||||
|
|
@ -78,4 +79,4 @@ if updating:
|
||||||
*AddressNeighborhood.convert(query_of_list_options),
|
*AddressNeighborhood.convert(query_of_list_options),
|
||||||
db=new_session,
|
db=new_session,
|
||||||
).data_as_dict
|
).data_as_dict
|
||||||
print('address_neighborhoods_one', address_neighborhoods_one)
|
print("address_neighborhoods_one", address_neighborhoods_one)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
import json
|
|
||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
from typing import Optional, List, Dict, Union
|
from typing import Optional, List, Dict, Union
|
||||||
|
|
||||||
from AllConfigs.main import MainConfig
|
from AllConfigs.main import MainConfig
|
||||||
|
|
||||||
from Services.Redis.conn import redis_cli
|
from Services.Redis.conn import redis_cli
|
||||||
from Services.Redis.Models.base import RedisRow
|
from Services.Redis.Models.base import RedisRow
|
||||||
from Services.Redis.Models.response import RedisResponse
|
from Services.Redis.Models.response import RedisResponse
|
||||||
|
|
@ -21,6 +21,24 @@ class RedisActions:
|
||||||
for unit, multiplier in time_multipliers.items()
|
for unit, multiplier in time_multipliers.items()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_expiry_time(cls, expiry_seconds: int) -> Dict[str, int]:
|
||||||
|
"""Convert total seconds back into a dictionary of time units."""
|
||||||
|
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
||||||
|
result = {}
|
||||||
|
for unit, multiplier in time_multipliers.items():
|
||||||
|
if expiry_seconds >= multiplier:
|
||||||
|
result[unit], expiry_seconds = divmod(expiry_seconds, multiplier)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_expires_at(cls, redis_row: RedisRow) -> str:
|
||||||
|
"""Resolve expiry time for Redis key."""
|
||||||
|
expiry_time = redis_cli.ttl(redis_row.redis_key)
|
||||||
|
if expiry_time == -1:
|
||||||
|
return "Key has no expiry time."
|
||||||
|
return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def delete_key(cls, key: Union[Optional[str], Optional[bytes]]):
|
def delete_key(cls, key: Union[Optional[str], Optional[bytes]]):
|
||||||
try:
|
try:
|
||||||
|
|
@ -41,7 +59,7 @@ class RedisActions:
|
||||||
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
||||||
) -> RedisResponse:
|
) -> RedisResponse:
|
||||||
try:
|
try:
|
||||||
regex = RedisRow.regex(list_keys=list_keys)
|
regex = RedisRow().regex(list_keys=list_keys)
|
||||||
json_get = redis_cli.scan_iter(match=regex)
|
json_get = redis_cli.scan_iter(match=regex)
|
||||||
|
|
||||||
for row in list(json_get):
|
for row in list(json_get):
|
||||||
|
|
@ -100,14 +118,6 @@ class RedisActions:
|
||||||
error=str(e),
|
error=str(e),
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_expires_at(cls, redis_row: RedisRow) -> str:
|
|
||||||
"""Resolve expiry time for Redis key."""
|
|
||||||
expiry_time = redis_cli.ttl(redis_row.redis_key)
|
|
||||||
if expiry_time == -1:
|
|
||||||
return "Key has no expiry time."
|
|
||||||
return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_json(
|
def get_json(
|
||||||
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
||||||
|
|
@ -120,8 +130,14 @@ class RedisActions:
|
||||||
for row in list(json_get):
|
for row in list(json_get):
|
||||||
redis_row = RedisRow()
|
redis_row = RedisRow()
|
||||||
redis_row.set_key(key=row)
|
redis_row.set_key(key=row)
|
||||||
redis_row.expires_at = cls.resolve_expires_at(redis_row=redis_row)
|
redis_value = redis_cli.get(row)
|
||||||
redis_value = redis_cli.get(redis_row.redis_key)
|
redis_value_expire = redis_cli.ttl(row)
|
||||||
|
redis_row.expires_at = cls.set_expiry_time(
|
||||||
|
expiry_seconds=int(redis_value_expire)
|
||||||
|
)
|
||||||
|
redis_row.expires_at_string = cls.resolve_expires_at(
|
||||||
|
redis_row=redis_row
|
||||||
|
)
|
||||||
redis_row.feed(redis_value)
|
redis_row.feed(redis_value)
|
||||||
list_of_rows.append(redis_row)
|
list_of_rows.append(redis_row)
|
||||||
if list_of_rows:
|
if list_of_rows:
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ This module provides a class for managing Redis key-value operations with suppor
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from typing import Union, Dict, List, Optional, Any, ClassVar
|
from typing import Union, Dict, List, Optional, Any, ClassVar
|
||||||
from datetime import datetime
|
from Services.Redis.conn import redis_cli
|
||||||
|
|
||||||
|
|
||||||
class RedisKeyError(Exception):
|
class RedisKeyError(Exception):
|
||||||
|
|
@ -44,23 +44,21 @@ class RedisRow:
|
||||||
|
|
||||||
key: ClassVar[Union[str, bytes]]
|
key: ClassVar[Union[str, bytes]]
|
||||||
value: ClassVar[Any]
|
value: ClassVar[Any]
|
||||||
delimiter: ClassVar[str] = ":"
|
delimiter: str = ":"
|
||||||
expires_at: Optional[dict] = {"seconds": 60 * 60 * 30}
|
expires_at: Optional[dict] = {"seconds": 60 * 60 * 30}
|
||||||
expires_at_string: Optional[str]
|
expires_at_string: Optional[str]
|
||||||
|
|
||||||
@classmethod
|
def get_expiry_time(self) -> int | None:
|
||||||
def get_expiry_time(cls) -> int | None:
|
|
||||||
"""Calculate expiry time in seconds from kwargs."""
|
"""Calculate expiry time in seconds from kwargs."""
|
||||||
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
||||||
if cls.expires_at:
|
if self.expires_at:
|
||||||
return sum(
|
return sum(
|
||||||
int(cls.expires_at.get(unit, 0)) * multiplier
|
int(self.expires_at.get(unit, 0)) * multiplier
|
||||||
for unit, multiplier in time_multipliers.items()
|
for unit, multiplier in time_multipliers.items()
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@classmethod
|
def merge(self, set_values: List[Union[str, bytes]]) -> None:
|
||||||
def merge(cls, set_values: List[Union[str, bytes]]) -> None:
|
|
||||||
"""
|
"""
|
||||||
Merge list of values into a single delimited key.
|
Merge list of values into a single delimited key.
|
||||||
|
|
||||||
|
|
@ -83,7 +81,7 @@ class RedisRow:
|
||||||
value = value.decode()
|
value = value.decode()
|
||||||
merged.append(str(value))
|
merged.append(str(value))
|
||||||
|
|
||||||
cls.key = cls.delimiter.join(merged).encode()
|
self.key = self.delimiter.join(merged).encode()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
|
def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
|
||||||
|
|
@ -120,12 +118,11 @@ class RedisRow:
|
||||||
# Add wildcard if first key was None
|
# Add wildcard if first key was None
|
||||||
if list_keys[0] is None:
|
if list_keys[0] is None:
|
||||||
pattern = f"*{cls.delimiter}{pattern}"
|
pattern = f"*{cls.delimiter}{pattern}"
|
||||||
if "*" not in pattern:
|
if "*" not in pattern and any([list_key is None for list_key in list_keys]):
|
||||||
pattern = f"{pattern}:*"
|
pattern = f"{pattern}:*"
|
||||||
return pattern
|
return pattern
|
||||||
|
|
||||||
@classmethod
|
def parse(self) -> List[str]:
|
||||||
def parse(cls) -> List[str]:
|
|
||||||
"""
|
"""
|
||||||
Parse the key into its component parts.
|
Parse the key into its component parts.
|
||||||
|
|
||||||
|
|
@ -137,14 +134,13 @@ class RedisRow:
|
||||||
>>> RedisRow.parse()
|
>>> RedisRow.parse()
|
||||||
['users', '123', 'profile']
|
['users', '123', 'profile']
|
||||||
"""
|
"""
|
||||||
if not cls.key:
|
if not self.key:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
key_str = cls.key.decode() if isinstance(cls.key, bytes) else cls.key
|
key_str = self.key.decode() if isinstance(self.key, bytes) else self.key
|
||||||
return key_str.split(cls.delimiter)
|
return key_str.split(self.delimiter)
|
||||||
|
|
||||||
@classmethod
|
def feed(self, value: Union[bytes, Dict, List, str]) -> None:
|
||||||
def feed(cls, value: Union[bytes, Dict, List, str]) -> None:
|
|
||||||
"""
|
"""
|
||||||
Convert and store value in JSON format.
|
Convert and store value in JSON format.
|
||||||
|
|
||||||
|
|
@ -161,18 +157,17 @@ class RedisRow:
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if isinstance(value, (dict, list)):
|
if isinstance(value, (dict, list)):
|
||||||
cls.value = json.dumps(value)
|
self.value = json.dumps(value)
|
||||||
elif isinstance(value, bytes):
|
elif isinstance(value, bytes):
|
||||||
cls.value = json.dumps(json.loads(value.decode()))
|
self.value = json.dumps(json.loads(value.decode()))
|
||||||
elif isinstance(value, str):
|
elif isinstance(value, str):
|
||||||
cls.value = value
|
self.value = value
|
||||||
else:
|
else:
|
||||||
raise RedisValueError(f"Unsupported value type: {type(value)}")
|
raise RedisValueError(f"Unsupported value type: {type(value)}")
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
raise RedisValueError(f"Invalid JSON format: {str(e)}")
|
raise RedisValueError(f"Invalid JSON format: {str(e)}")
|
||||||
|
|
||||||
@classmethod
|
def modify(self, add_dict: Dict) -> None:
|
||||||
def modify(cls, add_dict: Dict) -> None:
|
|
||||||
"""
|
"""
|
||||||
Modify existing data by merging with new dictionary.
|
Modify existing data by merging with new dictionary.
|
||||||
|
|
||||||
|
|
@ -187,15 +182,17 @@ class RedisRow:
|
||||||
"""
|
"""
|
||||||
if not isinstance(add_dict, dict):
|
if not isinstance(add_dict, dict):
|
||||||
raise RedisValueError("modify() requires a dictionary argument")
|
raise RedisValueError("modify() requires a dictionary argument")
|
||||||
|
current_data = self.row if self.row else {}
|
||||||
current_data = cls.data if cls.data else {}
|
|
||||||
if not isinstance(current_data, dict):
|
if not isinstance(current_data, dict):
|
||||||
raise RedisValueError("Cannot modify non-dictionary data")
|
raise RedisValueError("Cannot modify non-dictionary data")
|
||||||
|
current_data = {
|
||||||
|
**current_data,
|
||||||
|
**add_dict,
|
||||||
|
}
|
||||||
|
self.feed(current_data)
|
||||||
|
self.save()
|
||||||
|
|
||||||
cls.feed({**current_data, **add_dict})
|
def save(self):
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def save(cls):
|
|
||||||
"""
|
"""
|
||||||
Save the data to Redis with optional expiration.
|
Save the data to Redis with optional expiration.
|
||||||
|
|
||||||
|
|
@ -204,29 +201,28 @@ class RedisRow:
|
||||||
RedisValueError: If value is not set
|
RedisValueError: If value is not set
|
||||||
"""
|
"""
|
||||||
import arrow
|
import arrow
|
||||||
from Services.Redis.conn import redis_cli
|
|
||||||
|
|
||||||
if not cls.key:
|
if not self.key:
|
||||||
raise RedisKeyError("Cannot save data without a key")
|
raise RedisKeyError("Cannot save data without a key")
|
||||||
if not cls.value:
|
if not self.value:
|
||||||
raise RedisValueError("Cannot save empty data")
|
raise RedisValueError("Cannot save empty data")
|
||||||
|
|
||||||
if cls.expires_at:
|
if self.expires_at:
|
||||||
redis_cli.setex(name=cls.redis_key, time=cls.expires_at, value=cls.value)
|
redis_cli.setex(
|
||||||
cls.expires_at_string = str(
|
name=self.redis_key, time=self.get_expiry_time(), value=self.value
|
||||||
|
)
|
||||||
|
self.expires_at_string = str(
|
||||||
arrow.now()
|
arrow.now()
|
||||||
.shift(seconds=cls.get_expiry_time())
|
.shift(seconds=self.get_expiry_time())
|
||||||
.format("YYYY-MM-DD HH:mm:ss")
|
.format("YYYY-MM-DD HH:mm:ss")
|
||||||
)
|
)
|
||||||
return cls.value
|
return self.value
|
||||||
|
redis_cli.set(name=self.redis_key, value=self.value)
|
||||||
|
self.expires_at = None
|
||||||
|
self.expires_at_string = None
|
||||||
|
return self.value
|
||||||
|
|
||||||
redis_cli.set(name=cls.redis_key, value=cls.value)
|
def remove(self, key: str) -> None:
|
||||||
cls.expires_at = None
|
|
||||||
cls.expires_at_string = None
|
|
||||||
return cls.value
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def remove(cls, key: str) -> None:
|
|
||||||
"""
|
"""
|
||||||
Remove a key from the stored dictionary.
|
Remove a key from the stored dictionary.
|
||||||
|
|
||||||
|
|
@ -237,16 +233,24 @@ class RedisRow:
|
||||||
KeyError: If key doesn't exist
|
KeyError: If key doesn't exist
|
||||||
RedisValueError: If stored value is not a dictionary
|
RedisValueError: If stored value is not a dictionary
|
||||||
"""
|
"""
|
||||||
current_data = cls.data
|
current_data = self.row
|
||||||
if not isinstance(current_data, dict):
|
if not isinstance(current_data, dict):
|
||||||
raise RedisValueError("Cannot remove key from non-dictionary data")
|
raise RedisValueError("Cannot remove key from non-dictionary data")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
current_data.pop(key)
|
current_data.pop(key)
|
||||||
cls.feed(current_data)
|
self.feed(current_data)
|
||||||
|
self.save()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise KeyError(f"Key '{key}' not found in stored data")
|
raise KeyError(f"Key '{key}' not found in stored data")
|
||||||
|
|
||||||
|
def delete(self) -> None:
|
||||||
|
"""Delete the key from Redis."""
|
||||||
|
try:
|
||||||
|
redis_cli.delete(self.redis_key)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error deleting key: {str(e)}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def keys(self) -> str:
|
def keys(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
|
@ -257,8 +261,7 @@ class RedisRow:
|
||||||
"""
|
"""
|
||||||
return self.key.decode() if isinstance(self.key, bytes) else self.key
|
return self.key.decode() if isinstance(self.key, bytes) else self.key
|
||||||
|
|
||||||
@classmethod
|
def set_key(self, key: Union[str, bytes]) -> None:
|
||||||
def set_key(cls, key: Union[str, bytes]) -> None:
|
|
||||||
"""
|
"""
|
||||||
Set key ensuring bytes format.
|
Set key ensuring bytes format.
|
||||||
|
|
||||||
|
|
@ -267,7 +270,7 @@ class RedisRow:
|
||||||
"""
|
"""
|
||||||
if not key:
|
if not key:
|
||||||
raise RedisKeyError("Cannot set empty key")
|
raise RedisKeyError("Cannot set empty key")
|
||||||
cls.key = key if isinstance(key, bytes) else str(key).encode()
|
self.key = key if isinstance(key, bytes) else str(key).encode()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def redis_key(self) -> bytes:
|
def redis_key(self) -> bytes:
|
||||||
|
|
@ -280,7 +283,7 @@ class RedisRow:
|
||||||
return self.key if isinstance(self.key, bytes) else str(self.key).encode()
|
return self.key if isinstance(self.key, bytes) else str(self.key).encode()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self) -> Union[Dict, List]:
|
def row(self) -> Union[Dict, List]:
|
||||||
"""
|
"""
|
||||||
Get stored value as Python object.
|
Get stored value as Python object.
|
||||||
|
|
||||||
|
|
@ -290,6 +293,7 @@ class RedisRow:
|
||||||
try:
|
try:
|
||||||
return json.loads(self.value)
|
return json.loads(self.value)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
|
# return self.value
|
||||||
raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
|
raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -302,5 +306,5 @@ class RedisRow:
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
"keys": self.keys,
|
"keys": self.keys,
|
||||||
"value": self.data,
|
"value": self.row,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,8 @@ class RedisResponse:
|
||||||
self.data_type = "dict"
|
self.data_type = "dict"
|
||||||
elif isinstance(data, list):
|
elif isinstance(data, list):
|
||||||
self.data_type = "list"
|
self.data_type = "list"
|
||||||
|
elif isinstance(data, RedisRow):
|
||||||
|
self.data_type = "row"
|
||||||
elif data is None:
|
elif data is None:
|
||||||
self.data_type = None
|
self.data_type = None
|
||||||
self.error = error
|
self.error = error
|
||||||
|
|
@ -30,12 +32,16 @@ class RedisResponse:
|
||||||
"status": self.status,
|
"status": self.status,
|
||||||
"message": self.message,
|
"message": self.message,
|
||||||
"count": self.count,
|
"count": self.count,
|
||||||
"dataType": self.data_type,
|
"dataType": getattr(self, "data_type", None),
|
||||||
}
|
}
|
||||||
if isinstance(data, RedisRow):
|
if isinstance(data, RedisRow):
|
||||||
return {"data": {data.keys: data.data}, **main_dict}
|
dict_return = {data.keys: data.row}
|
||||||
|
dict_return.update(dict(main_dict))
|
||||||
|
return dict_return
|
||||||
elif isinstance(data, list):
|
elif isinstance(data, list):
|
||||||
return {"data": {row.keys: row.data for row in data}, **main_dict}
|
dict_return = {row.keys: row.data for row in data}
|
||||||
|
dict_return.update(dict(main_dict))
|
||||||
|
return dict_return
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all(self) -> Union[Optional[List[RedisRow]]]:
|
def all(self) -> Union[Optional[List[RedisRow]]]:
|
||||||
|
|
@ -43,11 +49,20 @@ class RedisResponse:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def count(self) -> int:
|
def count(self) -> int:
|
||||||
return len(self.all)
|
print()
|
||||||
|
row = self.all
|
||||||
|
if isinstance(row, list):
|
||||||
|
return len(row)
|
||||||
|
elif isinstance(row, RedisRow):
|
||||||
|
return 1
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def first(self) -> Union[RedisRow, None]:
|
def first(self) -> Union[RedisRow, None]:
|
||||||
|
print("self.data", self.data)
|
||||||
if self.data:
|
if self.data:
|
||||||
return self.data[0]
|
if isinstance(self.data, list):
|
||||||
|
return self.data[0]
|
||||||
|
elif isinstance(self.data, RedisRow):
|
||||||
|
return self.row
|
||||||
self.status = False
|
self.status = False
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -1,39 +1,76 @@
|
||||||
import secrets
|
import secrets
|
||||||
import uuid
|
import random
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
from Services.Redis import RedisActions, AccessToken
|
from Services.Redis.Actions.actions import RedisActions
|
||||||
|
from Services.Redis.Models.row import AccessToken
|
||||||
|
|
||||||
first_user = AccessToken(
|
|
||||||
accessToken=secrets.token_urlsafe(90),
|
|
||||||
userUUID=uuid.uuid4().__str__(),
|
|
||||||
)
|
|
||||||
second_user = AccessToken(
|
|
||||||
accessToken=secrets.token_urlsafe(90),
|
|
||||||
userUUID=uuid.uuid4().__str__(),
|
|
||||||
)
|
|
||||||
|
|
||||||
json_data = lambda uu_id, access: {
|
def generate_token(length=32):
|
||||||
"uu_id": uu_id,
|
letters = "abcdefghijklmnopqrstuvwxyz"
|
||||||
"access_token": access,
|
merged_letters = [letter for letter in letters] + [
|
||||||
"user_type": 1,
|
letter.upper() for letter in letters
|
||||||
"selected_company": None,
|
]
|
||||||
"selected_occupant": None,
|
token_generated = secrets.token_urlsafe(length)
|
||||||
"reachable_event_list_id": [],
|
for i in str(token_generated):
|
||||||
|
if i not in merged_letters:
|
||||||
|
token_generated = token_generated.replace(
|
||||||
|
i, random.choice(merged_letters), 1
|
||||||
|
)
|
||||||
|
return token_generated
|
||||||
|
|
||||||
|
|
||||||
|
save_json = {
|
||||||
|
"user": {
|
||||||
|
"first_name": "John",
|
||||||
|
"last_name": "Doe",
|
||||||
|
"email": "johndoe@glu.com",
|
||||||
|
"phone": "1234567890",
|
||||||
|
"address": "1234 Main St",
|
||||||
|
"details": {
|
||||||
|
"city": "San Francisco",
|
||||||
|
"state": "CA",
|
||||||
|
"zip": "94111",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"domain": "https://www.example.com",
|
||||||
|
"info": {
|
||||||
|
"mac": "oıuıouqqzxöç.işüğ",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"type": "web",
|
||||||
|
"device": "desktop",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
set_response_first_json = json_data(first_user.userUUID, first_user.accessToken)
|
|
||||||
set_response_second_json = json_data(second_user.userUUID, second_user.accessToken)
|
|
||||||
set_response_first = RedisActions.set_json(
|
|
||||||
list_keys=first_user.to_list(),
|
|
||||||
value=set_response_first_json,
|
|
||||||
expires={"seconds": 140},
|
|
||||||
)
|
|
||||||
|
|
||||||
set_response_second = RedisActions.set_json(
|
# access_object = AccessToken(
|
||||||
list_keys=second_user.to_list(),
|
# userUUID=str(uuid4()),
|
||||||
value=set_response_second_json,
|
# accessToken=generate_token(60)
|
||||||
expires={"seconds": 190},
|
# )
|
||||||
)
|
# redis_object = RedisActions.set_json(
|
||||||
|
# list_keys=access_object.to_list(),
|
||||||
|
# value=save_json,
|
||||||
|
# expires={"seconds": 720}
|
||||||
|
# )
|
||||||
|
# quit()
|
||||||
|
acc_token = "IuDXEzqzCSyOJvrwdjyxqGPOBnleUZjjXWsELJgUglJjyGhINOzAUpdMuzEzoTyOsJRUeEQsgXGUXrer:521a4ba7-898f-4204-a2e5-3226e1aea1e1"
|
||||||
|
|
||||||
search_keys = [None, set_response_first_json["uu_id"]]
|
userUUID = acc_token.split(":")[1]
|
||||||
get_response = RedisActions.get_json(list_keys=search_keys)
|
accessToken = acc_token.split(":")[0]
|
||||||
# print("get_response", [data.expires_at for data in get_response.all])
|
access_object = AccessToken(userUUID=None, accessToken=accessToken)
|
||||||
|
print("access_object", access_object.to_list())
|
||||||
|
redis_object = RedisActions.get_json(
|
||||||
|
list_keys=access_object.to_list(),
|
||||||
|
)
|
||||||
|
# print("type type(redis_object)", type(redis_object))
|
||||||
|
# print("type redis_object.data", type(redis_object.data))
|
||||||
|
# print("count", redis_object.count)
|
||||||
|
# print("data", redis_object.data)
|
||||||
|
# print("data", redis_object.as_dict())
|
||||||
|
# print("message", redis_object.message)
|
||||||
|
redis_row_object = redis_object.first
|
||||||
|
redis_row_object.modify({"reachable_event_list_id": [i for i in range(50)]})
|
||||||
|
# redis_row_object.remove("reachable_event_list_id")
|
||||||
|
# redis_row_object.modify({"reachable_event_list_id": [i for i in range(10)]})
|
||||||
|
# if redis_row_object:
|
||||||
|
# print("redis_row_object", redis_row_object.delete())
|
||||||
|
# print('redis_row_object.as_dict', redis_row_object.as_dict)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,203 @@
|
||||||
|
# System Architecture
|
||||||
|
|
||||||
|
## Core Services
|
||||||
|
|
||||||
|
### Top-Level Services
|
||||||
|
1. **AuthServiceApi**
|
||||||
|
- User authentication and authorization
|
||||||
|
- Token management
|
||||||
|
- Permission handling
|
||||||
|
|
||||||
|
2. **EventServiceApi**
|
||||||
|
- Event processing and management
|
||||||
|
- Event routing and handling
|
||||||
|
- Event validation
|
||||||
|
|
||||||
|
3. **ValidationServiceApi**
|
||||||
|
- Input validation
|
||||||
|
- Data verification
|
||||||
|
- Schema validation
|
||||||
|
|
||||||
|
## System Components
|
||||||
|
|
||||||
|
### AllConfigs
|
||||||
|
Configuration management for various system components.
|
||||||
|
|
||||||
|
| Category | Context | Dependencies |
|
||||||
|
|----------|----------|--------------|
|
||||||
|
| Email | configs, email_send_model | - |
|
||||||
|
| NoSqlDatabase | configs | - |
|
||||||
|
| Redis | configs | - |
|
||||||
|
| SqlDatabase | configs | - |
|
||||||
|
| Token | configs | - |
|
||||||
|
|
||||||
|
### Schemas
|
||||||
|
- SQL Alchemy schema definitions
|
||||||
|
- Data models and relationships
|
||||||
|
- Database structure definitions
|
||||||
|
|
||||||
|
### ApiLibrary
|
||||||
|
|
||||||
|
| Category | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| common | Error line number tracking |
|
||||||
|
| date_time_actions | DateTime handling functions |
|
||||||
|
| extensions | Password module and utilities |
|
||||||
|
|
||||||
|
### ApiServices
|
||||||
|
|
||||||
|
| Category | Context | Dependencies |
|
||||||
|
|----------|----------|--------------|
|
||||||
|
| Login | UserLoginModule | ApiLibrary, Schemas, ErrorHandlers, ApiValidations, ApiServices |
|
||||||
|
| Token | TokenService | Services, Schemas, ApiLibrary, ErrorHandlers, AllConfigs, ApiValidations |
|
||||||
|
|
||||||
|
### Services
|
||||||
|
|
||||||
|
| Category | Dependencies |
|
||||||
|
|----------|--------------|
|
||||||
|
| Email | ApiLibrary, Schemas, ErrorHandlers, ApiValidations, ApiServices |
|
||||||
|
| MongoDb | - |
|
||||||
|
| PostgresDb | - |
|
||||||
|
| Redis | - |
|
||||||
|
|
||||||
|
### ErrorHandlers
|
||||||
|
- ErrorHandlers: General error handling
|
||||||
|
- Exceptions: Custom exception definitions
|
||||||
|
|
||||||
|
### LanguageModels
|
||||||
|
- Database: Database-related language models
|
||||||
|
- Errors: Error message translations
|
||||||
|
|
||||||
|
### ApiValidations
|
||||||
|
- Custom: Custom validation rules
|
||||||
|
- Request: Request validation logic
|
||||||
|
|
||||||
|
## Testing Framework
|
||||||
|
|
||||||
|
### Test Categories
|
||||||
|
- AlchemyResponse pagination testing
|
||||||
|
- Redis function testing
|
||||||
|
- MongoDB function testing
|
||||||
|
- Validation testing
|
||||||
|
- Header testing
|
||||||
|
- Auth function testing
|
||||||
|
- Language testing
|
||||||
|
- Property definition testing
|
||||||
|
- SmartQuery testing
|
||||||
|
|
||||||
|
### Error Categories
|
||||||
|
- AlchemyError
|
||||||
|
- ApiError
|
||||||
|
- RedisError
|
||||||
|
- MongoError
|
||||||
|
- EmailError
|
||||||
|
- Validation[Pydantic]
|
||||||
|
|
||||||
|
## Alchemy Implementation Phases
|
||||||
|
|
||||||
|
1. **BaseAlchemyNeed**
|
||||||
|
- Session management
|
||||||
|
- Core functionality
|
||||||
|
|
||||||
|
2. **PlainModel**
|
||||||
|
- Basic model implementation
|
||||||
|
|
||||||
|
3. **FilteredModel**
|
||||||
|
- Filter functionality
|
||||||
|
|
||||||
|
4. **PaginatedModel**
|
||||||
|
- Pagination attributes
|
||||||
|
- Filter integration
|
||||||
|
|
||||||
|
5. **LanguageModel**
|
||||||
|
- Function retrieval
|
||||||
|
- Header management
|
||||||
|
|
||||||
|
6. **ResponseModel**
|
||||||
|
- Plain AlchemyClass
|
||||||
|
- Dictionary conversion
|
||||||
|
- Multiple response handling
|
||||||
|
|
||||||
|
## System Layers
|
||||||
|
|
||||||
|
1. **DependenciesLayer**
|
||||||
|
- External dependencies
|
||||||
|
- System requirements
|
||||||
|
|
||||||
|
2. **ApplicationLayer**
|
||||||
|
- Core application logic
|
||||||
|
- Business rules
|
||||||
|
|
||||||
|
3. **ServiceLayer**
|
||||||
|
- Service implementations
|
||||||
|
- API endpoints
|
||||||
|
|
||||||
|
4. **TestLayer**
|
||||||
|
- Test suites
|
||||||
|
- Test utilities
|
||||||
|
|
||||||
|
5. **DevLayer**
|
||||||
|
- Development tools
|
||||||
|
- Debug utilities
|
||||||
|
|
||||||
|
6. **RootLayer**
|
||||||
|
- Main directory
|
||||||
|
- Configuration files
|
||||||
|
- Documentation
|
||||||
|
|
||||||
|
## TODO Items
|
||||||
|
|
||||||
|
1. **Event Directory Structure**
|
||||||
|
- Move to ApiEvents
|
||||||
|
- Docker file integration
|
||||||
|
- Import organization
|
||||||
|
|
||||||
|
2. **MethodToEvent Renewal**
|
||||||
|
- Update implementation
|
||||||
|
- Improve flexibility
|
||||||
|
|
||||||
|
3. **Endpoint Configuration**
|
||||||
|
- Remove unnecessary fields
|
||||||
|
- Streamline configuration
|
||||||
|
|
||||||
|
4. **Middleware Organization**
|
||||||
|
- Consolidate into /TokenEventMiddleware/
|
||||||
|
- Standardize naming
|
||||||
|
|
||||||
|
5. **Code Cleanup**
|
||||||
|
- Remove ActionsSchemaFactory
|
||||||
|
- Remove ActionsSchema
|
||||||
|
- Move endpoint_wrapper to Middleware.wrappers
|
||||||
|
|
||||||
|
6. **Function Organization**
|
||||||
|
- Support sync/async functions
|
||||||
|
- Improve API function organization
|
||||||
|
|
||||||
|
7. **Directory Structure**
|
||||||
|
- Consolidate AllConfigs, ApiLibrary, ErrorHandlers
|
||||||
|
- Move to RootLayer
|
||||||
|
|
||||||
|
8. **Configuration Management**
|
||||||
|
- Update RouteFactoryConfig
|
||||||
|
- Update EndpointFactoryConfig
|
||||||
|
- Implement event validation interface
|
||||||
|
|
||||||
|
9. **Language Model**
|
||||||
|
- Review Schemas.__language_model__
|
||||||
|
- Update implementation
|
||||||
|
|
||||||
|
10. **Service Container**
|
||||||
|
- Review ApiServices
|
||||||
|
- Plan container migration
|
||||||
|
|
||||||
|
11. **Language Defaults**
|
||||||
|
- Add to config
|
||||||
|
- Implement ["tr", "en"] as default
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Redis implementation needs RedisRow class
|
||||||
|
- Event validation needs database integration
|
||||||
|
- Consider containerization for ApiServices
|
||||||
|
- Review language model implementation
|
||||||
|
- Test coverage needs improvement
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
# Improvements Documentation
|
||||||
|
|
||||||
|
This directory contains documentation and example implementations for various system improvements.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
improvements/
|
||||||
|
├── detailed_improvement_plan.md # Overall improvement plan
|
||||||
|
├── language_service/ # Language service implementation
|
||||||
|
│ ├── backend/
|
||||||
|
│ │ ├── language_service.py # Basic language service
|
||||||
|
│ │ └── zod_messages.py # Zod validation messages
|
||||||
|
│ └── frontend/
|
||||||
|
│ └── languageService.ts # Frontend language service
|
||||||
|
└── validation_service/ # Validation service implementation
|
||||||
|
├── backend/
|
||||||
|
│ └── schema_converter.py # Pydantic to Zod converter
|
||||||
|
└── frontend/
|
||||||
|
└── dynamicSchema.ts # Dynamic Zod schema builder
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### Language Service
|
||||||
|
The language service provides internationalization support with:
|
||||||
|
- Backend API for serving translations
|
||||||
|
- Frontend service for managing translations
|
||||||
|
- Integration with Zod for validation messages
|
||||||
|
|
||||||
|
### Validation Service
|
||||||
|
The validation service provides dynamic form validation with:
|
||||||
|
- Automatic conversion of Pydantic models to Zod schemas
|
||||||
|
- Frontend builder for dynamic schema creation
|
||||||
|
- Integration with language service for messages
|
||||||
|
|
||||||
|
## Implementation Status
|
||||||
|
|
||||||
|
These are example implementations that demonstrate the proposed improvements. To implement in the actual system:
|
||||||
|
|
||||||
|
1. Create appropriate service directories
|
||||||
|
2. Copy and adapt the code
|
||||||
|
3. Add tests
|
||||||
|
4. Update dependencies
|
||||||
|
5. Integrate with existing systems
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. Review the implementations
|
||||||
|
2. Decide on integration approach
|
||||||
|
3. Create implementation tickets
|
||||||
|
4. Plan phased rollout
|
||||||
|
5. Add monitoring and metrics
|
||||||
|
|
||||||
|
For detailed implementation plans and timelines, see [detailed_improvement_plan.md](./detailed_improvement_plan.md).
|
||||||
|
|
@ -0,0 +1,311 @@
|
||||||
|
# Detailed Improvement Plan
|
||||||
|
|
||||||
|
## 1. Infrastructure & Deployment
|
||||||
|
|
||||||
|
### Service Isolation and Containerization
|
||||||
|
- **Microservices Architecture**
|
||||||
|
```
|
||||||
|
/services
|
||||||
|
├── auth-service/
|
||||||
|
│ ├── Dockerfile
|
||||||
|
│ └── docker-compose.yml
|
||||||
|
├── event-service/
|
||||||
|
│ ├── Dockerfile
|
||||||
|
│ └── docker-compose.yml
|
||||||
|
└── validation-service/
|
||||||
|
├── Dockerfile
|
||||||
|
└── docker-compose.yml
|
||||||
|
```
|
||||||
|
- **Service Discovery**
|
||||||
|
- Implement Consul for service registry
|
||||||
|
- Add health check endpoints
|
||||||
|
- Create service mesh with Istio
|
||||||
|
|
||||||
|
### API Gateway Implementation
|
||||||
|
```yaml
|
||||||
|
# api-gateway.yml
|
||||||
|
services:
|
||||||
|
gateway:
|
||||||
|
routes:
|
||||||
|
- id: auth-service
|
||||||
|
uri: lb://auth-service
|
||||||
|
predicates:
|
||||||
|
- Path=/api/auth/**
|
||||||
|
filters:
|
||||||
|
- RateLimit=100,1s
|
||||||
|
- CircuitBreaker=3,10s
|
||||||
|
```
|
||||||
|
|
||||||
|
### Monitoring Stack
|
||||||
|
- **Distributed Tracing**
|
||||||
|
```python
|
||||||
|
from opentelemetry import trace
|
||||||
|
from opentelemetry.exporter import jaeger
|
||||||
|
|
||||||
|
tracer = trace.get_tracer(__name__)
|
||||||
|
with tracer.start_as_current_span("operation") as span:
|
||||||
|
span.set_attribute("attribute", value)
|
||||||
|
```
|
||||||
|
- **Metrics Collection**
|
||||||
|
- Prometheus for metrics
|
||||||
|
- Grafana for visualization
|
||||||
|
- Custom dashboards for each service
|
||||||
|
|
||||||
|
### Configuration Management
|
||||||
|
```python
|
||||||
|
# config_service.py
|
||||||
|
class ConfigService:
|
||||||
|
def __init__(self):
|
||||||
|
self.consul_client = Consul()
|
||||||
|
|
||||||
|
def get_config(self, service_name: str) -> Dict:
|
||||||
|
return self.consul_client.kv.get(f"config/{service_name}")
|
||||||
|
|
||||||
|
def update_config(self, service_name: str, config: Dict):
|
||||||
|
self.consul_client.kv.put(f"config/{service_name}", config)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. Performance & Scaling
|
||||||
|
|
||||||
|
### Enhanced Caching Strategy
|
||||||
|
```python
|
||||||
|
# redis_cache.py
|
||||||
|
class RedisCache:
|
||||||
|
def __init__(self):
|
||||||
|
self.client = Redis(cluster_mode=True)
|
||||||
|
|
||||||
|
async def get_or_set(self, key: str, callback: Callable):
|
||||||
|
if value := await self.client.get(key):
|
||||||
|
return value
|
||||||
|
value = await callback()
|
||||||
|
await self.client.set(key, value, ex=3600)
|
||||||
|
return value
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Optimization
|
||||||
|
```sql
|
||||||
|
-- Sharding Example
|
||||||
|
CREATE TABLE users_shard_1 PARTITION OF users
|
||||||
|
FOR VALUES WITH (modulus 3, remainder 0);
|
||||||
|
CREATE TABLE users_shard_2 PARTITION OF users
|
||||||
|
FOR VALUES WITH (modulus 3, remainder 1);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event System Enhancement
|
||||||
|
```python
|
||||||
|
# event_publisher.py
|
||||||
|
class EventPublisher:
|
||||||
|
def __init__(self):
|
||||||
|
self.kafka_producer = KafkaProducer()
|
||||||
|
|
||||||
|
async def publish(self, topic: str, event: Dict):
|
||||||
|
await self.kafka_producer.send(
|
||||||
|
topic,
|
||||||
|
value=event,
|
||||||
|
headers=[("version", "1.0")]
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Background Processing
|
||||||
|
```python
|
||||||
|
# job_processor.py
|
||||||
|
class JobProcessor:
|
||||||
|
def __init__(self):
|
||||||
|
self.celery = Celery()
|
||||||
|
self.connection_pool = ConnectionPool(max_size=100)
|
||||||
|
|
||||||
|
@celery.task
|
||||||
|
async def process_job(self, job_data: Dict):
|
||||||
|
async with self.connection_pool.acquire() as conn:
|
||||||
|
await conn.execute(job_data)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Security & Reliability
|
||||||
|
|
||||||
|
### API Security Enhancement
|
||||||
|
```python
|
||||||
|
# security.py
|
||||||
|
class SecurityMiddleware:
|
||||||
|
def __init__(self):
|
||||||
|
self.rate_limiter = RateLimiter()
|
||||||
|
self.key_rotator = KeyRotator()
|
||||||
|
|
||||||
|
async def process_request(self, request: Request):
|
||||||
|
await self.rate_limiter.check(request.client_ip)
|
||||||
|
await self.key_rotator.validate(request.api_key)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling System
|
||||||
|
```python
|
||||||
|
# error_handler.py
|
||||||
|
class ErrorHandler:
|
||||||
|
def __init__(self):
|
||||||
|
self.sentry_client = Sentry()
|
||||||
|
self.circuit_breaker = CircuitBreaker()
|
||||||
|
|
||||||
|
async def handle_error(self, error: Exception):
|
||||||
|
await self.sentry_client.capture_exception(error)
|
||||||
|
await self.circuit_breaker.record_error()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing Framework
|
||||||
|
```python
|
||||||
|
# integration_tests.py
|
||||||
|
class IntegrationTests:
|
||||||
|
async def setup(self):
|
||||||
|
self.containers = await TestContainers.start([
|
||||||
|
"postgres", "redis", "kafka"
|
||||||
|
])
|
||||||
|
|
||||||
|
async def test_end_to_end(self):
|
||||||
|
await self.setup()
|
||||||
|
# Test complete user journey
|
||||||
|
await self.cleanup()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Audit System
|
||||||
|
```python
|
||||||
|
# audit.py
|
||||||
|
class AuditLogger:
|
||||||
|
def __init__(self):
|
||||||
|
self.elastic = Elasticsearch()
|
||||||
|
|
||||||
|
async def log_action(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
action: str,
|
||||||
|
resource: str,
|
||||||
|
changes: Dict
|
||||||
|
):
|
||||||
|
await self.elastic.index({
|
||||||
|
"user_id": user_id,
|
||||||
|
"action": action,
|
||||||
|
"resource": resource,
|
||||||
|
"changes": changes,
|
||||||
|
"timestamp": datetime.utcnow()
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Development Experience
|
||||||
|
|
||||||
|
### Domain-Driven Design
|
||||||
|
```
|
||||||
|
/src
|
||||||
|
├── domain/
|
||||||
|
│ ├── entities/
|
||||||
|
│ ├── value_objects/
|
||||||
|
│ └── aggregates/
|
||||||
|
├── application/
|
||||||
|
│ ├── commands/
|
||||||
|
│ └── queries/
|
||||||
|
└── infrastructure/
|
||||||
|
├── repositories/
|
||||||
|
└── services/
|
||||||
|
```
|
||||||
|
|
||||||
|
### API Documentation
|
||||||
|
```python
|
||||||
|
# main.py
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.openapi.utils import get_openapi
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
def custom_openapi():
|
||||||
|
return get_openapi(
|
||||||
|
title="WAG Management API",
|
||||||
|
version="4.0.0",
|
||||||
|
description="Complete API documentation",
|
||||||
|
routes=app.routes
|
||||||
|
)
|
||||||
|
|
||||||
|
app.openapi = custom_openapi
|
||||||
|
```
|
||||||
|
|
||||||
|
### Translation Management
|
||||||
|
```python
|
||||||
|
# i18n.py
|
||||||
|
class TranslationService:
|
||||||
|
def __init__(self):
|
||||||
|
self.translations = {}
|
||||||
|
self.fallback_chain = ["tr", "en"]
|
||||||
|
|
||||||
|
async def get_translation(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
lang: str,
|
||||||
|
fallback: bool = True
|
||||||
|
) -> str:
|
||||||
|
if translation := self.translations.get(f"{lang}.{key}"):
|
||||||
|
return translation
|
||||||
|
if fallback:
|
||||||
|
for lang in self.fallback_chain:
|
||||||
|
if translation := self.translations.get(f"{lang}.{key}"):
|
||||||
|
return translation
|
||||||
|
return key
|
||||||
|
```
|
||||||
|
|
||||||
|
### Developer Tools
|
||||||
|
```python
|
||||||
|
# debug_toolkit.py
|
||||||
|
class DebugToolkit:
|
||||||
|
def __init__(self):
|
||||||
|
self.profiler = cProfile.Profile()
|
||||||
|
self.debugger = pdb.Pdb()
|
||||||
|
|
||||||
|
def profile_function(self, func: Callable):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
self.profiler.enable()
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
self.profiler.disable()
|
||||||
|
return result
|
||||||
|
return wrapper
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Priority
|
||||||
|
|
||||||
|
1. **Phase 1 - Foundation** (1-2 months)
|
||||||
|
- Service containerization
|
||||||
|
- Basic monitoring
|
||||||
|
- API gateway setup
|
||||||
|
- Security enhancements
|
||||||
|
|
||||||
|
2. **Phase 2 - Scaling** (2-3 months)
|
||||||
|
- Caching implementation
|
||||||
|
- Database optimization
|
||||||
|
- Event system upgrade
|
||||||
|
- Background jobs
|
||||||
|
|
||||||
|
3. **Phase 3 - Reliability** (1-2 months)
|
||||||
|
- Error handling
|
||||||
|
- Testing framework
|
||||||
|
- Audit system
|
||||||
|
- Performance monitoring
|
||||||
|
|
||||||
|
4. **Phase 4 - Developer Experience** (1-2 months)
|
||||||
|
- Documentation
|
||||||
|
- Development tools
|
||||||
|
- Translation system
|
||||||
|
- Code organization
|
||||||
|
|
||||||
|
## Success Metrics
|
||||||
|
|
||||||
|
- **Performance**
|
||||||
|
- Response time < 100ms for 95% of requests
|
||||||
|
- Cache hit rate > 80%
|
||||||
|
- Zero downtime deployments
|
||||||
|
|
||||||
|
- **Reliability**
|
||||||
|
- 99.99% uptime
|
||||||
|
- < 0.1% error rate
|
||||||
|
- < 1s failover time
|
||||||
|
|
||||||
|
- **Security**
|
||||||
|
- Zero critical vulnerabilities
|
||||||
|
- 100% audit log coverage
|
||||||
|
- < 1hr security incident response time
|
||||||
|
|
||||||
|
- **Development**
|
||||||
|
- 80% test coverage
|
||||||
|
- < 24hr PR review time
|
||||||
|
- < 1 day developer onboarding
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Original content from ApiEvents/LanguageServiceApi/language_service.py
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
from fastapi import APIRouter, Header
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
# ... rest of the file content ...
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
# Original content from ApiEvents/LanguageServiceApi/zod_messages.py
|
||||||
|
from typing import Dict
|
||||||
|
from fastapi import APIRouter, Header
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
# ... rest of the file content ...
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
// Original content from frontend/src/services/languageService.ts
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
// ... rest of the file content ...
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Original content from ApiEvents/ValidationServiceApi/schema_converter.py
|
||||||
|
from typing import Dict, Any, Type, get_type_hints, get_args, get_origin
|
||||||
|
from pydantic import BaseModel, Field, EmailStr
|
||||||
|
from enum import Enum
|
||||||
|
import inspect
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# ... rest of the file content ...
|
||||||
|
|
@ -0,0 +1,146 @@
|
||||||
|
from typing import Dict, Any, Type, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from fastapi import APIRouter, Header
|
||||||
|
|
||||||
|
class ValidationMessages(BaseModel):
|
||||||
|
"""Messages for Zod validation"""
|
||||||
|
required: str
|
||||||
|
invalid_type: str
|
||||||
|
invalid_string: Dict[str, str] # email, url, etc
|
||||||
|
too_small: Dict[str, str] # string, array, number
|
||||||
|
too_big: Dict[str, str] # string, array, number
|
||||||
|
invalid_date: str
|
||||||
|
invalid_enum: str
|
||||||
|
custom: Dict[str, str]
|
||||||
|
|
||||||
|
class SchemaField(BaseModel):
|
||||||
|
"""Schema field definition"""
|
||||||
|
type: str
|
||||||
|
items: Optional[str] = None # For arrays
|
||||||
|
values: Optional[list] = None # For enums
|
||||||
|
validations: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
class SchemaDefinition(BaseModel):
|
||||||
|
"""Complete schema definition"""
|
||||||
|
name: str
|
||||||
|
fields: Dict[str, SchemaField]
|
||||||
|
messages: ValidationMessages
|
||||||
|
|
||||||
|
class UnifiedSchemaService:
|
||||||
|
def __init__(self):
|
||||||
|
self.messages = {
|
||||||
|
"tr": ValidationMessages(
|
||||||
|
required="Bu alan zorunludur",
|
||||||
|
invalid_type="Geçersiz tip",
|
||||||
|
invalid_string={
|
||||||
|
"email": "Geçerli bir e-posta adresi giriniz",
|
||||||
|
"url": "Geçerli bir URL giriniz",
|
||||||
|
"uuid": "Geçerli bir UUID giriniz"
|
||||||
|
},
|
||||||
|
too_small={
|
||||||
|
"string": "{min} karakterden az olamaz",
|
||||||
|
"array": "En az {min} öğe gereklidir",
|
||||||
|
"number": "En az {min} olmalıdır"
|
||||||
|
},
|
||||||
|
too_big={
|
||||||
|
"string": "{max} karakterden fazla olamaz",
|
||||||
|
"array": "En fazla {max} öğe olabilir",
|
||||||
|
"number": "En fazla {max} olabilir"
|
||||||
|
},
|
||||||
|
invalid_date="Geçerli bir tarih giriniz",
|
||||||
|
invalid_enum="Geçersiz seçim",
|
||||||
|
custom={
|
||||||
|
"password_match": "Şifreler eşleşmiyor",
|
||||||
|
"strong_password": "Şifre güçlü değil"
|
||||||
|
}
|
||||||
|
),
|
||||||
|
"en": ValidationMessages(
|
||||||
|
required="This field is required",
|
||||||
|
invalid_type="Invalid type",
|
||||||
|
invalid_string={
|
||||||
|
"email": "Please enter a valid email",
|
||||||
|
"url": "Please enter a valid URL",
|
||||||
|
"uuid": "Please enter a valid UUID"
|
||||||
|
},
|
||||||
|
too_small={
|
||||||
|
"string": "Must be at least {min} characters",
|
||||||
|
"array": "Must contain at least {min} items",
|
||||||
|
"number": "Must be at least {min}"
|
||||||
|
},
|
||||||
|
too_big={
|
||||||
|
"string": "Must be at most {max} characters",
|
||||||
|
"array": "Must contain at most {max} items",
|
||||||
|
"number": "Must be at most {max}"
|
||||||
|
},
|
||||||
|
invalid_date="Please enter a valid date",
|
||||||
|
invalid_enum="Invalid selection",
|
||||||
|
custom={
|
||||||
|
"password_match": "Passwords do not match",
|
||||||
|
"strong_password": "Password is not strong enough"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_schema_with_messages(
|
||||||
|
self,
|
||||||
|
model: Type[BaseModel],
|
||||||
|
lang: str = "tr"
|
||||||
|
) -> SchemaDefinition:
|
||||||
|
"""Get schema definition with validation messages"""
|
||||||
|
fields: Dict[str, SchemaField] = {}
|
||||||
|
|
||||||
|
for field_name, field in model.__fields__.items():
|
||||||
|
field_info = SchemaField(
|
||||||
|
type=self._get_field_type(field.outer_type_),
|
||||||
|
items=self._get_items_type(field.outer_type_),
|
||||||
|
values=self._get_enum_values(field.outer_type_),
|
||||||
|
validations=self._get_validations(field)
|
||||||
|
)
|
||||||
|
fields[field_name] = field_info
|
||||||
|
|
||||||
|
return SchemaDefinition(
|
||||||
|
name=model.__name__,
|
||||||
|
fields=fields,
|
||||||
|
messages=self.messages[lang]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_field_type(self, type_: Type) -> str:
|
||||||
|
# Implementation similar to SchemaConverter
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_items_type(self, type_: Type) -> Optional[str]:
|
||||||
|
# Implementation similar to SchemaConverter
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_enum_values(self, type_: Type) -> Optional[list]:
|
||||||
|
# Implementation similar to SchemaConverter
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_validations(self, field) -> Optional[Dict[str, Any]]:
|
||||||
|
# Implementation similar to SchemaConverter
|
||||||
|
pass
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/schema", tags=["Schema"])
|
||||||
|
schema_service = UnifiedSchemaService()
|
||||||
|
|
||||||
|
@router.get("/model/{model_name}")
|
||||||
|
async def get_model_schema(
|
||||||
|
model_name: str,
|
||||||
|
accept_language: Optional[str] = Header(default="tr")
|
||||||
|
) -> SchemaDefinition:
|
||||||
|
"""Get model schema with validation messages"""
|
||||||
|
# You'd need to implement model lookup
|
||||||
|
models = {
|
||||||
|
"User": UserModel,
|
||||||
|
"Product": ProductModel,
|
||||||
|
# Add your models here
|
||||||
|
}
|
||||||
|
|
||||||
|
if model_name not in models:
|
||||||
|
raise ValueError(f"Model {model_name} not found")
|
||||||
|
|
||||||
|
lang = accept_language.split(",")[0][:2]
|
||||||
|
return schema_service.get_schema_with_messages(
|
||||||
|
models[model_name],
|
||||||
|
lang if lang in ["tr", "en"] else "tr"
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
// Original content from frontend/src/validation/dynamicSchema.ts
|
||||||
|
import { z } from 'zod';
|
||||||
|
import axios from 'axios';
|
||||||
|
import { zodMessages } from './zodMessages';
|
||||||
|
|
||||||
|
// ... rest of the file content ...
|
||||||
|
|
@ -0,0 +1,219 @@
|
||||||
|
import { z } from 'zod';
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
interface ValidationMessages {
|
||||||
|
required: string;
|
||||||
|
invalid_type: string;
|
||||||
|
invalid_string: Record<string, string>;
|
||||||
|
too_small: Record<string, string>;
|
||||||
|
too_big: Record<string, string>;
|
||||||
|
invalid_date: string;
|
||||||
|
invalid_enum: string;
|
||||||
|
custom: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SchemaField {
|
||||||
|
type: string;
|
||||||
|
items?: string;
|
||||||
|
values?: any[];
|
||||||
|
validations?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SchemaDefinition {
|
||||||
|
name: string;
|
||||||
|
fields: Record<string, SchemaField>;
|
||||||
|
messages: ValidationMessages;
|
||||||
|
}
|
||||||
|
|
||||||
|
class UnifiedSchemaBuilder {
|
||||||
|
private static instance: UnifiedSchemaBuilder;
|
||||||
|
private schemaCache: Map<string, z.ZodSchema> = new Map();
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): UnifiedSchemaBuilder {
|
||||||
|
if (!UnifiedSchemaBuilder.instance) {
|
||||||
|
UnifiedSchemaBuilder.instance = new UnifiedSchemaBuilder();
|
||||||
|
}
|
||||||
|
return UnifiedSchemaBuilder.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSchema(modelName: string): Promise<z.ZodSchema> {
|
||||||
|
// Check cache first
|
||||||
|
if (this.schemaCache.has(modelName)) {
|
||||||
|
return this.schemaCache.get(modelName)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch schema definition with messages from backend
|
||||||
|
const response = await axios.get<SchemaDefinition>(
|
||||||
|
`/api/schema/model/${modelName}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Accept-Language': navigator.language || 'tr'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const schema = this.buildSchema(response.data);
|
||||||
|
this.schemaCache.set(modelName, schema);
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildSchema(definition: SchemaDefinition): z.ZodSchema {
|
||||||
|
const shape: Record<string, z.ZodTypeAny> = {};
|
||||||
|
|
||||||
|
for (const [fieldName, field] of Object.entries(definition.fields)) {
|
||||||
|
shape[fieldName] = this.buildField(field, definition.messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
return z.object(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildField(
|
||||||
|
field: SchemaField,
|
||||||
|
messages: ValidationMessages
|
||||||
|
): z.ZodTypeAny {
|
||||||
|
let zodField: z.ZodTypeAny;
|
||||||
|
|
||||||
|
switch (field.type) {
|
||||||
|
case 'string':
|
||||||
|
zodField = z.string({
|
||||||
|
required_error: messages.required,
|
||||||
|
invalid_type_error: messages.invalid_type
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'email':
|
||||||
|
zodField = z.string().email(messages.invalid_string.email);
|
||||||
|
break;
|
||||||
|
case 'number':
|
||||||
|
zodField = z.number({
|
||||||
|
required_error: messages.required,
|
||||||
|
invalid_type_error: messages.invalid_type
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'boolean':
|
||||||
|
zodField = z.boolean({
|
||||||
|
required_error: messages.required,
|
||||||
|
invalid_type_error: messages.invalid_type
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'date':
|
||||||
|
zodField = z.date({
|
||||||
|
required_error: messages.required,
|
||||||
|
invalid_type_error: messages.invalid_date
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'array':
|
||||||
|
zodField = z.array(
|
||||||
|
this.buildField({ type: field.items! }, messages)
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
case 'enum':
|
||||||
|
zodField = z.enum(field.values as [string, ...string[]], {
|
||||||
|
required_error: messages.required,
|
||||||
|
invalid_type_error: messages.invalid_enum
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
zodField = z.any();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply validations if any
|
||||||
|
if (field.validations) {
|
||||||
|
zodField = this.applyValidations(zodField, field.validations, messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
return zodField;
|
||||||
|
}
|
||||||
|
|
||||||
|
private applyValidations(
|
||||||
|
field: z.ZodTypeAny,
|
||||||
|
validations: Record<string, any>,
|
||||||
|
messages: ValidationMessages
|
||||||
|
): z.ZodTypeAny {
|
||||||
|
let result = field;
|
||||||
|
|
||||||
|
if ('min_length' in validations) {
|
||||||
|
result = (result as z.ZodString).min(
|
||||||
|
validations.min_length,
|
||||||
|
messages.too_small.string.replace(
|
||||||
|
'{min}',
|
||||||
|
validations.min_length.toString()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('max_length' in validations) {
|
||||||
|
result = (result as z.ZodString).max(
|
||||||
|
validations.max_length,
|
||||||
|
messages.too_big.string.replace(
|
||||||
|
'{max}',
|
||||||
|
validations.max_length.toString()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('pattern' in validations) {
|
||||||
|
result = (result as z.ZodString).regex(
|
||||||
|
new RegExp(validations.pattern),
|
||||||
|
messages.custom[validations.pattern_message] || 'Invalid format'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('gt' in validations) {
|
||||||
|
result = (result as z.ZodNumber).gt(
|
||||||
|
validations.gt,
|
||||||
|
messages.too_small.number.replace(
|
||||||
|
'{min}',
|
||||||
|
(validations.gt + 1).toString()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('lt' in validations) {
|
||||||
|
result = (result as z.ZodNumber).lt(
|
||||||
|
validations.lt,
|
||||||
|
messages.too_big.number.replace(
|
||||||
|
'{max}',
|
||||||
|
(validations.lt - 1).toString()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
export const schemaBuilder = UnifiedSchemaBuilder.getInstance();
|
||||||
|
|
||||||
|
// Usage example:
|
||||||
|
/*
|
||||||
|
import { schemaBuilder } from './validation/unifiedSchemaBuilder';
|
||||||
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
|
import { useForm } from 'react-hook-form';
|
||||||
|
|
||||||
|
function UserForm() {
|
||||||
|
const [schema, setSchema] = useState<z.ZodSchema | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function loadSchema() {
|
||||||
|
const userSchema = await schemaBuilder.getSchema('User');
|
||||||
|
setSchema(userSchema);
|
||||||
|
}
|
||||||
|
loadSchema();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const form = useForm({
|
||||||
|
resolver: schema ? zodResolver(schema) : undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!schema) return <div>Loading...</div>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<form onSubmit={form.handleSubmit(data => console.log(data))}>
|
||||||
|
{/* Your form fields */}
|
||||||
|
</form>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
@ -0,0 +1,229 @@
|
||||||
|
# MethodToEvent System Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
The MethodToEvent system provides a unified way to manage API endpoints and frontend menu structure with built-in permission handling. It uses UUIDs for permission management and supports hierarchical menu structures.
|
||||||
|
|
||||||
|
## Core Components
|
||||||
|
|
||||||
|
### 1. MethodToEvent Base Class
|
||||||
|
Base class for defining event methods with API endpoints and frontend page configuration.
|
||||||
|
|
||||||
|
#### Class Variables
|
||||||
|
- `action_key`: Unique identifier for the action
|
||||||
|
- `event_type`: Type of event (e.g., 'query', 'command')
|
||||||
|
- `event_description`: Human-readable description
|
||||||
|
- `event_category`: Category for grouping
|
||||||
|
- `__event_keys__`: UUID to event name mapping
|
||||||
|
- `__event_validation__`: Validation rules
|
||||||
|
- `__endpoint_config__`: API endpoint configuration
|
||||||
|
- `__page_info__`: Frontend page configuration
|
||||||
|
|
||||||
|
#### Methods
|
||||||
|
|
||||||
|
##### Configure API Endpoints
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def register_endpoint(
|
||||||
|
cls,
|
||||||
|
event_uuid: str,
|
||||||
|
path: str,
|
||||||
|
method: str = "POST",
|
||||||
|
response_model: Optional[Type] = None,
|
||||||
|
**kwargs
|
||||||
|
) -> None
|
||||||
|
```
|
||||||
|
Registers an API endpoint for an event UUID.
|
||||||
|
|
||||||
|
##### Configure Router
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def configure_router(cls, prefix: str, tags: List[str]) -> None
|
||||||
|
```
|
||||||
|
Sets the router prefix and OpenAPI tags.
|
||||||
|
|
||||||
|
##### Configure Page
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def configure_page(
|
||||||
|
cls,
|
||||||
|
name: str,
|
||||||
|
title: Dict[str, str],
|
||||||
|
icon: str,
|
||||||
|
url: str,
|
||||||
|
component: Optional[str] = None,
|
||||||
|
parent: Optional[str] = None
|
||||||
|
) -> None
|
||||||
|
```
|
||||||
|
Configures frontend page information.
|
||||||
|
|
||||||
|
##### Get Page Info with Permissions
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def get_page_info_with_permissions(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str],
|
||||||
|
include_endpoints: bool = False
|
||||||
|
) -> Optional[Dict[str, Any]]
|
||||||
|
```
|
||||||
|
Returns page info if user has required permissions.
|
||||||
|
|
||||||
|
### 2. EventMethodRegistry
|
||||||
|
Singleton registry for managing all MethodToEvent classes and building menu structures.
|
||||||
|
|
||||||
|
#### Methods
|
||||||
|
|
||||||
|
##### Register Method Class
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def register_method_class(cls, method_class: Type[MethodToEvent]) -> None
|
||||||
|
```
|
||||||
|
Registers a MethodToEvent class in the registry.
|
||||||
|
|
||||||
|
##### Get All Menu Items
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def get_all_menu_items(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str],
|
||||||
|
include_endpoints: bool = False
|
||||||
|
) -> List[Dict[str, Any]]
|
||||||
|
```
|
||||||
|
Returns complete menu structure based on permissions.
|
||||||
|
|
||||||
|
##### Get Available Endpoints
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def get_available_endpoints(
|
||||||
|
cls,
|
||||||
|
user_permission_uuids: Set[str]
|
||||||
|
) -> Dict[str, Dict[str, Any]]
|
||||||
|
```
|
||||||
|
Returns all available API endpoints based on permissions.
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
### 1. Define Event Methods
|
||||||
|
```python
|
||||||
|
class AccountEventMethods(MethodToEvent):
|
||||||
|
event_category = "account"
|
||||||
|
event_type = "query"
|
||||||
|
event_description = "Account management operations"
|
||||||
|
__event_keys__ = {
|
||||||
|
"uuid1": "view_account",
|
||||||
|
"uuid2": "edit_account"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Configure API
|
||||||
|
configure_router("/api/account", ["Account"])
|
||||||
|
register_endpoint(
|
||||||
|
"uuid1",
|
||||||
|
"/view",
|
||||||
|
method="GET",
|
||||||
|
response_model=AccountResponse
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure frontend
|
||||||
|
configure_page(
|
||||||
|
name="AccountPage",
|
||||||
|
title={"tr": "Hesaplar", "en": "Accounts"},
|
||||||
|
icon="User",
|
||||||
|
url="/account"
|
||||||
|
)
|
||||||
|
|
||||||
|
class AccountDetailsEventMethods(MethodToEvent):
|
||||||
|
event_category = "account_details"
|
||||||
|
__event_keys__ = {
|
||||||
|
"uuid3": "view_details",
|
||||||
|
"uuid4": "edit_details"
|
||||||
|
}
|
||||||
|
|
||||||
|
configure_page(
|
||||||
|
name="AccountDetailsPage",
|
||||||
|
title={"tr": "Hesap Detayları", "en": "Account Details"},
|
||||||
|
icon="FileText",
|
||||||
|
url="/account/details",
|
||||||
|
parent="AccountPage" # Link to parent
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Register and Use
|
||||||
|
```python
|
||||||
|
# Register classes
|
||||||
|
registry = EventMethodRegistry()
|
||||||
|
registry.register_method_class(AccountEventMethods)
|
||||||
|
registry.register_method_class(AccountDetailsEventMethods)
|
||||||
|
|
||||||
|
# Get menu structure
|
||||||
|
user_permissions = {"uuid1", "uuid2", "uuid3"}
|
||||||
|
menu_items = registry.get_all_menu_items(user_permissions, include_endpoints=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Menu Structure Rules
|
||||||
|
|
||||||
|
1. **Parent-Child Visibility**
|
||||||
|
- Parent page must have permissions to be visible
|
||||||
|
- If parent is not visible, children are never shown
|
||||||
|
- If parent is visible, all children are shown
|
||||||
|
|
||||||
|
2. **Permission Checking**
|
||||||
|
- Based on UUID intersection
|
||||||
|
- Page is visible if user has any of its event UUIDs
|
||||||
|
- Endpoints only included if user has specific permission
|
||||||
|
|
||||||
|
3. **Menu Organization**
|
||||||
|
- Automatic tree structure based on parent field
|
||||||
|
- Sorted by name for consistency
|
||||||
|
- Optional endpoint information included
|
||||||
|
|
||||||
|
## Example Menu Structure
|
||||||
|
```python
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "AccountPage",
|
||||||
|
"title": {"tr": "Hesaplar", "en": "Accounts"},
|
||||||
|
"icon": "User",
|
||||||
|
"url": "/account",
|
||||||
|
"category": "account",
|
||||||
|
"type": "query",
|
||||||
|
"description": "Account management operations",
|
||||||
|
"available_endpoints": {
|
||||||
|
"uuid1": {"path": "/api/account/view", "method": "GET"},
|
||||||
|
"uuid2": {"path": "/api/account/edit", "method": "POST"}
|
||||||
|
},
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"name": "AccountDetailsPage",
|
||||||
|
"title": {"tr": "Hesap Detayları", "en": "Account Details"},
|
||||||
|
"icon": "FileText",
|
||||||
|
"url": "/account/details",
|
||||||
|
"parent": "AccountPage",
|
||||||
|
"available_endpoints": {
|
||||||
|
"uuid3": {"path": "/api/account/details/view", "method": "GET"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **UUID Management**
|
||||||
|
- Use consistent UUIDs across the system
|
||||||
|
- Document UUID meanings and permissions
|
||||||
|
- Group related permissions under same parent
|
||||||
|
|
||||||
|
2. **Page Organization**
|
||||||
|
- Use meaningful page names
|
||||||
|
- Provide translations for all titles
|
||||||
|
- Keep URL structure consistent with hierarchy
|
||||||
|
|
||||||
|
3. **API Endpoints**
|
||||||
|
- Use consistent router prefixes
|
||||||
|
- Group related endpoints under same router
|
||||||
|
- Use appropriate HTTP methods
|
||||||
|
|
||||||
|
4. **Permission Structure**
|
||||||
|
- Design permissions hierarchically
|
||||||
|
- Consider access patterns when grouping
|
||||||
|
- Document permission requirements
|
||||||
|
|
@ -0,0 +1,42 @@
|
||||||
|
# Development Notes
|
||||||
|
|
||||||
|
This directory contains development notes and documentation organized by topic and date.
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
- Each note is stored as a markdown file
|
||||||
|
- Files are organized by topic in subdirectories
|
||||||
|
- File naming format: `YYYY-MM-DD_topic_name.md`
|
||||||
|
- Each note includes:
|
||||||
|
- Date
|
||||||
|
- Topic/Category
|
||||||
|
- Content
|
||||||
|
- Related files/components
|
||||||
|
- Action items (if any)
|
||||||
|
|
||||||
|
## How to Add Notes
|
||||||
|
|
||||||
|
1. Create a new markdown file with the date prefix
|
||||||
|
2. Use the standard note template
|
||||||
|
3. Place in appropriate topic directory
|
||||||
|
4. Link related notes if applicable
|
||||||
|
|
||||||
|
## Note Template
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# [Topic] - [Date]
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Brief description of the topic/issue
|
||||||
|
|
||||||
|
## Details
|
||||||
|
Main content of the note
|
||||||
|
|
||||||
|
## Related
|
||||||
|
- Links to related files/components
|
||||||
|
- References to other notes
|
||||||
|
|
||||||
|
## Action Items
|
||||||
|
- [ ] Todo items if any
|
||||||
|
- [ ] Next steps
|
||||||
|
```
|
||||||
|
|
@ -0,0 +1,105 @@
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
interface LanguageStrings {
|
||||||
|
validation: Record<string, string>;
|
||||||
|
messages: Record<string, string>;
|
||||||
|
labels: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
class LanguageService {
|
||||||
|
private static instance: LanguageService;
|
||||||
|
private strings: LanguageStrings | null = null;
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): LanguageService {
|
||||||
|
if (!LanguageService.instance) {
|
||||||
|
LanguageService.instance = new LanguageService();
|
||||||
|
}
|
||||||
|
return LanguageService.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadStrings(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const response = await axios.get<LanguageStrings>('/api/language/strings', {
|
||||||
|
headers: {
|
||||||
|
'Accept-Language': navigator.language || 'tr'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.strings = response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load language strings:', error);
|
||||||
|
// Fallback to empty strings
|
||||||
|
this.strings = {
|
||||||
|
validation: {},
|
||||||
|
messages: {},
|
||||||
|
labels: {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getValidationMessage(key: string, params?: Record<string, string>): string {
|
||||||
|
if (!this.strings) return key;
|
||||||
|
let message = this.strings.validation[key] || key;
|
||||||
|
|
||||||
|
// Replace parameters if any
|
||||||
|
if (params) {
|
||||||
|
Object.entries(params).forEach(([key, value]) => {
|
||||||
|
message = message.replace(`{${key}}`, value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
|
||||||
|
getMessage(key: string): string {
|
||||||
|
if (!this.strings) return key;
|
||||||
|
return this.strings.messages[key] || key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLabel(key: string): string {
|
||||||
|
if (!this.strings) return key;
|
||||||
|
return this.strings.labels[key] || key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
export const languageService = LanguageService.getInstance();
|
||||||
|
|
||||||
|
// Usage example in a React component:
|
||||||
|
/*
|
||||||
|
import { useEffect, useState } from 'react';
|
||||||
|
import { languageService } from './services/languageService';
|
||||||
|
|
||||||
|
function MyForm() {
|
||||||
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function loadLanguage() {
|
||||||
|
await languageService.loadStrings();
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
loadLanguage();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
if (isLoading) return <div>Loading...</div>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<form>
|
||||||
|
<label>{languageService.getLabel('email')}</label>
|
||||||
|
<input
|
||||||
|
type="email"
|
||||||
|
placeholder={languageService.getMessage('enter_email')}
|
||||||
|
onInvalid={(e) => {
|
||||||
|
e.currentTarget.setCustomValidity(
|
||||||
|
languageService.getValidationMessage('email')
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<button type="submit">
|
||||||
|
{languageService.getLabel('submit')}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
@ -0,0 +1,174 @@
|
||||||
|
import { z } from 'zod';
|
||||||
|
import axios from 'axios';
|
||||||
|
import { zodMessages } from './zodMessages';
|
||||||
|
|
||||||
|
interface SchemaField {
|
||||||
|
type: string;
|
||||||
|
items?: string; // For arrays
|
||||||
|
additionalProperties?: string; // For objects
|
||||||
|
values?: any[]; // For enums
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SchemaDefinition {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
fields: Record<string, SchemaField>;
|
||||||
|
validations: Record<string, Record<string, any>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
class DynamicSchemaBuilder {
|
||||||
|
private static instance: DynamicSchemaBuilder;
|
||||||
|
private schemaCache: Map<string, z.ZodSchema> = new Map();
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): DynamicSchemaBuilder {
|
||||||
|
if (!DynamicSchemaBuilder.instance) {
|
||||||
|
DynamicSchemaBuilder.instance = new DynamicSchemaBuilder();
|
||||||
|
}
|
||||||
|
return DynamicSchemaBuilder.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSchema(modelName: string): Promise<z.ZodSchema> {
|
||||||
|
// Check cache first
|
||||||
|
if (this.schemaCache.has(modelName)) {
|
||||||
|
return this.schemaCache.get(modelName)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch schema definition from backend
|
||||||
|
const response = await axios.get<SchemaDefinition>(
|
||||||
|
`/api/validation/schema/${modelName}`
|
||||||
|
);
|
||||||
|
const schema = this.buildSchema(response.data);
|
||||||
|
|
||||||
|
// Cache the schema
|
||||||
|
this.schemaCache.set(modelName, schema);
|
||||||
|
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildSchema(definition: SchemaDefinition): z.ZodSchema {
|
||||||
|
const shape: Record<string, z.ZodTypeAny> = {};
|
||||||
|
|
||||||
|
for (const [fieldName, field] of Object.entries(definition.fields)) {
|
||||||
|
let zodField = this.buildField(field);
|
||||||
|
|
||||||
|
// Apply validations
|
||||||
|
const validations = definition.validations[fieldName];
|
||||||
|
if (validations) {
|
||||||
|
zodField = this.applyValidations(zodField, validations);
|
||||||
|
}
|
||||||
|
|
||||||
|
shape[fieldName] = zodField;
|
||||||
|
}
|
||||||
|
|
||||||
|
return z.object(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildField(field: SchemaField): z.ZodTypeAny {
|
||||||
|
switch (field.type) {
|
||||||
|
case 'string':
|
||||||
|
return zodMessages.string();
|
||||||
|
case 'string.email()':
|
||||||
|
return zodMessages.email();
|
||||||
|
case 'number':
|
||||||
|
return z.number();
|
||||||
|
case 'boolean':
|
||||||
|
return z.boolean();
|
||||||
|
case 'date':
|
||||||
|
return z.date();
|
||||||
|
case 'array':
|
||||||
|
return z.array(this.buildField({ type: field.items! }));
|
||||||
|
case 'enum':
|
||||||
|
return z.enum(field.values as [string, ...string[]]);
|
||||||
|
case 'object':
|
||||||
|
if (field.additionalProperties) {
|
||||||
|
return z.record(this.buildField({ type: field.additionalProperties }));
|
||||||
|
}
|
||||||
|
return z.object({});
|
||||||
|
default:
|
||||||
|
return z.any();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private applyValidations(
|
||||||
|
field: z.ZodTypeAny,
|
||||||
|
validations: Record<string, any>
|
||||||
|
): z.ZodTypeAny {
|
||||||
|
let result = field;
|
||||||
|
|
||||||
|
if ('min_length' in validations) {
|
||||||
|
result = (result as z.ZodString).min(
|
||||||
|
validations.min_length,
|
||||||
|
{
|
||||||
|
message: zodMessages.messages?.too_small.string.replace(
|
||||||
|
'{min}',
|
||||||
|
validations.min_length.toString()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('max_length' in validations) {
|
||||||
|
result = (result as z.ZodString).max(
|
||||||
|
validations.max_length,
|
||||||
|
{
|
||||||
|
message: zodMessages.messages?.too_big.string.replace(
|
||||||
|
'{max}',
|
||||||
|
validations.max_length.toString()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('pattern' in validations) {
|
||||||
|
result = (result as z.ZodString).regex(
|
||||||
|
new RegExp(validations.pattern)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('gt' in validations) {
|
||||||
|
result = (result as z.ZodNumber).gt(validations.gt);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('lt' in validations) {
|
||||||
|
result = (result as z.ZodNumber).lt(validations.lt);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
export const schemaBuilder = DynamicSchemaBuilder.getInstance();
|
||||||
|
|
||||||
|
// Usage example:
|
||||||
|
/*
|
||||||
|
import { schemaBuilder } from './validation/dynamicSchema';
|
||||||
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
|
import { useForm } from 'react-hook-form';
|
||||||
|
|
||||||
|
function UserForm() {
|
||||||
|
const [schema, setSchema] = useState<z.ZodSchema | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function loadSchema() {
|
||||||
|
const userSchema = await schemaBuilder.getSchema('User');
|
||||||
|
setSchema(userSchema);
|
||||||
|
}
|
||||||
|
loadSchema();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const form = useForm({
|
||||||
|
resolver: schema ? zodResolver(schema) : undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!schema) return <div>Loading...</div>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<form onSubmit={form.handleSubmit(data => console.log(data))}>
|
||||||
|
{/* Your form fields */}
|
||||||
|
</form>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
@ -0,0 +1,99 @@
|
||||||
|
import { z } from 'zod';
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
interface ZodMessages {
|
||||||
|
required_error: string;
|
||||||
|
invalid_type_error: string;
|
||||||
|
invalid_string: Record<string, string>;
|
||||||
|
too_small: Record<string, string>;
|
||||||
|
too_big: Record<string, string>;
|
||||||
|
custom: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
class ZodMessageService {
|
||||||
|
private static instance: ZodMessageService;
|
||||||
|
private messages: ZodMessages | null = null;
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): ZodMessageService {
|
||||||
|
if (!ZodMessageService.instance) {
|
||||||
|
ZodMessageService.instance = new ZodMessageService();
|
||||||
|
}
|
||||||
|
return ZodMessageService.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadMessages(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const response = await axios.get<ZodMessages>('/api/language/zod-messages', {
|
||||||
|
headers: {
|
||||||
|
'Accept-Language': navigator.language || 'tr'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.messages = response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load Zod messages:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to create Zod schemas with localized messages
|
||||||
|
string() {
|
||||||
|
if (!this.messages) throw new Error('Messages not loaded');
|
||||||
|
|
||||||
|
return z.string({
|
||||||
|
required_error: this.messages.required_error,
|
||||||
|
invalid_type_error: this.messages.invalid_type_error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
email() {
|
||||||
|
return this.string().email(this.messages?.invalid_string.email);
|
||||||
|
}
|
||||||
|
|
||||||
|
password() {
|
||||||
|
return this.string()
|
||||||
|
.min(8, { message: this.messages?.too_small.string.replace('{min}', '8') })
|
||||||
|
.regex(
|
||||||
|
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)/,
|
||||||
|
{ message: this.messages?.custom.strong_password }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add more schema helpers as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
export const zodMessages = ZodMessageService.getInstance();
|
||||||
|
|
||||||
|
// Usage example:
|
||||||
|
/*
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { zodMessages } from './validation/zodMessages';
|
||||||
|
|
||||||
|
// In your component:
|
||||||
|
useEffect(() => {
|
||||||
|
zodMessages.loadMessages();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const loginSchema = z.object({
|
||||||
|
email: zodMessages.email(),
|
||||||
|
password: zodMessages.password(),
|
||||||
|
confirmPassword: zodMessages.string()
|
||||||
|
}).refine(
|
||||||
|
(data) => data.password === data.confirmPassword,
|
||||||
|
{
|
||||||
|
message: zodMessages.messages?.custom.password_match,
|
||||||
|
path: ["confirmPassword"]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use with React Hook Form
|
||||||
|
const {
|
||||||
|
register,
|
||||||
|
handleSubmit,
|
||||||
|
formState: { errors }
|
||||||
|
} = useForm({
|
||||||
|
resolver: zodResolver(loginSchema)
|
||||||
|
});
|
||||||
|
*/
|
||||||
Loading…
Reference in New Issue