new api service and logic implemented
This commit is contained in:
0
Services/Email/__init__.py
Normal file
0
Services/Email/__init__.py
Normal file
34
Services/Email/send_email.py
Normal file
34
Services/Email/send_email.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from redmail import EmailSender
|
||||
|
||||
from AllConfigs.Email.configs import EmailConfig
|
||||
from AllConfigs.Email.email_send_model import EmailSendModel
|
||||
|
||||
email_sender = EmailSender(**EmailConfig.as_dict())
|
||||
|
||||
|
||||
class EmailService:
|
||||
|
||||
@classmethod
|
||||
def send_email(cls, params: EmailSendModel) -> bool:
|
||||
if not EmailConfig.EMAIL_SEND:
|
||||
print("Email sending is disabled", params)
|
||||
return False
|
||||
try:
|
||||
email_sender.connect()
|
||||
receivers = ["karatay@mehmetkaratay.com.tr"]
|
||||
email_sender.send(
|
||||
subject=params.subject,
|
||||
receivers=receivers,
|
||||
text=params.text + f" : Gonderilen [{str(receivers)}]",
|
||||
html=params.html,
|
||||
cc=params.cc,
|
||||
bcc=params.bcc,
|
||||
headers=params.headers or {},
|
||||
attachments=params.attachments or {},
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error raised at email send :{e}")
|
||||
finally:
|
||||
email_sender.close()
|
||||
return False
|
||||
115
Services/MongoDb/Models/action_models/base.py
Normal file
115
Services/MongoDb/Models/action_models/base.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Base models for MongoDB documents."""
|
||||
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from bson import ObjectId
|
||||
from pydantic import BaseModel, ConfigDict, Field, model_validator
|
||||
from pydantic.json_schema import JsonSchemaValue
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
|
||||
from ApiLibrary import system_arrow
|
||||
|
||||
|
||||
class PyObjectId(ObjectId):
|
||||
"""Custom type for handling MongoDB ObjectId in Pydantic models."""
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls,
|
||||
_source_type: Any,
|
||||
_handler: Any,
|
||||
) -> CoreSchema:
|
||||
"""Define the core schema for PyObjectId."""
|
||||
return core_schema.json_or_python_schema(
|
||||
json_schema=core_schema.str_schema(),
|
||||
python_schema=core_schema.union_schema(
|
||||
[
|
||||
core_schema.is_instance_schema(ObjectId),
|
||||
core_schema.chain_schema(
|
||||
[
|
||||
core_schema.str_schema(),
|
||||
core_schema.no_info_plain_validator_function(cls.validate),
|
||||
]
|
||||
),
|
||||
]
|
||||
),
|
||||
serialization=core_schema.plain_serializer_function_ser_schema(
|
||||
lambda x: str(x),
|
||||
return_schema=core_schema.str_schema(),
|
||||
when_used="json",
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, value: Any) -> ObjectId:
|
||||
"""Validate and convert the value to ObjectId."""
|
||||
if not ObjectId.is_valid(value):
|
||||
raise ValueError("Invalid ObjectId")
|
||||
return ObjectId(value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls,
|
||||
_core_schema: CoreSchema,
|
||||
_handler: Any,
|
||||
) -> JsonSchemaValue:
|
||||
"""Define the JSON schema for PyObjectId."""
|
||||
return {"type": "string"}
|
||||
|
||||
|
||||
class MongoBaseModel(BaseModel):
|
||||
"""Base model for all MongoDB documents."""
|
||||
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
json_encoders={ObjectId: str},
|
||||
populate_by_name=True,
|
||||
from_attributes=True,
|
||||
validate_assignment=True,
|
||||
extra="allow",
|
||||
)
|
||||
|
||||
# Optional _id field that will be ignored in create operations
|
||||
id: Optional[PyObjectId] = Field(None, alias="_id")
|
||||
|
||||
def get_extra(self, field_name: str, default: Any = None) -> Any:
|
||||
"""Safely get extra field value.
|
||||
|
||||
Args:
|
||||
field_name: Name of the extra field to retrieve
|
||||
default: Default value to return if field doesn't exist
|
||||
|
||||
Returns:
|
||||
Value of the extra field if it exists, otherwise the default value
|
||||
"""
|
||||
return getattr(self, field_name, default)
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary including all fields and extra fields.
|
||||
|
||||
Returns:
|
||||
Dict containing all model fields and extra fields with proper type conversion
|
||||
"""
|
||||
return self.model_dump(by_alias=True)
|
||||
|
||||
|
||||
class MongoDocument(MongoBaseModel):
|
||||
"""Base document model with timestamps."""
|
||||
|
||||
created_at: float = Field(default_factory=lambda: system_arrow.now().timestamp())
|
||||
updated_at: float = Field(default_factory=lambda: system_arrow.now().timestamp())
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def prevent_protected_fields(cls, data: Any) -> Any:
|
||||
"""Prevent user from setting protected fields like _id and timestamps."""
|
||||
if isinstance(data, dict):
|
||||
# Remove protected fields from input
|
||||
data.pop("_id", None)
|
||||
data.pop("created_at", None)
|
||||
data.pop("updated_at", None)
|
||||
|
||||
# Set timestamps
|
||||
data["created_at"] = system_arrow.now().timestamp()
|
||||
data["updated_at"] = system_arrow.now().timestamp()
|
||||
|
||||
return data
|
||||
76
Services/MongoDb/Models/action_models/domain.py
Normal file
76
Services/MongoDb/Models/action_models/domain.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
MongoDB Domain Models.
|
||||
|
||||
This module provides Pydantic models for domain management,
|
||||
including domain history and access details.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from pydantic import BaseModel, Field, ConfigDict, model_validator
|
||||
|
||||
from ApiLibrary import system_arrow
|
||||
from Services.MongoDb.Models.action_models.base import MongoBaseModel, MongoDocument
|
||||
|
||||
|
||||
class DomainData(MongoBaseModel):
|
||||
"""Model for domain data.
|
||||
|
||||
Attributes:
|
||||
user_uu_id: Unique identifier of the user
|
||||
main_domain: Primary domain
|
||||
other_domains_list: List of additional domains
|
||||
extra_data: Additional domain-related data
|
||||
"""
|
||||
|
||||
user_uu_id: str = Field(..., description="User's unique identifier")
|
||||
main_domain: str = Field(..., description="Primary domain")
|
||||
other_domains_list: List[str] = Field(
|
||||
default_factory=list, description="List of additional domains"
|
||||
)
|
||||
extra_data: Optional[Dict[str, Any]] = Field(
|
||||
default_factory=dict,
|
||||
alias="extraData",
|
||||
description="Additional domain-related data",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(
|
||||
from_attributes=True, populate_by_name=True, validate_assignment=True
|
||||
)
|
||||
|
||||
|
||||
class DomainDocument(MongoDocument):
|
||||
"""Model for domain-related documents."""
|
||||
|
||||
data: DomainData = Field(..., description="Domain data")
|
||||
|
||||
def update_main_domain(self, new_domain: str) -> None:
|
||||
"""Update the main domain and move current to history.
|
||||
|
||||
Args:
|
||||
new_domain: New main domain to set
|
||||
"""
|
||||
if self.data.main_domain and self.data.main_domain != new_domain:
|
||||
if self.data.main_domain not in self.data.other_domains_list:
|
||||
self.data.other_domains_list.append(self.data.main_domain)
|
||||
self.data.main_domain = new_domain
|
||||
|
||||
|
||||
class DomainDocumentCreate(MongoDocument):
|
||||
"""Model for creating new domain documents."""
|
||||
|
||||
data: DomainData = Field(..., description="Initial domain data")
|
||||
|
||||
model_config = ConfigDict(
|
||||
from_attributes=True, populate_by_name=True, validate_assignment=True
|
||||
)
|
||||
|
||||
|
||||
class DomainDocumentUpdate(MongoDocument):
|
||||
"""Model for updating existing domain documents."""
|
||||
|
||||
data: DomainData = Field(..., description="Updated domain data")
|
||||
|
||||
model_config = ConfigDict(
|
||||
from_attributes=True, populate_by_name=True, validate_assignment=True
|
||||
)
|
||||
49
Services/MongoDb/Models/action_models/password.py
Normal file
49
Services/MongoDb/Models/action_models/password.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
MongoDB Password Models.
|
||||
|
||||
This module provides Pydantic models for password management,
|
||||
including password history and access details.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from pydantic import Field
|
||||
|
||||
from ApiLibrary import system_arrow
|
||||
from Services.MongoDb.Models.action_models.base import MongoBaseModel, MongoDocument
|
||||
|
||||
|
||||
class PasswordHistoryDetail(MongoBaseModel):
|
||||
"""Model for password history details."""
|
||||
|
||||
timestamp: datetime
|
||||
ip_address: Optional[str] = Field(None, alias="ipAddress")
|
||||
user_agent: Optional[str] = Field(None, alias="userAgent")
|
||||
location: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class PasswordHistoryData(MongoBaseModel):
|
||||
"""Model for password history data."""
|
||||
|
||||
password_history: List[str] = Field([], alias="passwordHistory")
|
||||
access_history_detail: Dict[str, PasswordHistoryDetail] = Field(
|
||||
default_factory=dict, alias="accessHistoryDetail"
|
||||
)
|
||||
|
||||
|
||||
class PasswordDocument(MongoDocument):
|
||||
"""Model for password-related documents."""
|
||||
|
||||
data: PasswordHistoryData
|
||||
|
||||
|
||||
class PasswordDocumentCreate(MongoBaseModel):
|
||||
"""Model for creating new password documents."""
|
||||
|
||||
data: PasswordHistoryData = Field(..., description="Initial password data")
|
||||
|
||||
|
||||
class PasswordDocumentUpdate(MongoBaseModel):
|
||||
"""Model for updating existing password documents."""
|
||||
|
||||
data: PasswordHistoryData
|
||||
128
Services/MongoDb/Models/actions.py
Normal file
128
Services/MongoDb/Models/actions.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""
|
||||
This module contains the MongoActions class, which provides methods for
|
||||
performing actions on the MongoDB database.
|
||||
Api Mongo functions in general retrieves 2 params which are
|
||||
companyUUID and Storage Reason
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
from pymongo import MongoClient
|
||||
from pymongo.collection import Collection
|
||||
|
||||
from Services.MongoDb.Models.mixins import (
|
||||
MongoUpdateMixin,
|
||||
MongoInsertMixin,
|
||||
MongoFindMixin,
|
||||
MongoDeleteMixin,
|
||||
MongoAggregateMixin,
|
||||
)
|
||||
from Services.MongoDb.Models.exceptions import (
|
||||
MongoDocumentNotFoundError,
|
||||
MongoDuplicateKeyError,
|
||||
MongoValidationError,
|
||||
MongoConnectionError,
|
||||
)
|
||||
|
||||
|
||||
class MongoActions(
|
||||
MongoUpdateMixin,
|
||||
MongoInsertMixin,
|
||||
MongoFindMixin,
|
||||
MongoDeleteMixin,
|
||||
MongoAggregateMixin,
|
||||
):
|
||||
"""Main MongoDB actions class that inherits all CRUD operation mixins.
|
||||
|
||||
This class provides a unified interface for all MongoDB operations while
|
||||
managing collections based on company UUID and storage reason.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, client: MongoClient, database: str, company_uuid: str, storage_reason: str
|
||||
):
|
||||
"""Initialize MongoDB actions with client and collection info.
|
||||
|
||||
Args:
|
||||
client: MongoDB client
|
||||
database: Database name to use
|
||||
company_uuid: Company UUID for collection naming
|
||||
storage_reason: Storage reason for collection naming
|
||||
"""
|
||||
self._client = client
|
||||
self._database = database
|
||||
self._company_uuid = company_uuid
|
||||
self._storage_reason = storage_reason
|
||||
self._collection = None
|
||||
self.use_collection(storage_reason)
|
||||
|
||||
def use_collection(self, storage_reason: str) -> None:
|
||||
"""Switch to a different collection.
|
||||
|
||||
Args:
|
||||
storage_reason: New storage reason for collection naming
|
||||
"""
|
||||
collection_name = f"{self._company_uuid}*{storage_reason}"
|
||||
self._collection = self._client[self._database][collection_name]
|
||||
|
||||
@property
|
||||
def collection(self) -> Collection:
|
||||
"""Get current MongoDB collection."""
|
||||
return self._collection
|
||||
|
||||
def insert_one(self, document: Dict[str, Any]):
|
||||
"""Insert a single document."""
|
||||
return super().insert_one(self.collection, document)
|
||||
|
||||
def insert_many(self, documents: List[Dict[str, Any]]):
|
||||
"""Insert multiple documents."""
|
||||
return super().insert_many(self.collection, documents)
|
||||
|
||||
def find_one(
|
||||
self, filter_query: Dict[str, Any], projection: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
"""Find a single document."""
|
||||
return super().find_one(self.collection, filter_query, projection)
|
||||
|
||||
def find_many(
|
||||
self,
|
||||
filter_query: Dict[str, Any],
|
||||
projection: Optional[Dict[str, Any]] = None,
|
||||
sort: Optional[List[tuple]] = None,
|
||||
limit: Optional[int] = None,
|
||||
skip: Optional[int] = None,
|
||||
):
|
||||
"""Find multiple documents."""
|
||||
return super().find_many(
|
||||
self.collection, filter_query, projection, sort, limit, skip
|
||||
)
|
||||
|
||||
def update_one(
|
||||
self,
|
||||
filter_query: Dict[str, Any],
|
||||
update_data: Dict[str, Any],
|
||||
upsert: bool = False,
|
||||
):
|
||||
"""Update a single document."""
|
||||
return super().update_one(self.collection, filter_query, update_data, upsert)
|
||||
|
||||
def update_many(
|
||||
self,
|
||||
filter_query: Dict[str, Any],
|
||||
update_data: Dict[str, Any],
|
||||
upsert: bool = False,
|
||||
):
|
||||
"""Update multiple documents."""
|
||||
return super().update_many(self.collection, filter_query, update_data, upsert)
|
||||
|
||||
def delete_one(self, filter_query: Dict[str, Any]):
|
||||
"""Delete a single document."""
|
||||
return super().delete_one(self.collection, filter_query)
|
||||
|
||||
def delete_many(self, filter_query: Dict[str, Any]):
|
||||
"""Delete multiple documents."""
|
||||
return super().delete_many(self.collection, filter_query)
|
||||
|
||||
def aggregate(self, pipeline: List[Dict[str, Any]]):
|
||||
"""Execute an aggregation pipeline."""
|
||||
return super().aggregate(self.collection, pipeline)
|
||||
188
Services/MongoDb/Models/exception_handlers.py
Normal file
188
Services/MongoDb/Models/exception_handlers.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""
|
||||
Exception handlers for MongoDB operations.
|
||||
|
||||
This module provides exception handlers for MongoDB-related errors,
|
||||
converting them to appropriate HTTP responses.
|
||||
"""
|
||||
|
||||
from typing import Callable, Any
|
||||
from fastapi import Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from pymongo.errors import PyMongoError, DuplicateKeyError, ConnectionFailure
|
||||
|
||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||
from Services.MongoDb.Models.exceptions import (
|
||||
MongoBaseException,
|
||||
MongoConnectionError,
|
||||
MongoDocumentNotFoundError,
|
||||
MongoValidationError,
|
||||
MongoDuplicateKeyError,
|
||||
PasswordHistoryError,
|
||||
PasswordReuseError,
|
||||
PasswordHistoryLimitError,
|
||||
InvalidPasswordDetailError,
|
||||
)
|
||||
from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
|
||||
|
||||
|
||||
def handle_mongo_errors(func: Callable) -> Callable:
|
||||
"""Decorator to handle MongoDB operation errors.
|
||||
|
||||
Args:
|
||||
func: Function to wrap with error handling
|
||||
|
||||
Returns:
|
||||
Wrapped function with error handling
|
||||
"""
|
||||
|
||||
async def wrapper(*args, **kwargs) -> Any:
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except ConnectionFailure as e:
|
||||
raise MongoConnectionError(
|
||||
message=str(e), details={"error_type": "connection_failure"}
|
||||
).to_http_exception()
|
||||
except DuplicateKeyError as e:
|
||||
raise MongoDuplicateKeyError(
|
||||
collection=e.details.get("namespace", "unknown"),
|
||||
key_pattern=e.details.get("keyPattern", {}),
|
||||
).to_http_exception()
|
||||
except PyMongoError as e:
|
||||
raise MongoBaseException(
|
||||
message=str(e), details={"error_type": "pymongo_error"}
|
||||
).to_http_exception()
|
||||
except Exception as e:
|
||||
raise HTTPExceptionApi(
|
||||
lang="en",
|
||||
error_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def mongo_base_exception_handler(
|
||||
request: Request, exc: MongoBaseException
|
||||
) -> JSONResponse:
|
||||
"""Handle base MongoDB exceptions.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: MongoDB base exception
|
||||
|
||||
Returns:
|
||||
JSON response with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code, content={"error": exc.to_http_exception()}
|
||||
)
|
||||
|
||||
|
||||
async def mongo_connection_error_handler(
|
||||
request: Request, exc: MongoConnectionError
|
||||
) -> JSONResponse:
|
||||
"""Handle MongoDB connection errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: MongoDB connection error
|
||||
|
||||
Returns:
|
||||
JSON response with connection error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
content={"error": exc.to_http_exception()},
|
||||
)
|
||||
|
||||
|
||||
async def mongo_document_not_found_handler(
|
||||
request: Request, exc: MongoDocumentNotFoundError
|
||||
) -> JSONResponse:
|
||||
"""Handle document not found errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: Document not found error
|
||||
|
||||
Returns:
|
||||
JSON response with not found error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
content={"error": exc.to_http_exception()},
|
||||
)
|
||||
|
||||
|
||||
async def mongo_validation_error_handler(
|
||||
request: Request, exc: MongoValidationError
|
||||
) -> JSONResponse:
|
||||
"""Handle validation errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: Validation error
|
||||
|
||||
Returns:
|
||||
JSON response with validation error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content={"error": exc.to_http_exception()},
|
||||
)
|
||||
|
||||
|
||||
async def mongo_duplicate_key_error_handler(
|
||||
request: Request, exc: MongoDuplicateKeyError
|
||||
) -> JSONResponse:
|
||||
"""Handle duplicate key errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: Duplicate key error
|
||||
|
||||
Returns:
|
||||
JSON response with duplicate key error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT, content={"error": exc.to_http_exception()}
|
||||
)
|
||||
|
||||
|
||||
async def password_history_error_handler(
|
||||
request: Request, exc: PasswordHistoryError
|
||||
) -> JSONResponse:
|
||||
"""Handle password history errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: Password history error
|
||||
|
||||
Returns:
|
||||
JSON response with password history error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code, content={"error": exc.to_http_exception()}
|
||||
)
|
||||
|
||||
|
||||
def register_exception_handlers(app: Any) -> None:
|
||||
"""Register all MongoDB exception handlers with FastAPI app.
|
||||
|
||||
Args:
|
||||
app: FastAPI application instance
|
||||
"""
|
||||
app.add_exception_handler(MongoBaseException, mongo_base_exception_handler)
|
||||
app.add_exception_handler(MongoConnectionError, mongo_connection_error_handler)
|
||||
app.add_exception_handler(
|
||||
MongoDocumentNotFoundError, mongo_document_not_found_handler
|
||||
)
|
||||
app.add_exception_handler(MongoValidationError, mongo_validation_error_handler)
|
||||
app.add_exception_handler(MongoDuplicateKeyError, mongo_duplicate_key_error_handler)
|
||||
app.add_exception_handler(PasswordHistoryError, password_history_error_handler)
|
||||
app.add_exception_handler(PasswordReuseError, password_history_error_handler)
|
||||
app.add_exception_handler(PasswordHistoryLimitError, password_history_error_handler)
|
||||
app.add_exception_handler(
|
||||
InvalidPasswordDetailError, password_history_error_handler
|
||||
)
|
||||
146
Services/MongoDb/Models/exceptions.py
Normal file
146
Services/MongoDb/Models/exceptions.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
Custom exceptions for MongoDB operations and password management.
|
||||
|
||||
This module defines custom exceptions for handling various error cases in MongoDB
|
||||
operations and password-related functionality.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
from fastapi import HTTPException, status
|
||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||
from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
|
||||
|
||||
|
||||
class MongoBaseException(Exception):
|
||||
"""Base exception for MongoDB-related errors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
def to_http_exception(self) -> HTTPException:
|
||||
"""Convert to FastAPI HTTPException."""
|
||||
raise HTTPExceptionApi(
|
||||
lang="en",
|
||||
error_code=self.status_code,
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=self.message,
|
||||
)
|
||||
|
||||
|
||||
class MongoConnectionError(MongoBaseException):
|
||||
"""Raised when there's an error connecting to MongoDB."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Failed to connect to MongoDB",
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class MongoDocumentNotFoundError(MongoBaseException):
|
||||
"""Raised when a document is not found in MongoDB."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
collection: str,
|
||||
filter_query: Dict[str, Any],
|
||||
message: Optional[str] = None,
|
||||
):
|
||||
message = message or f"Document not found in collection '{collection}'"
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"collection": collection, "filter": filter_query},
|
||||
)
|
||||
|
||||
|
||||
class MongoValidationError(MongoBaseException):
|
||||
"""Raised when document validation fails."""
|
||||
|
||||
def __init__(self, message: str, field_errors: Optional[Dict[str, str]] = None):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
details={"field_errors": field_errors or {}},
|
||||
)
|
||||
|
||||
|
||||
class MongoDuplicateKeyError(MongoBaseException):
|
||||
"""Raised when trying to insert a document with a duplicate key."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
collection: str,
|
||||
key_pattern: Dict[str, Any],
|
||||
message: Optional[str] = None,
|
||||
):
|
||||
message = message or f"Duplicate key error in collection '{collection}'"
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
details={"collection": collection, "key_pattern": key_pattern},
|
||||
)
|
||||
|
||||
|
||||
class PasswordHistoryError(MongoBaseException):
|
||||
"""Base exception for password history-related errors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__(message, status_code, details)
|
||||
|
||||
|
||||
class PasswordReuseError(PasswordHistoryError):
|
||||
"""Raised when attempting to reuse a recent password."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Password was used recently",
|
||||
history_limit: Optional[int] = None,
|
||||
):
|
||||
details = {"history_limit": history_limit} if history_limit else None
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class PasswordHistoryLimitError(PasswordHistoryError):
|
||||
"""Raised when password history limit is reached."""
|
||||
|
||||
def __init__(self, limit: int, message: Optional[str] = None):
|
||||
message = message or f"Password history limit of {limit} reached"
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
details={"limit": limit},
|
||||
)
|
||||
|
||||
|
||||
class InvalidPasswordDetailError(PasswordHistoryError):
|
||||
"""Raised when password history detail is invalid."""
|
||||
|
||||
def __init__(self, message: str, field_errors: Optional[Dict[str, str]] = None):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
details={"field_errors": field_errors or {}},
|
||||
)
|
||||
171
Services/MongoDb/Models/mixins.py
Normal file
171
Services/MongoDb/Models/mixins.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""
|
||||
MongoDB CRUD Operation Mixins.
|
||||
|
||||
This module provides mixins for common MongoDB operations:
|
||||
1. Document creation (insert)
|
||||
2. Document retrieval (find)
|
||||
3. Document updates
|
||||
4. Document deletion
|
||||
5. Aggregation operations
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
from functools import wraps
|
||||
|
||||
from pymongo.collection import Collection
|
||||
from pymongo.errors import (
|
||||
ConnectionFailure,
|
||||
OperationFailure,
|
||||
ServerSelectionTimeoutError,
|
||||
PyMongoError,
|
||||
)
|
||||
|
||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||
from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
|
||||
|
||||
|
||||
def handle_mongo_errors(func):
|
||||
"""Decorator to handle MongoDB operation errors.
|
||||
|
||||
Catches MongoDB-specific errors and converts them to HTTPExceptionApi.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except ConnectionFailure:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_503_SERVICE_UNAVAILABLE",
|
||||
lang="en",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg="MongoDB connection failed",
|
||||
)
|
||||
except ServerSelectionTimeoutError:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_504_GATEWAY_TIMEOUT",
|
||||
lang="en",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg="MongoDB connection timed out",
|
||||
)
|
||||
except OperationFailure as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_400_BAD_REQUEST",
|
||||
lang="en",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
except PyMongoError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_500_INTERNAL_SERVER_ERROR",
|
||||
lang="en",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MongoInsertMixin:
|
||||
"""Mixin for MongoDB insert operations."""
|
||||
|
||||
@handle_mongo_errors
|
||||
def insert_one(self, collection: Collection, document: Dict[str, Any]):
|
||||
"""Insert a single document into the collection."""
|
||||
result = collection.insert_one(document)
|
||||
return result
|
||||
|
||||
@handle_mongo_errors
|
||||
def insert_many(self, collection: Collection, documents: List[Dict[str, Any]]):
|
||||
"""Insert multiple documents into the collection."""
|
||||
result = collection.insert_many(documents)
|
||||
return result
|
||||
|
||||
|
||||
class MongoFindMixin:
|
||||
"""Mixin for MongoDB find operations."""
|
||||
|
||||
@handle_mongo_errors
|
||||
def find_one(
|
||||
self,
|
||||
collection: Collection,
|
||||
filter_query: Dict[str, Any],
|
||||
projection: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
"""Find a single document in the collection."""
|
||||
result = collection.find_one(filter_query, projection)
|
||||
return result
|
||||
|
||||
@handle_mongo_errors
|
||||
def find_many(
|
||||
self,
|
||||
collection: Collection,
|
||||
filter_query: Dict[str, Any],
|
||||
projection: Optional[Dict[str, Any]] = None,
|
||||
sort: Optional[List[tuple]] = None,
|
||||
limit: Optional[int] = None,
|
||||
skip: Optional[int] = None,
|
||||
):
|
||||
"""Find multiple documents in the collection with pagination support."""
|
||||
cursor = collection.find(filter_query, projection)
|
||||
if sort:
|
||||
cursor = cursor.sort(sort)
|
||||
if skip:
|
||||
cursor = cursor.skip(skip)
|
||||
if limit:
|
||||
cursor = cursor.limit(limit)
|
||||
return list(cursor)
|
||||
|
||||
|
||||
class MongoUpdateMixin:
|
||||
"""Mixin for MongoDB update operations."""
|
||||
|
||||
@handle_mongo_errors
|
||||
def update_one(
|
||||
self,
|
||||
collection: Collection,
|
||||
filter_query: Dict[str, Any],
|
||||
update_data: Dict[str, Any],
|
||||
upsert: bool = False,
|
||||
):
|
||||
"""Update a single document in the collection."""
|
||||
result = collection.update_one(filter_query, update_data, upsert=upsert)
|
||||
return result
|
||||
|
||||
@handle_mongo_errors
|
||||
def update_many(
|
||||
self,
|
||||
collection: Collection,
|
||||
filter_query: Dict[str, Any],
|
||||
update_data: Dict[str, Any],
|
||||
upsert: bool = False,
|
||||
):
|
||||
"""Update multiple documents in the collection."""
|
||||
result = collection.update_many(filter_query, update_data, upsert=upsert)
|
||||
return result
|
||||
|
||||
|
||||
class MongoDeleteMixin:
|
||||
"""Mixin for MongoDB delete operations."""
|
||||
|
||||
@handle_mongo_errors
|
||||
def delete_one(self, collection: Collection, filter_query: Dict[str, Any]):
|
||||
"""Delete a single document from the collection."""
|
||||
result = collection.delete_one(filter_query)
|
||||
return result
|
||||
|
||||
@handle_mongo_errors
|
||||
def delete_many(self, collection: Collection, filter_query: Dict[str, Any]):
|
||||
"""Delete multiple documents from the collection."""
|
||||
result = collection.delete_many(filter_query)
|
||||
return result
|
||||
|
||||
|
||||
class MongoAggregateMixin:
|
||||
"""Mixin for MongoDB aggregation operations."""
|
||||
|
||||
@handle_mongo_errors
|
||||
def aggregate(self, collection: Collection, pipeline: List[Dict[str, Any]]):
|
||||
"""Execute an aggregation pipeline on the collection."""
|
||||
result = collection.aggregate(pipeline)
|
||||
return result
|
||||
85
Services/MongoDb/Models/response.py
Normal file
85
Services/MongoDb/Models/response.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Response handler for MongoDB query results.
|
||||
|
||||
This module provides a wrapper class for MongoDB query results,
|
||||
adding convenience methods for accessing data and managing query state.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional, TypeVar, Generic, Union
|
||||
from pymongo.cursor import Cursor
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class MongoResponse(Generic[T]):
|
||||
"""
|
||||
Wrapper for MongoDB query results.
|
||||
|
||||
Attributes:
|
||||
cursor: MongoDB cursor object
|
||||
first: Whether to return first result only
|
||||
data: Query results (lazy loaded)
|
||||
count: Total count of results
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cursor: Optional[Cursor] = None,
|
||||
first: bool = False,
|
||||
status: bool = True,
|
||||
message: str = "",
|
||||
error: Optional[str] = None,
|
||||
data: Optional[Union[List[T], T]] = None,
|
||||
):
|
||||
self._cursor = cursor
|
||||
self._first = first
|
||||
self.status = status
|
||||
self.message = message
|
||||
self.error = error
|
||||
self._data: Optional[Union[List[T], T]] = data
|
||||
self._count: Optional[int] = None
|
||||
|
||||
@property
|
||||
def data(self) -> Union[List[T], T, None]:
|
||||
"""
|
||||
Lazy load and return query results.
|
||||
Returns first item if first=True, otherwise returns all results.
|
||||
"""
|
||||
if self._data is None and self._cursor is not None:
|
||||
results = list(self._cursor)
|
||||
self._data = results[0] if self._first and results else results
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
"""Lazy load and return total count of results."""
|
||||
if self._count is None:
|
||||
if self._cursor is not None:
|
||||
self._count = self._cursor.count()
|
||||
else:
|
||||
self._count = len(self.all)
|
||||
return self._count
|
||||
|
||||
@property
|
||||
def all(self) -> List[T]:
|
||||
"""Get all results as list."""
|
||||
return (
|
||||
self.data
|
||||
if isinstance(self.data, list)
|
||||
else [self.data] if self.data else []
|
||||
)
|
||||
|
||||
@property
|
||||
def first(self) -> Optional[T]:
|
||||
"""Get first result only."""
|
||||
return self.data if self._first else (self.data[0] if self.data else None)
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert response to dictionary format."""
|
||||
return {
|
||||
"status": self.status,
|
||||
"message": self.message,
|
||||
"data": self.data,
|
||||
"count": self.count,
|
||||
"error": self.error,
|
||||
}
|
||||
0
Services/MongoDb/__init__.py
Normal file
0
Services/MongoDb/__init__.py
Normal file
192
Services/MongoDb/database.py
Normal file
192
Services/MongoDb/database.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""
|
||||
MongoDB database connection and operations.
|
||||
|
||||
This module provides MongoDB connection management with:
|
||||
1. Connection pooling
|
||||
2. Lifecycle management
|
||||
3. Error handling
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List, Union, Callable
|
||||
from contextlib import contextmanager
|
||||
from pymongo import MongoClient
|
||||
from pymongo.results import InsertOneResult, DeleteResult, UpdateResult
|
||||
from pymongo.cursor import Cursor
|
||||
from functools import wraps
|
||||
|
||||
from AllConfigs.NoSqlDatabase.configs import MongoConfig
|
||||
|
||||
|
||||
class MongoInsertMixin:
|
||||
"""Mixin for MongoDB insert operations."""
|
||||
|
||||
def insert_one(self, document: Dict[str, Any]) -> InsertOneResult:
|
||||
"""Insert a single document."""
|
||||
return self.collection.insert_one(document)
|
||||
|
||||
def insert_many(self, documents: List[Dict[str, Any]]) -> List[InsertOneResult]:
|
||||
"""Insert multiple documents."""
|
||||
return self.collection.insert_many(documents)
|
||||
|
||||
|
||||
class MongoFindMixin:
|
||||
"""Mixin for MongoDB find operations."""
|
||||
|
||||
def find_one(self, filter_query: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Find a single document."""
|
||||
return self.collection.find_one(filter_query)
|
||||
|
||||
def find_many(self, filter_query: Dict[str, Any]) -> Cursor:
|
||||
"""Find multiple documents."""
|
||||
return self.collection.find(filter_query)
|
||||
|
||||
|
||||
class MongoUpdateMixin:
|
||||
"""Mixin for MongoDB update operations."""
|
||||
|
||||
def update_one(
|
||||
self, filter_query: Dict[str, Any], update: Dict[str, Any]
|
||||
) -> UpdateResult:
|
||||
"""Update a single document."""
|
||||
return self.collection.update_one(filter_query, update)
|
||||
|
||||
def update_many(
|
||||
self, filter_query: Dict[str, Any], update: Dict[str, Any]
|
||||
) -> UpdateResult:
|
||||
"""Update multiple documents."""
|
||||
return self.collection.update_many(filter_query, update)
|
||||
|
||||
|
||||
class MongoDeleteMixin:
|
||||
"""Mixin for MongoDB delete operations."""
|
||||
|
||||
def delete_one(self, filter_query: Dict[str, Any]) -> DeleteResult:
|
||||
"""Delete a single document."""
|
||||
return self.collection.delete_one(filter_query)
|
||||
|
||||
def delete_many(self, filter_query: Dict[str, Any]) -> DeleteResult:
|
||||
"""Delete multiple documents."""
|
||||
return self.collection.delete_many(filter_query)
|
||||
|
||||
|
||||
class MongoAggregateMixin:
|
||||
"""Mixin for MongoDB aggregate operations."""
|
||||
|
||||
def aggregate(self, pipeline: List[Dict[str, Any]]) -> Cursor:
|
||||
"""Execute an aggregation pipeline."""
|
||||
return self.collection.aggregate(pipeline)
|
||||
|
||||
|
||||
class MongoDBHandler(
|
||||
MongoInsertMixin,
|
||||
MongoFindMixin,
|
||||
MongoUpdateMixin,
|
||||
MongoDeleteMixin,
|
||||
MongoAggregateMixin,
|
||||
):
|
||||
"""Handler for MongoDB operations with connection management."""
|
||||
|
||||
_instance = None
|
||||
_client: Optional[MongoClient] = None
|
||||
|
||||
def __new__(cls):
|
||||
"""Implement singleton pattern for database connection."""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize MongoDB connection if not already initialized."""
|
||||
if not self._client:
|
||||
# Build connection options
|
||||
connection_kwargs = {
|
||||
"host": MongoConfig.URL,
|
||||
"maxPoolSize": 50, # Maximum number of connections in the pool
|
||||
"minPoolSize": 10, # Minimum number of connections in the pool
|
||||
"maxIdleTimeMS": 30000, # Maximum time a connection can be idle (30 seconds)
|
||||
"waitQueueTimeoutMS": 2000, # How long a thread will wait for a connection
|
||||
"serverSelectionTimeoutMS": 5000, # How long to wait for server selection
|
||||
}
|
||||
self._client = MongoClient(**connection_kwargs)
|
||||
|
||||
# Test connection
|
||||
self._client.admin.command("ping")
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry point."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit point - ensures connection is properly closed."""
|
||||
try:
|
||||
if self._client:
|
||||
self._client.close()
|
||||
self._client = None
|
||||
except Exception:
|
||||
# Silently pass any errors during shutdown
|
||||
pass
|
||||
return False # Don't suppress any exceptions
|
||||
|
||||
def close(self):
|
||||
"""Close MongoDB connection."""
|
||||
try:
|
||||
if self._client:
|
||||
self._client.close()
|
||||
self._client = None
|
||||
except Exception:
|
||||
# Silently pass any errors during shutdown
|
||||
pass
|
||||
|
||||
@property
|
||||
def client(self) -> MongoClient:
|
||||
"""Get MongoDB client."""
|
||||
return self._client
|
||||
|
||||
def get_database(self, database_name: str = None):
|
||||
"""Get MongoDB database."""
|
||||
db_name = database_name or MongoConfig.DATABASE_NAME
|
||||
return self._client[db_name]
|
||||
|
||||
def get_collection(self, collection_name: str, database_name: str = None):
|
||||
"""Get MongoDB collection."""
|
||||
database = self.get_database(database_name)
|
||||
return database[collection_name]
|
||||
|
||||
# Create a function to get the singleton instance
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def get_mongodb(cls):
|
||||
"""Get or create the MongoDB singleton instance as a context manager."""
|
||||
instance = cls()
|
||||
try:
|
||||
yield instance
|
||||
finally:
|
||||
try:
|
||||
if instance._client:
|
||||
instance._client.close()
|
||||
instance._client = None
|
||||
except Exception:
|
||||
# Silently pass any errors during shutdown
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def with_mongodb(cls, func: Callable):
|
||||
"""Decorator to automatically handle MongoDB connection context.
|
||||
|
||||
Usage:
|
||||
@MongoDBHandler.with_mongodb
|
||||
def my_function(db, *args, **kwargs):
|
||||
# db is the MongoDB instance
|
||||
pass
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
with cls.get_mongodb() as db:
|
||||
return func(db, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# Create a singleton instance for backward compatibility
|
||||
mongodb = MongoDBHandler()
|
||||
159
Services/MongoDb/how_to.py
Normal file
159
Services/MongoDb/how_to.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""
|
||||
MongoDB Operations Examples
|
||||
|
||||
This module provides practical examples of using MongoDB operations through our mixins.
|
||||
Each example demonstrates different aspects of CRUD operations and aggregation.
|
||||
"""
|
||||
|
||||
import arrow
|
||||
from datetime import datetime
|
||||
|
||||
from Services.MongoDb.database import MongoDBHandler
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def insert_examples(db) -> None:
|
||||
"""Examples of insert operations."""
|
||||
# Get the collection
|
||||
users_collection = db.get_collection("users")
|
||||
products_collection = db.get_collection("products")
|
||||
|
||||
# Single document insert
|
||||
user_doc = {
|
||||
"username": "john_doe",
|
||||
"email": "john@example.com",
|
||||
"age": 30,
|
||||
"created_at": datetime.now(),
|
||||
}
|
||||
result = users_collection.insert_one(user_doc)
|
||||
print(f"Inserted user with ID: {result.inserted_id}")
|
||||
|
||||
# Multiple documents insert
|
||||
products = [
|
||||
{"name": "Laptop", "price": 999.99, "stock": 50},
|
||||
{"name": "Mouse", "price": 29.99, "stock": 100},
|
||||
{"name": "Keyboard", "price": 59.99, "stock": 75},
|
||||
]
|
||||
result = products_collection.insert_many(products)
|
||||
print(f"Inserted {len(result.inserted_ids)} products")
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def find_examples(db) -> None:
|
||||
"""Examples of find operations."""
|
||||
# Get the collections
|
||||
users_collection = db.get_collection("users")
|
||||
products_collection = db.get_collection("products")
|
||||
|
||||
# Find one document
|
||||
user = users_collection.find_one({"email": "john@example.com"})
|
||||
print(f"Found user: {user}")
|
||||
|
||||
# Find many documents
|
||||
products_cursor = products_collection.find({"price": {"$lt": 100}})
|
||||
products = list(products_cursor)
|
||||
print(f"Found {len(products)} products under $100")
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def update_examples(db) -> None:
|
||||
"""Examples of update operations."""
|
||||
# Get the collections
|
||||
products_collection = db.get_collection("products")
|
||||
|
||||
# Update single document
|
||||
result = products_collection.update_one(
|
||||
{"name": "Laptop"}, {"$set": {"price": 899.99, "stock": 45}}
|
||||
)
|
||||
print(f"Updated {result.modified_count} laptop(s)")
|
||||
|
||||
# Update multiple documents
|
||||
result = products_collection.update_many(
|
||||
{"stock": {"$lt": 10}}, {"$set": {"status": "low_stock"}}
|
||||
)
|
||||
print(f"Updated {result.modified_count} low stock products")
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def delete_examples(db) -> None:
|
||||
"""Examples of delete operations."""
|
||||
# Get the collections
|
||||
users_collection = db.get_collection("users")
|
||||
products_collection = db.get_collection("products")
|
||||
|
||||
# Delete single document
|
||||
result = users_collection.delete_one({"email": "john@example.com"})
|
||||
print(f"Deleted {result.deleted_count} user")
|
||||
|
||||
# Delete multiple documents
|
||||
result = products_collection.delete_many({"stock": 0})
|
||||
print(f"Deleted {result.deleted_count} out-of-stock products")
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def aggregate_examples(db) -> None:
|
||||
"""Examples of aggregate operations."""
|
||||
# Get the collection
|
||||
products_collection = db.get_collection("products")
|
||||
|
||||
# Calculate average price by category
|
||||
pipeline = [
|
||||
{
|
||||
"$group": {
|
||||
"_id": "$category",
|
||||
"avg_price": {"$avg": "$price"},
|
||||
"total_products": {"$sum": 1},
|
||||
}
|
||||
},
|
||||
{"$sort": {"avg_price": -1}},
|
||||
]
|
||||
results = products_collection.aggregate(pipeline)
|
||||
print("Category statistics:", list(results))
|
||||
|
||||
|
||||
@MongoDBHandler.with_mongodb
|
||||
def complex_query_example(db) -> None:
|
||||
"""Example of a more complex query combining multiple operations."""
|
||||
# Get the collection
|
||||
users_collection = db.get_collection("users")
|
||||
|
||||
# Find active users who made purchases in last 30 days
|
||||
pipeline = [
|
||||
{
|
||||
"$match": {
|
||||
"status": "active",
|
||||
"last_purchase": {
|
||||
"$gte": arrow.now().shift(days=-30).datetime,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"$lookup": {
|
||||
"from": "orders",
|
||||
"localField": "_id",
|
||||
"foreignField": "user_id",
|
||||
"as": "recent_orders",
|
||||
}
|
||||
},
|
||||
{
|
||||
"$project": {
|
||||
"username": 1,
|
||||
"email": 1,
|
||||
"total_orders": {"$size": "$recent_orders"},
|
||||
"total_spent": {"$sum": "$recent_orders.amount"},
|
||||
}
|
||||
},
|
||||
{"$sort": {"total_spent": -1}},
|
||||
]
|
||||
results = users_collection.aggregate(pipeline)
|
||||
print("Active users with recent purchases:", list(results))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Example usage of all operations
|
||||
insert_examples()
|
||||
find_examples()
|
||||
update_examples()
|
||||
delete_examples()
|
||||
aggregate_examples()
|
||||
complex_query_example()
|
||||
147
Services/PostgresDb/Models/core_alchemy.py
Normal file
147
Services/PostgresDb/Models/core_alchemy.py
Normal file
@@ -0,0 +1,147 @@
|
||||
from typing import Type, TypeVar
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
from ApiLibrary import get_line_number_for_error
|
||||
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
||||
|
||||
# Type variable for class methods returning self
|
||||
T = TypeVar("T", bound="FilterAttributes")
|
||||
|
||||
|
||||
class BaseAlchemyModel:
|
||||
"""
|
||||
Controller of alchemy to database transactions.
|
||||
Query: Query object for model
|
||||
Session: Session object for model
|
||||
Actions: save, flush, rollback, commit
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def new_session(cls) -> Session:
|
||||
"""Get database session."""
|
||||
from Services.PostgresDb.database import get_db
|
||||
|
||||
with get_db() as session:
|
||||
return session
|
||||
|
||||
@classmethod
|
||||
def flush(cls: Type[T], db: Session) -> T:
|
||||
"""
|
||||
Flush the current session to the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Self instance
|
||||
|
||||
Raises:
|
||||
HTTPException: If database operation fails
|
||||
"""
|
||||
try:
|
||||
db.flush()
|
||||
return cls
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
def destroy(self: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Delete the record from the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
"""
|
||||
db.delete(self)
|
||||
|
||||
@classmethod
|
||||
def save_via_metadata(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Save or rollback based on metadata.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If save operation fails
|
||||
"""
|
||||
try:
|
||||
if cls.is_created:
|
||||
db.commit()
|
||||
db.flush()
|
||||
db.rollback()
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def save(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Commit changes to database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If commit fails
|
||||
"""
|
||||
try:
|
||||
db.commit()
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_500_INTERNAL_SERVER_ERROR",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def save_and_confirm(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Save changes and mark record as confirmed.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If operation fails
|
||||
"""
|
||||
try:
|
||||
cls.save(db)
|
||||
cls.update(db, is_confirmed=True)
|
||||
cls.save(db)
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def rollback(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Rollback current transaction.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
"""
|
||||
db.rollback()
|
||||
399
Services/PostgresDb/Models/crud_alchemy.py
Normal file
399
Services/PostgresDb/Models/crud_alchemy.py
Normal file
@@ -0,0 +1,399 @@
|
||||
import datetime
|
||||
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, List, Optional
|
||||
from sqlalchemy import TIMESTAMP, NUMERIC
|
||||
from sqlalchemy.orm import Session, Mapped
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ApiLibrary import system_arrow, get_line_number_for_error, client_arrow
|
||||
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
||||
|
||||
from Services.PostgresDb.Models.core_alchemy import BaseAlchemyModel
|
||||
from Services.PostgresDb.Models.system_fields import SystemFields
|
||||
|
||||
|
||||
class MetaDataRow(BaseModel):
|
||||
created: Optional[bool] = False
|
||||
message: Optional[str] = None
|
||||
error_case: Optional[str] = None
|
||||
|
||||
|
||||
class Credentials(BaseModel):
|
||||
person_id: int
|
||||
person_name: str
|
||||
|
||||
|
||||
class CrudActions(SystemFields):
|
||||
|
||||
@classmethod
|
||||
def extract_system_fields(
|
||||
cls, filter_kwargs: dict, create: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Remove system-managed fields from input dictionary.
|
||||
|
||||
Args:
|
||||
filter_kwargs: Input dictionary of fields
|
||||
create: If True, use creation field list, else use update field list
|
||||
|
||||
Returns:
|
||||
Dictionary with system fields removed
|
||||
"""
|
||||
system_fields = filter_kwargs.copy()
|
||||
extract_fields = (
|
||||
cls.__system__fields__create__ if create else cls.__system__fields__update__
|
||||
)
|
||||
for field in extract_fields:
|
||||
system_fields.pop(field, None)
|
||||
return system_fields
|
||||
|
||||
@classmethod
|
||||
def remove_non_related_inputs(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Filter out inputs that don't correspond to model fields.
|
||||
|
||||
Args:
|
||||
kwargs: Dictionary of field names and values
|
||||
|
||||
Returns:
|
||||
Dictionary containing only valid model fields
|
||||
"""
|
||||
return {
|
||||
key: value
|
||||
for key, value in kwargs.items()
|
||||
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
|
||||
"""
|
||||
Process a field value based on its type and convert it to the appropriate format.
|
||||
|
||||
Args:
|
||||
val: Field value
|
||||
key: Field name
|
||||
|
||||
Returns:
|
||||
Tuple of (should_include, processed_value)
|
||||
"""
|
||||
key_ = cls.__annotations__.get(key, None)
|
||||
is_primary = key in cls.primary_keys
|
||||
row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
|
||||
|
||||
# Skip primary keys and foreign keys
|
||||
if is_primary or row_attr:
|
||||
return False, None
|
||||
|
||||
# Handle None values
|
||||
if val is None:
|
||||
return True, None
|
||||
|
||||
# Special handling for UUID fields
|
||||
if str(key[-5:]).lower() == "uu_id":
|
||||
return True, str(val)
|
||||
|
||||
# Handle typed fields
|
||||
if key_:
|
||||
if key_ == Mapped[int]:
|
||||
return True, int(val)
|
||||
elif key_ == Mapped[bool]:
|
||||
return True, bool(val)
|
||||
elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
|
||||
return True, round(float(val), 3)
|
||||
elif key_ == Mapped[TIMESTAMP]:
|
||||
return True, str(
|
||||
system_arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
|
||||
)
|
||||
elif key_ == Mapped[str]:
|
||||
return True, str(val)
|
||||
|
||||
# Handle based on Python types
|
||||
else:
|
||||
if isinstance(val, datetime.datetime):
|
||||
return True, str(
|
||||
system_arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
|
||||
)
|
||||
elif isinstance(val, bool):
|
||||
return True, bool(val)
|
||||
elif isinstance(val, (float, Decimal)):
|
||||
return True, round(float(val), 3)
|
||||
elif isinstance(val, int):
|
||||
return True, int(val)
|
||||
elif isinstance(val, str):
|
||||
return True, str(val)
|
||||
elif val is None:
|
||||
return True, None
|
||||
|
||||
return False, None
|
||||
|
||||
def get_dict(
|
||||
self,
|
||||
exclude: Optional[List[str]] = None,
|
||||
include: Optional[List[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert model instance to dictionary with customizable fields.
|
||||
|
||||
Args:
|
||||
exclude: List of fields to exclude
|
||||
include: List of fields to include (takes precedence over exclude)
|
||||
|
||||
Returns:
|
||||
Dictionary representation of the model
|
||||
"""
|
||||
return_dict: Dict[str, Any] = {}
|
||||
|
||||
if include:
|
||||
# Handle explicitly included fields
|
||||
exclude_list = [
|
||||
element
|
||||
for element in self.__system_default_model__
|
||||
if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
|
||||
]
|
||||
columns_include_list = list(set(include).difference(set(exclude_list)))
|
||||
columns_include_list.extend(["uu_id"])
|
||||
|
||||
for key in columns_include_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
|
||||
elif exclude:
|
||||
# Handle explicitly excluded fields
|
||||
exclude.extend(
|
||||
list(
|
||||
set(getattr(self, "__exclude__fields__", []) or []).difference(
|
||||
exclude
|
||||
)
|
||||
)
|
||||
)
|
||||
exclude.extend(
|
||||
[
|
||||
element
|
||||
for element in self.__system_default_model__
|
||||
if str(element)[-2:] == "id"
|
||||
]
|
||||
)
|
||||
|
||||
columns_excluded_list = list(set(self.columns).difference(set(exclude)))
|
||||
columns_excluded_list.extend(["uu_id", "active"])
|
||||
|
||||
for key in columns_excluded_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
else:
|
||||
# Handle default field selection
|
||||
exclude_list = (getattr(self, "__exclude__fields__", []) or []) + list(
|
||||
self.__system_default_model__
|
||||
)
|
||||
columns_list = list(set(self.columns).difference(set(exclude_list)))
|
||||
columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
|
||||
columns_list.extend(
|
||||
[col for col in self.columns if str(col)[-5:].lower() == "uu_id"]
|
||||
)
|
||||
|
||||
for remove_field in self.__system_default_model__:
|
||||
if remove_field in columns_list:
|
||||
columns_list.remove(remove_field)
|
||||
|
||||
for key in columns_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
|
||||
return return_dict
|
||||
|
||||
|
||||
class CRUDModel(BaseAlchemyModel, CrudActions):
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
meta_data: MetaDataRow
|
||||
creds: Credentials = None
|
||||
|
||||
@property
|
||||
def is_created(self):
|
||||
return self.meta_data.created
|
||||
|
||||
@classmethod
|
||||
def create_credentials(cls, record_created) -> None:
|
||||
"""
|
||||
Save user credentials for tracking.
|
||||
|
||||
Args:
|
||||
record_created: Record that created or updated
|
||||
"""
|
||||
|
||||
if getattr(cls.creds, "person_id", None) and getattr(
|
||||
cls.creds, "person_name", None
|
||||
):
|
||||
record_created.created_by_id = cls.creds.person_id
|
||||
record_created.created_by = cls.creds.person_name
|
||||
return
|
||||
|
||||
@classmethod
|
||||
def update_metadata(
|
||||
cls, created: bool, error_case: str = None, message: str = None
|
||||
) -> None:
|
||||
cls.meta_data = MetaDataRow(
|
||||
created=created, error_case=error_case, message=message
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def raise_exception(cls):
|
||||
raise HTTPExceptionApi(
|
||||
error_code=cls.meta_data.error_case,
|
||||
lang=cls.lang,
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=cls.meta_data.message,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_or_abort(cls, db: Session, **kwargs):
|
||||
"""
|
||||
Create a new record or abort if it already exists.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Record fields
|
||||
|
||||
Returns:
|
||||
New record if successfully created
|
||||
"""
|
||||
check_kwargs = cls.extract_system_fields(kwargs)
|
||||
|
||||
# Search for existing record
|
||||
query = db.query(cls).filter(
|
||||
cls.expiry_ends > str(system_arrow.now()),
|
||||
cls.expiry_starts <= str(system_arrow.now()),
|
||||
)
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
if hasattr(cls, key):
|
||||
query = query.filter(getattr(cls, key) == value)
|
||||
|
||||
already_record = query.first()
|
||||
# Handle existing record
|
||||
if already_record:
|
||||
if already_record.deleted:
|
||||
cls.update_metadata(created=False, error_case="DeletedRecord")
|
||||
cls.raise_exception()
|
||||
elif not already_record.is_confirmed:
|
||||
cls.update_metadata(created=False, error_case="IsNotConfirmed")
|
||||
cls.raise_exception()
|
||||
cls.update_metadata(created=False, error_case="AlreadyExists")
|
||||
cls.raise_exception()
|
||||
|
||||
# Create new record
|
||||
check_kwargs = cls.remove_non_related_inputs(check_kwargs)
|
||||
created_record = cls()
|
||||
for key, value in check_kwargs.items():
|
||||
setattr(created_record, key, value)
|
||||
cls.create_credentials(created_record)
|
||||
db.add(created_record)
|
||||
db.flush()
|
||||
cls.update_metadata(created=True)
|
||||
return created_record
|
||||
|
||||
@classmethod
|
||||
def find_or_create(cls, db: Session, **kwargs):
|
||||
"""
|
||||
Find an existing record matching the criteria or create a new one.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Search/creation criteria
|
||||
|
||||
Returns:
|
||||
Existing or newly created record
|
||||
"""
|
||||
check_kwargs = cls.extract_system_fields(kwargs)
|
||||
|
||||
# Search for existing record
|
||||
query = db.query(cls).filter(
|
||||
cls.expiry_ends > str(system_arrow.now()),
|
||||
cls.expiry_starts <= str(system_arrow.now()),
|
||||
)
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
if hasattr(cls, key):
|
||||
query = query.filter(getattr(cls, key) == value)
|
||||
|
||||
already_record = query.first()
|
||||
# Handle existing record
|
||||
if already_record:
|
||||
if already_record.deleted:
|
||||
cls.update_metadata(created=False, error_case="DeletedRecord")
|
||||
return already_record
|
||||
elif not already_record.is_confirmed:
|
||||
cls.update_metadata(created=False, error_case="IsNotConfirmed")
|
||||
return already_record
|
||||
cls.update_metadata(created=False, error_case="AlreadyExists")
|
||||
return already_record
|
||||
|
||||
# Create new record
|
||||
check_kwargs = cls.remove_non_related_inputs(check_kwargs)
|
||||
created_record = cls()
|
||||
for key, value in check_kwargs.items():
|
||||
setattr(created_record, key, value)
|
||||
cls.create_credentials(created_record)
|
||||
db.add(created_record)
|
||||
db.flush()
|
||||
cls.update_metadata(created=True)
|
||||
return created_record
|
||||
|
||||
def update(self, db: Session, **kwargs):
|
||||
"""
|
||||
Update the record with new values.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated record
|
||||
|
||||
Raises:
|
||||
ValueError: If attempting to update is_confirmed with other fields
|
||||
"""
|
||||
check_kwargs = self.remove_non_related_inputs(kwargs)
|
||||
check_kwargs = self.extract_system_fields(check_kwargs, create=False)
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
self.update_credentials(kwargs=kwargs)
|
||||
db.flush()
|
||||
return self
|
||||
|
||||
def update_credentials(self, **kwargs) -> None:
|
||||
"""
|
||||
Save user credentials for tracking.
|
||||
|
||||
Args:
|
||||
record_updated: Record that created or updated
|
||||
"""
|
||||
# Update confirmation or modification tracking
|
||||
is_confirmed_argument = kwargs.get("is_confirmed", None)
|
||||
|
||||
if is_confirmed_argument and not len(kwargs) == 1:
|
||||
raise ValueError("Confirm field cannot be updated with other fields")
|
||||
|
||||
if is_confirmed_argument:
|
||||
if getattr(self.creds, "person_id", None) and getattr(
|
||||
self.creds, "person_name", None
|
||||
):
|
||||
self.confirmed_by_id = self.creds.person_id
|
||||
self.confirmed_by = self.creds.person_name
|
||||
else:
|
||||
if getattr(self.creds, "person_id", None) and getattr(
|
||||
self.creds, "person_name", None
|
||||
):
|
||||
self.updated_by_id = self.creds.person_id
|
||||
self.updated_by = self.creds.person_name
|
||||
return
|
||||
180
Services/PostgresDb/Models/filter_functions.py
Normal file
180
Services/PostgresDb/Models/filter_functions.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""
|
||||
Advanced filtering functionality for SQLAlchemy models.
|
||||
|
||||
This module provides a comprehensive set of filtering capabilities for SQLAlchemy models,
|
||||
including pagination, ordering, and complex query building.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Any, TypeVar, Type
|
||||
|
||||
from sqlalchemy.orm import Query, Session
|
||||
from sqlalchemy.sql.elements import BinaryExpression
|
||||
from sqlalchemy_mixins.smartquery import SmartQueryMixin
|
||||
|
||||
from Services.PostgresDb.Models.response import PostgresResponse
|
||||
from Services.PostgresDb.Models_old.base_model import BaseModel
|
||||
|
||||
from ApiLibrary import system_arrow
|
||||
|
||||
|
||||
T = TypeVar("T", bound="FilterAttributes")
|
||||
|
||||
|
||||
class ArgumentModel:
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def _query(cls: Type[T], db: Session) -> Query:
|
||||
"""Returns the query to use in the model."""
|
||||
return cls.pre_query if cls.pre_query else db.query(cls)
|
||||
|
||||
@classmethod
|
||||
def add_new_arg_to_args(cls: Type[T], args_list, argument, value):
|
||||
new_arg_list = list(
|
||||
set(
|
||||
args_
|
||||
for args_ in list(args_list)
|
||||
if isinstance(args_, BinaryExpression)
|
||||
)
|
||||
)
|
||||
arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
|
||||
# arg_right = lambda arg_obj: getattr(getattr(arg_obj, "right", None), "value", None)
|
||||
if not any(True for arg in new_arg_list if arg_left(arg_obj=arg) == argument):
|
||||
new_arg_list.append(value)
|
||||
return tuple(new_arg_list)
|
||||
|
||||
@classmethod
|
||||
def get_not_expired_query_arg(cls: Type[T], arg):
|
||||
"""Add expiry_starts and expiry_ends to the query."""
|
||||
starts = cls.expiry_starts <= str(system_arrow.now())
|
||||
ends = cls.expiry_ends > str(system_arrow.now())
|
||||
arg = cls.add_new_arg_to_args(arg, "expiry_ends", ends)
|
||||
arg = cls.add_new_arg_to_args(arg, "expiry_starts", starts)
|
||||
return arg
|
||||
|
||||
@classmethod
|
||||
def get_active_and_confirmed_query_arg(cls: Type[T], arg):
|
||||
"""Add active and confirmed to the query."""
|
||||
arg = cls.add_new_arg_to_args(arg, "is_confirmed", cls.is_confirmed == True)
|
||||
arg = cls.add_new_arg_to_args(arg, "active", cls.active == True)
|
||||
arg = cls.add_new_arg_to_args(arg, "deleted", cls.deleted == False)
|
||||
return arg
|
||||
|
||||
|
||||
class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
|
||||
|
||||
pre_query = None
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def produce_query_to_add(cls: Type[T], filter_list):
|
||||
if filter_list.get("query"):
|
||||
for smart_iter in cls.filter_expr(**filter_list["query"]):
|
||||
if key := getattr(getattr(smart_iter, "left", None), "key", None):
|
||||
args = cls.add_new_arg_to_args(args, key, smart_iter)
|
||||
|
||||
@classmethod
|
||||
def convert(
|
||||
cls: Type[T], smart_options: dict, validate_model: Any = None
|
||||
) -> tuple[BinaryExpression]:
|
||||
if not validate_model:
|
||||
return tuple(cls.filter_expr(**smart_options))
|
||||
|
||||
@classmethod
|
||||
def filter_by_one(
|
||||
cls: Type[T], db: Session, system: bool = False, **kwargs
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter single record by keyword arguments.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
system: If True, skip status filtering
|
||||
**kwargs: Filter criteria
|
||||
|
||||
Returns:
|
||||
Query response with single record
|
||||
"""
|
||||
if "is_confirmed" not in kwargs and not system:
|
||||
kwargs["is_confirmed"] = True
|
||||
kwargs.pop("system", None)
|
||||
query = cls._query(db).filter_by(**kwargs)
|
||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
|
||||
|
||||
@classmethod
|
||||
def filter_one(
|
||||
cls: Type[T],
|
||||
*args: Any,
|
||||
db: Session,
|
||||
system: bool = False,
|
||||
expired: bool = False,
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter single record by expressions.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter expressions
|
||||
system: If True, skip status filtering
|
||||
expired: If True, include expired records
|
||||
|
||||
Returns:
|
||||
Query response with single record
|
||||
"""
|
||||
if not system:
|
||||
args = cls.get_active_and_confirmed_query_arg(args)
|
||||
if not expired:
|
||||
args = cls.get_not_expired_query_arg(args)
|
||||
query = cls._query(db).filter(*args)
|
||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
|
||||
|
||||
@classmethod
|
||||
def filter_all_system(
|
||||
cls: Type[T], *args: BinaryExpression, db: Session
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter multiple records by expressions without status filtering.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter expressions
|
||||
|
||||
Returns:
|
||||
Query response with matching records
|
||||
"""
|
||||
query = cls._query(db)
|
||||
query = query.filter(*args)
|
||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
||||
|
||||
@classmethod
|
||||
def filter_all(cls: Type[T], *args: Any, db: Session) -> PostgresResponse:
|
||||
"""
|
||||
Filter multiple records by expressions.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter expressions
|
||||
Returns:
|
||||
Query response with matching records
|
||||
"""
|
||||
args = cls.get_active_and_confirmed_query_arg(args)
|
||||
args = cls.get_not_expired_query_arg(args)
|
||||
query = cls._query(db).filter(*args)
|
||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
||||
|
||||
@classmethod
|
||||
def filter_by_all_system(cls: Type[T], db: Session, **kwargs) -> PostgresResponse:
|
||||
"""
|
||||
Filter multiple records by keyword arguments.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Filter criteria
|
||||
|
||||
Returns:
|
||||
Query response with matching records
|
||||
"""
|
||||
query = cls._query(db).filter_by(**kwargs)
|
||||
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
|
||||
2
Services/PostgresDb/Models/language_alchemy.py
Normal file
2
Services/PostgresDb/Models/language_alchemy.py
Normal file
@@ -0,0 +1,2 @@
|
||||
class LanguageModel:
|
||||
__language_model__ = None
|
||||
172
Services/PostgresDb/Models/mixin.py
Normal file
172
Services/PostgresDb/Models/mixin.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from sqlalchemy import (
|
||||
TIMESTAMP,
|
||||
NUMERIC,
|
||||
func,
|
||||
text,
|
||||
UUID,
|
||||
String,
|
||||
Integer,
|
||||
Boolean,
|
||||
SmallInteger,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy_mixins.serialize import SerializeMixin
|
||||
from sqlalchemy_mixins.repr import ReprMixin
|
||||
|
||||
from Services.PostgresDb.Models.crud_alchemy import CRUDModel
|
||||
from Services.PostgresDb.Models.filter_functions import QueryModel
|
||||
|
||||
|
||||
class BasicMixin(CRUDModel, QueryModel):
|
||||
|
||||
__abstract__ = True
|
||||
__repr__ = ReprMixin.__repr__
|
||||
|
||||
|
||||
class CrudMixin(BasicMixin, SerializeMixin, ReprMixin):
|
||||
"""
|
||||
Base mixin providing CRUD operations and common fields for PostgreSQL models.
|
||||
|
||||
Features:
|
||||
- Automatic timestamps (created_at, updated_at)
|
||||
- Soft delete capability
|
||||
- User tracking (created_by, updated_by)
|
||||
- Data serialization
|
||||
- Multi-language support
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
# Primary and reference fields
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
uu_id: Mapped[str] = mapped_column(
|
||||
UUID,
|
||||
server_default=text("gen_random_uuid()"),
|
||||
index=True,
|
||||
unique=True,
|
||||
comment="Unique identifier UUID",
|
||||
)
|
||||
|
||||
# Common timestamp fields for all models
|
||||
expiry_starts: Mapped[TIMESTAMP] = mapped_column(
|
||||
type_=TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
comment="Record validity start timestamp",
|
||||
)
|
||||
expiry_ends: Mapped[TIMESTAMP] = mapped_column(
|
||||
type_=TIMESTAMP(timezone=True),
|
||||
default="2099-12-31",
|
||||
server_default="2099-12-31",
|
||||
comment="Record validity end timestamp",
|
||||
)
|
||||
|
||||
|
||||
class BaseCollection(CrudMixin):
|
||||
"""Base model class with minimal fields."""
|
||||
|
||||
__abstract__ = True
|
||||
__repr__ = ReprMixin.__repr__
|
||||
|
||||
|
||||
class CrudCollection(CrudMixin):
|
||||
"""
|
||||
Full-featured model class with all common fields.
|
||||
|
||||
Includes:
|
||||
- UUID and reference ID
|
||||
- Timestamps
|
||||
- User tracking
|
||||
- Confirmation status
|
||||
- Soft delete
|
||||
- Notification flags
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
__repr__ = ReprMixin.__repr__
|
||||
|
||||
ref_id: Mapped[str] = mapped_column(
|
||||
String(100), nullable=True, index=True, comment="External reference ID"
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="Record creation timestamp",
|
||||
)
|
||||
updated_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="Last update timestamp",
|
||||
)
|
||||
|
||||
# Cryptographic and user tracking
|
||||
cryp_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, index=True, comment="Cryptographic UUID"
|
||||
)
|
||||
created_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Creator name"
|
||||
)
|
||||
created_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Creator ID"
|
||||
)
|
||||
updated_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Last modifier name"
|
||||
)
|
||||
updated_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Last modifier ID"
|
||||
)
|
||||
confirmed_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Confirmer name"
|
||||
)
|
||||
confirmed_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Confirmer ID"
|
||||
)
|
||||
|
||||
# Status flags
|
||||
is_confirmed: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Record confirmation status"
|
||||
)
|
||||
replication_id: Mapped[int] = mapped_column(
|
||||
SmallInteger, server_default="0", comment="Replication identifier"
|
||||
)
|
||||
deleted: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Soft delete flag"
|
||||
)
|
||||
active: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="1", comment="Record active status"
|
||||
)
|
||||
is_notification_send: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Notification sent flag"
|
||||
)
|
||||
is_email_send: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Email sent flag"
|
||||
)
|
||||
|
||||
# @classmethod
|
||||
# def retrieve_language_model(cls, lang: str, response_model: Any) -> Dict[str, str]:
|
||||
# """
|
||||
# Retrieve language-specific model headers and validation messages.
|
||||
#
|
||||
# Args:
|
||||
# lang: Language code
|
||||
# response_model: Model containing language annotations
|
||||
#
|
||||
# Returns:
|
||||
# Dictionary of field names to localized headers
|
||||
# """
|
||||
# headers_and_validation = {}
|
||||
# __language_model__ = getattr(cls.__language_model__, lang, "tr")
|
||||
#
|
||||
# for field in response_model.__annotations__.keys():
|
||||
# headers_and_validation[field] = getattr(
|
||||
# __language_model__, field, "Lang Not found"
|
||||
# )
|
||||
#
|
||||
# return headers_and_validation
|
||||
185
Services/PostgresDb/Models/pagination.py
Normal file
185
Services/PostgresDb/Models/pagination.py
Normal file
@@ -0,0 +1,185 @@
|
||||
from __future__ import annotations
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from sqlalchemy import desc, asc
|
||||
from pydantic import BaseModel
|
||||
from AllConfigs.SqlDatabase.configs import PaginateConfig
|
||||
from Services.PostgresDb.Models.response import PostgresResponse
|
||||
|
||||
|
||||
class PaginationConfig(BaseModel):
|
||||
"""
|
||||
Configuration for pagination settings.
|
||||
|
||||
Attributes:
|
||||
page: Current page number (default: 1)
|
||||
size: Items per page (default: 10)
|
||||
order_field: Field to order by (default: "id")
|
||||
order_type: Order direction (default: "asc")
|
||||
"""
|
||||
|
||||
page: int = 1
|
||||
size: int = 10
|
||||
order_field: Optional[Union[tuple[str], list[str]]] = None
|
||||
order_type: Optional[Union[tuple[str], list[str]]] = None
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
if self.order_field is None:
|
||||
self.order_field = ["uu_id"]
|
||||
if self.order_type is None:
|
||||
self.order_type = ["asc"]
|
||||
|
||||
|
||||
class Pagination:
|
||||
"""
|
||||
Handles pagination logic for query results.
|
||||
|
||||
Manages page size, current page, ordering, and calculates total pages
|
||||
and items based on the data source.
|
||||
|
||||
Attributes:
|
||||
DEFAULT_SIZE: Default number of items per page (10)
|
||||
MIN_SIZE: Minimum allowed page size (10)
|
||||
MAX_SIZE: Maximum allowed page size (40)
|
||||
"""
|
||||
|
||||
DEFAULT_SIZE = PaginateConfig.DEFAULT_SIZE
|
||||
MIN_SIZE = PaginateConfig.MIN_SIZE
|
||||
MAX_SIZE = PaginateConfig.MAX_SIZE
|
||||
|
||||
def __init__(self, data: PostgresResponse):
|
||||
self.data = data
|
||||
self.size: int = self.DEFAULT_SIZE
|
||||
self.page: int = 1
|
||||
self.orderField: Optional[Union[tuple[str], list[str]]] = ["uu_id"]
|
||||
self.orderType: Optional[Union[tuple[str], list[str]]] = ["asc"]
|
||||
self.page_count: int = 1
|
||||
self.total_count: int = 0
|
||||
self.all_count: int = 0
|
||||
self.total_pages: int = 1
|
||||
self._update_page_counts()
|
||||
|
||||
def change(self, **kwargs) -> None:
|
||||
"""Update pagination settings from config."""
|
||||
config = PaginationConfig(**kwargs)
|
||||
self.size = (
|
||||
config.size
|
||||
if self.MIN_SIZE <= config.size <= self.MAX_SIZE
|
||||
else self.DEFAULT_SIZE
|
||||
)
|
||||
self.page = config.page
|
||||
self.orderField = config.order_field
|
||||
self.orderType = config.order_type
|
||||
self._update_page_counts()
|
||||
|
||||
def feed(self, data: PostgresResponse) -> None:
|
||||
"""Calculate pagination based on data source."""
|
||||
self.data = data
|
||||
self._update_page_counts()
|
||||
|
||||
def _update_page_counts(self) -> None:
|
||||
"""Update page counts and validate current page."""
|
||||
if self.data:
|
||||
self.total_count = self.data.count
|
||||
self.all_count = self.data.total_count
|
||||
|
||||
self.size = (
|
||||
self.size
|
||||
if self.MIN_SIZE <= self.size <= self.MAX_SIZE
|
||||
else self.DEFAULT_SIZE
|
||||
)
|
||||
self.total_pages = max(1, (self.total_count + self.size - 1) // self.size)
|
||||
self.page = max(1, min(self.page, self.total_pages))
|
||||
self.page_count = (
|
||||
self.total_count % self.size
|
||||
if self.page == self.total_pages and self.total_count % self.size
|
||||
else self.size
|
||||
)
|
||||
|
||||
def refresh(self) -> None:
|
||||
"""Reset pagination state to defaults."""
|
||||
self._update_page_counts()
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Reset pagination state to defaults."""
|
||||
self.size = self.DEFAULT_SIZE
|
||||
self.page = 1
|
||||
self.orderField = "uu_id"
|
||||
self.orderType = "asc"
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert pagination state to dictionary format."""
|
||||
self.refresh()
|
||||
return {
|
||||
"size": self.size,
|
||||
"page": self.page,
|
||||
"allCount": self.all_count,
|
||||
"totalCount": self.total_count,
|
||||
"totalPages": self.total_pages,
|
||||
"pageCount": self.page_count,
|
||||
"order_field": self.orderField,
|
||||
"order_type": self.orderType,
|
||||
}
|
||||
|
||||
|
||||
class PaginationResult:
|
||||
"""
|
||||
Result of a paginated query.
|
||||
|
||||
Contains the query result and pagination state.
|
||||
data: PostgresResponse of query results
|
||||
pagination: Pagination state
|
||||
|
||||
Attributes:
|
||||
_query: Original query object
|
||||
pagination: Pagination state
|
||||
"""
|
||||
|
||||
def __init__(self, data: PostgresResponse, pagination: Pagination):
|
||||
self._query = data.query
|
||||
self.pagination = pagination
|
||||
self.response_type = data.is_list
|
||||
self.limit = self.pagination.size
|
||||
self.offset = self.pagination.size * (self.pagination.page - 1)
|
||||
self.order_by = self.pagination.orderField
|
||||
|
||||
def dynamic_order_by(self):
|
||||
"""
|
||||
Dynamically order a query by multiple fields.
|
||||
Returns:
|
||||
Ordered query object.
|
||||
"""
|
||||
if not len(self.order_by) == len(self.pagination.orderType):
|
||||
raise ValueError(
|
||||
"Order by fields and order types must have the same length."
|
||||
)
|
||||
order_criteria = zip(self.order_by, self.pagination.orderType)
|
||||
for field, direction in order_criteria:
|
||||
if hasattr(self._query.column_descriptions[0]["entity"], field):
|
||||
if direction.lower().startswith("d"):
|
||||
self._query = self._query.order_by(
|
||||
desc(
|
||||
getattr(self._query.column_descriptions[0]["entity"], field)
|
||||
)
|
||||
)
|
||||
else:
|
||||
self._query = self._query.order_by(
|
||||
asc(
|
||||
getattr(self._query.column_descriptions[0]["entity"], field)
|
||||
)
|
||||
)
|
||||
return self._query
|
||||
|
||||
@property
|
||||
def data(self) -> Union[list | dict]:
|
||||
"""Get query object."""
|
||||
query_ordered = self.dynamic_order_by()
|
||||
query_paginated = query_ordered.limit(self.limit).offset(self.offset)
|
||||
queried_data = (
|
||||
query_paginated.all() if self.response_type else query_paginated.first()
|
||||
)
|
||||
return (
|
||||
[result.get_dict() for result in queried_data]
|
||||
if self.response_type
|
||||
else queried_data.get_dict()
|
||||
)
|
||||
91
Services/PostgresDb/Models/response.py
Normal file
91
Services/PostgresDb/Models/response.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Response handler for PostgreSQL query results.
|
||||
|
||||
This module provides a wrapper class for SQLAlchemy query results,
|
||||
adding convenience methods for accessing data and managing query state.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Optional, TypeVar, Generic, Union
|
||||
from sqlalchemy.orm import Query
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class PostgresResponse(Generic[T]):
|
||||
"""
|
||||
Wrapper for PostgreSQL/SQLAlchemy query results.
|
||||
|
||||
Attributes:
|
||||
query: SQLAlchemy query object
|
||||
metadata: Additional metadata for the query
|
||||
|
||||
Properties:
|
||||
count: Total count of results
|
||||
query: Get query object
|
||||
as_dict: Convert response to dictionary format
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pre_query: Query,
|
||||
query: Query,
|
||||
is_array: bool = True,
|
||||
metadata: Any = None,
|
||||
):
|
||||
self._is_list = is_array
|
||||
self._query = query
|
||||
self._pre_query = pre_query
|
||||
self._count: Optional[int] = None
|
||||
self.metadata = metadata
|
||||
|
||||
@property
|
||||
def data(self) -> Union[T, list[T]]:
|
||||
"""Get query results."""
|
||||
if not self.is_list:
|
||||
first_item = self._query.first()
|
||||
return first_item if first_item else None
|
||||
return self._query.all() if self._query.all() else []
|
||||
|
||||
@property
|
||||
def data_as_dict(self) -> Union[Dict[str, Any], list[Dict[str, Any]]]:
|
||||
"""Get query results as dictionary."""
|
||||
if self.is_list:
|
||||
first_item = self._query.first()
|
||||
return first_item.get_dict() if first_item.first() else None
|
||||
all_items = self._query.all()
|
||||
return [result.get_dict() for result in all_items] if all_items else []
|
||||
|
||||
@property
|
||||
def total_count(self) -> int:
|
||||
"""Lazy load and return total count of results."""
|
||||
if self.is_list:
|
||||
return self._pre_query.count() if self._pre_query else 0
|
||||
return 1
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
"""Lazy load and return total count of results."""
|
||||
if self.is_list and self._count is None:
|
||||
self._count = self._query.count()
|
||||
elif not self.is_list:
|
||||
self._count = 1
|
||||
return self._count
|
||||
|
||||
@property
|
||||
def query(self) -> Query:
|
||||
"""Get query object."""
|
||||
return self._query
|
||||
|
||||
@property
|
||||
def is_list(self) -> bool:
|
||||
"""Check if response is a list."""
|
||||
return self._is_list
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert response to dictionary format."""
|
||||
return {
|
||||
"metadata": self.metadata,
|
||||
"is_list": self._is_list,
|
||||
"query": self.query,
|
||||
"count": self.count,
|
||||
}
|
||||
50
Services/PostgresDb/Models/system_fields.py
Normal file
50
Services/PostgresDb/Models/system_fields.py
Normal file
@@ -0,0 +1,50 @@
|
||||
class SystemFields:
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
# System fields that should be handled automatically during creation
|
||||
__system__fields__create__ = (
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"cryp_uu_id",
|
||||
"created_by",
|
||||
"created_by_id",
|
||||
"updated_by",
|
||||
"updated_by_id",
|
||||
"replication_id",
|
||||
"confirmed_by",
|
||||
"confirmed_by_id",
|
||||
"is_confirmed",
|
||||
"deleted",
|
||||
"active",
|
||||
"is_notification_send",
|
||||
"is_email_send",
|
||||
)
|
||||
|
||||
# System fields that should be handled automatically during updates
|
||||
__system__fields__update__ = (
|
||||
"cryp_uu_id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"created_by_id",
|
||||
"confirmed_by",
|
||||
"confirmed_by_id",
|
||||
"updated_by",
|
||||
"updated_by_id",
|
||||
"replication_id",
|
||||
)
|
||||
|
||||
# Default fields to exclude from serialization
|
||||
__system_default_model__ = (
|
||||
"cryp_uu_id",
|
||||
"is_confirmed",
|
||||
"deleted",
|
||||
"is_notification_send",
|
||||
"replication_id",
|
||||
"is_email_send",
|
||||
"confirmed_by_id",
|
||||
"confirmed_by",
|
||||
"updated_by_id",
|
||||
"created_by_id",
|
||||
)
|
||||
39
Services/PostgresDb/Models/token.py
Normal file
39
Services/PostgresDb/Models/token.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from typing import TypeVar, Dict, Any
|
||||
from dataclasses import dataclass
|
||||
from ApiLibrary import get_line_number_for_error
|
||||
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
||||
|
||||
# Type variable for class methods returning self
|
||||
T = TypeVar("T", bound="FilterAttributes")
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenModel:
|
||||
lang: str
|
||||
credentials: Dict[str, str]
|
||||
timezone: str
|
||||
|
||||
def __post_init__(self):
|
||||
self.lang = str(self.lang or "tr").lower()
|
||||
self.credentials = self.credentials or {}
|
||||
if "GMT" in self.timezone:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_400_BAD_REQUEST",
|
||||
lang=self.lang,
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg="Invalid timezone format",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_user_define_properties(cls, token: Any) -> None:
|
||||
"""
|
||||
Set user-specific properties from the authentication token.
|
||||
|
||||
Args:
|
||||
token: Authentication token containing user preferences
|
||||
"""
|
||||
from ApiLibrary.date_time_actions.date_functions import DateTimeLocal
|
||||
|
||||
cls.credentials = token.credentials
|
||||
cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
|
||||
cls.lang = str(token.lang).lower()
|
||||
372
Services/PostgresDb/Models_old/alchemy_response.py
Normal file
372
Services/PostgresDb/Models_old/alchemy_response.py
Normal file
@@ -0,0 +1,372 @@
|
||||
"""
|
||||
Response handlers for SQLAlchemy query results with pagination support.
|
||||
|
||||
This module provides a set of response classes for handling different types of data:
|
||||
- Single PostgreSQL records
|
||||
- Multiple SQLAlchemy records
|
||||
- List data
|
||||
- Dictionary data
|
||||
|
||||
Each response includes pagination information and supports data transformation
|
||||
through response models.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Any, Dict, List, Optional, Type, TypeVar, Protocol, Generic
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||
from Services.PostgresDb.Models.response import PostgresResponse
|
||||
from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
|
||||
from Services.pagination import Pagination, PaginationConfig
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
DataT = TypeVar("DataT")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResponseConfig(Generic[T]):
|
||||
"""Configuration for response formatting.
|
||||
|
||||
Attributes:
|
||||
status_code: HTTP status code (default: "HTTP_200_OK")
|
||||
message: Response message to include in the response
|
||||
completed: Operation completion status flag
|
||||
cls_object: Class object for error handling context
|
||||
response_model: Optional response model class for data transformation
|
||||
"""
|
||||
|
||||
status_code: str = "HTTP_200_OK"
|
||||
message: str = ""
|
||||
completed: bool = True
|
||||
cls_object: Optional[Any] = None
|
||||
response_model: Optional[Type[T]] = None
|
||||
|
||||
|
||||
class ResponseProtocol(Protocol):
|
||||
"""Protocol defining required methods for response models."""
|
||||
|
||||
def dump(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary format."""
|
||||
...
|
||||
|
||||
|
||||
class BaseJsonResponse(Generic[T]):
|
||||
"""Base class for JSON response handling.
|
||||
|
||||
Provides common functionality for all response types including:
|
||||
- Response formatting with consistent structure
|
||||
- Pagination handling and configuration
|
||||
- Data transformation through response models
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
result: Any,
|
||||
response_model: Optional[Type[T]] = None,
|
||||
status_code: str = "HTTP_200_OK",
|
||||
completed: bool = True,
|
||||
cls_object: Optional[Any] = None,
|
||||
filter_attributes: Optional[Any] = None,
|
||||
) -> None:
|
||||
"""Initialize response handler.
|
||||
|
||||
Args:
|
||||
message: Response message
|
||||
result: Query result or data
|
||||
response_model: Optional model for data transformation
|
||||
status_code: HTTP status code
|
||||
completed: Operation completion status
|
||||
cls_object: Class object for error context
|
||||
filter_attributes: Optional pagination and filtering attributes
|
||||
"""
|
||||
self.status_code = getattr(status, status_code, status.HTTP_200_OK)
|
||||
self.message = message
|
||||
self.completed = completed
|
||||
self.filter_attributes = filter_attributes
|
||||
self.response_model = response_model
|
||||
self.cls_object = cls_object
|
||||
self.result = result
|
||||
|
||||
def _create_pagination(self) -> Pagination:
|
||||
"""Create and configure pagination instance.
|
||||
|
||||
Returns:
|
||||
Configured Pagination instance
|
||||
"""
|
||||
pagination = Pagination()
|
||||
if self.filter_attributes:
|
||||
pagination.change(
|
||||
PaginationConfig(
|
||||
page=self.filter_attributes.page,
|
||||
size=self.filter_attributes.size,
|
||||
order_field=self.filter_attributes.order_field,
|
||||
order_type=self.filter_attributes.order_type,
|
||||
)
|
||||
)
|
||||
return pagination
|
||||
|
||||
def _format_response(self, pagination: Pagination, data: Any) -> JSONResponse:
|
||||
"""Format final JSON response with pagination.
|
||||
|
||||
Args:
|
||||
pagination: Pagination instance with configuration
|
||||
data: Response data to include
|
||||
|
||||
Returns:
|
||||
Formatted JSONResponse
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=self.status_code,
|
||||
content={
|
||||
"pagination": pagination.as_dict(),
|
||||
"completed": self.completed,
|
||||
"message": self.message,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
def _transform_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Transform data using response model if provided.
|
||||
|
||||
Args:
|
||||
data: Raw data dictionary
|
||||
|
||||
Returns:
|
||||
Transformed data dictionary
|
||||
"""
|
||||
if self.response_model:
|
||||
return self.response_model(**data).dump()
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _validate_data(data: Any, expected_type: Type, cls_object: Any) -> None:
|
||||
"""Validate data type and raise exception if invalid.
|
||||
|
||||
Args:
|
||||
data: Data to validate
|
||||
expected_type: Expected type of data
|
||||
cls_object: Class object for error context
|
||||
|
||||
Raises:
|
||||
HTTPExceptionApi: If data type is invalid
|
||||
"""
|
||||
if not isinstance(data, expected_type):
|
||||
raise HTTPExceptionApi(
|
||||
lang=cls_object.lang,
|
||||
error_code="HTTP_400_BAD_REQUEST",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=f"Invalid data type: {type(data)}",
|
||||
)
|
||||
|
||||
|
||||
class SinglePostgresResponse(BaseJsonResponse[T]):
|
||||
"""Handler for single record responses from PostgreSQL queries."""
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
message: str,
|
||||
result: PostgresResponse,
|
||||
response_model: Optional[Type[T]] = None,
|
||||
status_code: str = "HTTP_200_OK",
|
||||
completed: bool = True,
|
||||
cls_object: Optional[Any] = None,
|
||||
filter_attributes: Optional[Any] = None,
|
||||
) -> JSONResponse:
|
||||
"""Create response for single PostgreSQL record.
|
||||
|
||||
Args:
|
||||
message: Response message
|
||||
result: PostgreSQL query result
|
||||
response_model: Optional model for data transformation
|
||||
status_code: HTTP status code
|
||||
completed: Operation completion status
|
||||
cls_object: Class object for error context
|
||||
filter_attributes: Optional pagination and filtering attributes
|
||||
|
||||
Returns:
|
||||
Formatted JSON response
|
||||
|
||||
Raises:
|
||||
HTTPExceptionApi: If result is invalid or empty
|
||||
"""
|
||||
cls._validate_data(result, PostgresResponse, cls_object)
|
||||
|
||||
if not result.first:
|
||||
raise HTTPExceptionApi(
|
||||
lang=cls_object.lang,
|
||||
error_code="HTTP_400_BAD_REQUEST",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg="No data found",
|
||||
)
|
||||
|
||||
instance = super().__new__(cls)
|
||||
instance.__init__(
|
||||
message=message,
|
||||
result=result,
|
||||
response_model=response_model,
|
||||
status_code=status_code,
|
||||
completed=completed,
|
||||
cls_object=cls_object,
|
||||
filter_attributes=filter_attributes,
|
||||
)
|
||||
|
||||
pagination = instance._create_pagination()
|
||||
data = instance._transform_data(result.data.get_dict())
|
||||
|
||||
return instance._format_response(pagination, data)
|
||||
|
||||
|
||||
class AlchemyJsonResponse(BaseJsonResponse[T]):
|
||||
"""Handler for multiple record responses from SQLAlchemy queries."""
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
message: str,
|
||||
result: PostgresResponse,
|
||||
response_model: Optional[Type[T]] = None,
|
||||
status_code: str = "HTTP_200_OK",
|
||||
completed: bool = True,
|
||||
cls_object: Optional[Any] = None,
|
||||
filter_attributes: Optional[Any] = None,
|
||||
) -> JSONResponse:
|
||||
"""Create response for multiple SQLAlchemy records.
|
||||
|
||||
Args:
|
||||
message: Response message
|
||||
result: PostgreSQL query result
|
||||
response_model: Optional model for data transformation
|
||||
status_code: HTTP status code
|
||||
completed: Operation completion status
|
||||
cls_object: Class object for error context
|
||||
filter_attributes: Optional pagination and filtering attributes
|
||||
|
||||
Returns:
|
||||
Formatted JSON response
|
||||
|
||||
Raises:
|
||||
HTTPExceptionApi: If result is invalid
|
||||
"""
|
||||
cls._validate_data(result, PostgresResponse, cls_object)
|
||||
|
||||
if result.first:
|
||||
raise HTTPExceptionApi(
|
||||
lang=cls_object.lang,
|
||||
error_code="HTTP_400_BAD_REQUEST",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg="No data found",
|
||||
)
|
||||
|
||||
instance = super().__new__(cls)
|
||||
instance.__init__(
|
||||
message=message,
|
||||
result=result,
|
||||
response_model=response_model,
|
||||
status_code=status_code,
|
||||
completed=completed,
|
||||
cls_object=cls_object,
|
||||
filter_attributes=filter_attributes,
|
||||
)
|
||||
|
||||
pagination = instance._create_pagination()
|
||||
data = [instance._transform_data(item.get_dict()) for item in result.data]
|
||||
pagination.feed(data)
|
||||
return instance._format_response(pagination, data)
|
||||
|
||||
|
||||
class ListJsonResponse(BaseJsonResponse[T]):
|
||||
"""Handler for list data responses."""
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
message: str,
|
||||
result: List[Any],
|
||||
response_model: Optional[Type[T]] = None,
|
||||
status_code: str = "HTTP_200_OK",
|
||||
completed: bool = True,
|
||||
cls_object: Optional[Any] = None,
|
||||
filter_attributes: Optional[Any] = None,
|
||||
) -> JSONResponse:
|
||||
"""Create response for list data.
|
||||
|
||||
Args:
|
||||
message: Response message
|
||||
result: List of data items
|
||||
response_model: Optional model for data transformation
|
||||
status_code: HTTP status code
|
||||
completed: Operation completion status
|
||||
cls_object: Class object for error context
|
||||
filter_attributes: Optional pagination and filtering attributes
|
||||
|
||||
Returns:
|
||||
Formatted JSON response
|
||||
"""
|
||||
cls._validate_data(result, list, cls_object)
|
||||
|
||||
instance = super().__new__(cls)
|
||||
instance.__init__(
|
||||
message=message,
|
||||
result=result,
|
||||
response_model=response_model,
|
||||
status_code=status_code,
|
||||
completed=completed,
|
||||
cls_object=cls_object,
|
||||
filter_attributes=filter_attributes,
|
||||
)
|
||||
|
||||
pagination = instance._create_pagination()
|
||||
data = [instance._transform_data(item) for item in result]
|
||||
pagination.feed(data)
|
||||
|
||||
return instance._format_response(pagination, data)
|
||||
|
||||
|
||||
class DictJsonResponse(BaseJsonResponse[T]):
|
||||
"""Handler for dictionary data responses."""
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
message: str,
|
||||
result: Dict[str, Any],
|
||||
response_model: Optional[Type[T]] = None,
|
||||
status_code: str = "HTTP_200_OK",
|
||||
completed: bool = True,
|
||||
cls_object: Optional[Any] = None,
|
||||
filter_attributes: Optional[Any] = None,
|
||||
) -> JSONResponse:
|
||||
"""Create response for dictionary data.
|
||||
|
||||
Args:
|
||||
message: Response message
|
||||
result: Dictionary data
|
||||
response_model: Optional model for data transformation
|
||||
status_code: HTTP status code
|
||||
completed: Operation completion status
|
||||
cls_object: Class object for error context
|
||||
filter_attributes: Optional pagination and filtering attributes
|
||||
|
||||
Returns:
|
||||
Formatted JSON response
|
||||
"""
|
||||
cls._validate_data(result, dict, cls_object)
|
||||
|
||||
instance = super().__new__(cls)
|
||||
instance.__init__(
|
||||
message=message,
|
||||
result=result,
|
||||
response_model=response_model,
|
||||
status_code=status_code,
|
||||
completed=completed,
|
||||
cls_object=cls_object,
|
||||
filter_attributes=filter_attributes,
|
||||
)
|
||||
|
||||
pagination = instance._create_pagination()
|
||||
data = instance._transform_data(result)
|
||||
|
||||
return instance._format_response(pagination, data)
|
||||
254
Services/PostgresDb/Models_old/base_model.py
Normal file
254
Services/PostgresDb/Models_old/base_model.py
Normal file
@@ -0,0 +1,254 @@
|
||||
from contextlib import contextmanager
|
||||
from typing import Any, Dict, Optional, Generator
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import inspect
|
||||
|
||||
from Services.PostgresDb.database import Base
|
||||
|
||||
|
||||
class BaseModel(Base):
|
||||
"""Base model class with common utility functions and SQLAlchemy integration.
|
||||
|
||||
This class serves as the foundation for all database models, providing:
|
||||
- SQLAlchemy ORM integration through Base
|
||||
- Session management utilities
|
||||
- CRUD operations (create, update)
|
||||
- Bulk operation support
|
||||
"""
|
||||
|
||||
__abstract__ = True # Marks this as a base class, won't create a table
|
||||
|
||||
@classmethod
|
||||
def new_session(cls) -> Session:
|
||||
"""Get database session."""
|
||||
from Services.PostgresDb.database import get_db
|
||||
|
||||
with get_db() as session:
|
||||
return session
|
||||
|
||||
def update(
|
||||
self, session: Optional[Session] = None, **kwargs: Dict[str, Any]
|
||||
) -> "BaseModel":
|
||||
"""Update model instance with given attributes.
|
||||
|
||||
Args:
|
||||
session: Optional existing session to use. If not provided, creates a new one.
|
||||
**kwargs: Attributes to update
|
||||
|
||||
Returns:
|
||||
Updated model instance
|
||||
|
||||
Example:
|
||||
# Using an existing session
|
||||
with get_db() as session:
|
||||
model.update(session=session, name="new name")
|
||||
model2.update(session=session, status="active")
|
||||
# Both updates use the same transaction
|
||||
|
||||
# Creating a new session automatically
|
||||
model.update(name="new name") # Creates and manages its own session
|
||||
"""
|
||||
should_close_session = session is None
|
||||
if session is None:
|
||||
session = self.get_session()
|
||||
|
||||
try:
|
||||
# Remove unrelated fields
|
||||
check_kwargs = self.remove_non_related_inputs(kwargs)
|
||||
|
||||
# Get all table columns
|
||||
mapper = inspect(self.__class__)
|
||||
columns = [column.key for column in mapper.columns]
|
||||
|
||||
# Get relationship fields
|
||||
relationships = [rel.key for rel in mapper.relationships]
|
||||
|
||||
# Handle confirmation logic
|
||||
is_confirmed_argument = kwargs.get("is_confirmed", None)
|
||||
if is_confirmed_argument and not len(kwargs) == 1:
|
||||
self.raise_http_exception(
|
||||
status_code="HTTP_406_NOT_ACCEPTABLE",
|
||||
error_case="ConfirmError",
|
||||
data=kwargs,
|
||||
message="Confirm field cannot be updated with other fields",
|
||||
)
|
||||
|
||||
# Process system fields
|
||||
check_kwargs = self.extract_system_fields(check_kwargs, create=False)
|
||||
|
||||
# Update columns
|
||||
for key, value in check_kwargs.items():
|
||||
if key in columns:
|
||||
setattr(self, key, value)
|
||||
elif key in relationships:
|
||||
# Handle relationship updates
|
||||
related_obj = getattr(self, key)
|
||||
if isinstance(related_obj, list):
|
||||
# Handle many-to-many or one-to-many relationships
|
||||
if isinstance(value, list):
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
# Handle many-to-one or one-to-one relationships
|
||||
setattr(self, key, value)
|
||||
|
||||
# Handle user tracking
|
||||
if hasattr(self, "creds"):
|
||||
person_id = getattr(self.creds, "person_id", None)
|
||||
person_name = getattr(self.creds, "person_name", None)
|
||||
|
||||
if person_id and person_name:
|
||||
if is_confirmed_argument:
|
||||
self.confirmed_by_id = self.creds.get("person_id", "Unknown")
|
||||
self.confirmed_by = self.creds.get("person_name", "Unknown")
|
||||
else:
|
||||
self.updated_by_id = self.creds.get("person_id", "Unknown")
|
||||
self.updated_by = self.creds.get("person_name", "Unknown")
|
||||
|
||||
session.add(self)
|
||||
session.flush()
|
||||
return self
|
||||
|
||||
except Exception:
|
||||
if should_close_session:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
if should_close_session:
|
||||
session.close()
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, session: Optional[Session] = None, **kwargs: Dict[str, Any]
|
||||
) -> "BaseModel":
|
||||
"""Create new instance with optional session reuse.
|
||||
|
||||
Args:
|
||||
session: Optional existing session to use. If not provided, creates a new one.
|
||||
**kwargs: Attributes for the new instance
|
||||
|
||||
Returns:
|
||||
Created model instance
|
||||
|
||||
Example:
|
||||
# Using an existing session for multiple creates
|
||||
with get_db() as session:
|
||||
user1 = User.create(session=session, name="John")
|
||||
user2 = User.create(session=session, name="Jane")
|
||||
# Both creates use the same transaction
|
||||
|
||||
# Creating with auto-managed session
|
||||
user = User.create(name="John") # Creates and manages its own session
|
||||
"""
|
||||
instance = cls()
|
||||
should_close_session = session is None
|
||||
|
||||
if session is None:
|
||||
session = instance.get_session()
|
||||
|
||||
try:
|
||||
check_kwargs = cls.remove_non_related_inputs(instance, kwargs)
|
||||
check_kwargs = cls.extract_system_fields(
|
||||
instance, check_kwargs, create=True
|
||||
)
|
||||
|
||||
# Get all table columns and relationships
|
||||
mapper = inspect(cls)
|
||||
columns = [column.key for column in mapper.columns]
|
||||
relationships = [rel.key for rel in mapper.relationships]
|
||||
|
||||
# Set attributes
|
||||
for key, value in check_kwargs.items():
|
||||
if key in columns:
|
||||
setattr(instance, key, value)
|
||||
elif key in relationships:
|
||||
# Handle relationship assignments
|
||||
if isinstance(value, list):
|
||||
# Handle many-to-many or one-to-many relationships
|
||||
setattr(instance, key, value)
|
||||
else:
|
||||
# Handle many-to-one or one-to-one relationships
|
||||
setattr(instance, key, value)
|
||||
|
||||
# Handle user tracking
|
||||
if hasattr(instance, "creds"):
|
||||
person_id = getattr(instance.creds, "person_id", None)
|
||||
person_name = getattr(instance.creds, "person_name", None)
|
||||
|
||||
if person_id and person_name:
|
||||
instance.created_by_id = instance.creds.get("person_id", "Unknown")
|
||||
instance.created_by = instance.creds.get("person_name", "Unknown")
|
||||
|
||||
session.add(instance)
|
||||
session.flush()
|
||||
|
||||
if should_close_session:
|
||||
session.commit()
|
||||
|
||||
return instance
|
||||
|
||||
except Exception:
|
||||
if should_close_session:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
if should_close_session:
|
||||
session.close()
|
||||
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def bulk_create(
|
||||
cls, session: Optional[Session] = None
|
||||
) -> Generator[Session, None, None]:
|
||||
"""Context manager for bulk creating instances.
|
||||
|
||||
Args:
|
||||
session: Optional existing session to use. If not provided, creates a new one.
|
||||
|
||||
Yields:
|
||||
SQLAlchemy session for creating multiple instances
|
||||
|
||||
Example:
|
||||
# Bulk create multiple instances in one transaction
|
||||
with User.bulk_create() as session:
|
||||
user1 = User.create(session=session, name="John")
|
||||
user2 = User.create(session=session, name="Jane")
|
||||
# Both creates share the same transaction
|
||||
"""
|
||||
should_close_session = session is None
|
||||
if session is None:
|
||||
session = cls().get_session()
|
||||
|
||||
try:
|
||||
yield session
|
||||
if should_close_session:
|
||||
session.commit()
|
||||
except Exception:
|
||||
if should_close_session:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
if should_close_session:
|
||||
session.close()
|
||||
|
||||
|
||||
# @router.put("/users/{user_id}")
|
||||
# async def update_user(
|
||||
# user_id: str,
|
||||
# update_data: Dict[str, Any],
|
||||
# db: Session = Depends(get_db_session)
|
||||
# ):
|
||||
# user = db.query(User).filter(User.id == user_id).first()
|
||||
# if not user:
|
||||
# raise HTTPException(status_code=404, detail="User not found")
|
||||
#
|
||||
# updated_user = user.update(**update_data)
|
||||
# return updated_user
|
||||
#
|
||||
#
|
||||
# @router.post("/users")
|
||||
# async def create_user(
|
||||
# user_data: Dict[str, Any],
|
||||
# db: Session = Depends(get_db_session)
|
||||
# ):
|
||||
# with User.create_with_session(**user_data) as new_user:
|
||||
# return new_user
|
||||
535
Services/PostgresDb/Models_old/filter_functions.py
Normal file
535
Services/PostgresDb/Models_old/filter_functions.py
Normal file
@@ -0,0 +1,535 @@
|
||||
"""
|
||||
Advanced filtering functionality for SQLAlchemy models.
|
||||
|
||||
This module provides a comprehensive set of filtering capabilities for SQLAlchemy models,
|
||||
including pagination, ordering, and complex query building.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Any, Dict, List, Optional, Type, TypeVar, Union, Tuple, Protocol
|
||||
from dataclasses import dataclass
|
||||
from json import dumps
|
||||
|
||||
from sqlalchemy import BinaryExpression, desc, asc
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Query, Session
|
||||
from sqlalchemy.sql.elements import BinaryExpression
|
||||
|
||||
from ApiLibrary import system_arrow
|
||||
from ApiLibrary.common.line_number import get_line_number_for_error
|
||||
from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
|
||||
from Services.PostgresDb.Models.response import PostgresResponse
|
||||
|
||||
# Type variable for class methods returning self
|
||||
T = TypeVar("T", bound="FilterAttributes")
|
||||
|
||||
|
||||
class HTTPException(Exception):
|
||||
"""Base exception for HTTP errors."""
|
||||
|
||||
def __init__(self, status_code: str, detail: str):
|
||||
self.status_code = status_code
|
||||
self.detail = detail
|
||||
super().__init__(detail)
|
||||
|
||||
|
||||
class HTTPStatus(Protocol):
|
||||
"""Protocol defining required HTTP status codes."""
|
||||
|
||||
HTTP_400_BAD_REQUEST: str
|
||||
HTTP_404_NOT_FOUND: str
|
||||
HTTP_304_NOT_MODIFIED: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class FilterConfig:
|
||||
"""Configuration for filtering and pagination."""
|
||||
|
||||
page: int = 1
|
||||
size: int = 10
|
||||
order_field: str = "id"
|
||||
order_type: str = "asc"
|
||||
include_joins: List[str] = None
|
||||
query: Dict[str, Any] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""Initialize default values for None fields."""
|
||||
self.include_joins = self.include_joins or []
|
||||
self.query = self.query or {}
|
||||
|
||||
|
||||
class QueryConfig:
|
||||
"""Configuration for query building and execution."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pre_query: Optional[Query] = None,
|
||||
filter_config: Optional[FilterConfig] = None,
|
||||
http_exception: Optional[Type[HTTPException]] = HTTPException,
|
||||
status: Optional[Type[HTTPStatus]] = None,
|
||||
):
|
||||
self.pre_query = pre_query
|
||||
self.filter_config = filter_config or FilterConfig()
|
||||
self.http_exception = http_exception
|
||||
self.status = status
|
||||
self.total_count: Optional[int] = None
|
||||
|
||||
def update_filter_config(self, **kwargs) -> None:
|
||||
"""Update filter configuration parameters."""
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(self.filter_config, key):
|
||||
setattr(self.filter_config, key, value)
|
||||
|
||||
def set_total_count(self, count: int) -> None:
|
||||
"""Set the total count of records."""
|
||||
self.total_count = count
|
||||
|
||||
|
||||
class FilterAttributes:
|
||||
"""
|
||||
Advanced filtering capabilities for SQLAlchemy models.
|
||||
|
||||
Features:
|
||||
- Pagination and ordering
|
||||
- Complex query building
|
||||
- Active/deleted/confirmed status filtering
|
||||
- Expiry date handling
|
||||
- Transaction management
|
||||
|
||||
Usage:
|
||||
# Initialize configuration
|
||||
config = QueryConfig(filter_config=FilterConfig(page=1, size=10))
|
||||
|
||||
# Create model with configuration
|
||||
class User(FilterAttributes):
|
||||
query_config = config
|
||||
|
||||
# Filter multiple records
|
||||
users = User.filter_by_all(db, name="John").data
|
||||
|
||||
# Update configuration
|
||||
User.query_config.update_filter_config(page=2, size=20)
|
||||
next_users = User.filter_all(db).data
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
# Class-level configuration
|
||||
query_config: QueryConfig = QueryConfig()
|
||||
|
||||
@classmethod
|
||||
def flush(cls: Type[T], db: Session) -> T:
|
||||
"""
|
||||
Flush the current session to the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Self instance
|
||||
|
||||
Raises:
|
||||
HTTPException: If database operation fails
|
||||
"""
|
||||
try:
|
||||
db.flush()
|
||||
return cls
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def destroy(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Delete the record from the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
"""
|
||||
db.delete(cls)
|
||||
db.commit()
|
||||
|
||||
@classmethod
|
||||
def save_via_metadata(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Save or rollback based on metadata.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If save operation fails
|
||||
"""
|
||||
try:
|
||||
meta_data = getattr(cls, "meta_data", {})
|
||||
if meta_data.get("created", False):
|
||||
db.commit()
|
||||
db.rollback()
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def save(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Commit changes to database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If commit fails
|
||||
"""
|
||||
try:
|
||||
db.commit()
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def rollback(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Rollback current transaction.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
"""
|
||||
db.rollback()
|
||||
|
||||
@classmethod
|
||||
def save_and_confirm(cls: Type[T], db: Session) -> None:
|
||||
"""
|
||||
Save changes and mark record as confirmed.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: If operation fails
|
||||
"""
|
||||
try:
|
||||
cls.save(db)
|
||||
cls.update(db, is_confirmed=True)
|
||||
cls.save(db)
|
||||
except SQLAlchemyError as e:
|
||||
raise HTTPExceptionApi(
|
||||
error_code="HTTP_304_NOT_MODIFIED",
|
||||
lang=cls.lang or "tr",
|
||||
loc=get_line_number_for_error(),
|
||||
sys_msg=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _query(cls: Type[T], db: Session) -> Query:
|
||||
"""
|
||||
Get base query for model.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
SQLAlchemy Query object
|
||||
"""
|
||||
return (
|
||||
cls.query_config.pre_query if cls.query_config.pre_query else db.query(cls)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def add_query_to_filter(
|
||||
cls: Type[T], query: Query, filter_list: Dict[str, Any]
|
||||
) -> Query:
|
||||
"""
|
||||
Add pagination and ordering to query.
|
||||
|
||||
Args:
|
||||
query: Base query
|
||||
filter_list: Dictionary containing pagination and ordering parameters
|
||||
|
||||
Returns:
|
||||
Modified query with pagination and ordering
|
||||
"""
|
||||
order_field = getattr(cls, filter_list.get("order_field"))
|
||||
order_func = desc if str(filter_list.get("order_type"))[0] == "d" else asc
|
||||
|
||||
return (
|
||||
query.order_by(order_func(order_field))
|
||||
.limit(filter_list.get("size"))
|
||||
.offset((filter_list.get("page") - 1) * filter_list.get("size"))
|
||||
.populate_existing()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_filter_attributes(cls) -> Dict[str, Any]:
|
||||
"""
|
||||
Get filter configuration from attributes.
|
||||
|
||||
Returns:
|
||||
Dictionary containing pagination and filtering parameters
|
||||
"""
|
||||
return {
|
||||
"page": getattr(cls.query_config.filter_config, "page", 1),
|
||||
"size": getattr(cls.query_config.filter_config, "size", 10),
|
||||
"order_field": getattr(cls.query_config.filter_config, "order_field", "id"),
|
||||
"order_type": getattr(cls.query_config.filter_config, "order_type", "asc"),
|
||||
"include_joins": getattr(
|
||||
cls.query_config.filter_config, "include_joins", []
|
||||
),
|
||||
"query": getattr(cls.query_config.filter_config, "query", {}),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def add_new_arg_to_args(
|
||||
cls,
|
||||
args_list: Tuple[BinaryExpression, ...],
|
||||
argument: str,
|
||||
value: BinaryExpression,
|
||||
) -> Tuple[BinaryExpression, ...]:
|
||||
"""
|
||||
Add new argument to filter arguments if not exists.
|
||||
|
||||
Args:
|
||||
args_list: Current filter arguments
|
||||
argument: Argument name to check
|
||||
value: New argument to add
|
||||
|
||||
Returns:
|
||||
Updated argument tuple
|
||||
"""
|
||||
new_args = [arg for arg in args_list if isinstance(arg, BinaryExpression)]
|
||||
arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
|
||||
|
||||
if not any(arg_left(arg) == argument for arg in new_args):
|
||||
new_args.append(value)
|
||||
|
||||
return tuple(new_args)
|
||||
|
||||
@classmethod
|
||||
def get_not_expired_query_arg(
|
||||
cls, args: Tuple[BinaryExpression, ...]
|
||||
) -> Tuple[BinaryExpression, ...]:
|
||||
"""
|
||||
Add expiry date conditions to query.
|
||||
|
||||
Args:
|
||||
args: Current query arguments
|
||||
|
||||
Returns:
|
||||
Updated arguments with expiry conditions
|
||||
"""
|
||||
current_time = str(system_arrow.now())
|
||||
args = cls.add_new_arg_to_args(
|
||||
args, "expiry_ends", cls.expiry_ends > current_time
|
||||
)
|
||||
args = cls.add_new_arg_to_args(
|
||||
args, "expiry_starts", cls.expiry_starts <= current_time
|
||||
)
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
def get_active_and_confirmed_query_arg(
|
||||
cls, args: Tuple[BinaryExpression, ...]
|
||||
) -> Tuple[BinaryExpression, ...]:
|
||||
"""
|
||||
Add status conditions to query.
|
||||
|
||||
Args:
|
||||
args: Current query arguments
|
||||
|
||||
Returns:
|
||||
Updated arguments with status conditions
|
||||
"""
|
||||
args = cls.add_new_arg_to_args(args, "is_confirmed", cls.is_confirmed == True)
|
||||
args = cls.add_new_arg_to_args(args, "active", cls.active == True)
|
||||
args = cls.add_new_arg_to_args(args, "deleted", cls.deleted == False)
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
def select_only(
|
||||
cls: Type[T],
|
||||
db: Session,
|
||||
*args: BinaryExpression,
|
||||
select_args: List[Any],
|
||||
order_by: Optional[Any] = None,
|
||||
limit: Optional[int] = None,
|
||||
system: bool = False,
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Select specific columns from filtered query.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter conditions
|
||||
select_args: Columns to select
|
||||
order_by: Optional ordering
|
||||
limit: Optional result limit
|
||||
system: If True, skip status filtering
|
||||
|
||||
Returns:
|
||||
Query response with selected columns
|
||||
"""
|
||||
if not system:
|
||||
args = cls.get_active_and_confirmed_query_arg(args)
|
||||
args = cls.get_not_expired_query_arg(args)
|
||||
|
||||
query = cls._query(db).filter(*args).with_entities(*select_args)
|
||||
cls.query_config.set_total_count(query.count())
|
||||
|
||||
if order_by is not None:
|
||||
query = query.order_by(order_by)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return PostgresResponse(query=query, first=False)
|
||||
|
||||
@classmethod
|
||||
def filter_by_all(
|
||||
cls: Type[T], db: Session, system: bool = False, **kwargs
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter multiple records by keyword arguments.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
system: If True, skip status filtering
|
||||
**kwargs: Filter criteria
|
||||
|
||||
Returns:
|
||||
Query response with matching records
|
||||
"""
|
||||
if "is_confirmed" not in kwargs and not system:
|
||||
kwargs["is_confirmed"] = True
|
||||
kwargs.pop("system", None)
|
||||
|
||||
query = cls._query(db).filter_by(**kwargs)
|
||||
cls.query_config.set_total_count(query.count())
|
||||
|
||||
if cls.query_config.filter_config:
|
||||
filter_list = cls.get_filter_attributes()
|
||||
query = cls.add_query_to_filter(query, filter_list)
|
||||
|
||||
return PostgresResponse(query=query, first=False)
|
||||
|
||||
@classmethod
|
||||
def filter_by_one(
|
||||
cls: Type[T], db: Session, system: bool = False, **kwargs
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter single record by keyword arguments.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
system: If True, skip status filtering
|
||||
**kwargs: Filter criteria
|
||||
|
||||
Returns:
|
||||
Query response with single record
|
||||
"""
|
||||
if "is_confirmed" not in kwargs and not system:
|
||||
kwargs["is_confirmed"] = True
|
||||
kwargs.pop("system", None)
|
||||
|
||||
query = cls._query(db).filter_by(**kwargs)
|
||||
cls.query_config.set_total_count(1)
|
||||
|
||||
return PostgresResponse(query=query, first=True)
|
||||
|
||||
@classmethod
|
||||
def filter_all(
|
||||
cls: Type[T], *args: Any, db: Session, system: bool = False
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter multiple records by expressions.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter expressions
|
||||
system: If True, skip status filtering
|
||||
|
||||
Returns:
|
||||
Query response with matching records
|
||||
"""
|
||||
if not system:
|
||||
args = cls.get_active_and_confirmed_query_arg(args)
|
||||
args = cls.get_not_expired_query_arg(args)
|
||||
|
||||
filter_list = cls.get_filter_attributes()
|
||||
if filter_list.get("query"):
|
||||
for smart_iter in cls.filter_expr(**filter_list["query"]):
|
||||
if key := getattr(getattr(smart_iter, "left", None), "key", None):
|
||||
args = cls.add_new_arg_to_args(args, key, smart_iter)
|
||||
|
||||
query = cls._query(db)
|
||||
cls.query_config.set_total_count(query.count())
|
||||
query = query.filter(*args)
|
||||
|
||||
if cls.query_config.filter_config:
|
||||
query = cls.add_query_to_filter(query, filter_list)
|
||||
|
||||
return PostgresResponse(query=query, first=False)
|
||||
|
||||
@classmethod
|
||||
def filter_one(
|
||||
cls: Type[T],
|
||||
*args: Any,
|
||||
db: Session,
|
||||
system: bool = False,
|
||||
expired: bool = False,
|
||||
) -> PostgresResponse:
|
||||
"""
|
||||
Filter single record by expressions.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
args: Filter expressions
|
||||
system: If True, skip status filtering
|
||||
expired: If True, include expired records
|
||||
|
||||
Returns:
|
||||
Query response with single record
|
||||
"""
|
||||
if not system:
|
||||
args = cls.get_active_and_confirmed_query_arg(args)
|
||||
if not expired:
|
||||
args = cls.get_not_expired_query_arg(args)
|
||||
|
||||
query = cls._query(db).filter(*args)
|
||||
cls.query_config.set_total_count(1)
|
||||
|
||||
return PostgresResponse(query=query, first=True)
|
||||
|
||||
# @classmethod
|
||||
# def raise_http_exception(
|
||||
# cls,
|
||||
# status_code: str,
|
||||
# error_case: str,
|
||||
# data: Dict[str, Any],
|
||||
# message: str,
|
||||
# ) -> None:
|
||||
# """
|
||||
# Raise HTTP exception with formatted error details.
|
||||
|
||||
# Args:
|
||||
# status_code: HTTP status code string
|
||||
# error_case: Error type
|
||||
# data: Additional error data
|
||||
# message: Error message
|
||||
|
||||
# Raises:
|
||||
# HTTPException: With formatted error details
|
||||
# """
|
||||
# raise HTTPExceptionApi(
|
||||
# error_code="HTTP_304_NOT_MODIFIED",
|
||||
# lang=cls.lang or "tr", loc=get_line_number_for_error()
|
||||
# )
|
||||
549
Services/PostgresDb/Models_old/mixins.py
Normal file
549
Services/PostgresDb/Models_old/mixins.py
Normal file
@@ -0,0 +1,549 @@
|
||||
"""
|
||||
PostgreSQL Base Models Module
|
||||
|
||||
This module provides base classes for PostgreSQL models with common functionality such as:
|
||||
- CRUD operations with session management
|
||||
- Soft delete capability
|
||||
- Automatic timestamps
|
||||
- User tracking (created_by, updated_by)
|
||||
- Data serialization
|
||||
- Multi-language support
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, List, Optional, Type, TypeVar, Union, cast
|
||||
|
||||
from sqlalchemy import (
|
||||
TIMESTAMP,
|
||||
NUMERIC,
|
||||
func,
|
||||
text,
|
||||
UUID,
|
||||
String,
|
||||
Integer,
|
||||
Boolean,
|
||||
SmallInteger,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, Session
|
||||
from sqlalchemy_mixins.serialize import SerializeMixin
|
||||
from sqlalchemy_mixins.repr import ReprMixin
|
||||
from sqlalchemy_mixins.smartquery import SmartQueryMixin
|
||||
|
||||
from ApiLibrary import DateTimeLocal, system_arrow
|
||||
from Services.PostgresDb.Models.base_model import BaseModel
|
||||
from Services.PostgresDb.Models.filter_functions import FilterAttributes
|
||||
|
||||
# Type variable for class methods returning self
|
||||
T = TypeVar("T", bound="CrudMixin")
|
||||
|
||||
|
||||
class CrudMixin(
|
||||
BaseModel, SmartQueryMixin, SerializeMixin, ReprMixin, FilterAttributes
|
||||
):
|
||||
"""
|
||||
Base mixin providing CRUD operations and common fields for PostgreSQL models.
|
||||
|
||||
Features:
|
||||
- Automatic timestamps (created_at, updated_at)
|
||||
- Soft delete capability
|
||||
- User tracking (created_by, updated_by)
|
||||
- Data serialization
|
||||
- Multi-language support
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
# System fields that should be handled automatically during creation
|
||||
__system__fields__create__ = (
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"cryp_uu_id",
|
||||
"created_by",
|
||||
"created_by_id",
|
||||
"updated_by",
|
||||
"updated_by_id",
|
||||
"replication_id",
|
||||
"confirmed_by",
|
||||
"confirmed_by_id",
|
||||
"is_confirmed",
|
||||
"deleted",
|
||||
"active",
|
||||
"is_notification_send",
|
||||
"is_email_send",
|
||||
)
|
||||
|
||||
# System fields that should be handled automatically during updates
|
||||
__system__fields__update__ = (
|
||||
"cryp_uu_id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"created_by_id",
|
||||
"confirmed_by",
|
||||
"confirmed_by_id",
|
||||
"updated_by",
|
||||
"updated_by_id",
|
||||
"replication_id",
|
||||
)
|
||||
|
||||
# Default fields to exclude from serialization
|
||||
__system_default_model__ = [
|
||||
"cryp_uu_id",
|
||||
"is_confirmed",
|
||||
"deleted",
|
||||
"is_notification_send",
|
||||
"replication_id",
|
||||
"is_email_send",
|
||||
"confirmed_by_id",
|
||||
"confirmed_by",
|
||||
"updated_by_id",
|
||||
"created_by_id",
|
||||
]
|
||||
|
||||
# User credentials and preferences
|
||||
creds = None
|
||||
lang: str = "tr"
|
||||
client_arrow: Optional[DateTimeLocal] = None
|
||||
valid_record_dict: Dict[str, bool] = {"active": True, "deleted": False}
|
||||
meta_data: Dict[str, Any] = {}
|
||||
|
||||
# Common timestamp fields for all models
|
||||
expiry_starts: Mapped[TIMESTAMP] = mapped_column(
|
||||
type_=TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
comment="Record validity start timestamp",
|
||||
)
|
||||
expiry_ends: Mapped[TIMESTAMP] = mapped_column(
|
||||
type_=TIMESTAMP(timezone=True),
|
||||
default="2099-12-31",
|
||||
server_default="2099-12-31",
|
||||
comment="Record validity end timestamp",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_user_define_properties(cls, token: Any) -> None:
|
||||
"""
|
||||
Set user-specific properties from the authentication token.
|
||||
|
||||
Args:
|
||||
token: Authentication token containing user preferences
|
||||
"""
|
||||
cls.creds = token.credentials
|
||||
cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
|
||||
cls.lang = str(token.lang).lower()
|
||||
|
||||
@classmethod
|
||||
def remove_non_related_inputs(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Filter out inputs that don't correspond to model fields.
|
||||
|
||||
Args:
|
||||
kwargs: Dictionary of field names and values
|
||||
|
||||
Returns:
|
||||
Dictionary containing only valid model fields
|
||||
"""
|
||||
return {
|
||||
key: value
|
||||
for key, value in kwargs.items()
|
||||
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def extract_system_fields(
|
||||
cls, filter_kwargs: dict, create: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Remove system-managed fields from input dictionary.
|
||||
|
||||
Args:
|
||||
filter_kwargs: Input dictionary of fields
|
||||
create: If True, use creation field list, else use update field list
|
||||
|
||||
Returns:
|
||||
Dictionary with system fields removed
|
||||
"""
|
||||
system_fields = filter_kwargs.copy()
|
||||
extract_fields = (
|
||||
cls.__system__fields__create__ if create else cls.__system__fields__update__
|
||||
)
|
||||
for field in extract_fields:
|
||||
system_fields.pop(field, None)
|
||||
return system_fields
|
||||
|
||||
@classmethod
|
||||
def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
|
||||
"""
|
||||
Process a field value based on its type and convert it to the appropriate format.
|
||||
|
||||
Args:
|
||||
val: Field value
|
||||
key: Field name
|
||||
|
||||
Returns:
|
||||
Tuple of (should_include, processed_value)
|
||||
"""
|
||||
key_ = cls.__annotations__.get(key, None)
|
||||
is_primary = key in cls.primary_keys
|
||||
row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
|
||||
|
||||
# Skip primary keys and foreign keys
|
||||
if is_primary or row_attr:
|
||||
return False, None
|
||||
|
||||
# Handle None values
|
||||
if val is None:
|
||||
return True, None
|
||||
|
||||
# Special handling for UUID fields
|
||||
if str(key[-5:]).lower() == "uu_id":
|
||||
return True, str(val)
|
||||
|
||||
# Handle typed fields
|
||||
if key_:
|
||||
if key_ == Mapped[int]:
|
||||
return True, int(val)
|
||||
elif key_ == Mapped[bool]:
|
||||
return True, bool(val)
|
||||
elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
|
||||
return True, round(float(val), 3)
|
||||
elif key_ == Mapped[TIMESTAMP]:
|
||||
return True, str(
|
||||
cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss +0")
|
||||
)
|
||||
elif key_ == Mapped[str]:
|
||||
return True, str(val)
|
||||
|
||||
# Handle based on Python types
|
||||
else:
|
||||
if isinstance(val, datetime.datetime):
|
||||
return True, str(
|
||||
cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss +0")
|
||||
)
|
||||
elif isinstance(val, bool):
|
||||
return True, bool(val)
|
||||
elif isinstance(val, (float, Decimal)):
|
||||
return True, round(float(val), 3)
|
||||
elif isinstance(val, int):
|
||||
return True, int(val)
|
||||
elif isinstance(val, str):
|
||||
return True, str(val)
|
||||
elif val is None:
|
||||
return True, None
|
||||
|
||||
return False, None
|
||||
|
||||
@classmethod
|
||||
def find_or_create(cls: Type[T], db: Session, **kwargs) -> T:
|
||||
"""
|
||||
Find an existing record matching the criteria or create a new one.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Search/creation criteria
|
||||
|
||||
Returns:
|
||||
Existing or newly created record
|
||||
"""
|
||||
check_kwargs = cls.extract_system_fields(kwargs)
|
||||
|
||||
# Search for existing record
|
||||
query = db.query(cls).filter(
|
||||
cls.expiry_ends > str(system_arrow.now()),
|
||||
cls.expiry_starts <= str(system_arrow.now()),
|
||||
)
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
if hasattr(cls, key):
|
||||
query = query.filter(getattr(cls, key) == value)
|
||||
|
||||
already_record = query.first()
|
||||
|
||||
# Handle existing record
|
||||
if already_record:
|
||||
if already_record.deleted:
|
||||
already_record.meta_data = {
|
||||
"created": False,
|
||||
"error_case": "DeletedRecord",
|
||||
"message": "",
|
||||
}
|
||||
return already_record
|
||||
elif not already_record.is_confirmed:
|
||||
already_record.meta_data = {
|
||||
"created": False,
|
||||
"error_case": "IsNotConfirmed",
|
||||
"message": "",
|
||||
}
|
||||
return already_record
|
||||
|
||||
already_record.meta_data = {
|
||||
"created": False,
|
||||
"error_case": "AlreadyExists",
|
||||
"message": "",
|
||||
}
|
||||
return already_record
|
||||
|
||||
# Create new record
|
||||
check_kwargs = cls.remove_non_related_inputs(check_kwargs)
|
||||
created_record = cls()
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
setattr(created_record, key, value)
|
||||
|
||||
if getattr(cls.creds, "person_id", None) and getattr(
|
||||
cls.creds, "person_name", None
|
||||
):
|
||||
created_record.created_by_id = cls.creds.person_id
|
||||
created_record.created_by = cls.creds.person_name
|
||||
|
||||
db.add(created_record)
|
||||
db.flush()
|
||||
|
||||
created_record.meta_data = {"created": True, "error_case": None, "message": ""}
|
||||
return created_record
|
||||
|
||||
def update(self, db: Session, **kwargs) -> "CrudMixin":
|
||||
"""
|
||||
Update the record with new values.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
**kwargs: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated record
|
||||
|
||||
Raises:
|
||||
ValueError: If attempting to update is_confirmed with other fields
|
||||
"""
|
||||
check_kwargs = self.remove_non_related_inputs(kwargs)
|
||||
is_confirmed_argument = kwargs.get("is_confirmed", None)
|
||||
|
||||
if is_confirmed_argument and not len(kwargs) == 1:
|
||||
raise ValueError("Confirm field cannot be updated with other fields")
|
||||
|
||||
check_kwargs = self.extract_system_fields(check_kwargs, create=False)
|
||||
|
||||
for key, value in check_kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
# Update confirmation or modification tracking
|
||||
if is_confirmed_argument:
|
||||
if getattr(self.creds, "person_id", None) and getattr(
|
||||
self.creds, "person_name", None
|
||||
):
|
||||
self.confirmed_by_id = self.creds.person_id
|
||||
self.confirmed_by = self.creds.person_name
|
||||
else:
|
||||
if getattr(self.creds, "person_id", None) and getattr(
|
||||
self.creds, "person_name", None
|
||||
):
|
||||
self.updated_by_id = self.creds.person_id
|
||||
self.updated_by = self.creds.person_name
|
||||
|
||||
db.flush()
|
||||
return self
|
||||
|
||||
def get_dict(
|
||||
self,
|
||||
exclude: Optional[List[str]] = None,
|
||||
include: Optional[List[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert model instance to dictionary with customizable fields.
|
||||
|
||||
Args:
|
||||
exclude: List of fields to exclude
|
||||
include: List of fields to include (takes precedence over exclude)
|
||||
|
||||
Returns:
|
||||
Dictionary representation of the model
|
||||
"""
|
||||
return_dict: Dict[str, Any] = {}
|
||||
|
||||
if include:
|
||||
# Handle explicitly included fields
|
||||
exclude_list = [
|
||||
element
|
||||
for element in self.__system_default_model__
|
||||
if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
|
||||
]
|
||||
columns_include_list = list(set(include).difference(set(exclude_list)))
|
||||
columns_include_list.extend(["uu_id"])
|
||||
|
||||
for key in columns_include_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
|
||||
elif exclude:
|
||||
# Handle explicitly excluded fields
|
||||
exclude.extend(
|
||||
list(
|
||||
set(getattr(self, "__exclude__fields__", []) or []).difference(
|
||||
exclude
|
||||
)
|
||||
)
|
||||
)
|
||||
exclude.extend(
|
||||
[
|
||||
element
|
||||
for element in self.__system_default_model__
|
||||
if str(element)[-2:] == "id"
|
||||
]
|
||||
)
|
||||
|
||||
columns_excluded_list = list(set(self.columns).difference(set(exclude)))
|
||||
columns_excluded_list.extend(["uu_id", "active"])
|
||||
|
||||
for key in columns_excluded_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
else:
|
||||
# Handle default field selection
|
||||
exclude_list = (
|
||||
getattr(self, "__exclude__fields__", []) or []
|
||||
) + self.__system_default_model__
|
||||
columns_list = list(set(self.columns).difference(set(exclude_list)))
|
||||
columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
|
||||
columns_list.extend(
|
||||
[col for col in self.columns if str(col)[-5:].lower() == "uu_id"]
|
||||
)
|
||||
|
||||
for remove_field in self.__system_default_model__:
|
||||
if remove_field in columns_list:
|
||||
columns_list.remove(remove_field)
|
||||
|
||||
for key in columns_list:
|
||||
val = getattr(self, key)
|
||||
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||
if correct:
|
||||
return_dict[key] = value_of_database
|
||||
|
||||
return return_dict
|
||||
|
||||
|
||||
class BaseCollection(CrudMixin):
|
||||
"""Base model class with minimal fields."""
|
||||
|
||||
__abstract__ = True
|
||||
__repr__ = ReprMixin.__repr__
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
|
||||
|
||||
class CrudCollection(CrudMixin):
|
||||
"""
|
||||
Full-featured model class with all common fields.
|
||||
|
||||
Includes:
|
||||
- UUID and reference ID
|
||||
- Timestamps
|
||||
- User tracking
|
||||
- Confirmation status
|
||||
- Soft delete
|
||||
- Notification flags
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
__repr__ = ReprMixin.__repr__
|
||||
|
||||
# Primary and reference fields
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
uu_id: Mapped[str] = mapped_column(
|
||||
UUID,
|
||||
server_default=text("gen_random_uuid()"),
|
||||
index=True,
|
||||
unique=True,
|
||||
comment="Unique identifier UUID",
|
||||
)
|
||||
ref_id: Mapped[str] = mapped_column(
|
||||
String(100), nullable=True, index=True, comment="External reference ID"
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="Record creation timestamp",
|
||||
)
|
||||
updated_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="Last update timestamp",
|
||||
)
|
||||
|
||||
# Cryptographic and user tracking
|
||||
cryp_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, index=True, comment="Cryptographic UUID"
|
||||
)
|
||||
created_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Creator name"
|
||||
)
|
||||
created_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Creator ID"
|
||||
)
|
||||
updated_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Last modifier name"
|
||||
)
|
||||
updated_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Last modifier ID"
|
||||
)
|
||||
confirmed_by: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Confirmer name"
|
||||
)
|
||||
confirmed_by_id: Mapped[int] = mapped_column(
|
||||
Integer, nullable=True, comment="Confirmer ID"
|
||||
)
|
||||
|
||||
# Status flags
|
||||
is_confirmed: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Record confirmation status"
|
||||
)
|
||||
replication_id: Mapped[int] = mapped_column(
|
||||
SmallInteger, server_default="0", comment="Replication identifier"
|
||||
)
|
||||
deleted: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Soft delete flag"
|
||||
)
|
||||
active: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="1", comment="Record active status"
|
||||
)
|
||||
is_notification_send: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Notification sent flag"
|
||||
)
|
||||
is_email_send: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Email sent flag"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def retrieve_language_model(cls, lang: str, response_model: Any) -> Dict[str, str]:
|
||||
"""
|
||||
Retrieve language-specific model headers and validation messages.
|
||||
|
||||
Args:
|
||||
lang: Language code
|
||||
response_model: Model containing language annotations
|
||||
|
||||
Returns:
|
||||
Dictionary of field names to localized headers
|
||||
"""
|
||||
headers_and_validation = {}
|
||||
__language_model__ = getattr(cls.__language_model__, lang, "tr")
|
||||
|
||||
for field in response_model.__annotations__.keys():
|
||||
headers_and_validation[field] = getattr(
|
||||
__language_model__, field, "Lang Not found"
|
||||
)
|
||||
|
||||
return headers_and_validation
|
||||
44
Services/PostgresDb/Models_old/query.py
Normal file
44
Services/PostgresDb/Models_old/query.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from typing import Any, List, Optional, TypeVar, Union
|
||||
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class QueryResponse:
|
||||
"""Handler for SQLAlchemy query results with error handling."""
|
||||
|
||||
def __init__(self, db: Session, query: Query, first: bool = False):
|
||||
self.db = db
|
||||
self.first = first
|
||||
self.__query = query
|
||||
|
||||
def get(self, index: int) -> Optional[T]:
|
||||
"""Get item at specific index if it exists."""
|
||||
count = self.count
|
||||
if count and not index > count:
|
||||
return self.data[index - 1]
|
||||
return None
|
||||
|
||||
@property
|
||||
def data(self) -> Union[Optional[T], List[T]]:
|
||||
"""Get query results with error handling."""
|
||||
try:
|
||||
if self.first:
|
||||
return self.__query.first()
|
||||
return self.__query.all()
|
||||
except Exception as e:
|
||||
# Handle any database errors by rolling back
|
||||
self.db.rollback()
|
||||
return None if self.first else []
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
"""Get total count of query results."""
|
||||
return self.__query.count()
|
||||
|
||||
@property
|
||||
def query(self) -> Query:
|
||||
"""Get the underlying SQLAlchemy query."""
|
||||
return self.__query
|
||||
90
Services/PostgresDb/Models_old/response.py
Normal file
90
Services/PostgresDb/Models_old/response.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""
|
||||
Response handler for PostgreSQL query results.
|
||||
|
||||
This module provides a wrapper class for SQLAlchemy query results,
|
||||
adding convenience methods for accessing data and managing query state.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional, TypeVar, Generic, Union
|
||||
from sqlalchemy.orm import Query
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class PostgresResponse(Generic[T]):
|
||||
"""
|
||||
Wrapper for PostgreSQL/SQLAlchemy query results.
|
||||
|
||||
Attributes:
|
||||
query: SQLAlchemy query object
|
||||
first: Whether to return first result only
|
||||
data: Query results (lazy loaded)
|
||||
count: Total count of results
|
||||
|
||||
Properties:
|
||||
all: All results as list
|
||||
first_item: First result only
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
query: Query,
|
||||
first: bool = False,
|
||||
status: bool = True,
|
||||
message: str = "",
|
||||
error: Optional[str] = None,
|
||||
):
|
||||
self._query = query
|
||||
self._first = first
|
||||
self.status = status
|
||||
self.message = message
|
||||
self.error = error
|
||||
self._data: Optional[Union[List[T], T]] = None
|
||||
self._count: Optional[int] = None
|
||||
|
||||
@property
|
||||
def query(self) -> Query:
|
||||
"""Get query object."""
|
||||
return self._query
|
||||
|
||||
@property
|
||||
def data(self) -> Union[List[T], T, None]:
|
||||
"""
|
||||
Lazy load and return query results.
|
||||
Returns first item if first=True, otherwise returns all results.
|
||||
"""
|
||||
if self._data is None:
|
||||
results = self._query.all()
|
||||
self._data = results[0] if self._first and results else results
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
"""Lazy load and return total count of results."""
|
||||
if self._count is None:
|
||||
self._count = self._query.count()
|
||||
return self._count
|
||||
|
||||
@property
|
||||
def all(self) -> List[T]:
|
||||
"""Get all results as list."""
|
||||
return (
|
||||
self.data
|
||||
if isinstance(self.data, list)
|
||||
else [self.data] if self.data else []
|
||||
)
|
||||
|
||||
@property
|
||||
def first(self) -> Optional[T]:
|
||||
"""Get first result only."""
|
||||
return self.data if self._first else (self.data[0] if self.data else None)
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert response to dictionary format."""
|
||||
return {
|
||||
"status": self.status,
|
||||
"message": self.message,
|
||||
"data": self.data,
|
||||
"count": self.count,
|
||||
"error": self.error,
|
||||
}
|
||||
6
Services/PostgresDb/__init__.py
Normal file
6
Services/PostgresDb/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from Services.PostgresDb.Models.mixins import CrudCollection, BaseCollection
|
||||
|
||||
__all__ = [
|
||||
"CrudCollection",
|
||||
"BaseCollection",
|
||||
]
|
||||
60
Services/PostgresDb/database.py
Normal file
60
Services/PostgresDb/database.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from contextlib import contextmanager
|
||||
from functools import lru_cache
|
||||
from typing import Generator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker, scoped_session, Session
|
||||
|
||||
from AllConfigs.SqlDatabase.configs import WagDatabase
|
||||
|
||||
# Configure the database engine with proper pooling
|
||||
engine = create_engine(
|
||||
WagDatabase.DATABASE_URL,
|
||||
pool_pre_ping=True, # Verify connection before using
|
||||
pool_size=20, # Maximum number of permanent connections
|
||||
max_overflow=10, # Maximum number of additional connections
|
||||
pool_recycle=3600, # Recycle connections after 1 hour
|
||||
pool_timeout=30, # Wait up to 30 seconds for a connection
|
||||
echo=False, # Set to True for debugging SQL queries
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
# Create a cached session factory
|
||||
@lru_cache()
|
||||
def get_session_factory() -> scoped_session:
|
||||
"""Create a thread-safe session factory."""
|
||||
session_local = sessionmaker(
|
||||
bind=engine,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
expire_on_commit=False, # Prevent expired object issues
|
||||
)
|
||||
return scoped_session(session_local)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
"""Get database session with proper connection management.
|
||||
|
||||
This context manager ensures:
|
||||
- Proper connection pooling
|
||||
- Session cleanup
|
||||
- Connection return to pool
|
||||
- Thread safety
|
||||
|
||||
Yields:
|
||||
Session: SQLAlchemy session object
|
||||
"""
|
||||
session_factory = get_session_factory()
|
||||
session = session_factory()
|
||||
try:
|
||||
yield session
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
session_factory.remove() # Clean up the session from the registry
|
||||
82
Services/PostgresDb/how_to.py
Normal file
82
Services/PostgresDb/how_to.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from Schemas import AddressNeighborhood
|
||||
from Services.PostgresDb.Models.crud_alchemy import Credentials
|
||||
from Services.PostgresDb.Models.mixin import BasicMixin
|
||||
from Services.PostgresDb.Models.pagination import Pagination, PaginationResult
|
||||
|
||||
|
||||
listing = False
|
||||
creating = False
|
||||
updating = True
|
||||
|
||||
new_session = AddressNeighborhood.new_session()
|
||||
new_session_test = AddressNeighborhood.new_session()
|
||||
|
||||
BasicMixin.creds = Credentials(person_id=10, person_name="Berkay Super User")
|
||||
|
||||
|
||||
if listing:
|
||||
"""List Options and Queries"""
|
||||
AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
|
||||
AddressNeighborhood.neighborhood_code.icontains("10"),
|
||||
db=new_session,
|
||||
).query
|
||||
query_of_list_options = {
|
||||
"neighborhood_name__ilike": "A%",
|
||||
"neighborhood_code__contains": "3",
|
||||
}
|
||||
address_neighborhoods = AddressNeighborhood.filter_all(
|
||||
*AddressNeighborhood.convert(query_of_list_options),
|
||||
db=new_session,
|
||||
)
|
||||
pagination = Pagination(data=address_neighborhoods)
|
||||
pagination.page = 9
|
||||
pagination.size = 10
|
||||
pagination.orderField = ["type_code", "neighborhood_code"]
|
||||
pagination.orderType = ["desc", "asc"]
|
||||
|
||||
pagination_result = PaginationResult(
|
||||
data=address_neighborhoods, pagination=pagination
|
||||
)
|
||||
print(pagination_result.pagination.as_dict())
|
||||
print(pagination_result.data)
|
||||
|
||||
if creating:
|
||||
"""Create Queries"""
|
||||
find_or_create = AddressNeighborhood.find_or_create(
|
||||
neighborhood_code="100",
|
||||
neighborhood_name="Test",
|
||||
locality_id=15334,
|
||||
db=new_session,
|
||||
)
|
||||
find_or_create.save_via_metadata(db=new_session)
|
||||
find_or_create.destroy(db=new_session)
|
||||
find_or_create.save_via_metadata(db=new_session)
|
||||
find_or_create = AddressNeighborhood.find_or_create(
|
||||
neighborhood_code="100",
|
||||
neighborhood_name="Test",
|
||||
locality_id=15334,
|
||||
db=new_session,
|
||||
)
|
||||
find_or_create.save_via_metadata(db=new_session)
|
||||
|
||||
if updating:
|
||||
"""Update Queries"""
|
||||
|
||||
query_of_list_options = {
|
||||
"uu_id": str("33a89767-d2dc-4531-8f66-7b650e22a8a7"),
|
||||
}
|
||||
print("query_of_list_options", query_of_list_options)
|
||||
address_neighborhoods_one = AddressNeighborhood.filter_one(
|
||||
*AddressNeighborhood.convert(query_of_list_options),
|
||||
db=new_session,
|
||||
).data
|
||||
address_neighborhoods_one.update(
|
||||
neighborhood_name="Test 44",
|
||||
db=new_session,
|
||||
)
|
||||
address_neighborhoods_one.save(db=new_session)
|
||||
address_neighborhoods_one = AddressNeighborhood.filter_one(
|
||||
*AddressNeighborhood.convert(query_of_list_options),
|
||||
db=new_session,
|
||||
).data_as_dict
|
||||
print("address_neighborhoods_one", address_neighborhoods_one)
|
||||
159
Services/Redis/Actions/actions.py
Normal file
159
Services/Redis/Actions/actions.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import arrow
|
||||
|
||||
from typing import Optional, List, Dict, Union
|
||||
|
||||
from AllConfigs.main import MainConfig
|
||||
|
||||
from Services.Redis.conn import redis_cli
|
||||
from Services.Redis.Models.base import RedisRow
|
||||
from Services.Redis.Models.response import RedisResponse
|
||||
|
||||
|
||||
class RedisActions:
|
||||
"""Class for handling Redis operations with JSON data."""
|
||||
|
||||
@classmethod
|
||||
def get_expiry_time(cls, expiry_kwargs: Dict[str, int]) -> int:
|
||||
"""Calculate expiry time in seconds from kwargs."""
|
||||
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
||||
return sum(
|
||||
int(expiry_kwargs.get(unit, 0)) * multiplier
|
||||
for unit, multiplier in time_multipliers.items()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_expiry_time(cls, expiry_seconds: int) -> Dict[str, int]:
|
||||
"""Convert total seconds back into a dictionary of time units."""
|
||||
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
||||
result = {}
|
||||
for unit, multiplier in time_multipliers.items():
|
||||
if expiry_seconds >= multiplier:
|
||||
result[unit], expiry_seconds = divmod(expiry_seconds, multiplier)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def resolve_expires_at(cls, redis_row: RedisRow) -> str:
|
||||
"""Resolve expiry time for Redis key."""
|
||||
expiry_time = redis_cli.ttl(redis_row.redis_key)
|
||||
if expiry_time == -1:
|
||||
return "Key has no expiry time."
|
||||
return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
|
||||
|
||||
@classmethod
|
||||
def delete_key(cls, key: Union[Optional[str], Optional[bytes]]):
|
||||
try:
|
||||
redis_cli.delete(key)
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Value is deleted successfully.",
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Value is not deleted successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def delete(
|
||||
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
||||
) -> RedisResponse:
|
||||
try:
|
||||
regex = RedisRow().regex(list_keys=list_keys)
|
||||
json_get = redis_cli.scan_iter(match=regex)
|
||||
|
||||
for row in list(json_get):
|
||||
redis_cli.delete(row)
|
||||
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Values are deleted successfully.",
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Values are not deleted successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_json(
|
||||
cls,
|
||||
list_keys: List[Union[str, bytes]],
|
||||
value: Optional[Union[Dict, List]],
|
||||
expires: Optional[Dict[str, int]] = None,
|
||||
) -> RedisResponse:
|
||||
"""Set JSON value in Redis with optional expiry."""
|
||||
redis_row = RedisRow()
|
||||
redis_row.merge(set_values=list_keys)
|
||||
redis_row.feed(value)
|
||||
redis_row.expires_at_string = None
|
||||
redis_row.expires_at = None
|
||||
try:
|
||||
if expires:
|
||||
redis_row.expires_at = expires
|
||||
expiry_time = cls.get_expiry_time(expiry_kwargs=expires)
|
||||
redis_cli.setex(
|
||||
name=redis_row.redis_key,
|
||||
time=expiry_time,
|
||||
value=redis_row.value,
|
||||
)
|
||||
redis_row.expires_at_string = str(
|
||||
arrow.now()
|
||||
.shift(seconds=expiry_time)
|
||||
.format(MainConfig.DATETIME_FORMAT)
|
||||
)
|
||||
else:
|
||||
redis_cli.set(name=redis_row.redis_key, value=redis_row.value)
|
||||
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Value is set successfully.",
|
||||
data=redis_row,
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Value is not set successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_json(
|
||||
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
|
||||
) -> RedisResponse:
|
||||
"""Get JSON values from Redis using pattern matching."""
|
||||
try:
|
||||
list_of_rows = []
|
||||
regex = RedisRow.regex(list_keys=list_keys)
|
||||
json_get = redis_cli.scan_iter(match=regex)
|
||||
for row in list(json_get):
|
||||
redis_row = RedisRow()
|
||||
redis_row.set_key(key=row)
|
||||
redis_value = redis_cli.get(row)
|
||||
redis_value_expire = redis_cli.ttl(row)
|
||||
redis_row.expires_at = cls.set_expiry_time(
|
||||
expiry_seconds=int(redis_value_expire)
|
||||
)
|
||||
redis_row.expires_at_string = cls.resolve_expires_at(
|
||||
redis_row=redis_row
|
||||
)
|
||||
redis_row.feed(redis_value)
|
||||
list_of_rows.append(redis_row)
|
||||
if list_of_rows:
|
||||
return RedisResponse(
|
||||
status=True,
|
||||
message="Value is get successfully.",
|
||||
data=list_of_rows,
|
||||
)
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Value is not get successfully.",
|
||||
data=list_of_rows,
|
||||
)
|
||||
except Exception as e:
|
||||
return RedisResponse(
|
||||
status=False,
|
||||
message="Value is not get successfully.",
|
||||
error=str(e),
|
||||
)
|
||||
310
Services/Redis/Models/base.py
Normal file
310
Services/Redis/Models/base.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""
|
||||
Redis key-value operations with structured data handling.
|
||||
|
||||
This module provides a class for managing Redis key-value operations with support for:
|
||||
- Structured data storage and retrieval
|
||||
- Key pattern generation for searches
|
||||
- JSON serialization/deserialization
|
||||
- Type-safe value handling
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Union, Dict, List, Optional, Any, ClassVar
|
||||
from Services.Redis.conn import redis_cli
|
||||
|
||||
|
||||
class RedisKeyError(Exception):
|
||||
"""Exception raised for Redis key-related errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RedisValueError(Exception):
|
||||
"""Exception raised for Redis value-related errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RedisRow:
|
||||
"""
|
||||
Handles Redis key-value operations with structured data.
|
||||
|
||||
This class provides methods for:
|
||||
- Managing compound keys with delimiters
|
||||
- Converting between bytes and string formats
|
||||
- JSON serialization/deserialization of values
|
||||
- Pattern generation for Redis key searches
|
||||
|
||||
Attributes:
|
||||
key: The Redis key in bytes or string format
|
||||
value: The stored value (will be JSON serialized)
|
||||
delimiter: Character used to separate compound key parts
|
||||
expires_at: Optional expiration timestamp
|
||||
"""
|
||||
|
||||
key: ClassVar[Union[str, bytes]]
|
||||
value: ClassVar[Any]
|
||||
delimiter: str = ":"
|
||||
expires_at: Optional[dict] = {"seconds": 60 * 60 * 30}
|
||||
expires_at_string: Optional[str]
|
||||
|
||||
def get_expiry_time(self) -> int | None:
|
||||
"""Calculate expiry time in seconds from kwargs."""
|
||||
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
|
||||
if self.expires_at:
|
||||
return sum(
|
||||
int(self.expires_at.get(unit, 0)) * multiplier
|
||||
for unit, multiplier in time_multipliers.items()
|
||||
)
|
||||
return
|
||||
|
||||
def merge(self, set_values: List[Union[str, bytes]]) -> None:
|
||||
"""
|
||||
Merge list of values into a single delimited key.
|
||||
|
||||
Args:
|
||||
set_values: List of values to merge into key
|
||||
|
||||
Example:
|
||||
>>> RedisRow.merge(["users", "123", "profile"])
|
||||
>>> print(RedisRow.key)
|
||||
b'users:123:profile'
|
||||
"""
|
||||
if not set_values:
|
||||
raise RedisKeyError("Cannot merge empty list of values")
|
||||
|
||||
merged = []
|
||||
for value in set_values:
|
||||
if value is None:
|
||||
continue
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode()
|
||||
merged.append(str(value))
|
||||
|
||||
self.key = self.delimiter.join(merged).encode()
|
||||
|
||||
@classmethod
|
||||
def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
|
||||
"""
|
||||
Generate Redis search pattern from list of keys.
|
||||
|
||||
Args:
|
||||
list_keys: List of key parts, can include None for wildcards
|
||||
|
||||
Returns:
|
||||
str: Redis key pattern with wildcards
|
||||
|
||||
Example:
|
||||
>>> RedisRow.regex([None, "users", "active"])
|
||||
'*:users:active'
|
||||
"""
|
||||
if not list_keys:
|
||||
return ""
|
||||
|
||||
# Filter and convert valid keys
|
||||
valid_keys = []
|
||||
for key in list_keys:
|
||||
if key is None or str(key) == "None":
|
||||
continue
|
||||
if isinstance(key, bytes):
|
||||
key = key.decode()
|
||||
valid_keys.append(str(key))
|
||||
|
||||
# Build pattern
|
||||
pattern = cls.delimiter.join(valid_keys)
|
||||
if not pattern:
|
||||
return ""
|
||||
|
||||
# Add wildcard if first key was None
|
||||
if list_keys[0] is None:
|
||||
pattern = f"*{cls.delimiter}{pattern}"
|
||||
if "*" not in pattern and any([list_key is None for list_key in list_keys]):
|
||||
pattern = f"{pattern}:*"
|
||||
return pattern
|
||||
|
||||
def parse(self) -> List[str]:
|
||||
"""
|
||||
Parse the key into its component parts.
|
||||
|
||||
Returns:
|
||||
List[str]: Key parts split by delimiter
|
||||
|
||||
Example:
|
||||
>>> RedisRow.key = b'users:123:profile'
|
||||
>>> RedisRow.parse()
|
||||
['users', '123', 'profile']
|
||||
"""
|
||||
if not self.key:
|
||||
return []
|
||||
|
||||
key_str = self.key.decode() if isinstance(self.key, bytes) else self.key
|
||||
return key_str.split(self.delimiter)
|
||||
|
||||
def feed(self, value: Union[bytes, Dict, List, str]) -> None:
|
||||
"""
|
||||
Convert and store value in JSON format.
|
||||
|
||||
Args:
|
||||
value: Value to store (bytes, dict, or list)
|
||||
|
||||
Raises:
|
||||
RedisValueError: If value type is not supported
|
||||
|
||||
Example:
|
||||
>>> RedisRow.feed({"name": "John", "age": 30})
|
||||
>>> print(RedisRow.value)
|
||||
'{"name": "John", "age": 30}'
|
||||
"""
|
||||
try:
|
||||
if isinstance(value, (dict, list)):
|
||||
self.value = json.dumps(value)
|
||||
elif isinstance(value, bytes):
|
||||
self.value = json.dumps(json.loads(value.decode()))
|
||||
elif isinstance(value, str):
|
||||
self.value = value
|
||||
else:
|
||||
raise RedisValueError(f"Unsupported value type: {type(value)}")
|
||||
except json.JSONDecodeError as e:
|
||||
raise RedisValueError(f"Invalid JSON format: {str(e)}")
|
||||
|
||||
def modify(self, add_dict: Dict) -> None:
|
||||
"""
|
||||
Modify existing data by merging with new dictionary.
|
||||
|
||||
Args:
|
||||
add_dict: Dictionary to merge with existing data
|
||||
|
||||
Example:
|
||||
>>> RedisRow.feed({"name": "John"})
|
||||
>>> RedisRow.modify({"age": 30})
|
||||
>>> print(RedisRow.data)
|
||||
{"name": "John", "age": 30}
|
||||
"""
|
||||
if not isinstance(add_dict, dict):
|
||||
raise RedisValueError("modify() requires a dictionary argument")
|
||||
current_data = self.row if self.row else {}
|
||||
if not isinstance(current_data, dict):
|
||||
raise RedisValueError("Cannot modify non-dictionary data")
|
||||
current_data = {
|
||||
**current_data,
|
||||
**add_dict,
|
||||
}
|
||||
self.feed(current_data)
|
||||
self.save()
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Save the data to Redis with optional expiration.
|
||||
|
||||
Raises:
|
||||
RedisKeyError: If key is not set
|
||||
RedisValueError: If value is not set
|
||||
"""
|
||||
import arrow
|
||||
|
||||
if not self.key:
|
||||
raise RedisKeyError("Cannot save data without a key")
|
||||
if not self.value:
|
||||
raise RedisValueError("Cannot save empty data")
|
||||
|
||||
if self.expires_at:
|
||||
redis_cli.setex(
|
||||
name=self.redis_key, time=self.get_expiry_time(), value=self.value
|
||||
)
|
||||
self.expires_at_string = str(
|
||||
arrow.now()
|
||||
.shift(seconds=self.get_expiry_time())
|
||||
.format("YYYY-MM-DD HH:mm:ss")
|
||||
)
|
||||
return self.value
|
||||
redis_cli.set(name=self.redis_key, value=self.value)
|
||||
self.expires_at = None
|
||||
self.expires_at_string = None
|
||||
return self.value
|
||||
|
||||
def remove(self, key: str) -> None:
|
||||
"""
|
||||
Remove a key from the stored dictionary.
|
||||
|
||||
Args:
|
||||
key: Key to remove from stored dictionary
|
||||
|
||||
Raises:
|
||||
KeyError: If key doesn't exist
|
||||
RedisValueError: If stored value is not a dictionary
|
||||
"""
|
||||
current_data = self.row
|
||||
if not isinstance(current_data, dict):
|
||||
raise RedisValueError("Cannot remove key from non-dictionary data")
|
||||
|
||||
try:
|
||||
current_data.pop(key)
|
||||
self.feed(current_data)
|
||||
self.save()
|
||||
except KeyError:
|
||||
raise KeyError(f"Key '{key}' not found in stored data")
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete the key from Redis."""
|
||||
try:
|
||||
redis_cli.delete(self.redis_key)
|
||||
except Exception as e:
|
||||
print(f"Error deleting key: {str(e)}")
|
||||
|
||||
@property
|
||||
def keys(self) -> str:
|
||||
"""
|
||||
Get key as string.
|
||||
|
||||
Returns:
|
||||
str: Key in string format
|
||||
"""
|
||||
return self.key.decode() if isinstance(self.key, bytes) else self.key
|
||||
|
||||
def set_key(self, key: Union[str, bytes]) -> None:
|
||||
"""
|
||||
Set key ensuring bytes format.
|
||||
|
||||
Args:
|
||||
key: Key in string or bytes format
|
||||
"""
|
||||
if not key:
|
||||
raise RedisKeyError("Cannot set empty key")
|
||||
self.key = key if isinstance(key, bytes) else str(key).encode()
|
||||
|
||||
@property
|
||||
def redis_key(self) -> bytes:
|
||||
"""
|
||||
Get key in bytes format for Redis operations.
|
||||
|
||||
Returns:
|
||||
bytes: Key in bytes format
|
||||
"""
|
||||
return self.key if isinstance(self.key, bytes) else str(self.key).encode()
|
||||
|
||||
@property
|
||||
def row(self) -> Union[Dict, List]:
|
||||
"""
|
||||
Get stored value as Python object.
|
||||
|
||||
Returns:
|
||||
Union[Dict, List]: Deserialized JSON data
|
||||
"""
|
||||
try:
|
||||
return json.loads(self.value)
|
||||
except json.JSONDecodeError as e:
|
||||
# return self.value
|
||||
raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
|
||||
|
||||
@property
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get row data as dictionary.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary with keys and value
|
||||
"""
|
||||
return {
|
||||
"keys": self.keys,
|
||||
"value": self.row,
|
||||
}
|
||||
68
Services/Redis/Models/response.py
Normal file
68
Services/Redis/Models/response.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from typing import Union, Dict, List, Optional, Any
|
||||
from Services.Redis.Models.base import RedisRow
|
||||
|
||||
|
||||
class RedisResponse:
|
||||
"""Base class for Redis response handling."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: bool,
|
||||
message: str,
|
||||
data: Any = None,
|
||||
error: Optional[str] = None,
|
||||
):
|
||||
self.status = status
|
||||
self.message = message
|
||||
self.data = data
|
||||
|
||||
if isinstance(data, dict):
|
||||
self.data_type = "dict"
|
||||
elif isinstance(data, list):
|
||||
self.data_type = "list"
|
||||
elif isinstance(data, RedisRow):
|
||||
self.data_type = "row"
|
||||
elif data is None:
|
||||
self.data_type = None
|
||||
self.error = error
|
||||
|
||||
def as_dict(self) -> Dict:
|
||||
data = self.all
|
||||
main_dict = {
|
||||
"status": self.status,
|
||||
"message": self.message,
|
||||
"count": self.count,
|
||||
"dataType": getattr(self, "data_type", None),
|
||||
}
|
||||
if isinstance(data, RedisRow):
|
||||
dict_return = {data.keys: data.row}
|
||||
dict_return.update(dict(main_dict))
|
||||
return dict_return
|
||||
elif isinstance(data, list):
|
||||
dict_return = {row.keys: row.data for row in data}
|
||||
dict_return.update(dict(main_dict))
|
||||
return dict_return
|
||||
|
||||
@property
|
||||
def all(self) -> Union[Optional[List[RedisRow]]]:
|
||||
return self.data or []
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
print()
|
||||
row = self.all
|
||||
if isinstance(row, list):
|
||||
return len(row)
|
||||
elif isinstance(row, RedisRow):
|
||||
return 1
|
||||
|
||||
@property
|
||||
def first(self) -> Union[RedisRow, None]:
|
||||
print("self.data", self.data)
|
||||
if self.data:
|
||||
if isinstance(self.data, list):
|
||||
return self.data[0]
|
||||
elif isinstance(self.data, RedisRow):
|
||||
return self.row
|
||||
self.status = False
|
||||
return
|
||||
28
Services/Redis/Models/row.py
Normal file
28
Services/Redis/Models/row.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import Optional, Literal
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class AccessToken(BaseModel):
|
||||
|
||||
accessToken: Optional[str] = None
|
||||
userUUID: Optional[str | UUID] = None
|
||||
|
||||
@field_validator("userUUID", mode="after")
|
||||
def validate_uuid(cls, v):
|
||||
"""Convert UUID to string during validation."""
|
||||
if v is None:
|
||||
return None
|
||||
return str(v)
|
||||
|
||||
def to_list(self):
|
||||
"""Convert to list for Redis storage."""
|
||||
return [self.accessToken, str(self.userUUID) if self.userUUID else None]
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
return 2
|
||||
|
||||
@property
|
||||
def delimiter(self):
|
||||
return "*"
|
||||
8
Services/Redis/__init__.py
Normal file
8
Services/Redis/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from Services.Redis.Actions.actions import RedisActions
|
||||
from Services.Redis.Models.row import AccessToken
|
||||
|
||||
|
||||
__all__ = [
|
||||
"RedisActions",
|
||||
"AccessToken",
|
||||
]
|
||||
25
Services/Redis/conn.py
Normal file
25
Services/Redis/conn.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from redis import Redis
|
||||
|
||||
from AllConfigs.Redis.configs import WagRedis
|
||||
|
||||
|
||||
class RedisConn:
|
||||
|
||||
def __init__(self):
|
||||
self.redis = Redis(**WagRedis.as_dict())
|
||||
if not self.check_connection():
|
||||
raise Exception("Connection error")
|
||||
|
||||
def check_connection(self):
|
||||
return self.redis.ping()
|
||||
|
||||
def set_connection(self, host, password, port, db):
|
||||
self.redis = Redis(host=host, password=password, port=port, db=db)
|
||||
return self.redis
|
||||
|
||||
|
||||
try:
|
||||
redis_conn = RedisConn()
|
||||
redis_cli = redis_conn.redis
|
||||
except Exception as e:
|
||||
print("Redis Connection Error", e)
|
||||
76
Services/Redis/howto.py
Normal file
76
Services/Redis/howto.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import secrets
|
||||
import random
|
||||
from uuid import uuid4
|
||||
|
||||
from Services.Redis.Actions.actions import RedisActions
|
||||
from Services.Redis.Models.row import AccessToken
|
||||
|
||||
|
||||
def generate_token(length=32):
|
||||
letters = "abcdefghijklmnopqrstuvwxyz"
|
||||
merged_letters = [letter for letter in letters] + [
|
||||
letter.upper() for letter in letters
|
||||
]
|
||||
token_generated = secrets.token_urlsafe(length)
|
||||
for i in str(token_generated):
|
||||
if i not in merged_letters:
|
||||
token_generated = token_generated.replace(
|
||||
i, random.choice(merged_letters), 1
|
||||
)
|
||||
return token_generated
|
||||
|
||||
|
||||
save_json = {
|
||||
"user": {
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
"email": "johndoe@glu.com",
|
||||
"phone": "1234567890",
|
||||
"address": "1234 Main St",
|
||||
"details": {
|
||||
"city": "San Francisco",
|
||||
"state": "CA",
|
||||
"zip": "94111",
|
||||
},
|
||||
},
|
||||
"domain": "https://www.example.com",
|
||||
"info": {
|
||||
"mac": "oıuıouqqzxöç.işüğ",
|
||||
"version": "1.0.0",
|
||||
"type": "web",
|
||||
"device": "desktop",
|
||||
},
|
||||
}
|
||||
|
||||
# access_object = AccessToken(
|
||||
# userUUID=str(uuid4()),
|
||||
# accessToken=generate_token(60)
|
||||
# )
|
||||
# redis_object = RedisActions.set_json(
|
||||
# list_keys=access_object.to_list(),
|
||||
# value=save_json,
|
||||
# expires={"seconds": 720}
|
||||
# )
|
||||
# quit()
|
||||
acc_token = "IuDXEzqzCSyOJvrwdjyxqGPOBnleUZjjXWsELJgUglJjyGhINOzAUpdMuzEzoTyOsJRUeEQsgXGUXrer:521a4ba7-898f-4204-a2e5-3226e1aea1e1"
|
||||
|
||||
userUUID = acc_token.split(":")[1]
|
||||
accessToken = acc_token.split(":")[0]
|
||||
access_object = AccessToken(userUUID=None, accessToken=accessToken)
|
||||
print("access_object", access_object.to_list())
|
||||
redis_object = RedisActions.get_json(
|
||||
list_keys=access_object.to_list(),
|
||||
)
|
||||
# print("type type(redis_object)", type(redis_object))
|
||||
# print("type redis_object.data", type(redis_object.data))
|
||||
# print("count", redis_object.count)
|
||||
# print("data", redis_object.data)
|
||||
# print("data", redis_object.as_dict())
|
||||
# print("message", redis_object.message)
|
||||
redis_row_object = redis_object.first
|
||||
redis_row_object.modify({"reachable_event_list_id": [i for i in range(50)]})
|
||||
# redis_row_object.remove("reachable_event_list_id")
|
||||
# redis_row_object.modify({"reachable_event_list_id": [i for i in range(10)]})
|
||||
# if redis_row_object:
|
||||
# print("redis_row_object", redis_row_object.delete())
|
||||
# print('redis_row_object.as_dict', redis_row_object.as_dict)
|
||||
0
Services/__init__.py
Normal file
0
Services/__init__.py
Normal file
102
Services/pagination.py
Normal file
102
Services/pagination.py
Normal file
@@ -0,0 +1,102 @@
|
||||
from typing import Any, Dict, List, Union, Protocol
|
||||
from dataclasses import dataclass
|
||||
from sqlalchemy.orm import Query
|
||||
|
||||
from Services.PostgresDb.Models.response import PostgresResponse
|
||||
|
||||
|
||||
class DataValidator(Protocol):
|
||||
"""Protocol for data validation methods."""
|
||||
|
||||
@staticmethod
|
||||
def validate_data(data: Any, cls_object: Any) -> None:
|
||||
"""Validate data and raise HTTPExceptionApi if invalid."""
|
||||
...
|
||||
|
||||
|
||||
@dataclass
|
||||
class PaginationConfig:
|
||||
"""
|
||||
Configuration for pagination settings.
|
||||
|
||||
Attributes:
|
||||
page: Current page number (default: 1)
|
||||
size: Items per page (default: 10)
|
||||
order_field: Field to order by (default: "id")
|
||||
order_type: Order direction (default: "asc")
|
||||
"""
|
||||
|
||||
page: int = 1
|
||||
size: int = 10
|
||||
order_field: str = "id"
|
||||
order_type: str = "asc"
|
||||
|
||||
|
||||
class Pagination:
|
||||
"""
|
||||
Handles pagination logic for query results.
|
||||
|
||||
Manages page size, current page, ordering, and calculates total pages
|
||||
and items based on the data source.
|
||||
|
||||
Attributes:
|
||||
DEFAULT_SIZE: Default number of items per page (10)
|
||||
MIN_SIZE: Minimum allowed page size (10)
|
||||
MAX_SIZE: Maximum allowed page size (40)
|
||||
"""
|
||||
|
||||
DEFAULT_SIZE = 10
|
||||
MIN_SIZE = 10
|
||||
MAX_SIZE = 40
|
||||
|
||||
def __init__(self):
|
||||
self.size: int = self.DEFAULT_SIZE
|
||||
self.page: int = 1
|
||||
self.order_field: str = "id"
|
||||
self.order_type: str = "asc"
|
||||
self.page_count: int = 1
|
||||
self.total_count: int = 0
|
||||
self.total_pages: int = 1
|
||||
|
||||
def change(self, config: PaginationConfig) -> None:
|
||||
"""Update pagination settings from config."""
|
||||
self.size = (
|
||||
config.size
|
||||
if self.MIN_SIZE <= config.size <= self.MAX_SIZE
|
||||
else self.DEFAULT_SIZE
|
||||
)
|
||||
self.page = config.page
|
||||
self.order_field = config.order_field
|
||||
self.order_type = config.order_type
|
||||
self._update_page_counts()
|
||||
|
||||
def feed(self, data: Union[List[Any], PostgresResponse, Query]) -> None:
|
||||
"""Calculate pagination based on data source."""
|
||||
self.total_count = (
|
||||
len(data)
|
||||
if isinstance(data, list)
|
||||
else data.count if isinstance(data, PostgresResponse) else data.count()
|
||||
)
|
||||
self._update_page_counts()
|
||||
|
||||
def _update_page_counts(self) -> None:
|
||||
"""Update page counts and validate current page."""
|
||||
self.total_pages = max(1, (self.total_count + self.size - 1) // self.size)
|
||||
self.page = max(1, min(self.page, self.total_pages))
|
||||
self.page_count = (
|
||||
self.total_count % self.size
|
||||
if self.page == self.total_pages and self.total_count % self.size
|
||||
else self.size
|
||||
)
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Convert pagination state to dictionary format."""
|
||||
return {
|
||||
"size": self.size,
|
||||
"page": self.page,
|
||||
"totalCount": self.total_count,
|
||||
"totalPages": self.total_pages,
|
||||
"pageCount": self.page_count,
|
||||
"orderField": self.order_field,
|
||||
"orderType": self.order_type,
|
||||
}
|
||||
Reference in New Issue
Block a user