updated docs

This commit is contained in:
2025-01-22 21:46:11 +03:00
parent 87e5f5ab06
commit 1ba2694a9d
50 changed files with 3342 additions and 401 deletions

View File

@@ -7,10 +7,12 @@ This module provides MongoDB connection management with:
3. Error handling
"""
from typing import Optional, Dict, Any, List, Union
from typing import Optional, Dict, Any, List, Union, Callable
from contextlib import contextmanager
from pymongo import MongoClient
from pymongo.results import InsertOneResult, DeleteResult, UpdateResult
from pymongo.cursor import Cursor
from functools import wraps
from AllConfigs.NoSqlDatabase.configs import MongoConfig
@@ -96,39 +98,44 @@ class MongoDBHandler(
def __init__(self):
"""Initialize MongoDB connection if not already initialized."""
if not self._client:
# Build connection URL based on whether credentials are provided
if MongoConfig.USER_NAME and MongoConfig.PASSWORD:
connection_url = (
f"mongodb://{MongoConfig.USER_NAME}:{MongoConfig.PASSWORD}"
f"@{MongoConfig.HOST}:{MongoConfig.PORT}"
)
else:
connection_url = f"mongodb://{MongoConfig.HOST}:{MongoConfig.PORT}"
# Build connection options
connection_kwargs = {
"host": connection_url,
"host": MongoConfig.URL,
"maxPoolSize": 50, # Maximum number of connections in the pool
"minPoolSize": 10, # Minimum number of connections in the pool
"maxIdleTimeMS": 30000, # Maximum time a connection can be idle (30 seconds)
"waitQueueTimeoutMS": 2000, # How long a thread will wait for a connection
"serverSelectionTimeoutMS": 5000, # How long to wait for server selection
}
self._client = MongoClient(**connection_kwargs)
# Test connection
self._client.admin.command("ping")
def __enter__(self):
"""Context manager entry point."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit point - ensures connection is properly closed."""
try:
if self._client:
self._client.close()
self._client = None
except Exception:
# Silently pass any errors during shutdown
pass
return False # Don't suppress any exceptions
def close(self):
"""Close MongoDB connection."""
if self._client:
self._client.close()
self._client = None
def __del__(self):
"""Ensure connection is closed on deletion."""
self.close()
try:
if self._client:
self._client.close()
self._client = None
except Exception:
# Silently pass any errors during shutdown
pass
@property
def client(self) -> MongoClient:
@@ -145,6 +152,41 @@ class MongoDBHandler(
database = self.get_database(database_name)
return database[collection_name]
# Create a function to get the singleton instance
@classmethod
@contextmanager
def get_mongodb(cls):
"""Get or create the MongoDB singleton instance as a context manager."""
instance = cls()
try:
yield instance
finally:
try:
if instance._client:
instance._client.close()
instance._client = None
except Exception:
# Silently pass any errors during shutdown
pass
# Create a singleton instance
@classmethod
def with_mongodb(cls, func: Callable):
"""Decorator to automatically handle MongoDB connection context.
Usage:
@MongoDBHandler.with_mongodb
def my_function(db, *args, **kwargs):
# db is the MongoDB instance
pass
"""
@wraps(func)
def wrapper(*args, **kwargs):
with cls.get_mongodb() as db:
return func(db, *args, **kwargs)
return wrapper
# Create a singleton instance for backward compatibility
mongodb = MongoDBHandler()

View File

@@ -5,25 +5,28 @@ This module provides practical examples of using MongoDB operations through our
Each example demonstrates different aspects of CRUD operations and aggregation.
"""
from typing import Dict, List, Any
import arrow
from datetime import datetime
from Services.MongoDb.database import mongodb
from Services.MongoDb.database import MongoDBHandler
def insert_examples() -> None:
@MongoDBHandler.with_mongodb
def insert_examples(db) -> None:
"""Examples of insert operations."""
# Get the collection
users_collection = db.get_collection("users")
products_collection = db.get_collection("products")
# Single document insert
user_doc = {
"username": "john_doe",
"email": "john@example.com",
"age": 30,
"created_at": datetime.utcnow(),
"created_at": datetime.now(),
}
user_id = mongodb.insert_one(
database="user_db", collection="users", document=user_doc
)
print(f"Inserted user with ID: {user_id}")
result = users_collection.insert_one(user_doc)
print(f"Inserted user with ID: {result.inserted_id}")
# Multiple documents insert
products = [
@@ -31,80 +34,68 @@ def insert_examples() -> None:
{"name": "Mouse", "price": 29.99, "stock": 100},
{"name": "Keyboard", "price": 59.99, "stock": 75},
]
product_ids = mongodb.insert_many(
database="store_db", collection="products", documents=products
)
print(f"Inserted {len(product_ids)} products")
result = products_collection.insert_many(products)
print(f"Inserted {len(result.inserted_ids)} products")
def find_examples() -> None:
@MongoDBHandler.with_mongodb
def find_examples(db) -> None:
"""Examples of find operations."""
# Get the collections
users_collection = db.get_collection("users")
products_collection = db.get_collection("products")
# Find one document
user = mongodb.find_one(
database="user_db",
collection="users",
filter_query={"email": "john@example.com"},
projection={"username": 1, "email": 1, "_id": 0},
)
user = users_collection.find_one({"email": "john@example.com"})
print(f"Found user: {user}")
# Find many with pagination
page_size = 10
page_number = 1
products = mongodb.find_many(
database="store_db",
collection="products",
filter_query={"price": {"$lt": 100}},
projection={"name": 1, "price": 1},
sort=[("price", 1)], # Sort by price ascending
limit=page_size,
skip=(page_number - 1) * page_size,
)
# Find many documents
products_cursor = products_collection.find({"price": {"$lt": 100}})
products = list(products_cursor)
print(f"Found {len(products)} products under $100")
def update_examples() -> None:
@MongoDBHandler.with_mongodb
def update_examples(db) -> None:
"""Examples of update operations."""
# Get the collections
products_collection = db.get_collection("products")
# Update single document
result = mongodb.update_one(
database="store_db",
collection="products",
filter_query={"name": "Laptop"},
update_data={"price": 899.99, "stock": 45},
upsert=False,
result = products_collection.update_one(
{"name": "Laptop"}, {"$set": {"price": 899.99, "stock": 45}}
)
print(f"Updated {result['modified_count']} laptop(s)")
print(f"Updated {result.modified_count} laptop(s)")
# Update multiple documents
result = mongodb.update_many(
database="store_db",
collection="products",
filter_query={"stock": {"$lt": 10}},
update_data={"status": "low_stock"},
upsert=True,
result = products_collection.update_many(
{"stock": {"$lt": 10}}, {"$set": {"status": "low_stock"}}
)
print(f"Updated {result['modified_count']} low stock products")
print(f"Updated {result.modified_count} low stock products")
def delete_examples() -> None:
@MongoDBHandler.with_mongodb
def delete_examples(db) -> None:
"""Examples of delete operations."""
# Get the collections
users_collection = db.get_collection("users")
products_collection = db.get_collection("products")
# Delete single document
count = mongodb.delete_one(
database="user_db",
collection="users",
filter_query={"email": "john@example.com"},
)
print(f"Deleted {count} user")
result = users_collection.delete_one({"email": "john@example.com"})
print(f"Deleted {result.deleted_count} user")
# Delete multiple documents
count = mongodb.delete_many(
database="store_db", collection="products", filter_query={"stock": 0}
)
print(f"Deleted {count} out-of-stock products")
result = products_collection.delete_many({"stock": 0})
print(f"Deleted {result.deleted_count} out-of-stock products")
def aggregate_examples() -> None:
"""Examples of aggregation operations."""
@MongoDBHandler.with_mongodb
def aggregate_examples(db) -> None:
"""Examples of aggregate operations."""
# Get the collection
products_collection = db.get_collection("products")
# Calculate average price by category
pipeline = [
{
@@ -116,21 +107,23 @@ def aggregate_examples() -> None:
},
{"$sort": {"avg_price": -1}},
]
results = mongodb.aggregate(
database="store_db", collection="products", pipeline=pipeline
)
results = products_collection.aggregate(pipeline)
print("Category statistics:", list(results))
def complex_query_example() -> None:
"""Example of a complex query combining multiple operations."""
@MongoDBHandler.with_mongodb
def complex_query_example(db) -> None:
"""Example of a more complex query combining multiple operations."""
# Get the collection
users_collection = db.get_collection("users")
# Find active users who made purchases in last 30 days
pipeline = [
{
"$match": {
"status": "active",
"last_purchase": {
"$gte": datetime.utcnow().replace(day=datetime.utcnow().day - 30)
"$gte": arrow.now().shift(days=-30).datetime,
},
}
},
@@ -152,9 +145,7 @@ def complex_query_example() -> None:
},
{"$sort": {"total_spent": -1}},
]
results = mongodb.aggregate(
database="user_db", collection="users", pipeline=pipeline
)
results = users_collection.aggregate(pipeline)
print("Active users with recent purchases:", list(results))

View File

@@ -16,12 +16,14 @@ class BaseAlchemyModel:
Session: Session object for model
Actions: save, flush, rollback, commit
"""
__abstract__ = True
@classmethod
def new_session(cls) -> Session:
"""Get database session."""
from Services.PostgresDb.database import get_db
with get_db() as session:
return session
@@ -143,6 +145,3 @@ class BaseAlchemyModel:
db: Database session
"""
db.rollback()

View File

@@ -27,7 +27,9 @@ class Credentials(BaseModel):
class CrudActions(SystemFields):
@classmethod
def extract_system_fields(cls, filter_kwargs: dict, create: bool = True) -> Dict[str, Any]:
def extract_system_fields(
cls, filter_kwargs: dict, create: bool = True
) -> Dict[str, Any]:
"""
Remove system-managed fields from input dictionary.
@@ -63,8 +65,6 @@ class CrudActions(SystemFields):
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
}
@classmethod
def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
"""
@@ -187,9 +187,9 @@ class CrudActions(SystemFields):
return_dict[key] = value_of_database
else:
# Handle default field selection
exclude_list = (
getattr(self, "__exclude__fields__", []) or []
) + list(self.__system_default_model__)
exclude_list = (getattr(self, "__exclude__fields__", []) or []) + list(
self.__system_default_model__
)
columns_list = list(set(self.columns).difference(set(exclude_list)))
columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
columns_list.extend(
@@ -230,18 +230,18 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
"""
if getattr(cls.creds, "person_id", None) and getattr(
cls.creds, "person_name", None
cls.creds, "person_name", None
):
record_created.created_by_id = cls.creds.person_id
record_created.created_by = cls.creds.person_name
return
@classmethod
def update_metadata(cls, created: bool, error_case: str = None, message: str = None) -> None:
def update_metadata(
cls, created: bool, error_case: str = None, message: str = None
) -> None:
cls.meta_data = MetaDataRow(
created=created,
error_case=error_case,
message=message
created=created, error_case=error_case, message=message
)
@classmethod
@@ -250,7 +250,7 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
error_code=cls.meta_data.error_case,
lang=cls.lang,
loc=get_line_number_for_error(),
sys_msg=cls.meta_data.message
sys_msg=cls.meta_data.message,
)
@classmethod
@@ -385,11 +385,15 @@ class CRUDModel(BaseAlchemyModel, CrudActions):
raise ValueError("Confirm field cannot be updated with other fields")
if is_confirmed_argument:
if getattr(self.creds, "person_id", None) and getattr(self.creds, "person_name", None):
if getattr(self.creds, "person_id", None) and getattr(
self.creds, "person_name", None
):
self.confirmed_by_id = self.creds.person_id
self.confirmed_by = self.creds.person_name
else:
if getattr(self.creds, "person_id", None) and getattr(self.creds, "person_name", None):
if getattr(self.creds, "person_id", None) and getattr(
self.creds, "person_name", None
):
self.updated_by_id = self.creds.person_id
self.updated_by = self.creds.person_name
return

View File

@@ -28,9 +28,7 @@ class ArgumentModel:
@classmethod
def _query(cls: Type[T], db: Session) -> Query:
"""Returns the query to use in the model."""
return (
cls.pre_query if cls.pre_query else db.query(cls)
)
return cls.pre_query if cls.pre_query else db.query(cls)
@classmethod
def add_new_arg_to_args(cls: Type[T], args_list, argument, value):
@@ -79,7 +77,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
@classmethod
def convert(
cls: Type[T], smart_options: dict, validate_model: Any = None
cls: Type[T], smart_options: dict, validate_model: Any = None
) -> tuple[BinaryExpression]:
if not validate_model:
return tuple(cls.filter_expr(**smart_options))
@@ -107,11 +105,11 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
@classmethod
def filter_one(
cls: Type[T],
*args: Any,
db: Session,
system: bool = False,
expired: bool = False,
cls: Type[T],
*args: Any,
db: Session,
system: bool = False,
expired: bool = False,
) -> PostgresResponse:
"""
Filter single record by expressions.
@@ -132,7 +130,6 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
query = cls._query(db).filter(*args)
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
@classmethod
def filter_all_system(
cls: Type[T], *args: BinaryExpression, db: Session
@@ -152,9 +149,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
@classmethod
def filter_all(
cls: Type[T], *args: Any, db: Session
) -> PostgresResponse:
def filter_all(cls: Type[T], *args: Any, db: Session) -> PostgresResponse:
"""
Filter multiple records by expressions.
@@ -170,9 +165,7 @@ class QueryModel(ArgumentModel, BaseModel, SmartQueryMixin):
return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
@classmethod
def filter_by_all_system(
cls: Type[T], db: Session, **kwargs
) -> PostgresResponse:
def filter_by_all_system(cls: Type[T], db: Session, **kwargs) -> PostgresResponse:
"""
Filter multiple records by keyword arguments.

View File

@@ -1,7 +1,2 @@
class LanguageModel:
__language_model__ = None

View File

@@ -37,7 +37,6 @@ class CrudMixin(BasicMixin, SerializeMixin, ReprMixin):
__abstract__ = True
# Primary and reference fields
id: Mapped[int] = mapped_column(Integer, primary_key=True)
uu_id: Mapped[str] = mapped_column(
@@ -171,6 +170,3 @@ class CrudCollection(CrudMixin):
# )
#
# return headers_and_validation

View File

@@ -155,14 +155,18 @@ class PaginationResult:
)
order_criteria = zip(self.order_by, self.pagination.orderType)
for field, direction in order_criteria:
if hasattr(self._query.column_descriptions[0]['entity'], field):
if hasattr(self._query.column_descriptions[0]["entity"], field):
if direction.lower().startswith("d"):
self._query = self._query.order_by(
desc(getattr(self._query.column_descriptions[0]['entity'], field))
desc(
getattr(self._query.column_descriptions[0]["entity"], field)
)
)
else:
self._query = self._query.order_by(
asc(getattr(self._query.column_descriptions[0]['entity'], field))
asc(
getattr(self._query.column_descriptions[0]["entity"], field)
)
)
return self._query
@@ -171,6 +175,11 @@ class PaginationResult:
"""Get query object."""
query_ordered = self.dynamic_order_by()
query_paginated = query_ordered.limit(self.limit).offset(self.offset)
queried_data = query_paginated.all() if self.response_type else query_paginated.first()
return [result.get_dict() for result in queried_data] if self.response_type else queried_data.get_dict()
queried_data = (
query_paginated.all() if self.response_type else query_paginated.first()
)
return (
[result.get_dict() for result in queried_data]
if self.response_type
else queried_data.get_dict()
)

View File

@@ -26,11 +26,11 @@ class PostgresResponse(Generic[T]):
"""
def __init__(
self,
pre_query: Query,
query: Query,
is_array: bool = True,
metadata: Any = None,
self,
pre_query: Query,
query: Query,
is_array: bool = True,
metadata: Any = None,
):
self._is_list = is_array
self._query = query

View File

@@ -1,5 +1,3 @@
class SystemFields:
__abstract__ = True
@@ -50,4 +48,3 @@ class SystemFields:
"updated_by_id",
"created_by_id",
)

View File

@@ -16,7 +16,7 @@ class TokenModel:
def __post_init__(self):
self.lang = str(self.lang or "tr").lower()
self.credentials = self.credentials or {}
if 'GMT' in self.timezone:
if "GMT" in self.timezone:
raise HTTPExceptionApi(
error_code="HTTP_400_BAD_REQUEST",
lang=self.lang,

View File

@@ -1,4 +1,3 @@
from Schemas import AddressNeighborhood
from Services.PostgresDb.Models.crud_alchemy import Credentials
from Services.PostgresDb.Models.mixin import BasicMixin
@@ -12,13 +11,13 @@ updating = True
new_session = AddressNeighborhood.new_session()
new_session_test = AddressNeighborhood.new_session()
BasicMixin.creds = Credentials(person_id=10, person_name='Berkay Super User')
BasicMixin.creds = Credentials(person_id=10, person_name="Berkay Super User")
if listing:
"""List Options and Queries """
AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
AddressNeighborhood.neighborhood_code.icontains('10'),
"""List Options and Queries"""
AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
AddressNeighborhood.neighborhood_code.icontains("10"),
db=new_session,
).query
query_of_list_options = {
@@ -32,18 +31,20 @@ if listing:
pagination = Pagination(data=address_neighborhoods)
pagination.page = 9
pagination.size = 10
pagination.orderField = ['type_code','neighborhood_code']
pagination.orderType = ['desc', 'asc']
pagination.orderField = ["type_code", "neighborhood_code"]
pagination.orderType = ["desc", "asc"]
pagination_result = PaginationResult(data=address_neighborhoods, pagination=pagination)
pagination_result = PaginationResult(
data=address_neighborhoods, pagination=pagination
)
print(pagination_result.pagination.as_dict())
print(pagination_result.data)
if creating:
"""Create Queries """
"""Create Queries"""
find_or_create = AddressNeighborhood.find_or_create(
neighborhood_code='100',
neighborhood_name='Test',
neighborhood_code="100",
neighborhood_name="Test",
locality_id=15334,
db=new_session,
)
@@ -51,26 +52,26 @@ if creating:
find_or_create.destroy(db=new_session)
find_or_create.save_via_metadata(db=new_session)
find_or_create = AddressNeighborhood.find_or_create(
neighborhood_code='100',
neighborhood_name='Test',
neighborhood_code="100",
neighborhood_name="Test",
locality_id=15334,
db=new_session,
)
find_or_create.save_via_metadata(db=new_session)
if updating:
"""Update Queries """
"""Update Queries"""
query_of_list_options = {
"uu_id": str("33a89767-d2dc-4531-8f66-7b650e22a8a7"),
}
print('query_of_list_options', query_of_list_options)
print("query_of_list_options", query_of_list_options)
address_neighborhoods_one = AddressNeighborhood.filter_one(
*AddressNeighborhood.convert(query_of_list_options),
db=new_session,
).data
address_neighborhoods_one.update(
neighborhood_name='Test 44',
neighborhood_name="Test 44",
db=new_session,
)
address_neighborhoods_one.save(db=new_session)
@@ -78,4 +79,4 @@ if updating:
*AddressNeighborhood.convert(query_of_list_options),
db=new_session,
).data_as_dict
print('address_neighborhoods_one', address_neighborhoods_one)
print("address_neighborhoods_one", address_neighborhoods_one)

View File

@@ -1,9 +1,9 @@
import json
import arrow
from typing import Optional, List, Dict, Union
from AllConfigs.main import MainConfig
from Services.Redis.conn import redis_cli
from Services.Redis.Models.base import RedisRow
from Services.Redis.Models.response import RedisResponse
@@ -21,6 +21,24 @@ class RedisActions:
for unit, multiplier in time_multipliers.items()
)
@classmethod
def set_expiry_time(cls, expiry_seconds: int) -> Dict[str, int]:
"""Convert total seconds back into a dictionary of time units."""
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
result = {}
for unit, multiplier in time_multipliers.items():
if expiry_seconds >= multiplier:
result[unit], expiry_seconds = divmod(expiry_seconds, multiplier)
return result
@classmethod
def resolve_expires_at(cls, redis_row: RedisRow) -> str:
"""Resolve expiry time for Redis key."""
expiry_time = redis_cli.ttl(redis_row.redis_key)
if expiry_time == -1:
return "Key has no expiry time."
return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
@classmethod
def delete_key(cls, key: Union[Optional[str], Optional[bytes]]):
try:
@@ -41,7 +59,7 @@ class RedisActions:
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
) -> RedisResponse:
try:
regex = RedisRow.regex(list_keys=list_keys)
regex = RedisRow().regex(list_keys=list_keys)
json_get = redis_cli.scan_iter(match=regex)
for row in list(json_get):
@@ -100,14 +118,6 @@ class RedisActions:
error=str(e),
)
@classmethod
def resolve_expires_at(cls, redis_row: RedisRow) -> str:
"""Resolve expiry time for Redis key."""
expiry_time = redis_cli.ttl(redis_row.redis_key)
if expiry_time == -1:
return "Key has no expiry time."
return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
@classmethod
def get_json(
cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
@@ -120,8 +130,14 @@ class RedisActions:
for row in list(json_get):
redis_row = RedisRow()
redis_row.set_key(key=row)
redis_row.expires_at = cls.resolve_expires_at(redis_row=redis_row)
redis_value = redis_cli.get(redis_row.redis_key)
redis_value = redis_cli.get(row)
redis_value_expire = redis_cli.ttl(row)
redis_row.expires_at = cls.set_expiry_time(
expiry_seconds=int(redis_value_expire)
)
redis_row.expires_at_string = cls.resolve_expires_at(
redis_row=redis_row
)
redis_row.feed(redis_value)
list_of_rows.append(redis_row)
if list_of_rows:

View File

@@ -10,7 +10,7 @@ This module provides a class for managing Redis key-value operations with suppor
import json
from typing import Union, Dict, List, Optional, Any, ClassVar
from datetime import datetime
from Services.Redis.conn import redis_cli
class RedisKeyError(Exception):
@@ -44,23 +44,21 @@ class RedisRow:
key: ClassVar[Union[str, bytes]]
value: ClassVar[Any]
delimiter: ClassVar[str] = ":"
delimiter: str = ":"
expires_at: Optional[dict] = {"seconds": 60 * 60 * 30}
expires_at_string: Optional[str]
@classmethod
def get_expiry_time(cls) -> int | None:
def get_expiry_time(self) -> int | None:
"""Calculate expiry time in seconds from kwargs."""
time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
if cls.expires_at:
if self.expires_at:
return sum(
int(cls.expires_at.get(unit, 0)) * multiplier
int(self.expires_at.get(unit, 0)) * multiplier
for unit, multiplier in time_multipliers.items()
)
return
@classmethod
def merge(cls, set_values: List[Union[str, bytes]]) -> None:
def merge(self, set_values: List[Union[str, bytes]]) -> None:
"""
Merge list of values into a single delimited key.
@@ -83,7 +81,7 @@ class RedisRow:
value = value.decode()
merged.append(str(value))
cls.key = cls.delimiter.join(merged).encode()
self.key = self.delimiter.join(merged).encode()
@classmethod
def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
@@ -120,12 +118,11 @@ class RedisRow:
# Add wildcard if first key was None
if list_keys[0] is None:
pattern = f"*{cls.delimiter}{pattern}"
if "*" not in pattern:
if "*" not in pattern and any([list_key is None for list_key in list_keys]):
pattern = f"{pattern}:*"
return pattern
@classmethod
def parse(cls) -> List[str]:
def parse(self) -> List[str]:
"""
Parse the key into its component parts.
@@ -137,14 +134,13 @@ class RedisRow:
>>> RedisRow.parse()
['users', '123', 'profile']
"""
if not cls.key:
if not self.key:
return []
key_str = cls.key.decode() if isinstance(cls.key, bytes) else cls.key
return key_str.split(cls.delimiter)
key_str = self.key.decode() if isinstance(self.key, bytes) else self.key
return key_str.split(self.delimiter)
@classmethod
def feed(cls, value: Union[bytes, Dict, List, str]) -> None:
def feed(self, value: Union[bytes, Dict, List, str]) -> None:
"""
Convert and store value in JSON format.
@@ -161,18 +157,17 @@ class RedisRow:
"""
try:
if isinstance(value, (dict, list)):
cls.value = json.dumps(value)
self.value = json.dumps(value)
elif isinstance(value, bytes):
cls.value = json.dumps(json.loads(value.decode()))
self.value = json.dumps(json.loads(value.decode()))
elif isinstance(value, str):
cls.value = value
self.value = value
else:
raise RedisValueError(f"Unsupported value type: {type(value)}")
except json.JSONDecodeError as e:
raise RedisValueError(f"Invalid JSON format: {str(e)}")
@classmethod
def modify(cls, add_dict: Dict) -> None:
def modify(self, add_dict: Dict) -> None:
"""
Modify existing data by merging with new dictionary.
@@ -187,15 +182,17 @@ class RedisRow:
"""
if not isinstance(add_dict, dict):
raise RedisValueError("modify() requires a dictionary argument")
current_data = cls.data if cls.data else {}
current_data = self.row if self.row else {}
if not isinstance(current_data, dict):
raise RedisValueError("Cannot modify non-dictionary data")
current_data = {
**current_data,
**add_dict,
}
self.feed(current_data)
self.save()
cls.feed({**current_data, **add_dict})
@classmethod
def save(cls):
def save(self):
"""
Save the data to Redis with optional expiration.
@@ -204,29 +201,28 @@ class RedisRow:
RedisValueError: If value is not set
"""
import arrow
from Services.Redis.conn import redis_cli
if not cls.key:
if not self.key:
raise RedisKeyError("Cannot save data without a key")
if not cls.value:
if not self.value:
raise RedisValueError("Cannot save empty data")
if cls.expires_at:
redis_cli.setex(name=cls.redis_key, time=cls.expires_at, value=cls.value)
cls.expires_at_string = str(
if self.expires_at:
redis_cli.setex(
name=self.redis_key, time=self.get_expiry_time(), value=self.value
)
self.expires_at_string = str(
arrow.now()
.shift(seconds=cls.get_expiry_time())
.shift(seconds=self.get_expiry_time())
.format("YYYY-MM-DD HH:mm:ss")
)
return cls.value
return self.value
redis_cli.set(name=self.redis_key, value=self.value)
self.expires_at = None
self.expires_at_string = None
return self.value
redis_cli.set(name=cls.redis_key, value=cls.value)
cls.expires_at = None
cls.expires_at_string = None
return cls.value
@classmethod
def remove(cls, key: str) -> None:
def remove(self, key: str) -> None:
"""
Remove a key from the stored dictionary.
@@ -237,16 +233,24 @@ class RedisRow:
KeyError: If key doesn't exist
RedisValueError: If stored value is not a dictionary
"""
current_data = cls.data
current_data = self.row
if not isinstance(current_data, dict):
raise RedisValueError("Cannot remove key from non-dictionary data")
try:
current_data.pop(key)
cls.feed(current_data)
self.feed(current_data)
self.save()
except KeyError:
raise KeyError(f"Key '{key}' not found in stored data")
def delete(self) -> None:
"""Delete the key from Redis."""
try:
redis_cli.delete(self.redis_key)
except Exception as e:
print(f"Error deleting key: {str(e)}")
@property
def keys(self) -> str:
"""
@@ -257,8 +261,7 @@ class RedisRow:
"""
return self.key.decode() if isinstance(self.key, bytes) else self.key
@classmethod
def set_key(cls, key: Union[str, bytes]) -> None:
def set_key(self, key: Union[str, bytes]) -> None:
"""
Set key ensuring bytes format.
@@ -267,7 +270,7 @@ class RedisRow:
"""
if not key:
raise RedisKeyError("Cannot set empty key")
cls.key = key if isinstance(key, bytes) else str(key).encode()
self.key = key if isinstance(key, bytes) else str(key).encode()
@property
def redis_key(self) -> bytes:
@@ -280,7 +283,7 @@ class RedisRow:
return self.key if isinstance(self.key, bytes) else str(self.key).encode()
@property
def data(self) -> Union[Dict, List]:
def row(self) -> Union[Dict, List]:
"""
Get stored value as Python object.
@@ -290,6 +293,7 @@ class RedisRow:
try:
return json.loads(self.value)
except json.JSONDecodeError as e:
# return self.value
raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
@property
@@ -302,5 +306,5 @@ class RedisRow:
"""
return {
"keys": self.keys,
"value": self.data,
"value": self.row,
}

View File

@@ -20,6 +20,8 @@ class RedisResponse:
self.data_type = "dict"
elif isinstance(data, list):
self.data_type = "list"
elif isinstance(data, RedisRow):
self.data_type = "row"
elif data is None:
self.data_type = None
self.error = error
@@ -30,12 +32,16 @@ class RedisResponse:
"status": self.status,
"message": self.message,
"count": self.count,
"dataType": self.data_type,
"dataType": getattr(self, "data_type", None),
}
if isinstance(data, RedisRow):
return {"data": {data.keys: data.data}, **main_dict}
dict_return = {data.keys: data.row}
dict_return.update(dict(main_dict))
return dict_return
elif isinstance(data, list):
return {"data": {row.keys: row.data for row in data}, **main_dict}
dict_return = {row.keys: row.data for row in data}
dict_return.update(dict(main_dict))
return dict_return
@property
def all(self) -> Union[Optional[List[RedisRow]]]:
@@ -43,11 +49,20 @@ class RedisResponse:
@property
def count(self) -> int:
return len(self.all)
print()
row = self.all
if isinstance(row, list):
return len(row)
elif isinstance(row, RedisRow):
return 1
@property
def first(self) -> Union[RedisRow, None]:
print("self.data", self.data)
if self.data:
return self.data[0]
if isinstance(self.data, list):
return self.data[0]
elif isinstance(self.data, RedisRow):
return self.row
self.status = False
return

View File

@@ -1,39 +1,76 @@
import secrets
import uuid
import random
from uuid import uuid4
from Services.Redis import RedisActions, AccessToken
from Services.Redis.Actions.actions import RedisActions
from Services.Redis.Models.row import AccessToken
first_user = AccessToken(
accessToken=secrets.token_urlsafe(90),
userUUID=uuid.uuid4().__str__(),
)
second_user = AccessToken(
accessToken=secrets.token_urlsafe(90),
userUUID=uuid.uuid4().__str__(),
)
json_data = lambda uu_id, access: {
"uu_id": uu_id,
"access_token": access,
"user_type": 1,
"selected_company": None,
"selected_occupant": None,
"reachable_event_list_id": [],
def generate_token(length=32):
letters = "abcdefghijklmnopqrstuvwxyz"
merged_letters = [letter for letter in letters] + [
letter.upper() for letter in letters
]
token_generated = secrets.token_urlsafe(length)
for i in str(token_generated):
if i not in merged_letters:
token_generated = token_generated.replace(
i, random.choice(merged_letters), 1
)
return token_generated
save_json = {
"user": {
"first_name": "John",
"last_name": "Doe",
"email": "johndoe@glu.com",
"phone": "1234567890",
"address": "1234 Main St",
"details": {
"city": "San Francisco",
"state": "CA",
"zip": "94111",
},
},
"domain": "https://www.example.com",
"info": {
"mac": "oıuıouqqzxöç.işüğ",
"version": "1.0.0",
"type": "web",
"device": "desktop",
},
}
set_response_first_json = json_data(first_user.userUUID, first_user.accessToken)
set_response_second_json = json_data(second_user.userUUID, second_user.accessToken)
set_response_first = RedisActions.set_json(
list_keys=first_user.to_list(),
value=set_response_first_json,
expires={"seconds": 140},
)
set_response_second = RedisActions.set_json(
list_keys=second_user.to_list(),
value=set_response_second_json,
expires={"seconds": 190},
)
# access_object = AccessToken(
# userUUID=str(uuid4()),
# accessToken=generate_token(60)
# )
# redis_object = RedisActions.set_json(
# list_keys=access_object.to_list(),
# value=save_json,
# expires={"seconds": 720}
# )
# quit()
acc_token = "IuDXEzqzCSyOJvrwdjyxqGPOBnleUZjjXWsELJgUglJjyGhINOzAUpdMuzEzoTyOsJRUeEQsgXGUXrer:521a4ba7-898f-4204-a2e5-3226e1aea1e1"
search_keys = [None, set_response_first_json["uu_id"]]
get_response = RedisActions.get_json(list_keys=search_keys)
# print("get_response", [data.expires_at for data in get_response.all])
userUUID = acc_token.split(":")[1]
accessToken = acc_token.split(":")[0]
access_object = AccessToken(userUUID=None, accessToken=accessToken)
print("access_object", access_object.to_list())
redis_object = RedisActions.get_json(
list_keys=access_object.to_list(),
)
# print("type type(redis_object)", type(redis_object))
# print("type redis_object.data", type(redis_object.data))
# print("count", redis_object.count)
# print("data", redis_object.data)
# print("data", redis_object.as_dict())
# print("message", redis_object.message)
redis_row_object = redis_object.first
redis_row_object.modify({"reachable_event_list_id": [i for i in range(50)]})
# redis_row_object.remove("reachable_event_list_id")
# redis_row_object.modify({"reachable_event_list_id": [i for i in range(10)]})
# if redis_row_object:
# print("redis_row_object", redis_row_object.delete())
# print('redis_row_object.as_dict', redis_row_object.as_dict)