validations updated & Error Handlers updated & token config added

This commit is contained in:
berkay 2025-01-13 23:25:19 +03:00
parent f21c92cb27
commit 69883d3120
9 changed files with 522 additions and 66 deletions

0
ApiEvents/__init__.py Normal file
View File

View File

@ -0,0 +1,107 @@
import enum
from typing import Optional, List, Any
from pydantic import BaseModel
# Company / Priority / Department / Duty / Employee / Occupant / Module / Endpoint are changeable dynamics
class UserType(enum.Enum):
employee = 1
occupant = 2
class Credentials(BaseModel):
person_id: int
person_name: str
class ApplicationToken(BaseModel):
# Application Token Object -> is the main object for the user
domain: Optional[str] = "app.evyos.com.tr"
lang: Optional[str] = "TR"
timezone: Optional[str] = "GMT+3"
user_type: int = UserType.occupant.value
credentials: dict = None
user_uu_id: str
user_id: int
person_id: int
person_uu_id: str
request: Optional[dict] = None # Request Info of Client
expires_at: Optional[float] = None # Expiry timestamp
class OccupantToken(BaseModel):
# Selection of the occupant type for a build part is made by the user
living_space_id: int # Internal use
living_space_uu_id: str # Outer use
occupant_type_id: int
occupant_type_uu_id: str
occupant_type: str
build_id: int
build_uuid: str
build_part_id: int
build_part_uuid: str
responsible_company_id: Optional[int] = None
responsible_company_uuid: Optional[str] = None
responsible_employee_id: Optional[int] = None
responsible_employee_uuid: Optional[str] = None
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
class CompanyToken(BaseModel): # Required Company Object for an employee
company_id: int
company_uu_id: str
department_id: int # ID list of departments
department_uu_id: str # ID list of departments
duty_id: int
duty_uu_id: str
staff_id: int
staff_uu_id: str
employee_id: int
employee_uu_id: str
bulk_duties_id: int
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
class OccupantTokenObject(ApplicationToken):
# Occupant Token Object -> Requires selection of the occupant type for a specific build part
available_occupants: dict = None
selected_occupant: Optional[OccupantToken] = None # Selected Occupant Type
available_event: Optional[Any] = None
class EmployeeTokenObject(ApplicationToken):
# Full hierarchy Employee[staff_id] -> Staff -> Duty -> Department -> Company
companies_id_list: List[int] # List of company objects
companies_uu_id_list: List[str] # List of company objects
duty_id_list: List[int] # List of duty objects
duty_uu_id_list: List[str] # List of duty objects
selected_company: Optional[CompanyToken] = None # Selected Company Object
available_event: Optional[Any] = None

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,4 @@
REDIS_HOST=commercial_redis_service
REDIS_PASSWORD=commercial_redis_password
REDIS_PORT=6379
REDIS_DB=0

View File

@ -0,0 +1,8 @@
MONGODB_DISABLE_ENFORCE_AUTH=true
MONGODB_ROOT_PASSWORD=root
MONGODB_DATABASE=mongo_database
MONGODB_USERNAME=mongo_user
MONGODB_PASSWORD=mongo_password
MONGO_INITDB_ROOT_USERNAME=mongo_user
MONGO_INITDB_ROOT_PASSWORD=mongo_password
MONGO_INITDB_DATABASE=mongo_database

View File

@ -0,0 +1,48 @@
services:
commercial_main_mongo_service:
container_name: commercial_main_mongo_service
# image: "bitnami/mongodb:latest"
image: "bitnami/mongodb:4.4.1-debian-10-r3"
networks:
- network_store_services
restart: on-failure
env_file:
- commercial_main_mongo_service.env
volumes:
- wag_commercial_mongodb_main_data:/bitnami/mongodb
ports:
- "11777:27017"
commercial_main_memory_service:
container_name: commercial_main_memory_service
image: 'bitnami/redis:latest'
networks:
- network_store_services
restart: on-failure
env_file:
- commercial_main_memory_service.env
ports:
- "11222:6379"
postgres_main_commercial:
image: 'bitnami/postgresql:latest'
container_name: postgres_main_commercial
networks:
- network_store_services
restart: on-failure
env_file:
- postgres_main_commercial.env
depends_on:
- commercial_main_mongo_service
ports:
- "5444:5432"
volumes:
- wag_postgres_main_commercial_data:/bitnami/postgresql
networks:
network_store_services:
volumes:
wag_postgres_main_commercial_data:
wag_commercial_mongodb_main_data:

View File

@ -0,0 +1,48 @@
services:
commercial_main_mongo_service:
container_name: commercial_main_mongo_service
image: "bitnami/mongodb:latest"
# image: "bitnami/mongodb:4.4.1-debian-10-r3"
networks:
- network_store_services
restart: on-failure
env_file:
- commercial_main_mongo_service.env
volumes:
- wag_commercial_mongodb_main_data:/bitnami/mongodb
ports:
- "11777:27017"
commercial_main_memory_service:
container_name: commercial_main_memory_service
image: 'bitnami/redis:latest'
networks:
- network_store_services
restart: on-failure
env_file:
- commercial_main_memory_service.env
ports:
- "11222:6379"
postgres_main_commercial:
image: 'bitnami/postgresql:latest'
container_name: postgres_main_commercial
networks:
- network_store_services
restart: on-failure
env_file:
- postgres_main_commercial.env
depends_on:
- commercial_mongo_service
ports:
- "5444:5432"
volumes:
- wag_postgres_main_commercial_data:/bitnami/postgresql
networks:
network_store_services:
volumes:
wag_postgres_main_commercial_data:
wag_commercial_mongodb_main_data:

View File

@ -0,0 +1,3 @@
POSTGRES_DB=wag_database
POSTGRES_USER=berkay_wag_user
POSTGRES_PASSWORD=berkay_wag_user_password

View File

@ -1,114 +1,259 @@
"""
Redis key-value operations with structured data handling.
This module provides a class for managing Redis key-value operations with support for:
- Structured data storage and retrieval
- Key pattern generation for searches
- JSON serialization/deserialization
- Type-safe value handling
"""
import json
from typing import Union, Dict, List, Optional, Any
from typing import Union, Dict, List, Optional, Any, ClassVar
from datetime import datetime
class RedisKeyError(Exception):
"""Exception raised for Redis key-related errors."""
pass
class RedisValueError(Exception):
"""Exception raised for Redis value-related errors."""
pass
class RedisRow:
"""Class for handling Redis key-value operations with structured data."""
"""
Handles Redis key-value operations with structured data.
key: Union[str, bytes]
value: Any
delimiter: str = ":"
expires_at: Optional[str] = None
This class provides methods for:
- Managing compound keys with delimiters
- Converting between bytes and string formats
- JSON serialization/deserialization of values
- Pattern generation for Redis key searches
Attributes:
key: The Redis key in bytes or string format
value: The stored value (will be JSON serialized)
delimiter: Character used to separate compound key parts
expires_at: Optional expiration timestamp
"""
key: ClassVar[Union[str, bytes]]
value: ClassVar[Any]
delimiter: ClassVar[str] = ":"
expires_at: ClassVar[Optional[str]] = None
@classmethod
def merge(cls, set_values: List[Union[str, bytes]]) -> None:
"""Merge list of values into a single delimited key."""
cls.key = ""
for key, set_value in enumerate(set_values):
set_value = (
set_value.decode() if isinstance(set_value, bytes) else str(set_value)
)
cls.key += (
f"{set_value}"
if key == len(set_values) - 1
else f"{set_value}{cls.delimiter}"
)
cls.key = cls.key.encode()
"""
Merge list of values into a single delimited key.
@classmethod
def regex(cls, list_keys: List[Union[str, bytes]]) -> str:
"""Generate Redis search pattern from list of keys.
Args:
set_values: List of values to merge into key
Example:
list_keys = [None, "example1", "example2"]
Result: "*:example1:example2"
>>> RedisRow.merge(["users", "123", "profile"])
>>> print(RedisRow.key)
b'users:123:profile'
"""
# First create string with dash separators
temp_str = "-"
for list_key in list_keys:
if list_key:
list_key = (
list_key.decode() if isinstance(list_key, bytes) else str(list_key)
)
temp_str += f"{list_key}-"
if not set_values:
raise RedisKeyError("Cannot merge empty list of values")
# Remove redundant dashes
temp_str = temp_str.strip("-")
merged = []
for value in set_values:
if value is None:
continue
if isinstance(value, bytes):
value = value.decode()
merged.append(str(value))
# If no valid keys, return empty string
if not temp_str:
cls.key = cls.delimiter.join(merged).encode()
@classmethod
def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
"""
Generate Redis search pattern from list of keys.
Args:
list_keys: List of key parts, can include None for wildcards
Returns:
str: Redis key pattern with wildcards
Example:
>>> RedisRow.regex([None, "users", "active"])
'*:users:active'
"""
if not list_keys:
return ""
# Replace dashes with delimiter
result = temp_str.replace("-", cls.delimiter)
# Filter and convert valid keys
valid_keys = []
for key in list_keys:
if key is None:
continue
if isinstance(key, bytes):
key = key.decode()
valid_keys.append(str(key))
# Add wildcard at start if first item was None
if list_keys and list_keys[0] is None:
result = f"*{cls.delimiter}{result}"
else:
result = f"{result}"
# Build pattern
pattern = cls.delimiter.join(valid_keys)
if not pattern:
return ""
return result
# Add wildcard if first key was None
if list_keys[0] is None:
pattern = f"*{cls.delimiter}{pattern}"
return pattern
@classmethod
def parse(cls) -> List[str]:
"""Parse the key into its component parts."""
return cls.key.split(cls.delimiter) if cls.key else []
"""
Parse the key into its component parts.
Returns:
List[str]: Key parts split by delimiter
Example:
>>> RedisRow.key = b'users:123:profile'
>>> RedisRow.parse()
['users', '123', 'profile']
"""
if not cls.key:
return []
key_str = cls.key.decode() if isinstance(cls.key, bytes) else cls.key
return key_str.split(cls.delimiter)
@classmethod
def feed(cls, value: Union[bytes, Dict, List]) -> None:
"""Convert and store value in JSON format."""
if isinstance(value, (dict, list)):
cls.value = json.dumps(value)
else:
cls.value = json.dumps(json.loads(value.decode()))
"""
Convert and store value in JSON format.
Args:
value: Value to store (bytes, dict, or list)
Raises:
RedisValueError: If value type is not supported
Example:
>>> RedisRow.feed({"name": "John", "age": 30})
>>> print(RedisRow.value)
'{"name": "John", "age": 30}'
"""
try:
if isinstance(value, (dict, list)):
cls.value = json.dumps(value)
elif isinstance(value, bytes):
cls.value = json.dumps(json.loads(value.decode()))
else:
raise RedisValueError(f"Unsupported value type: {type(value)}")
except json.JSONDecodeError as e:
raise RedisValueError(f"Invalid JSON format: {str(e)}")
@classmethod
def modify(cls, add_dict: Dict) -> None:
"""Modify existing data by merging with new dictionary."""
value = cls.data or {}
cls.feed({**value, **add_dict})
"""
Modify existing data by merging with new dictionary.
Args:
add_dict: Dictionary to merge with existing data
Example:
>>> RedisRow.feed({"name": "John"})
>>> RedisRow.modify({"age": 30})
>>> print(RedisRow.data)
{"name": "John", "age": 30}
"""
if not isinstance(add_dict, dict):
raise RedisValueError("modify() requires a dictionary argument")
current_data = cls.data if cls.data else {}
if not isinstance(current_data, dict):
raise RedisValueError("Cannot modify non-dictionary data")
cls.feed({**current_data, **add_dict})
@classmethod
def remove(cls, key: str) -> None:
"""Remove a key from the stored dictionary."""
value = cls.data or {}
value.pop(key)
cls.feed(value)
"""
Remove a key from the stored dictionary.
Args:
key: Key to remove from stored dictionary
Raises:
KeyError: If key doesn't exist
RedisValueError: If stored value is not a dictionary
"""
current_data = cls.data
if not isinstance(current_data, dict):
raise RedisValueError("Cannot remove key from non-dictionary data")
try:
current_data.pop(key)
cls.feed(current_data)
except KeyError:
raise KeyError(f"Key '{key}' not found in stored data")
@property
def keys(self) -> str:
"""Get key as string."""
"""
Get key as string.
Returns:
str: Key in string format
"""
return self.key.decode() if isinstance(self.key, bytes) else self.key
@classmethod
def set_key(cls, key: Union[str, bytes]) -> None:
"""Set key ensuring bytes format."""
cls.key = key if isinstance(key, bytes) else key.encode()
"""
Set key ensuring bytes format.
Args:
key: Key in string or bytes format
"""
if not key:
raise RedisKeyError("Cannot set empty key")
cls.key = key if isinstance(key, bytes) else str(key).encode()
@property
def redis_key(self) -> bytes:
"""Get key in bytes format for Redis operations."""
return self.key if isinstance(self.key, bytes) else self.key.encode()
"""
Get key in bytes format for Redis operations.
Returns:
bytes: Key in bytes format
"""
return self.key if isinstance(self.key, bytes) else str(self.key).encode()
@property
def data(self) -> Union[Dict, List]:
"""Get stored value as Python object."""
return json.loads(self.value)
"""
Get stored value as Python object.
Returns:
Union[Dict, List]: Deserialized JSON data
"""
try:
return json.loads(self.value)
except json.JSONDecodeError as e:
raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
@property
def as_dict(self) -> Dict:
"""Get row data as dictionary."""
def as_dict(self) -> Dict[str, Any]:
"""
Get row data as dictionary.
Returns:
Dict[str, Any]: Dictionary with keys and value
"""
return {
"keys": self.keys,
"value": self.data,