first commit
This commit is contained in:
0
databases/no_sql_models/__init__.py
Normal file
0
databases/no_sql_models/__init__.py
Normal file
137
databases/no_sql_models/identity.py
Normal file
137
databases/no_sql_models/identity.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import datetime
|
||||
|
||||
from fastapi import HTTPException
|
||||
from .validations import PasswordHistoryViaUser, DomainViaUser, AccessHistoryViaUser
|
||||
from .mongo_database import MongoQuery
|
||||
|
||||
|
||||
class MongoQueryIdentity:
|
||||
"""
|
||||
4ex. mongo_collection_name = str(Company.uu_id()) + "*" + str('UserPasswordHistory')
|
||||
"""
|
||||
|
||||
def __init__(self, company_uuid, storage_reasoning: str = None):
|
||||
self.company_uuid = company_uuid
|
||||
self.mongo_collection_base = str(company_uuid)
|
||||
if storage_reasoning:
|
||||
self.mongo_collection_name = (
|
||||
str(company_uuid) + "*" + str(storage_reasoning)
|
||||
)
|
||||
else:
|
||||
self.mongo_collection_name = str(company_uuid) + "*" + str("Domain")
|
||||
self.mongo_engine = MongoQuery(
|
||||
table_name=self.mongo_collection_name, database_name="mongo_database"
|
||||
)
|
||||
|
||||
def use_collection(self, storage_reasoning):
|
||||
self.mongo_collection_name = (
|
||||
str(self.company_uuid) + "*" + str(storage_reasoning)
|
||||
)
|
||||
self.mongo_engine = MongoQuery(
|
||||
table_name=self.mongo_collection_name, database_name="mongo_database"
|
||||
)
|
||||
|
||||
def create_domain_via_user(self, payload: DomainViaUser):
|
||||
self.use_collection("Domain")
|
||||
return self.mongo_engine.insert(
|
||||
payload={
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"other_domains_list": [payload.main_domain],
|
||||
"main_domain": payload.main_domain,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
def update_domain_via_user(self, payload: DomainViaUser):
|
||||
self.use_collection("Domain")
|
||||
return self.mongo_engine.update(
|
||||
match=payload.user_uu_id,
|
||||
payload={
|
||||
"other_domains_list": payload.other_domains_list,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
|
||||
def get_domain_via_user(self, user_uu_id):
|
||||
self.use_collection("Domain")
|
||||
return self.mongo_engine.get_one(match=str(user_uu_id), field="user_uu_id")
|
||||
|
||||
def refresh_password_history_via_user(self, payload: PasswordHistoryViaUser):
|
||||
self.use_collection("PasswordHistory")
|
||||
password_history_item = self.mongo_engine.get_one(
|
||||
match=payload.user_uu_id, field="user_uu_id"
|
||||
)
|
||||
if not password_history_item:
|
||||
self.mongo_engine.insert(
|
||||
payload={
|
||||
"user_uu_id": str(payload.user_uu_id),
|
||||
"password_history": [],
|
||||
}
|
||||
)
|
||||
|
||||
password_history_item = self.mongo_engine.get_one(
|
||||
match=payload.user_uu_id, field="user_uu_id"
|
||||
)
|
||||
password_history_list = password_history_item.get("password_history", [])
|
||||
hashed_password = payload.password_add.get("password")
|
||||
|
||||
for password_in_history in password_history_list:
|
||||
if password_in_history.get("password") == str(hashed_password):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Password already used. Please enter a new password that you have not used last 3 times.",
|
||||
)
|
||||
|
||||
if len(password_history_list) > 3:
|
||||
password_history_list.pop(0)
|
||||
|
||||
password_history_list.append(payload.password_add)
|
||||
|
||||
return self.mongo_engine.update(
|
||||
match=payload.user_uu_id,
|
||||
payload={
|
||||
"password_history": password_history_list,
|
||||
"access_history_detail": payload.access_history_detail,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
|
||||
def get_password_history_via_user(self, user_uu_id):
|
||||
self.use_collection("PasswordHistory")
|
||||
return self.mongo_engine.get_one(match=user_uu_id, field="user_uu_id")
|
||||
|
||||
def update_access_history_via_user(self, payload: AccessHistoryViaUser):
|
||||
self.use_collection("AccessHistory")
|
||||
if already_dict := self.get_access_history_via_user(
|
||||
user_uu_id=payload.user_uu_id
|
||||
):
|
||||
access_history = already_dict[0].get("access_history") or []
|
||||
access_history.append(payload.access_history)
|
||||
if len(access_history) > 60:
|
||||
access_history.pop(0)
|
||||
return self.mongo_engine.update(
|
||||
match=payload.user_uu_id,
|
||||
payload={
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"access_history": access_history,
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
},
|
||||
field="user_uu_id",
|
||||
)
|
||||
return self.mongo_engine.insert(
|
||||
payload={
|
||||
"user_uu_id": payload.user_uu_id,
|
||||
"access_history": [payload.access_history],
|
||||
"modified_at": datetime.datetime.now().timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
def get_access_history_via_user(self, user_uu_id):
|
||||
self.use_collection("AccessHistory")
|
||||
return self.mongo_engine.filter_by(
|
||||
payload={"user_uu_id": user_uu_id},
|
||||
sort_by="modified_at",
|
||||
sort_direction="desc",
|
||||
)
|
||||
89
databases/no_sql_models/login_handlers.py
Normal file
89
databases/no_sql_models/login_handlers.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# from loggers.loggers import LoginLogger
|
||||
|
||||
|
||||
def get_menu_from_mongo(user_id, company_name):
|
||||
from databases.no_sql_models.mongo_database import MongoQuery
|
||||
|
||||
mongo = MongoQuery(
|
||||
table_name=company_name.replace(" ", ""), database_name="mongo_database"
|
||||
)
|
||||
mongo_dict = mongo.parse_json(mongo.get_one(match=user_id, field="user_id")) or {}
|
||||
return mongo_dict.get("menu", [])
|
||||
|
||||
|
||||
def load_user_with_erp_details(found_user, access_dict: dict = None):
|
||||
duties = []
|
||||
employee = found_user.person.employee
|
||||
duty_dict = {}
|
||||
try:
|
||||
duty_dict = employee.duty.get_dict(
|
||||
include=["duty_name", "duty_code", "duty_description"]
|
||||
)
|
||||
duty_dict["buildings"] = []
|
||||
duty_dict["response_buildings"] = []
|
||||
duty_dict["department"] = employee.duty.department.get_dict(
|
||||
include=["department_name", "department_code"]
|
||||
)
|
||||
duty_dict["company"] = employee.duty.department.company.get_dict(
|
||||
include=["formal_name", "public_name", "tax_no", "default_lang_type"]
|
||||
)
|
||||
except Exception as e:
|
||||
# LoginLogger.log_exception(
|
||||
# {
|
||||
# "exc": e,
|
||||
# "user": found_user.uu_id,
|
||||
# "function": "load_user_with_erp_details",
|
||||
# }
|
||||
# )
|
||||
err = e
|
||||
print('MongoQuery load_user_with_erp_details', err)
|
||||
|
||||
|
||||
for building in list(set(employee.duty.department.company.response_buildings)):
|
||||
build_parts = []
|
||||
for part in building.parts:
|
||||
build_parts.append(
|
||||
part.get_dict(
|
||||
include=["uu_id", "part_name", "part_code", "part_description"]
|
||||
)
|
||||
)
|
||||
duty_dict["response_buildings"].append(
|
||||
{
|
||||
"build": building.get_dict(include=["build_name", "uu_id"]),
|
||||
"parts": build_parts,
|
||||
}
|
||||
)
|
||||
for building in list(set(employee.duty.department.company.buildings)):
|
||||
build_parts = []
|
||||
for part in building.parts:
|
||||
build_parts.append(
|
||||
part.get_dict(
|
||||
include=[
|
||||
"uu_id",
|
||||
"part_name",
|
||||
"part_code",
|
||||
"part_description",
|
||||
]
|
||||
)
|
||||
)
|
||||
duty_dict["buildings"].append(
|
||||
{
|
||||
"build": building.get_dict(include=["build_name", "uu_id"]),
|
||||
"parts": build_parts,
|
||||
}
|
||||
)
|
||||
duties.append(duty_dict)
|
||||
return_dict = access_dict if access_dict else {}
|
||||
return_dict.update(
|
||||
{
|
||||
"data": {
|
||||
"profile": found_user.get_dict(),
|
||||
"employee_info": duties,
|
||||
"menu": get_menu_from_mongo(
|
||||
found_user.id,
|
||||
company_name=duty_dict.get("company", {}).get("public_name", ""),
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
return return_dict
|
||||
99
databases/no_sql_models/mongo_database.py
Normal file
99
databases/no_sql_models/mongo_database.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import pymongo
|
||||
|
||||
from json import loads
|
||||
from bson import ObjectId, json_util
|
||||
from pydantic import BaseModel
|
||||
from api_configs import MongoConfig
|
||||
|
||||
from pymongo import MongoClient
|
||||
from pymongo.collection import Collection
|
||||
from pymongo.results import InsertManyResult
|
||||
# from configs import TestMongo as MongoConfig
|
||||
|
||||
|
||||
def parse_json(data):
|
||||
return loads(json_util.dumps(data))
|
||||
|
||||
|
||||
def create_database_client(url, database_name):
|
||||
return MongoClient(url)[database_name]
|
||||
|
||||
|
||||
class Paginate(BaseModel):
|
||||
pageSize: int = 10
|
||||
pageNumber: int = 1
|
||||
sortField: str = "_id"
|
||||
sortOrder: str = "desc"
|
||||
|
||||
def grab_paginates(self):
|
||||
size_ = self.pageSize
|
||||
return (
|
||||
size_,
|
||||
size_ * (self.pageNumber - 1),
|
||||
self.sortField,
|
||||
-1 if self.sortOrder == "desc" else 1,
|
||||
)
|
||||
|
||||
|
||||
class MongoQuery:
|
||||
|
||||
def __init__(self, table_name: str, database_name: str):
|
||||
database = MongoClient(MongoConfig.url)[database_name]
|
||||
if table_name not in database.collection_names():
|
||||
database.create_collection(name=table_name)
|
||||
self.table: Collection = database[table_name]
|
||||
|
||||
@staticmethod
|
||||
def grab_paginates(paginate):
|
||||
return (
|
||||
paginate.size,
|
||||
paginate.size * (paginate.page - 1),
|
||||
paginate.order_field,
|
||||
-1 if paginate.order_type == "desc" else 1,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_json(data):
|
||||
return loads(json_util.dumps(data))
|
||||
|
||||
def insert(self, payload) -> InsertManyResult:
|
||||
return self.table.insert_many(documents=[payload])
|
||||
|
||||
def update(self, match, payload, field: str = "id"):
|
||||
if field == "id":
|
||||
filter_ = {"_id": ObjectId(match)}
|
||||
self.table.update_one(filter=filter_, update={"$set": payload})
|
||||
self.table.update_one(filter={field: match}, update={"$set": payload})
|
||||
|
||||
def get_one(self, match, field: str = "id"):
|
||||
if field == "id":
|
||||
return self.parse_json(
|
||||
data=self.table.find_one(filter={"_id": ObjectId(match)})
|
||||
)
|
||||
return self.parse_json(data=self.table.find_one(filter={field: match}))
|
||||
|
||||
def filter_by(self, payload, sort_by: str = "_id", sort_direction: str = "asc"):
|
||||
sort_direction = (
|
||||
pymongo.ASCENDING
|
||||
if str(sort_direction).lower() == "asc"
|
||||
else pymongo.DESCENDING
|
||||
)
|
||||
return_ = self.table.find(payload).sort(sort_by, sort_direction)
|
||||
return self.parse_json(data=return_)
|
||||
|
||||
def delete_one(self, match, field: str = "id"):
|
||||
if field == "id":
|
||||
self.table.delete_one(filter={"_id": ObjectId(match)})
|
||||
self.table.delete_one(filter={field: match})
|
||||
|
||||
def list_all(self, paginate: Paginate):
|
||||
size, skip, field, order = paginate.grab_paginates()
|
||||
return_ = self.table.find().sort([(field, order)]).skip(skip).limit(size)
|
||||
return self.table.count_documents({}), self.parse_json(data=return_)
|
||||
|
||||
def get_all(self):
|
||||
return_ = self.table.find()
|
||||
return self.table.count_documents({}), self.parse_json(data=return_)
|
||||
|
||||
|
||||
# Mongo = MongoQuery(table_name="XcompanyConfig", database_name="mongo_database")
|
||||
19
databases/no_sql_models/validations.py
Normal file
19
databases/no_sql_models/validations.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DomainViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
main_domain: str
|
||||
other_domains_list: Optional[list] = None
|
||||
|
||||
|
||||
class PasswordHistoryViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
password_add: dict
|
||||
access_history_detail: Optional[dict]
|
||||
|
||||
|
||||
class AccessHistoryViaUser(BaseModel):
|
||||
user_uu_id: str
|
||||
access_history: dict
|
||||
Reference in New Issue
Block a user