Web service initiated

This commit is contained in:
2025-04-05 14:59:10 +03:00
parent b1c8203a33
commit fa4df11323
76 changed files with 5385 additions and 171 deletions

View File

@@ -17,9 +17,8 @@ RUN poetry config virtualenvs.create false \
# Copy application code
COPY /ApiServices/AuthService /ApiServices/AuthService
COPY /Controllers /Controllers
COPY /Schemas/building /Schemas/building
COPY /Schemas/company /Schemas/company
COPY /Schemas/identity /Schemas/identity
COPY /Modules /Modules
COPY /Schemas /Schemas
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1

View File

@@ -8,10 +8,7 @@ class RouteRegisterController:
self.router_list = router_list
self.app = app
def register_routes(self):
for router in self.router_list:
self.app.include_router(router)
self.add_router_to_database(router)
return self.app

View File

@@ -2,4 +2,4 @@ from .auth.route import auth_route
__all__ = [
"auth_route",
]
]

View File

@@ -13,13 +13,12 @@ from ApiServices.AuthService.validations.request.authentication.login_post impor
RequestChangePassword,
RequestForgotPasswordPhone,
RequestForgotPasswordEmail,
RequestVerifyOTP,
)
from ApiServices.AuthService.events.auth.auth import AuthHandlers
auth_route = APIRouter(
prefix="/authentication",
tags=["Authentication Cluster"],
)
auth_route = APIRouter(prefix="/authentication", tags=["Authentication Cluster"])
@auth_route.post(
@@ -49,8 +48,12 @@ def authentication_login_post(
status_code=status.HTTP_406_NOT_ACCEPTABLE,
headers=headers,
)
result = AuthHandlers.LoginHandler.authentication_login_with_domain_and_creds(
request=request,
data=data,
)
return JSONResponse(
content={**data.model_dump()},
content=result,
status_code=status.HTTP_202_ACCEPTED,
headers=headers,
)
@@ -84,9 +87,12 @@ def authentication_select_post(
status_code=status.HTTP_406_NOT_ACCEPTABLE,
headers=headers,
)
result = AuthHandlers.LoginHandler.authentication_select_company_or_occupant_type(
request=request,
data=data,
)
return JSONResponse(
content=data.model_dump(),
content=result,
status_code=status.HTTP_202_ACCEPTED,
headers=headers,
)
@@ -107,13 +113,15 @@ def authentication_password_create_post(
"""
Authentication create password Route with Post Method
"""
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
headers = {
"language": language or "",
"domain": domain or "",
"eys-ext": f"{str(uuid.uuid4())}",
"token": token,
}
result = AuthHandlers.PasswordHandler.create_password(
password=data.password,
password_token=data.password_token,
)
if not domain or not language:
return JSONResponse(
content={"error": "EYS_0001"},
@@ -121,7 +129,7 @@ def authentication_password_create_post(
headers=headers,
)
return JSONResponse(
content={**data.model_dump()},
content={},
status_code=status.HTTP_202_ACCEPTED,
headers=headers,
)
@@ -333,3 +341,41 @@ def authentication_token_refresh_post(
status_code=status.HTTP_202_ACCEPTED,
headers=headers,
)
@auth_route.get(
path="/password/verify-otp",
summary="Verify OTP for password reset",
description="Verify OTP for password reset",
)
def authentication_password_verify_otp(
request: Request,
data: RequestVerifyOTP,
language: str = Header(None, alias="language"),
domain: str = Header(None, alias="domain"),
tz: str = Header(None, alias="timezone"),
):
"""
Verify OTP for password reset
"""
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
headers = {
"language": language or "",
"domain": domain or "",
"eys-ext": f"{str(uuid.uuid4())}",
"tz": tz or "GMT+3",
"token": token,
}
print('Token&OTP : ', data.otp, data.token)
if not domain or not language:
return JSONResponse(
content={"error": "EYS_0003"},
status_code=status.HTTP_406_NOT_ACCEPTABLE,
headers=headers,
)
return JSONResponse(
content={},
status_code=status.HTTP_202_ACCEPTED,
headers=headers,
)

View File

@@ -16,4 +16,7 @@ def get_safe_endpoint_urls() -> list[tuple[str, str]]:
("/auth/login", "POST"),
("/metrics", "GET"),
("/authentication/login", "POST"),
("/authentication/password/reset", "POST"),
("/authentication/password/create", "POST"),
("/authentication/password/verify-otp", "POST"),
]

View File

@@ -1,4 +1,7 @@
from typing import Any, List, Dict, Optional, Union
import arrow
from typing import Any, Dict, Optional, Union
from ApiServices.AuthService.events.auth.model import PasswordHistoryViaUser
from ApiServices.AuthService.validations.custom.token import (
EmployeeTokenObject,
OccupantTokenObject,
@@ -6,7 +9,7 @@ from ApiServices.AuthService.validations.custom.token import (
OccupantToken,
UserType,
)
from ApiServices.TemplateService.config import api_config
from ApiServices.AuthService.config import api_config
from Schemas import (
Users,
People,
@@ -25,12 +28,14 @@ from Schemas import (
Event2Employee,
)
from Modules.Token.password_module import PasswordModule
from Controllers.Redis.database import RedisActions
from Schemas.building.build import RelationshipEmployee2Build
from Schemas.event.event import Event2Occupant
from Controllers.Redis.database import RedisActions
from Controllers.Mongo.database import mongo_handler
TokenDictType = Union[EmployeeTokenObject, OccupantTokenObject]
class RedisHandlers:
AUTH_TOKEN: str = "AUTH_TOKEN"
@@ -63,7 +68,7 @@ class RedisHandlers:
result_delete = RedisActions.delete(
list_keys=[RedisHandlers.AUTH_TOKEN, "*", str(user.uu_id)]
)
print('result_delete', result_delete)
print("result_delete", result_delete)
generated_access_token = PasswordModule.generate_access_token()
keys = [RedisHandlers.AUTH_TOKEN, generated_access_token, str(user.uu_id)]
RedisActions.set_json(
@@ -74,20 +79,27 @@ class RedisHandlers:
return generated_access_token
@classmethod
def update_token_at_redis(cls, token: str, add_payload: Union[CompanyToken, OccupantToken]):
def update_token_at_redis(
cls, token: str, add_payload: Union[CompanyToken, OccupantToken]
):
if already_token_data := RedisActions.get_json(
list_keys=[RedisHandlers.AUTH_TOKEN, token, "*"]
).first:
already_token = cls.process_redis_object(**already_token_data)
if already_token.is_employee:
already_token = cls.process_redis_object(already_token_data)
if already_token.is_employee and isinstance(add_payload, CompanyToken):
already_token.selected_company = add_payload
elif already_token.is_occupant:
elif already_token.is_occupant and isinstance(add_payload, OccupantToken):
already_token.selected_occupant = add_payload
result = RedisActions.set_json(
list_keys=[RedisHandlers.AUTH_TOKEN, token, str(already_token.user_uu_id)],
list_keys=[
RedisHandlers.AUTH_TOKEN,
token,
str(already_token.user_uu_id),
],
value=already_token.model_dump(),
expires={"hours": 1, "minutes": 30},
)
print("result.first", result.first)
return result.first
raise ValueError("Something went wrong")
@@ -118,6 +130,14 @@ class UserHandlers:
"""
Check if the password is valid.
"""
print(
dict(
domain=domain,
id_=id_,
password=password,
password_hashed=password_hashed,
)
)
if PasswordModule.check_password(
domain=domain, id_=id_, password=password, password_hashed=password_hashed
):
@@ -135,10 +155,9 @@ class LoginHandler:
def is_employee(email: str):
return str(email).split("@")[1] == api_config.ACCESS_EMAIL_EXT
@classmethod
def do_employee_login(
cls, request: Any, data: Any, extra_dict: Optional[Dict[str, Any]] = None
cls, request: Any, data: Any, extra_dict: Optional[Dict[str, Any]] = None
):
"""
Handle employee login.
@@ -154,10 +173,10 @@ class LoginHandler:
)
if not user_handler.check_password_valid(
domain=data.domain,
domain=domain or "",
id_=str(found_user.uu_id),
password=data.password,
password_hashed=found_user.hash_password
password_hashed=found_user.hash_password,
):
raise ValueError("EYS_0005")
@@ -230,13 +249,27 @@ class LoginHandler:
return {
"access_token": access_token,
"user_type": UserType.employee.name,
"user": found_user.get_dict(
exclude_list=[
Users.hash_password,
Users.cryp_uu_id,
Users.password_token,
Users.created_credentials_token,
Users.updated_credentials_token,
Users.confirmed_credentials_token,
Users.is_confirmed,
Users.is_notification_send,
Users.is_email_send,
Users.remember_me,
]
),
"selection_list": companies_list,
}
raise ValueError("Something went wrong")
@classmethod
def do_employee_occupant(
cls, request: Any, data: Any, extra_dict: Optional[Dict[str, Any]] = None
cls, request: Any, data: Any, extra_dict: Optional[Dict[str, Any]] = None
):
"""
Handle occupant login.
@@ -254,7 +287,7 @@ class LoginHandler:
domain=data.domain,
id_=str(found_user.uu_id),
password=data.password,
password_hashed=found_user.hash_password
password_hashed=found_user.hash_password,
):
raise ValueError("EYS_0005")
@@ -300,7 +333,9 @@ class LoginHandler:
"occupants": [occupant_data],
}
else:
occupants_selection_dict[build_key]["occupants"].append(occupant_data)
occupants_selection_dict[build_key]["occupants"].append(
occupant_data
)
person = found_user.person
model_value = OccupantTokenObject(
@@ -344,9 +379,9 @@ class LoginHandler:
Returns:
SuccessResponse containing authentication token and user info
"""
language = request.headers("language", "tr")
domain = request.headers("domain", None)
timezone = request.headers("tz", None) or "GMT+3"
language = request.headers.get("language", "tr")
domain = request.headers.get("domain", None)
timezone = request.headers.get("tz", None) or "GMT+3"
if cls.is_employee(data.access_key):
return cls.do_employee_login(
@@ -386,7 +421,9 @@ class LoginHandler:
return request.headers.get(api_config.ACCESS_TOKEN_TAG)
@classmethod
def handle_employee_selection(cls, access_token: str, data: Any, token_dict: TokenDictType):
def handle_employee_selection(
cls, access_token: str, data: Any, token_dict: TokenDictType
):
with Users.new_session() as db:
if data.company_uu_id not in token_dict.companies_uu_id_list:
ValueError("EYS_0011")
@@ -407,7 +444,9 @@ class LoginHandler:
# Get staff IDs
staff_ids = [
staff.id
for staff in Staff.filter_all(Staff.duties_id.in_(duties_ids), db=db).data
for staff in Staff.filter_all(
Staff.duties_id.in_(duties_ids), db=db
).data
]
# Get employee
@@ -456,13 +495,12 @@ class LoginHandler:
reachable_event_codes=reachable_event_codes,
)
redis_handler = RedisHandlers()
try: # Update Redis
return redis_handler.update_token_at_redis(
token=access_token, add_payload=company_token
)
except Exception as e:
err = e
ValueError("EYS_0008")
redis_result = redis_handler.update_token_at_redis(
token=access_token, add_payload=company_token
)
return {
"selected_uu_id": data.company_uu_id,
}
@classmethod
def handle_occupant_selection(
@@ -526,12 +564,12 @@ class LoginHandler:
reachable_event_codes=reachable_event_codes,
)
redis_handler = RedisHandlers()
try: # Update Redis
return redis_handler.update_token_at_redis(
token=access_token, add_payload=occupant_token
)
except Exception as e:
raise ValueError("EYS_0008")
redis_handler.update_token_at_redis(
token=access_token, add_payload=occupant_token
)
return {
"selected_uu_id": data.company_uu_id,
}
@classmethod # Requires auth context
def authentication_select_company_or_occupant_type(cls, request: Any, data: Any):
@@ -549,16 +587,119 @@ class LoginHandler:
if not access_token:
raise ValueError("EYS_0001")
token_object = RedisHandlers().get_object_from_redis(access_token=access_token)
if token_object.is_employee and isinstance(data, CompanyToken):
token_object = RedisHandlers.get_object_from_redis(access_token=access_token)
if token_object.is_employee:
return cls.handle_employee_selection(
access_token=access_token, data=data, token_dict=token_object,
access_token=access_token,
data=data,
token_dict=token_object,
)
elif token_object.is_occupant and isinstance(data, OccupantToken):
elif token_object.is_occupant:
return cls.handle_occupant_selection(
access_token=access_token, data=data, token_dict=token_object,
access_token=access_token,
data=data,
token_dict=token_object,
)
class PasswordHandler:
@staticmethod
def create_password(password, password_token=None):
with Users.new_session() as db_session:
found_user = Users.filter_one(
Users.password_token == password_token, db=db_session
).data
if not found_user:
raise ValueError("EYS_0031")
if found_user.password_token:
replace_day = 0
try:
replace_day = int(
str(found_user.password_expires_day or 0)
.split(",")[0]
.replace(" days", "")
)
except Exception as e:
err = e
token_is_expired = arrow.now() >= arrow.get(
str(found_user.password_expiry_begins)
).shift(days=replace_day)
if not password_token == found_user.password_token and token_is_expired:
raise ValueError("")
collection_name = f"{found_user.related_company}*Domain"
print("collection_name", collection_name)
with mongo_handler.collection(collection_name) as mongo_engine:
print({"user_uu_id": str(found_user.uu_id)})
domain_via_user = mongo_engine.find_one(
{"user_uu_id": str(found_user.uu_id)}
)
print("domain_via_user", domain_via_user)
if not domain_via_user:
raise ValueError("EYS_0024")
domain_via_user = domain_via_user.get("main_domain", None)
new_password_dict = {
"password": PasswordModule.create_hashed_password(
domain=domain_via_user,
id_=str(found_user.uu_id),
password=password,
),
"date": str(arrow.now().date()),
}
history_dict = PasswordHistoryViaUser(
user_uu_id=str(found_user.uu_id),
password_add=new_password_dict,
access_history_detail={"request": "", "ip": ""},
)
found_user.password_expiry_begins = str(arrow.now())
found_user.hash_password = new_password_dict.get("password")
found_user.password_token = "" if found_user.password_token else ""
collection_name = f"{found_user.related_company}*PasswordHistory"
with mongo_handler.collection(collection_name) as mongo_engine_sc:
password_history_item = mongo_engine_sc.find_one(
{"user_uu_id": str(found_user.uu_id)}
)
if not password_history_item:
mongo_engine_sc.insert_one(
document={
"user_uu_id": str(found_user.uu_id),
"password_history": [],
}
)
password_history_item = mongo_engine_sc.find_one(
{"user_uu_id": str(found_user.uu_id)}
)
password_history_list = password_history_item.get(
"password_history", []
)
hashed_password = history_dict.password_add.get("password")
for password_in_history in password_history_list:
if str(password_in_history.get("password")) == str(
hashed_password
):
raise ValueError("EYS_0032")
if len(password_history_list) > 3:
password_history_list.pop(0)
password_history_list.append(history_dict.password_add)
return mongo_engine_sc.update_one(
filter={"user_uu_id": str(found_user.uu_id)},
update={
"$set": {
"password_history": password_history_list,
"modified_at": arrow.now().timestamp(),
"access_history_detail": history_dict.access_history_detail,
}
},
upsert=True,
)
found_user.save(db=db_session)
return found_user
class AuthHandlers:
LoginHandler: LoginHandler = LoginHandler()
PasswordHandler: PasswordHandler = PasswordHandler()

View File

@@ -0,0 +1,19 @@
from typing import Optional
from pydantic import BaseModel
class DomainViaUser(BaseModel):
user_uu_id: str
main_domain: str
other_domains_list: Optional[list] = None
class PasswordHistoryViaUser(BaseModel):
user_uu_id: str
password_add: dict
access_history_detail: Optional[dict]
class AccessHistoryViaUser(BaseModel):
user_uu_id: str
access_history: dict

View File

@@ -5,8 +5,7 @@ from ..config import api_config
async def token_middleware(request: Request, call_next):
base_url = "/".join(request.url.path.split("/")[:3])
base_url = request.url.path
safe_endpoints = [_[0] for _ in get_safe_endpoint_urls()]
if base_url in safe_endpoints:
return await call_next(request)

View File

@@ -9,6 +9,11 @@ class RequestLogin(BaseModel):
remember_me: Optional[bool]
class RequestVerifyOTP(BaseModel):
token: str
otp: str
class RequestSelectOccupant(BaseModel):
company_uu_id: str

View File

@@ -16,10 +16,10 @@ RUN poetry config virtualenvs.create false \
# Copy application code
COPY /ApiServices/InitialService /ApiServices/InitialService
COPY /ApiServices/InitialService /
COPY /Controllers /Controllers
COPY /Schemas/building /Schemas/building
COPY /Schemas/company /Schemas/company
COPY /Schemas/identity /Schemas/identity
COPY /Modules /Modules
COPY /Schemas /Schemas
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1

View File

@@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres-service:5432/wag_database
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1 @@
Generic single-database configuration.

View File

@@ -0,0 +1,74 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from Schemas import *
from Controllers.Postgres.database import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -1,18 +1,21 @@
from Schemas import (
BuildLivingSpace,
BuildParts,
Companies,
Departments,
Duties,
Duty,
Staff,
Employees,
Event2Employee,
Event2Occupant,
OccupantTypes,
Users,
UsersTokens,
)
from Controllers.Postgres.database import get_db
from init_app_defaults import create_application_defaults
from init_enums import init_api_enums_build_types
from init_alembic import generate_alembic
from init_occupant_types import create_occupant_types_defaults
from init_services import create_modules_and_services_and_actions
from init_address import create_one_address
set_alembic = True
if __name__ == "__main__":
pass
with get_db() as db_session:
if set_alembic:
generate_alembic(session=db_session)
init_api_enums_build_types(db_session=db_session)
create_application_defaults(db_session=db_session)
create_occupant_types_defaults(db_session=db_session)
create_modules_and_services_and_actions(db_session=db_session)
create_one_address(db_session=db_session)

View File

@@ -0,0 +1,83 @@
from Schemas import (
AddressCity,
AddressStreet,
AddressLocality,
AddressDistrict,
AddressNeighborhood,
AddressState,
AddressCountry,
)
def create_one_address(db_session):
address_list = []
country = AddressCountry.find_or_create(
country_name="TÜRKİYE", country_code="TR", db=db_session, is_confirmed=True
)
address_list.append(country)
state = AddressState.find_or_create(
state_name="TÜRKİYE",
state_code="TR",
phone_code="90",
country_id=country.id,
country_uu_id=str(country.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(state)
city = AddressCity.find_or_create(
city_name="ANKARA",
city_code="6",
licence_plate="06",
state_id=state.id,
state_uu_id=str(state.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(city)
district = AddressDistrict.find_or_create(
district_name="ÇANKAYA",
district_code="1231",
city_id=city.id,
city_uu_id=str(city.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(district)
locality = AddressLocality.find_or_create(
locality_name="MERKEZ",
locality_code="2431",
type_code="3",
type_description=None,
district_id=district.id,
district_uu_id=str(district.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(locality)
neighborhood = AddressNeighborhood.find_or_create(
neighborhood_name="AYRANCI MAHALLESİ",
neighborhood_code="1522",
type_code="1",
type_description="MAHALLESİ",
locality_id=locality.id,
locality_uu_id=str(locality.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(neighborhood)
street = AddressStreet.find_or_create(
street_name="REŞAT NURİ CADDESİ",
type_description="CADDESİ",
type_code="3",
street_code="52270",
neighborhood_id=neighborhood.id,
neighborhood_uu_id=str(neighborhood.uu_id),
is_confirmed=True,
db=db_session,
)
address_list.append(street)
for address_single in address_list:
address_single.save(db=db_session)
return

View File

@@ -0,0 +1,24 @@
import os
def generate_alembic(session):
from sqlalchemy import text
try:
result = session.execute(
text(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = "
"'alembic_version') AS table_existence;"
)
)
if result.first()[0]:
session.execute(text("delete from alembic_version;"))
session.commit()
except Exception as e:
print(e)
finally:
run_command = "python -m alembic stamp head;"
run_command += (
"python -m alembic revision --autogenerate;python -m alembic upgrade head;"
)
os.system(run_command)

View File

@@ -0,0 +1,410 @@
import arrow
from Modules.Token.password_module import PasswordModule
from Controllers.Mongo.database import mongo_handler
def create_application_defaults(db_session):
from Schemas import (
Companies,
Departments,
Duty,
Duties,
Employees,
People,
Users,
Staff,
RelationshipDutyCompany,
)
created_list = []
created_by, confirmed_by = "System", "System"
company_management = Companies.find_or_create(
**{
"formal_name": "Evyos LTD",
"public_name": "Evyos Verimlilik Sistemleri",
"company_type": "LTD",
"commercial_type": "Commercial",
"tax_no": "123132123132",
"company_tag": "Evyos",
"default_lang_type": "TR",
"default_money_type": "TL",
"is_commercial": True,
"is_confirmed": True,
},
db=db_session,
)
created_list.append(company_management)
active_row = dict(
is_confirmed=True, active=True, deleted=False, is_notification_send=True
)
company_id, company_uu_id = company_management.id, str(company_management.uu_id)
created_list = []
execution = Departments.find_or_create(
department_name="Execution Office",
department_code="EO001",
company_id=company_id,
company_uu_id=str(company_uu_id),
**active_row,
db=db_session,
)
created_list.append(execution)
it_dept = Departments.find_or_create(
department_name="IT Department",
department_code="ITD001",
company_id=company_id,
company_uu_id=str(company_uu_id),
**active_row,
db=db_session,
)
created_list.append(it_dept)
bm_duty = Duty.find_or_create(
duty_name="Business Manager",
duty_code="BM0001",
duty_description="Business Manager",
**active_row,
db=db_session,
)
created_list.append(bm_duty)
it_duty = Duty.find_or_create(
duty_name="IT Manager",
duty_code="IT0001",
duty_description="IT Manager",
**active_row,
db=db_session,
)
created_list.append(it_duty)
bulk_duty = Duty.find_or_create(
duty_name="BULK",
duty_code="BULK",
duty_description="BULK RECORDS OF THE COMPANY",
**active_row,
db=db_session,
)
created_list.append(bulk_duty)
occu_duty = Duty.find_or_create(
duty_name="OCCUPANT",
duty_code="OCCUPANT",
duty_description="OCCUPANT RECORDS OF THE COMPANY",
**active_row,
db=db_session,
)
created_list.append(occu_duty)
duties_created_bm = Duties.find_or_create(
company_id=company_id,
company_uu_id=str(company_uu_id),
duties_id=bm_duty.id,
duties_uu_id=str(bm_duty.uu_id),
department_id=execution.id,
department_uu_id=str(execution.uu_id),
**active_row,
db=db_session,
)
created_list.append(duties_created_bm)
duties_created_it = Duties.find_or_create(
company_id=company_id,
company_uu_id=str(company_uu_id),
duties_id=it_duty.id,
duties_uu_id=str(it_duty.uu_id),
department_id=it_dept.id,
department_uu_id=str(it_dept.uu_id),
**active_row,
db=db_session,
)
created_list.append(duties_created_it)
duties_created__ex = Duties.find_or_create(
company_id=company_id,
company_uu_id=str(company_uu_id),
duties_id=bulk_duty.id,
duties_uu_id=str(bulk_duty.uu_id),
department_id=execution.id,
department_uu_id=str(execution.uu_id),
**active_row,
db=db_session,
)
created_list.append(duties_created__ex)
duties_created_at = Duties.find_or_create(
company_id=company_id,
company_uu_id=str(company_uu_id),
duties_id=occu_duty.id,
duties_uu_id=str(occu_duty.uu_id),
department_id=execution.id,
department_uu_id=str(execution.uu_id),
**active_row,
db=db_session,
)
created_list.append(duties_created_at)
bulk_duty = Duty.filter_by_one(system=True, duty_code="BULK", db=db_session).data
it_dept = Departments.filter_by_one(
system=True,
department_name="IT Department",
department_code="ITD001",
company_id=company_management.id,
company_uu_id=str(company_management.uu_id),
db=db_session,
).data
created_duty = Duty.find_or_create(
duty_name="Database Manager",
duty_code="DM",
duty_description="Database Manager",
created_by=created_by,
confirmed_by=confirmed_by,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(created_duty)
created_duty = Duty.find_or_create(
duty_name="Network Manager",
duty_code="NM",
duty_description="Network Manager",
created_by=created_by,
confirmed_by=confirmed_by,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(created_duty)
application_manager_duty = Duty.find_or_create(
duty_name="Application Manager",
duty_code="AM",
duty_description="Application Manager",
created_by=created_by,
confirmed_by=confirmed_by,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(application_manager_duty)
application_super_user_duty = Duty.find_or_create(
duty_name="Super User",
duty_code="SUE",
duty_description="Super User",
created_by=created_by,
confirmed_by=confirmed_by,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(application_super_user_duty)
application_manager_duties = Duties.find_or_create(
department_id=it_dept.id,
department_uu_id=str(it_dept.uu_id),
duties_id=application_manager_duty.id,
duties_uu_id=str(application_manager_duty.uu_id),
company_id=company_management.id,
company_uu_id=str(company_management.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(application_manager_duties)
super_user_duties = Duties.find_or_create(
department_id=it_dept.id,
department_uu_id=str(it_dept.uu_id),
duties_id=application_super_user_duty.id,
duties_uu_id=str(application_manager_duty.uu_id),
company_id=company_management.id,
company_uu_id=str(company_management.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(super_user_duties)
RelationshipDutyCompany.find_or_create(
duties_id=application_manager_duties.id,
owner_id=company_management.id,
member_id=company_management.id,
parent_id=None,
child_count=0,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(application_manager_duties)
RelationshipDutyCompany.find_or_create(
duties_id=super_user_duties.id,
owner_id=company_management.id,
member_id=company_management.id,
parent_id=None,
child_count=0,
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(super_user_duties)
app_manager = People.find_or_create(
**{
"person_tag": "BAM-System",
"firstname": "Berkay Application Manager",
"surname": "Karatay",
"sex_code": "M",
"middle_name": "",
"father_name": "Father",
"mother_name": "Mother",
"country_code": "TR",
"national_identity_id": "12312312312",
"birth_place": "Ankara",
"birth_date": "01.07.1990",
"tax_no": "1231231231",
"is_confirmed": True,
},
db=db_session,
)
created_list.append(app_manager)
sup_manager = People.find_or_create(
**{
"person_tag": "BSU-System",
"firstname": "Berkay Super User",
"surname": "Karatay",
"sex_code": "M",
"middle_name": "",
"father_name": "Father",
"mother_name": "Mother",
"country_code": "TR",
"national_identity_id": "12312312313",
"birth_place": "Ankara",
"birth_date": "01.07.1990",
"tax_no": "1231231232",
},
db=db_session,
)
created_list.append(sup_manager)
application_manager_staff = Staff.find_or_create(
staff_description="Application Manager",
staff_name="Application Manager Employee",
staff_code="AME",
duties_id=application_manager_duties.id,
duties_uu_id=str(application_manager_duty.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(application_manager_staff)
super_user_staff = Staff.find_or_create(
staff_description="Super User",
staff_name="Super User Employee",
staff_code="SUE",
duties_id=super_user_duties.id,
duties_uu_id=str(application_manager_duty.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(super_user_staff)
app_manager_employee = Employees.find_or_create(
staff_id=application_manager_staff.id,
staff_uu_id=str(application_manager_staff.uu_id),
people_id=app_manager.id,
people_uu_id=str(app_manager.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(app_manager_employee)
super_user_employee = Employees.find_or_create(
staff_id=super_user_staff.id,
staff_uu_id=str(super_user_staff.uu_id),
people_id=sup_manager.id,
people_uu_id=str(sup_manager.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(super_user_employee)
app_manager_user = Users.find_or_create(
person_id=app_manager.id,
person_uu_id=str(app_manager.uu_id),
user_tag=app_manager.person_tag,
email="karatay.berkay.man@evyos.com.tr",
phone_number="+901111111111",
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
related_company=str(company_management.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(app_manager_user)
app_manager_user.password_expiry_begins = str(arrow.now())
app_manager_user.password_token = PasswordModule.generate_refresher_token()
main_domain, collection_name = (
"evyos.com.tr",
f"{str(company_management.uu_id)}*Domain",
)
with mongo_handler.collection(collection_name) as mongo_engine:
mongo_engine.insert_one(
document={
"user_uu_id": str(app_manager_user.uu_id),
"other_domains_list": [main_domain],
"main_domain": main_domain,
"modified_at": arrow.now().timestamp(),
}
)
sup_manager_employee = Users.find_or_create(
person_id=sup_manager.id,
person_uu_id=str(sup_manager.uu_id),
user_tag=sup_manager.person_tag,
email="karatay.berkay.sup@evyos.com.tr",
phone_number="+901111111112",
avatar="https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg",
created_by=created_by,
confirmed_by=confirmed_by,
related_company=str(company_management.uu_id),
is_confirmed=True,
active=True,
deleted=False,
is_notification_send=True,
db=db_session,
)
created_list.append(sup_manager_employee)
sup_manager_employee.password_expiry_begins = str(arrow.now())
sup_manager_employee.password_token = PasswordModule.generate_refresher_token()
with mongo_handler.collection(collection_name) as mongo_engine:
mongo_engine.insert_one(
document={
"user_uu_id": str(sup_manager_employee.uu_id),
"other_domains_list": [main_domain],
"main_domain": main_domain,
"modified_at": arrow.now().timestamp(),
}
)
db_session.commit()
print("All Defaults Create is now completed")

View File

@@ -0,0 +1,253 @@
from pydantic import BaseModel
class InsertBuildTypes(BaseModel):
function_code: str
type_code: str
lang: str
type_name: str
def init_api_enums_build_types(db_session):
from Schemas import BuildTypes, ApiEnumDropdown
insert_types = [
{
"function_code": "EVYOS",
"type_code": "APT_KZN",
"type_name": "Apartman Kazan Dairesi",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_GRJ",
"type_name": "Apartman Garaj",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_DP",
"type_name": "Apartman Depo",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "DAIRE",
"type_name": "Apartman Dairesi",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT",
"type_name": "Apartman Binası",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_YNT",
"type_name": "Apartman Yönetimi",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_PRK",
"type_name": "Apartman Açık Park Alanı",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_YSL",
"type_name": "Apartman Yeşil Alan",
"lang": "TR",
},
{
"function_code": "EVYOS",
"type_code": "APT_YOL",
"type_name": "Apartman Ara Yol",
"lang": "TR",
},
]
for insert_type in insert_types:
build_types = InsertBuildTypes(
function_code="EVYOS",
lang=insert_type["lang"],
type_code=str(insert_type["type_code"]).upper(),
type_name=insert_type["type_name"],
)
created_build_type = BuildTypes.find_or_create(
**build_types.model_dump(), is_confirmed=True, db=db_session
)
created_build_type.save(db=db_session)
insert_enums = [
{"enum_class": "BuildDuesTypes", "type_code": "BDT-D", "type_name": "Debit"},
{
"enum_class": "BuildDuesTypes",
"type_code": "BDT-A",
"type_name": "Add Debit",
},
{
"enum_class": "BuildDuesTypes",
"type_code": "BDT-R",
"type_name": "Renovation",
},
{
"enum_class": "BuildDuesTypes",
"type_code": "BDT-L",
"type_name": "Lawyer expence",
},
{
"enum_class": "BuildDuesTypes",
"type_code": "BDT-S",
"type_name": "Service fee",
},
{
"enum_class": "BuildDuesTypes",
"type_code": "BDT-I",
"type_name": "Information",
},
{
"enum_class": "AccountingReceiptTypes",
"type_code": "ART-A",
"type_name": "Kasa Tahsil Fişi",
},
{
"enum_class": "AccountingReceiptTypes",
"type_code": "ART-E",
"type_name": "Kasa Tediye Fişi",
},
{
"enum_class": "AccountingReceiptTypes",
"type_code": "ART-M",
"type_name": "Mahsup Fişi",
},
{
"enum_class": "AccountingReceiptTypes",
"type_code": "ART-O",
"type_name": "ılış Fişi",
},
{
"enum_class": "AccountingReceiptTypes",
"type_code": "ART-C",
"type_name": "Kapanış Fişi",
},
{"enum_class": "IbanBudgetType", "type_code": "IBT-I", "type_name": "Iban"},
{"enum_class": "IbanBudgetType", "type_code": "IBT-B", "type_name": "Budget"},
{
"enum_class": "IbanBudgetType",
"type_code": "IBT-TR",
"type_name": "Transaction records",
},
{"enum_class": "ProjectTypes", "type_code": "R", "type_name": "Tadilat"},
{
"enum_class": "ProjectTypes",
"type_code": "PT-C",
"type_name": "Mahkeme süreçleri",
},
{
"enum_class": "ProjectTypes",
"type_code": "PT-Z",
"type_name": "Sıfır Bakiye",
},
{
"enum_class": "EdmBudgetType",
"type_code": "PT-B",
"type_name": "Banka records",
},
{
"enum_class": "EdmBudgetType",
"type_code": "PT-S",
"type_name": "Sistem kaydı",
},
{
"enum_class": "EdmBudgetType",
"type_code": "EBT-C",
"type_name": "Build, Flat or Site records",
},
{"enum_class": "ExpireType", "type_code": "1", "type_name": "daily"},
{"enum_class": "ExpireType", "type_code": "7", "type_name": "weekly"},
{"enum_class": "ExpireType", "type_code": "30", "type_name": "monthly"},
{"enum_class": "ExpireType", "type_code": "90", "type_name": "quarter"},
{"enum_class": "ExpireType", "type_code": "180", "type_name": "six_month"},
{"enum_class": "ExpireType", "type_code": "365", "type_name": "yearly"},
{"enum_class": "PhoneType", "type_code": "M", "type_name": "cep tel"},
{"enum_class": "PhoneType", "type_code": "L", "type_name": "sabit telefon"},
{"enum_class": "PhoneType", "type_code": "F", "type_name": "fax"},
{"enum_class": "PhoneType", "type_code": "C", "type_name": "santral"},
{
"enum_class": "PhoneType",
"type_code": "G",
"type_name": "ülke genelindeki hatlar 444",
},
{"enum_class": "PerComType", "type_code": "1", "type_name": "Person"},
{"enum_class": "PerComType", "type_code": "2", "type_name": "Company"},
{"enum_class": "Directions", "type_code": "NN", "type_name": "North"},
{"enum_class": "Directions", "type_code": "EE", "type_name": "East"},
{"enum_class": "Directions", "type_code": "SS", "type_name": "South"},
{"enum_class": "Directions", "type_code": "WW", "type_name": "West"},
{"enum_class": "Directions", "type_code": "NE", "type_name": "North East"},
{"enum_class": "Directions", "type_code": "NW", "type_name": "North West"},
{"enum_class": "Directions", "type_code": "SE", "type_name": "South East"},
{"enum_class": "Directions", "type_code": "SW", "type_name": "South West"},
{
"enum_class": "MeetingTypes",
"type_code": "MT-RBM",
"type_name": "Regular Building Meeting",
},
{
"enum_class": "MeetingTypes",
"type_code": "MT-DBM",
"type_name": "Disaster Building Meeting",
},
{
"enum_class": "MeetingTypes",
"type_code": "MT-EBM",
"type_name": "Emergency Building Meeting",
},
{
"enum_class": "DebitTypes",
"type_code": "DT-D",
"type_name": "Debit Sender",
},
{
"enum_class": "DebitTypes",
"type_code": "DT-R",
"type_name": "Credit Receiver",
},
{
"enum_class": "DebitTypes",
"type_code": "DT-Z",
"type_name": "Zero Balance",
},
{
"enum_class": "TimePeriod",
"type_code": "TP-W",
"type_name": "Weekly",
},
{
"enum_class": "TimePeriod",
"type_code": "TP-M",
"type_name": "Monthly",
},
{
"enum_class": "TimePeriod",
"type_code": "TP-Q",
"type_name": "Quarterly",
},
{
"enum_class": "TimePeriod",
"type_code": "TP-Y",
"type_name": "Yearly",
},
]
for insert_enum in insert_enums:
created_api_enum = ApiEnumDropdown.find_or_create(
enum_class=insert_enum["enum_class"],
value=insert_enum["type_name"],
key=str(insert_enum["type_code"]).upper(),
description=insert_enum["type_name"],
is_confirmed=True,
db=db_session,
)
created_api_enum.save(db=db_session)

View File

@@ -0,0 +1,218 @@
def create_occupant_types_defaults(db_session):
from Schemas import OccupantTypes
"""
occupant_category = mapped_column(String, server_default="")
occupant_category_type = mapped_column(String, server_default="")
occupant_is_unique = mapped_column(Boolean, server_default="0")
"""
list_occupant_types = [
{
"occupant_type": "Toplantı Başkanı",
"occupant_description": "Toplantı Başkanı",
"occupant_code": "MT-PRS",
"occupant_category": "Toplantı",
"occupant_category_type": "MT",
"occupant_is_unique": True,
},
{
"occupant_type": "Toplantı Katip",
"occupant_description": "Toplantıda tutanak tutan kişi",
"occupant_code": "MT-WRT",
"occupant_category": "Toplantı",
"occupant_category_type": "MT",
"occupant_is_unique": True,
},
{
"occupant_type": "Toplantı Katılımcısı",
"occupant_description": "Toplantıda sadece katılan kişi",
"occupant_code": "MT-ATT",
"occupant_category": "Toplantı",
"occupant_category_type": "MT",
"occupant_is_unique": False,
},
{
"occupant_type": "Toplantı Danışman",
"occupant_description": "Toplantıda danışmanlık yapan kişi",
"occupant_code": "MT-ADV",
"occupant_category": "Toplantı",
"occupant_category_type": "MT",
"occupant_is_unique": False,
},
{
"occupant_type": "Toplantı Seçilmiş Başkanı",
"occupant_description": "Toplantı Seçilmiş Başkanı",
"occupant_code": "MT-VPR",
"occupant_category": "Toplantı",
"occupant_category_type": "MT",
"occupant_is_unique": True,
},
{
"occupant_type": "Daire Sahibi",
"occupant_description": "Daire Sahibi",
"occupant_code": "FL-OWN",
"occupant_category": "Daire",
"occupant_category_type": "FL",
"occupant_is_unique": True,
},
{
"occupant_type": "Daire Kiracısı",
"occupant_description": "Daire Kiracısı",
"occupant_code": "FL-TEN",
"occupant_category": "Daire",
"occupant_category_type": "FL",
"occupant_is_unique": True,
},
{
"occupant_type": "Daire Sakini",
"occupant_description": "Daire Sakini",
"occupant_code": "FL-RES",
"occupant_category": "Daire",
"occupant_category_type": "FL",
"occupant_is_unique": False,
},
{
"occupant_type": "Daire Sakini Vekili",
"occupant_description": "Daire Sakini Vekili",
"occupant_code": "FL-REP",
"occupant_category": "Daire",
"occupant_category_type": "FL",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Avukatı",
"occupant_description": "Bina Avukatı",
"occupant_code": "BU-ATT",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Avukatı Yardımcısı",
"occupant_description": "Bina Avukatı Yardımcısı",
"occupant_code": "BU-ATA",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Denetmen Yardımcısı",
"occupant_description": "Bina Denetmen Yardımcısı",
"occupant_code": "BU-SPA",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Denetmeni",
"occupant_description": "Bina Denetmeni",
"occupant_code": "BU-SPV",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Yönetici Yardımcısı",
"occupant_description": "Bina Yönetici Yardımcısı",
"occupant_code": "BU-MNA",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Yöneticisi",
"occupant_description": "Bina Yöneticisi",
"occupant_code": "BU-MNG",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": True,
},
{
"occupant_type": "Bina Muhasabecisi",
"occupant_description": "Bina Muhasabecisi",
"occupant_code": "BU-ACC",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Proje Lideri",
"occupant_description": "Proje Lideri",
"occupant_code": "PRJ-LDR",
"occupant_category": "Proje",
"occupant_category_type": "PRJ",
"occupant_is_unique": False,
},
{
"occupant_type": "Proje Sorumlusu",
"occupant_description": "Proje Sorumlusu",
"occupant_code": "PRJ-RES",
"occupant_category": "Proje",
"occupant_category_type": "PRJ",
"occupant_is_unique": False,
},
{
"occupant_type": "Proje Ekibi",
"occupant_description": "Proje Ekibi",
"occupant_code": "PRJ-EMP",
"occupant_category": "Proje",
"occupant_category_type": "PRJ",
"occupant_is_unique": False,
},
{
"occupant_type": "Proje Finans Sorumlusu",
"occupant_description": "Proje Finans Sorumlusu",
"occupant_code": "PRJ-FIN",
"occupant_category": "Proje",
"occupant_category_type": "PRJ",
"occupant_is_unique": False,
},
{
"occupant_type": "Proje Teknik Sorumlusu",
"occupant_description": "Proje Teknik Sorumlusu",
"occupant_code": "PRJ-TEC",
"occupant_category": "Proje",
"occupant_category_type": "PRJ",
"occupant_is_unique": False,
},
{
"occupant_type": "Daire Mülkiyet Vekili",
"occupant_description": "Daire Mülkiyet Vekili",
"occupant_code": "FL-DEP", # deputy
"occupant_category": "Daire",
"occupant_category_type": "FL",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Teknik Sorumlusu",
"occupant_description": "Bina Teknik Sorumlusu",
"occupant_code": "BU-TEC",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Teknik Elemanı",
"occupant_description": "Bina Teknik Elemanı",
"occupant_code": "BU-EMP",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
{
"occupant_type": "Bina Teknik Freelancer",
"occupant_description": "Bina Teknik Freelancer",
"occupant_code": "BU-FLC",
"occupant_category": "Bina",
"occupant_category_type": "BU",
"occupant_is_unique": False,
},
]
for list_occupant_type in list_occupant_types:
try:
created_type = OccupantTypes.find_or_create(
**list_occupant_type, is_confirmed=True, db=db_session
)
created_type.save(db=db_session)
except Exception as e:
print(f"Error: {e}")

View File

@@ -0,0 +1,83 @@
def create_modules_and_services_and_actions(db_session):
from Schemas import (
Duty,
OccupantTypes,
Modules,
Services,
)
erp_module = Modules.find_or_create(
**{
"module_name": "EVYOS ERP",
"module_description": "EVYOS Enterprise Resource Planning",
"module_code": "EVYOS-ERP",
"module_layer": 1,
"is_default_module": False,
"is_confirmed": True,
},
db=db_session,
)
erp_module.save(db=db_session)
build_module = Modules.find_or_create(
**{
"module_name": "Bina Yönetim Modülü",
"module_description": "Building Management Module",
"module_code": "BLD-MNG",
"module_layer": 1,
"is_default_module": False,
"is_confirmed": True,
},
db=db_session,
)
build_module.save(db=db_session)
user_module = Modules.find_or_create(
**{
"module_name": "Kullancı Modülü",
"module_description": "Kullanıcı Genel Modülü",
"module_code": "USR-PUB",
"module_layer": 1,
"is_default_module": True,
"is_confirmed": True,
},
db=db_session,
)
user_module.save(db=db_session)
erp_module_module_dict = dict(
module_id=erp_module.id,
module_uu_id=str(erp_module.uu_id),
)
build_module_module_dict = dict(
module_id=build_module.id,
module_uu_id=str(build_module.uu_id),
)
duty_objects = Duty.filter_all(
Duty.duty_code.notin_(["BULK", "OCCUPANT", "BM0001"]),
db=db_session,
).data
for duty_object in duty_objects:
created_service = Services.find_or_create(
**erp_module_module_dict,
service_name=duty_object.duty_name,
service_description=duty_object.duty_description,
service_code=f"SRE-{duty_object.duty_code}",
related_responsibility=duty_object.duty_code,
is_confirmed=True,
db=db_session,
)
created_service.save(db=db_session)
occupant_types = OccupantTypes.filter_all(db=db_session).data
for occupant_type in occupant_types:
created_service = Services.find_or_create(
**build_module_module_dict,
service_name=occupant_type.occupant_type,
service_description=occupant_type.occupant_description,
service_code=f"SRO-{occupant_type.occupant_code}",
related_responsibility=occupant_type.occupant_code,
is_confirmed=True,
db=db_session,
)
created_service.save(db=db_session)

View File

@@ -8,9 +8,9 @@ class Configs(BaseSettings):
"""
PATH: str = ""
HOST: str = ("",)
PORT: int = (0,)
LOG_LEVEL: str = ("info",)
HOST: str = ""
PORT: int = 0
LOG_LEVEL: str = "info"
RELOAD: int = 0
ACCESS_TOKEN_TAG: str = ""

View File

@@ -10,6 +10,14 @@ from ApiServices.TemplateService.initializer.create_route import RouteRegisterCo
from .config import api_config
def create_events_if_any_cluster_set():
import events
for event_str in events.__all__:
if to_set_events := getattr(events, event_str, None):
to_set_events.set_events_to_database()
def create_app():
application = FastAPI(**api_config.api_info)
@@ -36,6 +44,6 @@ def create_app():
route_register = RouteRegisterController(app=application, router_list=get_routes())
application = route_register.register_routes()
create_events_if_any_cluster_set()
application.openapi = lambda _=application: create_openapi_schema(_)
return application

View File

@@ -1,6 +1,6 @@
from fastapi import APIRouter, Request, Response
from ApiServices.TemplateService.events.events_setter import event_cluster
from ApiServices.TemplateService.events.template.event import template_event_cluster
test_template_route = APIRouter(prefix="/test", tags=["Test"])
@@ -8,30 +8,23 @@ test_template_route = APIRouter(prefix="/test", tags=["Test"])
@test_template_route.get(
path="/template",
description="Test Template Route",
operation_id="bb20c8c6-a289-4cab-9da7-34ca8a36c8e5"
operation_id="bb20c8c6-a289-4cab-9da7-34ca8a36c8e5",
)
def test_template(request: Request, response: Response):
"""
Test Template Route
"""
headers = dict(request.headers)
event_cluster_matched = event_cluster.match_event(
event_cluster_matched = template_event_cluster.match_event(
event_keys=[
"3f510dcf-9f84-4eb9-b919-f582f30adab1",
"9f403034-deba-4e1f-b43e-b25d3c808d39",
"b8ec6e64-286a-4f60-8554-7a3865454944"
"b8ec6e64-286a-4f60-8554-7a3865454944",
]
)
event_cluster_matched.example_callable()
response.headers["X-Header"] = "Test Header GET"
return {
"completed": True,
"message": "Test Template Route",
"info": {
"host": headers.get("host", "Not Found"),
"user_agent": headers.get("user-agent", "Not Found"),
},
}
if runner_callable := event_cluster_matched.example_callable():
return runner_callable
raise ValueError("Event key not found or multiple matches found")
@test_template_route.post(
@@ -42,13 +35,14 @@ def test_template_post(request: Request, response: Response):
"""
Test Template Route with Post Method
"""
headers = dict(request.headers)
event_cluster_matched = template_event_cluster.match_event(
event_keys=[
"3f510dcf-9f84-4eb9-b919-f582f30adab1",
"9f403034-deba-4e1f-b43e-b25d3c808d39",
"b8ec6e64-286a-4f60-8554-7a3865454944",
]
)
response.headers["X-Header"] = "Test Header POST"
return {
"completed": True,
"message": "Test Template Route with Post Method",
"info": {
"host": headers.get("host", "Not Found"),
"user_agent": headers.get("user-agent", "Not Found"),
},
}
if runner_callable := event_cluster_matched.example_callable():
return runner_callable
raise ValueError("Event key not found or multiple matches found")

View File

@@ -0,0 +1,5 @@
from .template.event import template_event_cluster
__all__ = [
"template_event_cluster",
]

View File

@@ -9,18 +9,21 @@ single_event = Event(
description="Example event description",
)
def example_callable():
"""
Example callable method
"""
return {
"completed": True,
"message": "Example callable method 2",
"info": {
"host": "example_host",
"user_agent": "example_user_agent",
},
}
"completed": True,
"message": "Example callable method 2",
"info": {
"host": "example_host",
"user_agent": "example_user_agent",
},
}
single_event.event_callable = example_callable
other_event = Event(
@@ -30,28 +33,34 @@ other_event = Event(
response_validator=None, # TODO: Add response validator
description="Example event 2 description",
)
def example_callable_other():
"""
Example callable method
"""
return {
"completed": True,
"message": "Example callable method 1",
"info": {
"host": "example_host",
"user_agent": "example_user_agent",
},
}
"completed": True,
"message": "Example callable method 1",
"info": {
"host": "example_host",
"user_agent": "example_user_agent",
},
}
other_event.event_callable = example_callable_other
tokens_in_redis = [
"3f510dcf-9f84-4eb9-b919-f582f30adab1",
"9f403034-deba-4e1f-b43e-b25d3c808d39",
"b8ec6e64-286a-4f60-8554-7a3865454944",
"176b829c-7622-4cf2-b474-421e5acb637c",
]
template_event_cluster = EventCluster(endpoint_uu_id="bb20c8c6-a289-4cab-9da7-34ca8a36c8e5")
"3f510dcf-9f84-4eb9-b919-f582f30adab1",
"9f403034-deba-4e1f-b43e-b25d3c808d39",
"b8ec6e64-286a-4f60-8554-7a3865454944",
"176b829c-7622-4cf2-b474-421e5acb637c",
]
template_event_cluster = EventCluster(
endpoint_uu_id="bb20c8c6-a289-4cab-9da7-34ca8a36c8e5"
)
template_event_cluster.add_event([single_event, other_event])
matched_event = template_event_cluster.match_event(event_keys=tokens_in_redis)
# matched_event = template_event_cluster.match_event(event_keys=tokens_in_redis)
print('event_callable', matched_event.event_callable())
# print('event_callable', matched_event.event_callable())

View File

@@ -9,30 +9,31 @@ class RouteRegisterController:
self.app = app
@staticmethod
def add_router_with_event_to_database(route: APIRouter):
def add_router_with_event_to_database(router: APIRouter):
from Schemas import EndpointRestriction
with EndpointRestriction.new_session() as db_session:
route_path = str(getattr(route, "path"))
route_summary = str(getattr(route, "name")) or ""
operation_id = str(getattr(route, "operation_id")) or ""
if not operation_id:
return
for route in router.routes:
route_path = str(getattr(route, "path"))
route_summary = str(getattr(route, "name"))
operation_id = getattr(route, "operation_id", None)
if not operation_id:
continue
for route_method in [method.lower() for method in getattr(route, "methods")]:
restriction = EndpointRestriction.find_or_create(
**dict(
for route_method in [
method.lower() for method in getattr(route, "methods")
]:
restriction = EndpointRestriction.find_or_create(
endpoint_method=route_method,
endpoint_name=route_path,
endpoint_desc=route_summary.replace("_", " "),
endpoint_function=route_summary,
operation_uu_id=operation_id, # UUID of the endpoint
operation_uu_id=operation_id, # UUID of the endpoint
is_confirmed=True,
db=db_session,
)
)
if not restriction.meta_data.created:
restriction.endpoint_code = f"AR{str(restriction.id).zfill(3)}"
restriction.save(db=db_session)
if restriction.meta_data.created:
restriction.save(db=db_session)
def register_routes(self):
for router in self.router_list:

View File

@@ -1,4 +1,3 @@
class EventCluster:
def __init__(self, endpoint_uu_id: str):
@@ -25,33 +24,39 @@ class EventCluster:
def set_events_to_database(self):
from Schemas import Events, EndpointRestriction
with Events.new_session() as db_session:
with Events.new_session() as db_session:
if to_save_endpoint := EndpointRestriction.filter_one(
EndpointRestriction.uu_id == self.endpoint_uu_id,
EndpointRestriction.operation_uu_id == self.endpoint_uu_id,
db=db_session,
).data:
for event in self.events:
event_obj = Events.find_or_create(
event_to_save_database = Events.find_or_create(
function_code=event.key,
function_class=event.name,
description=event.description,
endpoint_code=self.endpoint_uu_id,
endpoint_id=to_save_endpoint.id,
endpoint_uu_id=str(to_save_endpoint.uu_id),
is_confirmed=True,
active=True,
db=db_session,
)
event_obj.save()
print(f'UUID: {event_obj.uu_id} event is saved to {to_save_endpoint.uu_id}')
if event_to_save_database.meta_data.created:
event_to_save_database.save(db=db_session)
print(
f"UUID: {event_to_save_database.uu_id} event is saved to {to_save_endpoint.uu_id}"
)
def match_event(self, event_keys: list[str]) -> "Event":
"""
Match an event by its key
"""
print('set(event_keys)', set(event_keys))
print('event.keys', set([event.key for event in self.events]))
intersection_of_key: set[str] = set(event_keys) & set([event.key for event in self.events])
# print('set(event_keys)', set(event_keys))
# print('event.keys', set([event.key for event in self.events]))
intersection_of_key: set[str] = set(event_keys) & set(
[event.key for event in self.events]
)
if not len(intersection_of_key) == 1:
raise ValueError(
f"Event key not found or multiple matches found: {intersection_of_key}"
@@ -75,7 +80,6 @@ class Event:
self.response_validator = response_validator
self.description = description
def event_callable(self):
"""
Example callable method

View File

@@ -6,7 +6,7 @@ from ..config import api_config
async def token_middleware(request: Request, call_next):
base_url = "/".join(request.url.path.split("/")[:3])
base_url = request.url.path
safe_endpoints = [_[0] for _ in get_safe_endpoint_urls()]
if base_url in safe_endpoints:
return await call_next(request)

View File

@@ -3,7 +3,7 @@ from fastapi import FastAPI
from fastapi.routing import APIRoute
from fastapi.openapi.utils import get_openapi
from ApiServices.TemplateService.config import template_api_config
from .config import api_config as template_api_config
from ApiServices.TemplateService.endpoints.routes import get_safe_endpoint_urls