Compare commits
58 Commits
73d41b8e10
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 78615a7f8e | |||
| 410ee2d6c4 | |||
| fa66741b48 | |||
| 17a6609a8f | |||
| 1c6fffd29d | |||
| 39a839c43c | |||
| 73645ce3ca | |||
| 05c8af2310 | |||
| be8a5212d8 | |||
| 7997e561f7 | |||
| 5223f36da7 | |||
| b8cebd9af4 | |||
| de9af544bb | |||
| b61f00d61c | |||
| 6089f1cc8e | |||
| a0ec0b5b69 | |||
| 6bdf887437 | |||
| aa6fb5a683 | |||
| c248db29fe | |||
| eab9e93969 | |||
| 6682d50228 | |||
| 1b15b77036 | |||
| 3c8ec13dfe | |||
| a61efdf473 | |||
| 4308f1e7b1 | |||
| 6ba0e37ffd | |||
| 9e955841c3 | |||
| 88309eb49d | |||
| 88f94c37c2 | |||
| efb528bd46 | |||
| 7e4cec2d0b | |||
| 55abf7c80f | |||
| c2dd464fc6 | |||
| ac5a71f1a8 | |||
| a038a1b8ee | |||
| aaeb7f4d00 | |||
| 665d961be8 | |||
| a4fd52c28a | |||
| 288a393719 | |||
| 3539a26d77 | |||
| 3e38bdf113 | |||
| ea6a5b20f3 | |||
| 40b6c63e2a | |||
| 193fca1248 | |||
| 77a57279f2 | |||
| c01b729e1d | |||
| 3b32cdcf62 | |||
| 5a5e58ff78 | |||
| c525ac1117 | |||
| a371d5d6e3 | |||
| 523b2c5033 | |||
| e119c66d3c | |||
| a71939c651 | |||
| 54ccc55c34 | |||
| 6ad1ba7d62 | |||
| 1291ac87e2 | |||
| 5e3bd35603 | |||
| 40bf6d8ae1 |
271593
a_project_files/backups/account_records_202411201459.json
Normal file
271593
a_project_files/backups/account_records_202411201459.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import typing
|
||||
from typing import Union
|
||||
|
||||
@@ -36,7 +35,6 @@ from api_validations.validations_request import (
|
||||
EmployeeSelection,
|
||||
)
|
||||
from api_services import (
|
||||
password_is_changed_template,
|
||||
change_your_password_template,
|
||||
save_access_token_to_redis,
|
||||
update_selected_to_redis,
|
||||
@@ -55,6 +53,9 @@ from api_validations.core_response import AlchemyJsonResponse
|
||||
class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Login via domain and access key : [email] | [phone]"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
|
||||
}
|
||||
@@ -66,13 +67,11 @@ class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
request: Request,
|
||||
):
|
||||
access_dict = Users.login_user_with_credentials(data=data, request=request)
|
||||
found_user = access_dict.get("user", None)
|
||||
if not found_user:
|
||||
if not access_dict.get("user", None):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
|
||||
)
|
||||
access_object = access_dict.get("access_object")
|
||||
if not access_object:
|
||||
if not access_dict.get("access_object", None):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="This User has no active role registered. Please contact your administrator.",
|
||||
@@ -84,7 +83,7 @@ class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
"access_token": access_dict.get("access_token"),
|
||||
"refresh_token": access_dict.get("refresher_token"),
|
||||
"access_object": access_dict.get("access_object"),
|
||||
"user": found_user.get_dict(),
|
||||
"user": access_dict.get("user", None).get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
@@ -106,7 +105,7 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
):
|
||||
from api_objects import OccupantToken, CompanyToken
|
||||
|
||||
if token_dict.user_type == 1:
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
if data.company_uu_id not in token_dict.companies_uu_id_list:
|
||||
return JSONResponse(
|
||||
content={
|
||||
@@ -140,8 +139,8 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
Employees.people_id == token_dict.person_id,
|
||||
Employees.staff_id.in_(staff_ids),
|
||||
).data
|
||||
reachable_event_list_id, reachable_event_list_uu_id = (
|
||||
Event2Employee.get_event_id_by_employee_id(employee_id=employee.id)
|
||||
reachable_event_list_id = Event2Employee.get_event_id_by_employee_id(
|
||||
employee_id=employee.id
|
||||
)
|
||||
staff = Staff.filter_one(
|
||||
Staff.id == employee.staff_id,
|
||||
@@ -173,7 +172,6 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
employee_id=employee.id,
|
||||
employee_uu_id=employee.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
reachable_event_list_uu_id=reachable_event_list_uu_id,
|
||||
),
|
||||
)
|
||||
return JSONResponse(
|
||||
@@ -183,7 +181,7 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
elif token_dict.user_type == 2:
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
system=True, uu_id=data.occupant_uu_id
|
||||
).data
|
||||
@@ -217,7 +215,7 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
BuildLivingSpace.person_id == token_dict.person_id,
|
||||
BuildLivingSpace.build_parts_id == build_part.id,
|
||||
).data:
|
||||
reachable_event_list_id, reachable_event_list_uu_id = (
|
||||
reachable_event_list_id = (
|
||||
Event2Occupant.get_event_id_by_build_living_space_id(
|
||||
build_living_space_id=selected_occupant_type.id
|
||||
)
|
||||
@@ -239,7 +237,6 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
responsible_company_id=company_related.id,
|
||||
responsible_company_uuid=company_related.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
reachable_event_list_uu_id=reachable_event_list_uu_id,
|
||||
),
|
||||
)
|
||||
return JSONResponse(
|
||||
@@ -263,7 +260,7 @@ class AuthenticationCheckTokenEventMethods(MethodToEvent):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_login_with_domain_and_creds(
|
||||
def authentication_check_token_is_valid(
|
||||
cls,
|
||||
request,
|
||||
):
|
||||
@@ -329,28 +326,29 @@ class AuthenticationChangePasswordEventMethods(MethodToEvent):
|
||||
def authentication_change_password(
|
||||
cls,
|
||||
data: ChangePassword,
|
||||
token_dict: typing.Union[EmployeeSelection, OccupantSelection],
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if token_dict.user_type == 1:
|
||||
if found_user := Users.filter_one(
|
||||
Users.uu_id == token_dict.person_uu_id,
|
||||
).data:
|
||||
if found_user.check_password(data.old_password):
|
||||
found_user.set_password(data.new_password)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password is changed successfully",
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
if found_user := Users.filter_one(
|
||||
Users.id == token_dict.user_id,
|
||||
).data:
|
||||
if found_user.check_password(data.old_password):
|
||||
found_user.create_password(
|
||||
found_user=found_user, password=data.new_password
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Old password is not correct",
|
||||
"completed": True,
|
||||
"message": "Password is changed successfully",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Old password is not correct",
|
||||
},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data"},
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
@@ -405,6 +403,52 @@ class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationResetPasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"af9e121e-24bb-44ac-a616-471d5754360e": "authentication_reset_password",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_reset_password(cls, data: Forgot):
|
||||
from sqlalchemy import or_
|
||||
|
||||
found_user = Users.query.filter(
|
||||
or_(
|
||||
Users.email == str(data.access_key).lower(),
|
||||
Users.phone_number == str(data.access_key).replace(" ", ""),
|
||||
),
|
||||
).first()
|
||||
if not found_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Given access key or domain is not matching with the any user record.",
|
||||
)
|
||||
|
||||
reset_password_token = found_user.reset_password_token(found_user=found_user)
|
||||
send_email_completed = send_email(
|
||||
subject=f"Dear {found_user.user_tag}, a password reset request has been received.",
|
||||
receivers=[str(found_user.email)],
|
||||
html=change_your_password_template(
|
||||
user_name=found_user.user_tag,
|
||||
forgot_link=ApiStatic.forgot_link(forgot_key=reset_password_token),
|
||||
),
|
||||
)
|
||||
if not send_email_completed:
|
||||
raise found_user.raise_http_exception(
|
||||
status_code=400, message="Email can not be sent. Try again later"
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password change link is sent to your email or phone",
|
||||
"data": found_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
@@ -432,12 +476,10 @@ class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
if already_tokens := get_object_via_user_uu_id(user_id=found_user.uu_id):
|
||||
for key in already_tokens:
|
||||
token_user = json.loads(redis_cli.get(key) or {})
|
||||
for key, token_user in already_tokens.items():
|
||||
redis_cli.delete(key)
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
*Users.valid_record_args(Users),
|
||||
)
|
||||
selected_user.remove_refresher_token(
|
||||
domain=data.domain, disconnect=True
|
||||
@@ -493,13 +535,12 @@ class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
token_users = get_object_via_user_uu_id(token_dict.user_uu_id)
|
||||
for token_user in token_users:
|
||||
if token_dict.domain == data.domain:
|
||||
redis_cli.delete(token_user)
|
||||
for token, token_user in token_users.items():
|
||||
if token_user["domain"] == data.domain:
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
*Users.valid_record_args(Users),
|
||||
)
|
||||
Users.uu_id == token_dict.user_uu_id,
|
||||
).data
|
||||
redis_cli.delete(token)
|
||||
selected_user.remove_refresher_token(domain=data.domain)
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
@@ -720,6 +761,9 @@ AuthenticationChangePasswordEventMethod = AuthenticationChangePasswordEventMetho
|
||||
AuthenticationCreatePasswordEventMethod = AuthenticationCreatePasswordEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/create_password")
|
||||
)
|
||||
AuthenticationResetPasswordEventMethod = AuthenticationResetPasswordEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/reset_password")
|
||||
)
|
||||
AuthenticationDisconnectUserEventMethod = AuthenticationDisconnectUserEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/disconnect")
|
||||
)
|
||||
@@ -19,10 +19,14 @@ from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
|
||||
class EventBindOccupantEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"5702f0a9-fe8f-4aae-922e-6e04b497ef6a": "bind_events_occupant_super_user",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"5702f0a9-fe8f-4aae-922e-6e04b497ef6a": RegisterEvents2Occupant
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_events_occupant_super_user(
|
||||
@@ -142,6 +146,9 @@ class EventBindEmployeeEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"c93a3009-65a0-498d-9191-04484d5cde81": "bind_events_employee",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c93a3009-65a0-498d-9191-04484d5cde81": RegisterEvents2Occupant
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_events_employee(
|
||||
@@ -10,6 +10,7 @@ from api_configs.configs import (
|
||||
TestMongo,
|
||||
RelationAccess,
|
||||
EmailConfig,
|
||||
TestDatabase,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@@ -24,4 +25,5 @@ __all__ = [
|
||||
"TestMongo",
|
||||
"RelationAccess",
|
||||
"EmailConfig",
|
||||
"TestDatabase",
|
||||
]
|
||||
|
||||
@@ -26,11 +26,17 @@ class Config:
|
||||
"/authentication/refresh",
|
||||
"/authentication/disconnect",
|
||||
"/authentication/create_password",
|
||||
"/authentication/change_password",
|
||||
"/authentication/reset_password",
|
||||
"/authentication/forgot",
|
||||
"/authentication/avatar",
|
||||
"/authentication/valid",
|
||||
"/api/Contact/Us/current_date",
|
||||
]
|
||||
NOT_SECURE_PATHS = [
|
||||
"/access/endpoints/available",
|
||||
"/access/endpoint/available",
|
||||
"/validations/endpoint",
|
||||
"/authentication/avatar",
|
||||
]
|
||||
|
||||
APP_NAME = "evyos-web-api-gateway"
|
||||
TITLE = "WAG API Web Api Gateway"
|
||||
@@ -40,7 +46,7 @@ class Config:
|
||||
|
||||
class ApiStatic:
|
||||
PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
|
||||
FORGOT_LINK = "https://www.evyos.com.tr/auth/create-password/"
|
||||
FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl="
|
||||
BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/"
|
||||
APP_DIR = "/home/berkay/git-evyos/api-managment-backend/"
|
||||
|
||||
@@ -118,7 +124,7 @@ class TestDatabase:
|
||||
SQL: str = "postgresql+psycopg2"
|
||||
USERNAME: str = "berkay_wag_user"
|
||||
PASSWORD: str = "berkay_wag_user_password"
|
||||
HOST: str = "10.10.2.44"
|
||||
HOST: str = "10.10.2.46"
|
||||
PORT: str = "5434"
|
||||
DATABASE_NAME: str = "wag_database"
|
||||
DATABASE_URL = f"{SQL}://{USERNAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}"
|
||||
|
||||
@@ -14,13 +14,14 @@ from api_events.events.address.address import (
|
||||
AddressPostCodeUpdateEventMethod,
|
||||
AddressPostCodeListEventMethod,
|
||||
)
|
||||
from api_events.events.authentication import (
|
||||
from api_events.events.application.authentication import (
|
||||
AuthenticationLoginEventMethod,
|
||||
AuthenticationSelectEventMethod,
|
||||
AuthenticationCheckTokenEventMethod,
|
||||
AuthenticationRefreshEventMethod,
|
||||
AuthenticationChangePasswordEventMethod,
|
||||
AuthenticationCreatePasswordEventMethod,
|
||||
AuthenticationResetPasswordEventMethod,
|
||||
AuthenticationDisconnectUserEventMethod,
|
||||
AuthenticationLogoutEventMethod,
|
||||
AuthenticationRefreshTokenEventMethod,
|
||||
@@ -106,9 +107,9 @@ from api_events.events.company.company_staff import (
|
||||
StaffPatchEventMethod,
|
||||
)
|
||||
from api_events.events.building.building_living_spaces import (
|
||||
BuildingLivingSpacesPartsListEventMethod,
|
||||
BuildingLivingSpacesPartsCreateEventMethod,
|
||||
BuildingLivingSpacesPartsUpdateEventMethod,
|
||||
BuildingLivingSpacesListEventMethod,
|
||||
BuildingLivingSpacesCreateEventMethod,
|
||||
BuildingLivingSpacesUpdateEventMethod,
|
||||
)
|
||||
from api_events.events.decision_book.decision_book_decision_book import (
|
||||
DecisionBookListEventMethod,
|
||||
@@ -142,10 +143,11 @@ from api_events.events.decision_book.project_decision_book_items import (
|
||||
BuildDecisionBookProjectItemsCreateEventMethod,
|
||||
BuildDecisionBookProjectItemsListEventMethod,
|
||||
)
|
||||
from api_events.events.events.events_bind_events import (
|
||||
EventBindOccupantEventMethod,
|
||||
EventBindEmployeeEventMethod,
|
||||
)
|
||||
|
||||
# from api_events.events.events.events_ import (
|
||||
# EventBindOccupantEventMethod,
|
||||
# EventBindEmployeeEventMethod,
|
||||
# )
|
||||
from api_events.events.events.events_bind_services import (
|
||||
ServiceBindOccupantEventMethod,
|
||||
ServiceBindEmployeeEventMethod,
|
||||
@@ -162,6 +164,11 @@ from api_events.events.decision_book.decision_book_invitations import (
|
||||
BuildDecisionBookInvitationsCreateEventMethod,
|
||||
BuildDecisionBookInvitationsUpdateEventMethod,
|
||||
)
|
||||
from api_events.events.events.events_events import (
|
||||
EventsBindEventToEmployeeMethod,
|
||||
EventsBindEventToOccupantMethod,
|
||||
EventsListEventMethod,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
@@ -183,6 +190,7 @@ __all__ = [
|
||||
"AuthenticationRefreshEventMethod",
|
||||
"AuthenticationChangePasswordEventMethod",
|
||||
"AuthenticationCreatePasswordEventMethod",
|
||||
"AuthenticationResetPasswordEventMethod",
|
||||
"AuthenticationDisconnectUserEventMethod",
|
||||
"AuthenticationLogoutEventMethod",
|
||||
"AuthenticationRefreshTokenEventMethod",
|
||||
@@ -204,9 +212,9 @@ __all__ = [
|
||||
"BuildingBuildPartsCreateEventMethod",
|
||||
"BuildingBuildPartsUpdateEventMethod",
|
||||
"BuildingBuildPartsPatchEventMethod",
|
||||
"BuildingLivingSpacesPartsListEventMethod",
|
||||
"BuildingLivingSpacesPartsCreateEventMethod",
|
||||
"BuildingLivingSpacesPartsUpdateEventMethod",
|
||||
"BuildingLivingSpacesListEventMethod",
|
||||
"BuildingLivingSpacesCreateEventMethod",
|
||||
"BuildingLivingSpacesUpdateEventMethod",
|
||||
"BuildAreaListEventMethod",
|
||||
"BuildAreaCreateEventMethod",
|
||||
"BuildAreaUpdateEventMethod",
|
||||
@@ -268,8 +276,8 @@ __all__ = [
|
||||
"StaffGetByUUIDEventMethod",
|
||||
"StaffUpdateEventMethod",
|
||||
"StaffPatchEventMethod",
|
||||
"EventBindOccupantEventMethod",
|
||||
"EventBindEmployeeEventMethod",
|
||||
# "EventBindOccupantEventMethod",
|
||||
# "EventBindEmployeeEventMethod",
|
||||
"ServiceBindOccupantEventMethod",
|
||||
"ServiceBindEmployeeEventMethod",
|
||||
"BuildDecisionBookInvitationsListEventMethod",
|
||||
@@ -277,4 +285,7 @@ __all__ = [
|
||||
"BuildDecisionBookInvitationsUpdateEventMethod",
|
||||
"DecisionBookPersonAttendEventMethod",
|
||||
"DecisionBookPersonAssignOccupantEventMethod",
|
||||
"EventsBindEventToEmployeeMethod",
|
||||
"EventsBindEventToOccupantMethod",
|
||||
"EventsListEventMethod",
|
||||
]
|
||||
|
||||
@@ -37,11 +37,13 @@ class ActionsSchemaFactory:
|
||||
|
||||
class MethodToEvent(ABC, ActionsSchemaFactory):
|
||||
|
||||
action_key: str = None
|
||||
event_type: str = None
|
||||
event_description: str = ""
|
||||
event_category: str = ""
|
||||
|
||||
__event_keys__: dict = {}
|
||||
__event_validation__: dict = {}
|
||||
|
||||
@classmethod
|
||||
def call_event_method(cls, method_uu_id: str, *args, **kwargs):
|
||||
|
||||
@@ -10,19 +10,29 @@ from api_validations.validations_request import (
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.validations_response.account import AccountListResponse
|
||||
from databases import (
|
||||
AccountRecords,
|
||||
BuildIbans,
|
||||
)
|
||||
from databases.sql_models.building.build import Build
|
||||
from databases.sql_models.building.build import Build, BuildLivingSpace
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookPayments
|
||||
from databases.sql_models.others.enums import ApiEnumDropdown
|
||||
|
||||
|
||||
class AccountRecordsListEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": "account_records_list",
|
||||
"208e6273-17ef-44f0-814a-8098f816b63a": "account_records_list_flt_res",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"7192c2aa-5352-4e36-98b3-dafb7d036a3d": AccountListResponse,
|
||||
"208e6273-17ef-44f0-814a-8098f816b63a": AccountListResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -43,16 +53,150 @@ class AccountRecordsListEventMethods(MethodToEvent):
|
||||
AccountRecords.filter_attr = list_options
|
||||
records = AccountRecords.filter_all()
|
||||
return AlchemyJsonResponse(
|
||||
completed=True, message="Update Build record", result=records
|
||||
completed=True, message="List Build record", result=records
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def account_records_list_flt_res(
|
||||
cls,
|
||||
list_options: ListOptions,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if not isinstance(token_dict, OccupantTokenObject):
|
||||
raise AccountRecords().raise_http_exception(
|
||||
status_code="HTTP_404_NOT_FOUND",
|
||||
error_case="UNAUTHORIZED",
|
||||
message="Only Occupant can see this data",
|
||||
data={},
|
||||
)
|
||||
|
||||
return_list = []
|
||||
living_space: BuildLivingSpace = BuildLivingSpace.filter_by_one(
|
||||
id=token_dict.selected_occupant.living_space_id
|
||||
).data
|
||||
if not living_space:
|
||||
raise AccountRecords().raise_http_exception(
|
||||
status_code="HTTP_404_NOT_FOUND",
|
||||
error_case="UNAUTHORIZED",
|
||||
message="Living space not found",
|
||||
data={},
|
||||
)
|
||||
|
||||
if not list_options:
|
||||
list_options = ListOptions()
|
||||
|
||||
main_filters = [
|
||||
AccountRecords.living_space_id
|
||||
== token_dict.selected_occupant.living_space_id,
|
||||
BuildDecisionBookPayments.process_date
|
||||
>= str(system_arrow.now().shift(months=-3).date()),
|
||||
BuildDecisionBookPayments.process_date
|
||||
< str(system_arrow.find_last_day_of_month(living_space.expiry_ends)),
|
||||
BuildDecisionBookPayments.process_date
|
||||
>= str(system_arrow.get(living_space.expiry_starts)),
|
||||
BuildDecisionBookPayments.is_confirmed == True,
|
||||
AccountRecords.active == True,
|
||||
]
|
||||
order_type = "desc"
|
||||
if list_options.order_type:
|
||||
order_type = "asc" if list_options.order_type[0] == "a" else "desc"
|
||||
|
||||
order_by_list = BuildDecisionBookPayments.process_date.desc()
|
||||
if list_options.order_field:
|
||||
if list_options.order_field == "process_date":
|
||||
order_by_list = (
|
||||
BuildDecisionBookPayments.process_date.asc()
|
||||
if order_type == "asc"
|
||||
else BuildDecisionBookPayments.process_date.desc()
|
||||
)
|
||||
if list_options.order_field == "bank_date":
|
||||
order_by_list = (
|
||||
AccountRecords.bank_date.desc()
|
||||
if order_type == "asc"
|
||||
else AccountRecords.bank_date.asc()
|
||||
)
|
||||
if list_options.order_field == "currency_value":
|
||||
order_by_list = (
|
||||
AccountRecords.currency_value.desc()
|
||||
if order_type == "asc"
|
||||
else AccountRecords.currency_value.asc()
|
||||
)
|
||||
if list_options.order_field == "process_comment":
|
||||
order_by_list = (
|
||||
AccountRecords.process_comment.desc()
|
||||
if order_type == "asc"
|
||||
else AccountRecords.process_comment.asc()
|
||||
)
|
||||
if list_options.order_field == "payment_amount":
|
||||
order_by_list = (
|
||||
BuildDecisionBookPayments.payment_amount.desc()
|
||||
if order_type == "asc"
|
||||
else BuildDecisionBookPayments.payment_amount.asc()
|
||||
)
|
||||
|
||||
if list_options.query:
|
||||
for key, value in list_options.query.items():
|
||||
if key == "process_date":
|
||||
main_filters.append(BuildDecisionBookPayments.process_date == value)
|
||||
if key == "bank_date":
|
||||
main_filters.append(AccountRecords.bank_date == value)
|
||||
if key == "currency":
|
||||
main_filters.append(BuildDecisionBookPayments.currency == value)
|
||||
if key == "currency_value":
|
||||
main_filters.append(AccountRecords.currency_value == value)
|
||||
if key == "process_comment":
|
||||
main_filters.append(AccountRecords.process_comment == value)
|
||||
if key == "payment_amount":
|
||||
main_filters.append(
|
||||
BuildDecisionBookPayments.payment_amount == value
|
||||
)
|
||||
|
||||
query = (
|
||||
AccountRecords.session.query(
|
||||
BuildDecisionBookPayments.process_date,
|
||||
BuildDecisionBookPayments.payment_amount,
|
||||
BuildDecisionBookPayments.currency,
|
||||
AccountRecords.bank_date,
|
||||
AccountRecords.currency_value,
|
||||
AccountRecords.process_comment,
|
||||
BuildDecisionBookPayments.uu_id,
|
||||
)
|
||||
.join(
|
||||
AccountRecords,
|
||||
AccountRecords.id == BuildDecisionBookPayments.account_records_id,
|
||||
)
|
||||
.filter(*main_filters)
|
||||
).order_by(order_by_list)
|
||||
|
||||
query.limit(list_options.size or 5).offset(
|
||||
(list_options.page or 1 - 1) * list_options.size or 5
|
||||
)
|
||||
for list_of_values in query.all() or []:
|
||||
return_list.append(
|
||||
{
|
||||
"process_date": list_of_values[0],
|
||||
"payment_amount": list_of_values[1],
|
||||
"currency": list_of_values[2],
|
||||
"bank_date": list_of_values[3],
|
||||
"currency_value": list_of_values[4],
|
||||
"process_comment": list_of_values[5],
|
||||
}
|
||||
)
|
||||
return dict(completed=True, message="List Build record", result=return_list)
|
||||
|
||||
|
||||
class AccountRecordsCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": "account_records_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"31f4f32f-0cd4-4995-8a6a-f9f56335848a": InsertAccountRecord,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def account_records_create(
|
||||
@@ -133,9 +277,15 @@ class AccountRecordsCreateEventMethods(MethodToEvent):
|
||||
class AccountRecordsUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": "account_records_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"ec98ef2c-bcd0-432d-a8f4-1822a56c33b2": UpdateAccountRecord,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_update(
|
||||
@@ -159,9 +309,15 @@ class AccountRecordsUpdateEventMethods(MethodToEvent):
|
||||
class AccountRecordsPatchEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "PATCH"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"34c38937-42a2-45f1-b2ef-a23978650aee": "account_records_patch",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"34c38937-42a2-45f1-b2ef-a23978650aee": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_patch(
|
||||
|
||||
@@ -3,6 +3,10 @@ from typing import Union
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_validations.validations_response.address import (
|
||||
ListAddressResponse,
|
||||
AddressPostCodeResponse,
|
||||
)
|
||||
from databases import (
|
||||
AddressPostcode,
|
||||
Addresses,
|
||||
@@ -28,10 +32,15 @@ class AddressListEventMethods(MethodToEvent):
|
||||
event_type = "SELECT"
|
||||
event_description = "List Address records"
|
||||
event_category = "Address"
|
||||
|
||||
__event_keys__ = {
|
||||
"9c251d7d-da70-4d63-a72c-e69c26270442": "address_list_super_user",
|
||||
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": "address_list_employee",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"9c251d7d-da70-4d63-a72c-e69c26270442": ListAddressResponse,
|
||||
"52afe375-dd95-4f4b-aaa2-4ec61bc6de52": ListAddressResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def address_list_super_user(
|
||||
@@ -93,9 +102,15 @@ class AddressListEventMethods(MethodToEvent):
|
||||
class AddressCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": "create_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"ffdc445f-da10-4ce4-9531-d2bdb9a198ae": InsertAddress,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def create_address(
|
||||
@@ -133,9 +148,15 @@ class AddressCreateEventMethods(MethodToEvent):
|
||||
class AddressSearchEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SEARCH"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": "search_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e0ac1269-e9a7-4806-9962-219ac224b0d0": SearchAddress,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def search_address(
|
||||
@@ -205,9 +226,15 @@ class AddressSearchEventMethods(MethodToEvent):
|
||||
class AddressUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": "update_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": UpdateAddress,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def update_address(
|
||||
@@ -247,9 +274,15 @@ class AddressUpdateEventMethods(MethodToEvent):
|
||||
class AddressPatchEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "PATCH"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"b0e55a7e-af81-468c-b46c-a6b3a6b68d5d": "patch_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b0e55a7e-af81-468c-b46c-a6b3a6b68d5d": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def patch_address(
|
||||
@@ -288,9 +321,15 @@ class AddressPatchEventMethods(MethodToEvent):
|
||||
class AddressPostCodeCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"6f1406ac-577d-4f2c-8077-71fff2252c5f": "create_post_code_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"6f1406ac-577d-4f2c-8077-71fff2252c5f": InsertPostCode,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def create_post_code_address(
|
||||
@@ -335,9 +374,15 @@ class AddressPostCodeCreateEventMethods(MethodToEvent):
|
||||
class AddressPostCodeUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"df18e489-a63c-477f-984c-aa52d30640ad": "update_post_code_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"df18e489-a63c-477f-984c-aa52d30640ad": UpdatePostCode,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def update_post_code_address(
|
||||
@@ -388,9 +433,15 @@ class AddressPostCodeUpdateEventMethods(MethodToEvent):
|
||||
class AddressPostCodeListEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"88d37b78-1ac4-4513-9d25-090ac3a24f31": "list_post_code_address",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"88d37b78-1ac4-4513-9d25-090ac3a24f31": AddressPostCodeResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def list_post_code_address(
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import datetime
|
||||
import json
|
||||
import typing
|
||||
from typing import Union
|
||||
|
||||
import arrow
|
||||
from fastapi import status
|
||||
from fastapi.requests import Request
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_objects import UserType
|
||||
from databases import (
|
||||
Companies,
|
||||
Staff,
|
||||
@@ -22,6 +24,7 @@ from databases import (
|
||||
Users,
|
||||
UsersTokens,
|
||||
OccupantTypes,
|
||||
RelationshipEmployee2Build,
|
||||
)
|
||||
|
||||
from api_services import (
|
||||
@@ -35,10 +38,16 @@ from api_services import (
|
||||
change_your_password_template,
|
||||
)
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from api_configs import ApiStatic, Auth
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects import (
|
||||
OccupantToken,
|
||||
CompanyToken,
|
||||
EmployeeTokenObject,
|
||||
OccupantTokenObject,
|
||||
)
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
|
||||
from databases.no_sql_models.login_handlers import load_user_with_erp_details
|
||||
|
||||
from api_validations.validations_request import (
|
||||
@@ -51,22 +60,26 @@ from api_validations.validations_request import (
|
||||
OccupantSelection,
|
||||
EmployeeSelection,
|
||||
)
|
||||
from databases.sql_models.building.build import RelationshipEmployee2Build
|
||||
|
||||
|
||||
class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Login via domain and access key : [email] | [phone]"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_login_with_domain_and_creds(
|
||||
cls,
|
||||
data: Login,
|
||||
request,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
access_dict = Users.login_user_with_credentials(data=data, request=request)
|
||||
found_user = access_dict.get("user", None)
|
||||
@@ -90,23 +103,28 @@ class AuthenticationLoginEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Select Employee Duty or Occupant Type"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"cee96b9b-8487-4e9f-aaed-2e8c79687bf9": "authentication_select_company_or_occupant_type",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"cee96b9b-8487-4e9f-aaed-2e8c79687bf9": "authentication_select_company_or_occupant_type",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_select_company_or_occupant_type(
|
||||
cls,
|
||||
request: Request,
|
||||
data,
|
||||
token_dict: typing.Union[EmployeeSelection, OccupantSelection],
|
||||
data: Union[EmployeeSelection, OccupantSelection],
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
from api_objects import OccupantToken, CompanyToken
|
||||
|
||||
token_user = get_object_via_access_key(request=request)
|
||||
if token_user.user_type == 1:
|
||||
if data.company_uu_id not in token_user.companies_uu_id_list:
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
if data.company_uu_id not in token_dict.companies_uu_id_list:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
@@ -127,7 +145,6 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
duties.id
|
||||
for duties in Duties.filter_all(
|
||||
Duties.company_id == selected_company.id,
|
||||
Duties.department_id.in_(department_ids),
|
||||
).data
|
||||
]
|
||||
staff_ids = [
|
||||
@@ -137,12 +154,11 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
).data
|
||||
]
|
||||
employee = Employees.filter_one(
|
||||
Employees.people_id == token_user.person_id,
|
||||
Employees.people_id == token_dict.person_id,
|
||||
Employees.staff_id.in_(staff_ids),
|
||||
).data
|
||||
|
||||
reachable_event_list_id, reachable_event_list_uu_id = (
|
||||
Event2Employee.get_event_id_by_employee_id(employee_id=employee.id)
|
||||
reachable_event_list_id = Event2Employee.get_event_id_by_employee_id(
|
||||
employee_id=employee.id
|
||||
)
|
||||
staff = Staff.filter_one(
|
||||
Staff.id == employee.staff_id,
|
||||
@@ -153,12 +169,11 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
department = Departments.filter_one(
|
||||
Departments.id == duties.department_id,
|
||||
).data
|
||||
bulk_id = Duty.filter_one(
|
||||
Duty.duty_code == "BULK",
|
||||
).data
|
||||
bulk_duty_id = Duties.filter_one(
|
||||
Duties.company_id == selected_company.id,
|
||||
Duties.duties_id == bulk_id.id,
|
||||
bulk_id = Duty.filter_by_one(system=True, duty_code="BULK").data
|
||||
bulk_duty_id = Duties.filter_by_one(
|
||||
company_id=selected_company.id,
|
||||
duties_id=bulk_id.id,
|
||||
**Duties.valid_record_dict,
|
||||
).data
|
||||
update_selected_to_redis(
|
||||
request=request,
|
||||
@@ -175,7 +190,6 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
employee_id=employee.id,
|
||||
employee_uu_id=employee.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
reachable_event_list_uu_id=reachable_event_list_uu_id,
|
||||
),
|
||||
)
|
||||
return JSONResponse(
|
||||
@@ -185,24 +199,26 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
elif token_user.user_type == 2:
|
||||
occupant_type = OccupantTypes.filter_one(
|
||||
OccupantTypes.uu_id == data.occupant_uu_id
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
system=True, uu_id=data.occupant_uu_id
|
||||
).data
|
||||
if not occupant_type:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Occupant Type is not found",
|
||||
)
|
||||
build_part = BuildParts.filter_one(
|
||||
BuildParts.uu_id == data.build_part_uu_id,
|
||||
build_part = BuildParts.filter_by_one(
|
||||
system=True, uu_id=data.build_part_uu_id
|
||||
).data
|
||||
if not build_part:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Build Part is not found",
|
||||
)
|
||||
build = Build.filter_one(Build.id == build_part.build_id).data
|
||||
build = Build.filter_one(
|
||||
Build.id == build_part.build_id,
|
||||
).data
|
||||
related_company = RelationshipEmployee2Build.filter_one(
|
||||
RelationshipEmployee2Build.member_id == build.id,
|
||||
).data
|
||||
@@ -214,13 +230,12 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
).data
|
||||
if selected_occupant_type := BuildLivingSpace.filter_one(
|
||||
BuildLivingSpace.occupant_type == occupant_type.id,
|
||||
BuildLivingSpace.person_id == token_user.person_id,
|
||||
BuildLivingSpace.person_id == token_dict.person_id,
|
||||
BuildLivingSpace.build_parts_id == build_part.id,
|
||||
).data:
|
||||
reachable_event_list_id, reachable_event_list_uu_id = (
|
||||
reachable_event_list_id = (
|
||||
Event2Occupant.get_event_id_by_build_living_space_id(
|
||||
Event2Occupant.build_living_space_id
|
||||
== selected_occupant_type.id
|
||||
build_living_space_id=selected_occupant_type.id
|
||||
)
|
||||
)
|
||||
update_selected_to_redis(
|
||||
@@ -240,7 +255,6 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
responsible_company_id=company_related.id,
|
||||
responsible_company_uuid=company_related.uu_id.__str__(),
|
||||
reachable_event_list_id=reachable_event_list_id,
|
||||
reachable_event_list_uu_id=reachable_event_list_uu_id,
|
||||
),
|
||||
)
|
||||
return JSONResponse(
|
||||
@@ -258,16 +272,21 @@ class AuthenticationSelectEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationCheckTokenEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CHECK"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Check Token is valid for user"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"73d77e45-a33f-4f12-909e-3b56f00d8a12": "authentication_check_token_is_valid",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"73d77e45-a33f-4f12-909e-3b56f00d8a12": "authentication_check_token_is_valid",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_login_with_domain_and_creds(
|
||||
def authentication_check_token_is_valid(
|
||||
cls,
|
||||
request,
|
||||
token_dict: typing.Union[EmployeeSelection, OccupantSelection],
|
||||
):
|
||||
if get_object_via_access_key(request=request):
|
||||
return JSONResponse(
|
||||
@@ -282,10 +301,18 @@ class AuthenticationCheckTokenEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationRefreshEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "REFRESH"
|
||||
event_type = "LOGIN"
|
||||
event_description = (
|
||||
"Refresher Token for refreshing access token without credentials"
|
||||
)
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"48379bb2-ba81-4d8e-a9dd-58837cfcbf67": "authentication_refresh_user_info",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"48379bb2-ba81-4d8e-a9dd-58837cfcbf67": "authentication_refresh_user_info",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_refresh_user_info(
|
||||
@@ -323,21 +350,25 @@ class AuthenticationRefreshEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationChangePasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Change password with access token implemented on request headers without password reset token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"f09f7c1a-bee6-4e32-8444-962ec8f39091": "authentication_change_password",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"f09f7c1a-bee6-4e32-8444-962ec8f39091": "authentication_change_password",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_change_password(
|
||||
cls,
|
||||
request,
|
||||
data: ChangePassword,
|
||||
token_dict: typing.Union[EmployeeSelection, OccupantSelection],
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
token_user = get_object_via_access_key(request=request)
|
||||
if token_user.user_type == 1:
|
||||
if found_user := Users.filter_one(Users.uu_id == token_user.uu_id).data:
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
if found_user := Users.filter_one(Users.uu_id == token_dict.uu_id).data:
|
||||
if found_user.check_password(data.old_password):
|
||||
found_user.set_password(data.new_password)
|
||||
return JSONResponse(
|
||||
@@ -362,15 +393,19 @@ class AuthenticationChangePasswordEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Create password with password reset token requested via email"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"c519f9af-92e1-47b2-abf7-5a3316d075f7": "authentication_create_password",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c519f9af-92e1-47b2-abf7-5a3316d075f7": "authentication_create_password",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_create_password(
|
||||
cls, request, data: CreatePassword, token_dict: dict = None
|
||||
):
|
||||
def authentication_create_password(cls, data: CreatePassword):
|
||||
|
||||
if not data.re_password == data.password:
|
||||
raise HTTPException(
|
||||
@@ -379,9 +414,9 @@ class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
if found_user := Users.filter_one(
|
||||
Users.password_token == data.password_token
|
||||
).data:
|
||||
found_user.create_password(password=data.password)
|
||||
found_user.password_token = None
|
||||
Users.save()
|
||||
found_user.create_password(found_user=found_user, password=data.password)
|
||||
found_user.password_token = ""
|
||||
found_user.save()
|
||||
send_email_completed = send_email(
|
||||
subject=f"Dear {found_user.user_tag}, your password has been changed.",
|
||||
receivers=[str(found_user.email)],
|
||||
@@ -411,57 +446,45 @@ class AuthenticationCreatePasswordEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Disconnect all sessions of user in access token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"8b586848-2fb3-4161-abbe-642157eec7ce": "authentication_disconnect_user",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"8b586848-2fb3-4161-abbe-642157eec7ce": "authentication_disconnect_user",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_disconnect_user(
|
||||
cls, request: Request, data: Logout, token_dict: dict = None
|
||||
cls, data: Logout, token_dict: Union[EmployeeTokenObject, OccupantTokenObject]
|
||||
):
|
||||
|
||||
if token_user := get_object_via_access_key(request=request):
|
||||
found_user = Users.filter_one(Users.uu_id == token_user.get("uu_id")).data
|
||||
if not found_user:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Invalid data",
|
||||
"data": None,
|
||||
},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
|
||||
if not found_user:
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "Invalid data",
|
||||
"data": None,
|
||||
},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
if already_tokens := get_object_via_user_uu_id(user_id=str(found_user.uu_id)):
|
||||
for key, token_user in already_tokens.items():
|
||||
redis_cli.delete(key)
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
).data
|
||||
selected_user.remove_refresher_token(
|
||||
domain=data.domain, disconnect=True
|
||||
)
|
||||
if already_tokens := get_object_via_user_uu_id(user_id=found_user.uu_id):
|
||||
for key in already_tokens:
|
||||
token_user = json.loads(redis_cli.get(key) or {})
|
||||
redis_cli.delete(key)
|
||||
selected_user = Users.filter_one(
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
).data
|
||||
selected_user.remove_refresher_token(
|
||||
domain=data.domain, disconnect=True
|
||||
)
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "All sessions are disconnected",
|
||||
"data": token_user,
|
||||
"data": selected_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
@@ -472,10 +495,17 @@ class AuthenticationDisconnectUserEventMethods(MethodToEvent):
|
||||
|
||||
|
||||
class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
event_type = "UPDATE"
|
||||
|
||||
event_type = "LOGIN"
|
||||
event_description = "Logout only single session of user which domain is provided"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"5cc22e4e-a0f7-4077-be41-1871feb3dfd1": "authentication_logout_user",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"5cc22e4e-a0f7-4077-be41-1871feb3dfd1": "authentication_logout_user",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_logout_user(
|
||||
@@ -491,21 +521,7 @@ class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
Users.uu_id == token_user.get("uu_id"),
|
||||
).data
|
||||
selected_user.remove_refresher_token(domain=data.domain)
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=selected_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
@@ -526,10 +542,16 @@ class AuthenticationLogoutEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Refresh access token with refresher token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"c90f3334-10c9-4181-b5ff-90d98a0287b2": "authentication_refresher_token",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c90f3334-10c9-4181-b5ff-90d98a0287b2": "authentication_refresher_token",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_refresher_token(
|
||||
@@ -558,21 +580,6 @@ class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
request, "remote_addr", None
|
||||
) or request.headers.get("X-Forwarded-For", None)
|
||||
found_user.last_seen = str(system_arrow.now())
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key="via_refresher",
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
@@ -595,14 +602,22 @@ class AuthenticationRefreshTokenEventMethods(MethodToEvent):
|
||||
|
||||
class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Send an email to user for a valid password reset token"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"e3ca6e24-b9f8-4127-949c-3bfa364e3513": "authentication_forgot_password",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e3ca6e24-b9f8-4127-949c-3bfa364e3513": "authentication_forgot_password",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_forgot_password(
|
||||
cls, request: Request, data: Forgot, token_dict: dict = None
|
||||
cls,
|
||||
request: Request,
|
||||
data: Forgot,
|
||||
):
|
||||
found_user: Users = Users.check_user_exits(
|
||||
access_key=data.access_key, domain=data.domain
|
||||
@@ -610,21 +625,6 @@ class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
forgot_key = save_access_token_to_redis(
|
||||
request=request, found_user=found_user, domain=data.domain
|
||||
)
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=data.access_key,
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=str(DateTimeLocal.now()),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
forgot_link = ApiStatic.forgot_link(forgot_key=forgot_key)
|
||||
send_email_completed = send_email(
|
||||
subject=f"Dear {found_user.user_tag}, your forgot password link has been sent.",
|
||||
@@ -637,7 +637,6 @@ class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Email can not be sent. Try again later"
|
||||
)
|
||||
|
||||
found_user.password_token = forgot_key
|
||||
found_user.password_token_is_valid = str(system_arrow.shift(days=1))
|
||||
found_user.save()
|
||||
@@ -652,42 +651,96 @@ class AuthenticationForgotPasswordEventMethods(MethodToEvent):
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationResetPasswordEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"af9e121e-24bb-44ac-a616-471d5754360e": "authentication_reset_password",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_reset_password(cls, data: Forgot):
|
||||
from sqlalchemy import or_
|
||||
|
||||
found_user = Users.query.filter(
|
||||
or_(
|
||||
Users.email == str(data.access_key).lower(),
|
||||
Users.phone_number == str(data.access_key).replace(" ", ""),
|
||||
),
|
||||
).first()
|
||||
if not found_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Given access key or domain is not matching with the any user record.",
|
||||
)
|
||||
|
||||
reset_password_token = found_user.reset_password_token(found_user=found_user)
|
||||
send_email_completed = send_email(
|
||||
subject=f"Dear {found_user.user_tag}, a password reset request has been received.",
|
||||
receivers=[str(found_user.email)],
|
||||
html=change_your_password_template(
|
||||
user_name=found_user.user_tag,
|
||||
forgot_link=ApiStatic.forgot_link(forgot_key=reset_password_token),
|
||||
),
|
||||
)
|
||||
if not send_email_completed:
|
||||
raise found_user.raise_http_exception(
|
||||
status_code=400, message="Email can not be sent. Try again later"
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Password change link is sent to your email or phone",
|
||||
"data": found_user.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationDownloadAvatarEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_type = "LOGIN"
|
||||
event_description = "Download avatar icon and profile info of user"
|
||||
event_category = "AUTHENTICATION"
|
||||
|
||||
__event_keys__ = {
|
||||
"c140cd5f-307f-4046-a93e-3ade032a57a7": "authentication_download_avatar",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c140cd5f-307f-4046-a93e-3ade032a57a7": "authentication_download_avatar",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def authentication_download_avatar(
|
||||
cls, request: Request, data: Forgot, token_dict: dict = None
|
||||
cls, token_dict: Union[EmployeeTokenObject, OccupantTokenObject]
|
||||
):
|
||||
found_user = Users.check_user_exits(
|
||||
access_key=data.access_key, domain=data.domain
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Avatar and profile is shared via user credentials",
|
||||
"data": {
|
||||
"last_seen": str(found_user.last_seen),
|
||||
"avatar": found_user.avatar,
|
||||
"remember_me": found_user.remember_me,
|
||||
"expiry_ends": str(found_user.expiry_ends),
|
||||
"expired_str": str(
|
||||
system_arrow.now()
|
||||
- system_arrow.get(str(found_user.expiry_ends))
|
||||
),
|
||||
"expired_int": int(
|
||||
(
|
||||
system_arrow.now()
|
||||
- system_arrow.get(str(found_user.expiry_ends))
|
||||
).days
|
||||
),
|
||||
if found_user := Users.filter_one(Users.id == token_dict.user_id).data:
|
||||
expired_starts = str(
|
||||
system_arrow.now() - system_arrow.get(str(found_user.expiry_ends))
|
||||
)
|
||||
expired_int = (
|
||||
system_arrow.now() - system_arrow.get(str(found_user.expiry_ends))
|
||||
).days
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Avatar and profile is shared via user credentials",
|
||||
"data": {
|
||||
"lang": token_dict.lang,
|
||||
"full_name": found_user.person.full_name,
|
||||
"avatar": found_user.avatar,
|
||||
"remember_me": found_user.remember_me,
|
||||
"expiry_ends": str(found_user.expiry_ends),
|
||||
"expired_str": expired_starts,
|
||||
"expired_int": int(expired_int),
|
||||
},
|
||||
},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={"completed": False, "message": "Invalid data", "data": {}},
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
@@ -724,3 +777,68 @@ AuthenticationForgotPasswordEventMethod = AuthenticationForgotPasswordEventMetho
|
||||
AuthenticationDownloadAvatarEventMethod = AuthenticationDownloadAvatarEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/avatar")
|
||||
)
|
||||
AuthenticationResetPasswordEventMethod = AuthenticationResetPasswordEventMethods(
|
||||
action=ActionsSchema(endpoint="/authentication/reset_password")
|
||||
)
|
||||
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=data.access_key,
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=str(DateTimeLocal.now()),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=found_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key="via_refresher",
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
# UserLogger.log_error(
|
||||
# str(
|
||||
# dict(
|
||||
# user_id=selected_user.id,
|
||||
# domain=data.domain,
|
||||
# access_key=token_user.get("access_input"),
|
||||
# agent=request.headers.get("User-Agent", None),
|
||||
# ip=getattr(request, "remote_addr", None)
|
||||
# or request.headers.get("X-Forwarded-For", None),
|
||||
# platform=request.headers.get("Origin", None),
|
||||
# login_date=datetime.datetime.utcnow().__str__(),
|
||||
# is_login=False,
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import typing
|
||||
from typing import Union
|
||||
|
||||
from fastapi import status, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
@@ -18,9 +19,9 @@ from api_validations.validations_request import (
|
||||
PatchRecord,
|
||||
ListOptions,
|
||||
)
|
||||
from api_validations.validations_response import ListBuildingResponse
|
||||
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
|
||||
@@ -28,9 +29,15 @@ from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObj
|
||||
class BuildListEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"68b3b5ed-b74c-4a27-820f-3959214e94e9": "build_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"68b3b5ed-b74c-4a27-820f-3959214e94e9": ListBuildingResponse,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_list(
|
||||
@@ -52,16 +59,23 @@ class BuildListEventMethods(MethodToEvent):
|
||||
completed=True,
|
||||
message="Building Records are listed",
|
||||
result=records,
|
||||
response_model=ListBuildingResponse,
|
||||
)
|
||||
|
||||
|
||||
class BuildCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
__event_keys__ = {
|
||||
"a2271854-6b90-43da-a440-a62b70d90528": "build_create",
|
||||
"b67ee709-0992-4604-9f90-fb1da10d5cf9": "create_building_employee",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"a2271854-6b90-43da-a440-a62b70d90528": InsertBuild,
|
||||
"b67ee709-0992-4604-9f90-fb1da10d5cf9": InsertBuild,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_create(cls, data: InsertBuild, token_dict: EmployeeTokenObject):
|
||||
@@ -78,7 +92,6 @@ class BuildCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
|
||||
created_build = Build.create_action(data=data, token=token_dict)
|
||||
|
||||
build_type = BuildTypes.filter_by_one(
|
||||
**BuildTypes.valid_record_dict, type_code="APT_YNT"
|
||||
).data
|
||||
@@ -173,19 +186,28 @@ class BuildCreateEventMethods(MethodToEvent):
|
||||
class BuildUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__class_key__ = ""
|
||||
__event_keys__ = {
|
||||
"5ad38a66-1189-451e-babb-77de2d63d757": "build_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"5ad38a66-1189-451e-babb-77de2d63d757": UpdateBuild,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_update(cls, build_uu_id: str, data: UpdateBuild, token_dict):
|
||||
Build.pre_query = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id
|
||||
)
|
||||
if Build.filter_all(
|
||||
Build.person_id == token_dict.person_id,
|
||||
).data:
|
||||
Build.pre_query = None
|
||||
def build_update(
|
||||
cls,
|
||||
build_uu_id: str,
|
||||
data: UpdateBuild,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if isinstance(token_dict, OccupantTokenObject):
|
||||
Build.pre_query = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id
|
||||
)
|
||||
updated_build = Build.update_action(
|
||||
data=data, token=token_dict, build_uu_id=build_uu_id
|
||||
)
|
||||
@@ -194,10 +216,30 @@ class BuildUpdateEventMethods(MethodToEvent):
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build record",
|
||||
"data": updated_build,
|
||||
"data": updated_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
find_one_build = Build.filter_one(
|
||||
Build.uu_id == build_uu_id,
|
||||
).data
|
||||
access_authorized_build = Build.select_action(
|
||||
employee_id=token_dict.selected_company.employee_id,
|
||||
filter_expr=[Build.id == find_one_build.id],
|
||||
)
|
||||
if access_authorized_build.count:
|
||||
updated_build = Build.update_action(
|
||||
data=data, token=token_dict, build_uu_id=build_uu_id
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": "Update Build record",
|
||||
"data": updated_build.get_dict(),
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"This user can not modify {build_uu_id} - building.",
|
||||
@@ -207,9 +249,15 @@ class BuildUpdateEventMethods(MethodToEvent):
|
||||
class BuildPatchEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "PATCH"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
|
||||
__event_keys__ = {
|
||||
"e3876bfe-8847-4dea-ae36-e709f7431930": "build_patch",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e3876bfe-8847-4dea-ae36-e709f7431930": "build_patch",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_patch(cls, build_uu_id: str, data: PatchRecord, token_dict):
|
||||
|
||||
@@ -22,6 +22,9 @@ class BuildAreaListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"0bb51845-65a2-4340-8872-a3b5aad95468": "build_area_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"0bb51845-65a2-4340-8872-a3b5aad95468": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_list(
|
||||
@@ -56,6 +59,9 @@ class BuildAreaCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"a10571fa-ac1d-4546-9272-cacb911d8004": "build_area_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"a10571fa-ac1d-4546-9272-cacb911d8004": InsertBuildArea,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_create(
|
||||
@@ -114,6 +120,9 @@ class BuildAreaUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"58178738-7489-4f8f-954e-5c8f083c1845": "build_area_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"58178738-7489-4f8f-954e-5c8f083c1845": UpdateBuildArea,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_update(
|
||||
@@ -135,6 +144,9 @@ class BuildAreaPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d6bd8a5f-fa76-49da-b82e-4a95f1bcce39": "build_area_patch",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"d6bd8a5f-fa76-49da-b82e-4a95f1bcce39": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_patch(
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import Union
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import status
|
||||
|
||||
from api_validations.validations_response.parts import BuildPartsListResponse
|
||||
from databases import (
|
||||
Build,
|
||||
BuildParts,
|
||||
@@ -13,6 +14,7 @@ from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildParts,
|
||||
UpdateBuildParts,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
@@ -23,6 +25,9 @@ class BuildingBuildPartsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b860e37a-e19b-4c45-9543-461241f7587c": "building_build_parts_list"
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b860e37a-e19b-4c45-9543-461241f7587c": BuildPartsListResponse
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_build_parts_list(
|
||||
@@ -43,6 +48,9 @@ class BuildingBuildPartsListEventMethods(MethodToEvent):
|
||||
completed=True,
|
||||
message="Building Parts Records are listed",
|
||||
result=records,
|
||||
cls_object=BuildParts,
|
||||
response_model=BuildPartsListResponse,
|
||||
filter_attributes=list_options,
|
||||
)
|
||||
|
||||
|
||||
@@ -52,6 +60,7 @@ class BuildingBuildPartsCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"fb403f69-11ed-4f4f-ad71-5e6fb4a793d2": "building_build_parts_create"
|
||||
}
|
||||
__event_validation__ = {"fb403f69-11ed-4f4f-ad71-5e6fb4a793d2": InsertBuildParts}
|
||||
|
||||
@classmethod
|
||||
def building_build_parts_create(
|
||||
@@ -79,11 +88,12 @@ class BuildingBuildPartsUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"58fdf95e-2110-4ed6-9c26-95f4be87eaee": "building_build_parts_update"
|
||||
}
|
||||
__event_validation__ = {"58fdf95e-2110-4ed6-9c26-95f4be87eaee": UpdateBuildParts}
|
||||
|
||||
@classmethod
|
||||
def building_build_parts_update(
|
||||
cls,
|
||||
data: InsertBuildParts,
|
||||
data: UpdateBuildParts,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
updated_build = BuildParts.update_action(data=data, token=token_dict)
|
||||
@@ -104,6 +114,7 @@ class BuildingBuildPartsPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"87a15ade-3474-4206-b574-bbf8580cbb14": "building_build_parts_patch"
|
||||
}
|
||||
__event_validation__ = {"87a15ade-3474-4206-b574-bbf8580cbb14": None}
|
||||
|
||||
@classmethod
|
||||
def building_build_parts_patch(cls, data, token_dict):
|
||||
|
||||
@@ -23,6 +23,9 @@ class BuildSitesListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"6798414c-6c7d-47f0-9d8b-6935a0f51c2e": "build_sites_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"6798414c-6c7d-47f0-9d8b-6935a0f51c2e": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_sites_list(
|
||||
@@ -65,6 +68,9 @@ class BuildSitesCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"57edc8bf-8f29-4e75-b5e1-9ca0139a3fda": "build_sites_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"57edc8bf-8f29-4e75-b5e1-9ca0139a3fda": InsertBuildArea,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_create(
|
||||
@@ -113,6 +119,9 @@ class BuildSitesUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b18e8e37-a62b-4a84-9972-ba17121ed393": "build_sites_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b18e8e37-a62b-4a84-9972-ba17121ed393": UpdateBuildArea,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_update(
|
||||
@@ -134,6 +143,9 @@ class BuildSitesPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"39ba1d78-ff0d-4ec7-a363-b457cbf199a0": "build_sites_patch",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"39ba1d78-ff0d-4ec7-a363-b457cbf199a0": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def build_area_patch(
|
||||
|
||||
@@ -15,6 +15,7 @@ class BuildTypesListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"5344d03c-fc47-43ec-8c44-6c2acd7e5d9f": "build_types_list",
|
||||
}
|
||||
__event_validation__ = {"5344d03c-fc47-43ec-8c44-6c2acd7e5d9f": None}
|
||||
|
||||
@classmethod
|
||||
def build_types_list(
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from typing import Union
|
||||
|
||||
from api_events.events.events.events_bind_modules import ModulesBindOccupantEventMethods
|
||||
|
||||
from api_events.events.events.events_bind_services import (
|
||||
ServiceBindOccupantEventMethods,
|
||||
)
|
||||
from databases import (
|
||||
Modules,
|
||||
BuildParts,
|
||||
@@ -17,17 +20,24 @@ from api_validations.validations_request import (
|
||||
UpdateBuildLivingSpace,
|
||||
ListOptions,
|
||||
)
|
||||
from api_validations.validations_response.living_space import LivingSpaceListResponse
|
||||
from databases.sql_models.event.event import Services
|
||||
|
||||
|
||||
class BuildingLivingSpacesPartsListEventMethods(MethodToEvent):
|
||||
class BuildingLivingSpacesListEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
event_description = ""
|
||||
event_category = ""
|
||||
__event_keys__ = {
|
||||
"36961d8a-cefa-46cc-9f7c-9d841d6351b6": "building_live_space_list",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"36961d8a-cefa-46cc-9f7c-9d841d6351b6": LivingSpaceListResponse
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_build_parts_list(
|
||||
def building_live_space_list(
|
||||
cls,
|
||||
list_options: ListOptions,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
@@ -95,15 +105,21 @@ class BuildingLivingSpacesPartsListEventMethods(MethodToEvent):
|
||||
completed=True,
|
||||
message="Building Living Spaces are listed successfully",
|
||||
result=records,
|
||||
response_model=LivingSpaceListResponse,
|
||||
cls_object=BuildLivingSpace,
|
||||
filter_attributes=list_options,
|
||||
)
|
||||
|
||||
|
||||
class BuildingLivingSpacesPartsCreateEventMethods(MethodToEvent):
|
||||
class BuildingLivingSpacesCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
__event_keys__ = {
|
||||
"46d90119-3b23-4784-8053-fe11da4a3584": "building_live_space_create"
|
||||
}
|
||||
__event_validation__ = {
|
||||
"46d90119-3b23-4784-8053-fe11da4a3584": InsertBuildLivingSpace
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_live_space_create(
|
||||
@@ -175,7 +191,6 @@ class BuildingLivingSpacesPartsCreateEventMethods(MethodToEvent):
|
||||
created_living_space = BuildLivingSpace.create_action(
|
||||
data=data_dict, token_dict=token_dict
|
||||
)
|
||||
|
||||
if last_living_space:
|
||||
dt = system_arrow.get(last_living_space.expiry_ends)
|
||||
if dt > system_arrow.now():
|
||||
@@ -187,25 +202,26 @@ class BuildingLivingSpacesPartsCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
last_living_space.save()
|
||||
|
||||
user_module = Modules.filter_one(
|
||||
Modules.module_code == "USR-PUB", system=True
|
||||
).data
|
||||
created_living_space.save()
|
||||
created_living_space.update(is_confirmed=True)
|
||||
created_living_space.save()
|
||||
ModulesBindOccupantEventMethods.modules_bind_occupant_system(
|
||||
created_living_space.save_and_confirm()
|
||||
occupants_service = Services.retrieve_service_via_occupant_code(
|
||||
occupant_code=occupant_type.occupant_code
|
||||
)
|
||||
ServiceBindOccupantEventMethods.bind_services_occupant_system(
|
||||
build_living_space_id=created_living_space.id,
|
||||
modules_id=user_module.id,
|
||||
service_id=occupants_service.id,
|
||||
)
|
||||
return created_living_space
|
||||
|
||||
|
||||
class BuildingLivingSpacesPartsUpdateEventMethods(MethodToEvent):
|
||||
class BuildingLivingSpacesUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"c786e15c-c03e-4e8f-936c-7e5e5ec9bbcc": "building_live_space_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c786e15c-c03e-4e8f-936c-7e5e5ec9bbcc": UpdateBuildLivingSpace
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_live_space_update(
|
||||
@@ -280,16 +296,12 @@ class BuildingLivingSpacesPartsUpdateEventMethods(MethodToEvent):
|
||||
del data_dict["build_parts_uu_id"], data_dict["life_person_uu_id"]
|
||||
|
||||
|
||||
BuildingLivingSpacesPartsListEventMethod = BuildingLivingSpacesPartsListEventMethods(
|
||||
BuildingLivingSpacesListEventMethod = BuildingLivingSpacesListEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/list")
|
||||
)
|
||||
BuildingLivingSpacesPartsCreateEventMethod = (
|
||||
BuildingLivingSpacesPartsCreateEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/create")
|
||||
)
|
||||
BuildingLivingSpacesCreateEventMethod = BuildingLivingSpacesCreateEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/create")
|
||||
)
|
||||
BuildingLivingSpacesPartsUpdateEventMethod = (
|
||||
BuildingLivingSpacesPartsUpdateEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/update")
|
||||
)
|
||||
BuildingLivingSpacesUpdateEventMethod = BuildingLivingSpacesUpdateEventMethods(
|
||||
action=ActionsSchema(endpoint="/building/living_space/update")
|
||||
)
|
||||
|
||||
@@ -23,6 +23,7 @@ class CompanyListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"f6900cb5-ac5b-478e-8e7c-fa87e65cd2e5": "company_list",
|
||||
}
|
||||
__event_validation__ = {"f6900cb5-ac5b-478e-8e7c-fa87e65cd2e5": None}
|
||||
|
||||
@classmethod
|
||||
def company_list(
|
||||
@@ -56,6 +57,7 @@ class CompanyCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"76f11a08-5f4a-4e1f-961f-aaef21699acd": "company_create",
|
||||
}
|
||||
__event_validation__ = {"76f11a08-5f4a-4e1f-961f-aaef21699acd": InsertCompany}
|
||||
|
||||
@classmethod
|
||||
def company_create(
|
||||
@@ -84,6 +86,9 @@ class CompanyUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"41ea7f29-006a-4310-b5c4-b2a0e1a504bd": "company_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"41ea7f29-006a-4310-b5c4-b2a0e1a504bd": UpdateCompany,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def company_update(
|
||||
@@ -125,6 +130,7 @@ class CompanyPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"6320d696-1fd1-49f9-860a-8f22e5b8a68d": "company_patch",
|
||||
}
|
||||
__event_validation__ = {"6320d696-1fd1-49f9-860a-8f22e5b8a68d": None}
|
||||
|
||||
@classmethod
|
||||
def company_patch(
|
||||
|
||||
@@ -22,6 +22,7 @@ class DepartmentListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"2cb90331-c1b4-4923-8314-8111326b621a": "department_list",
|
||||
}
|
||||
__event_validation__ = {"2cb90331-c1b4-4923-8314-8111326b621a": None}
|
||||
|
||||
@classmethod
|
||||
def department_list(
|
||||
@@ -55,6 +56,7 @@ class DepartmentCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d8bd3985-7f3b-4267-a74e-d5017e4ea9f8": "super_user_department_create",
|
||||
}
|
||||
__event_validation__ = {"d8bd3985-7f3b-4267-a74e-d5017e4ea9f8": DepartmentsPydantic}
|
||||
|
||||
@classmethod
|
||||
def super_user_department_create(
|
||||
@@ -83,6 +85,7 @@ class DepartmentUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"4172706f-06c9-4c38-9ac8-59085a72f80a": "department_update",
|
||||
}
|
||||
__event_validation__ = {"4172706f-06c9-4c38-9ac8-59085a72f80a": DepartmentsPydantic}
|
||||
|
||||
@classmethod
|
||||
def department_update(
|
||||
@@ -120,6 +123,7 @@ class DepartmentPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"1e272e4f-6c1e-418b-91a7-be8b06c875da": "department_patch",
|
||||
}
|
||||
__event_validation__ = {"1e272e4f-6c1e-418b-91a7-be8b06c875da": None}
|
||||
|
||||
@classmethod
|
||||
def department_patch(
|
||||
|
||||
@@ -22,6 +22,7 @@ class DutiesListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"44b72beb-53a8-407b-a12a-76e74b65794d": "duties_list",
|
||||
}
|
||||
__event_validation__ = {"44b72beb-53a8-407b-a12a-76e74b65794d": None}
|
||||
|
||||
@classmethod
|
||||
def duties_list(
|
||||
@@ -46,6 +47,7 @@ class DutiesGetByUUIDEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"30c54cce-3303-4d36-959a-b64e383ae177": "duties_get_by_uuid",
|
||||
}
|
||||
__event_validation__ = {"30c54cce-3303-4d36-959a-b64e383ae177": SelectDuties}
|
||||
|
||||
@classmethod
|
||||
def duties_get_by_uuid(
|
||||
@@ -91,6 +93,7 @@ class DutiesCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"3524ae42-0825-4af7-be85-7c890a4f65d3": "duties_create",
|
||||
}
|
||||
__event_validation__ = {"3524ae42-0825-4af7-be85-7c890a4f65d3": InsertDuties}
|
||||
|
||||
@classmethod
|
||||
def duties_create(
|
||||
@@ -140,6 +143,7 @@ class DutiesUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"3fc77829-f1ee-4511-a2ca-582daa03125b": "duties_update",
|
||||
}
|
||||
__event_validation__ = {"3fc77829-f1ee-4511-a2ca-582daa03125b": UpdateDuties}
|
||||
|
||||
@classmethod
|
||||
def duties_update(
|
||||
@@ -175,6 +179,7 @@ class DutiesPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"ca81c6d1-975a-4288-a27b-1069aea84afe": "duties_patch",
|
||||
}
|
||||
__event_validation__ = {"ca81c6d1-975a-4288-a27b-1069aea84afe": None}
|
||||
|
||||
@classmethod
|
||||
def duties_patch(
|
||||
|
||||
@@ -20,6 +20,7 @@ class DutyListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"23231c7d-4ff2-4b39-b71b-ea350d31fadf": "duty_list",
|
||||
}
|
||||
__event_validation__ = {"23231c7d-4ff2-4b39-b71b-ea350d31fadf": None}
|
||||
|
||||
@classmethod
|
||||
def duty_list(
|
||||
@@ -42,6 +43,7 @@ class DutyCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"c6ea200e-fa17-4393-b390-37f5337c9c65": "duty_create",
|
||||
}
|
||||
__event_validation__ = {"c6ea200e-fa17-4393-b390-37f5337c9c65": InsertCompanyDuty}
|
||||
|
||||
@classmethod
|
||||
def duty_create(
|
||||
@@ -67,6 +69,7 @@ class DutyUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"ad952647-bcf8-482d-9e05-b2ee8086483f": "duty_update",
|
||||
}
|
||||
__event_validation__ = {"ad952647-bcf8-482d-9e05-b2ee8086483f": None}
|
||||
|
||||
@classmethod
|
||||
def duty_update(
|
||||
@@ -104,6 +107,7 @@ class DutyPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d5c7b5c4-7b4e-4d5b-8e3b-2b9c5f5d0c0b": "duty_patch",
|
||||
}
|
||||
__event_validation__ = {"d5c7b5c4-7b4e-4d5b-8e3b-2b9c5f5d0c0b": None}
|
||||
|
||||
@classmethod
|
||||
def duty_patch(
|
||||
|
||||
@@ -22,6 +22,7 @@ class EmployeeListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"cb677c92-6b05-4122-af5c-12766fae8095": "employee_list",
|
||||
}
|
||||
__event_validation__ = {"cb677c92-6b05-4122-af5c-12766fae8095": None}
|
||||
|
||||
@classmethod
|
||||
def employee_list(
|
||||
@@ -49,6 +50,7 @@ class EmployeeCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"1e1632c3-bb0e-46a5-8e45-da3f6d88ac43": "employee_create",
|
||||
}
|
||||
__event_validation__ = {"1e1632c3-bb0e-46a5-8e45-da3f6d88ac43": InsertEmployees}
|
||||
|
||||
@classmethod
|
||||
def employee_create(
|
||||
@@ -94,6 +96,7 @@ class EmployeeUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"9015a076-d78c-463d-9474-ea343a125fb8": "employee_update",
|
||||
}
|
||||
__event_validation__ = {"9015a076-d78c-463d-9474-ea343a125fb8": None}
|
||||
|
||||
@classmethod
|
||||
def employee_update(
|
||||
@@ -137,6 +140,7 @@ class EmployeePatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"8446ce0b-9310-4b9f-93e2-61f56a9dacd1": "employee_patch",
|
||||
}
|
||||
__event_validation__ = {"8446ce0b-9310-4b9f-93e2-61f56a9dacd1": None}
|
||||
|
||||
@classmethod
|
||||
def employee_patch(
|
||||
@@ -185,6 +189,9 @@ class Employee2PeopleEmployEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"5eb04057-7a74-4555-b2c6-14eda32dae89": "company_employee_employ",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"5eb04057-7a74-4555-b2c6-14eda32dae89": BindEmployees2People
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def company_employee_employ(
|
||||
@@ -247,6 +254,9 @@ class Employee2PeopleFireEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"caf914fa-0899-4b0b-a85a-3d40fdaa06a5": "company_employee_fire",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"caf914fa-0899-4b0b-a85a-3d40fdaa06a5": BindEmployees2People
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def company_employee_fire(
|
||||
|
||||
@@ -21,6 +21,7 @@ class StaffListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"8984a519-99bf-4f25-8f34-2e1aebba468c": "staff_list",
|
||||
}
|
||||
__event_validation__ = {"8984a519-99bf-4f25-8f34-2e1aebba468c": None}
|
||||
|
||||
@classmethod
|
||||
def staff_list(cls, list_options: ListOptions, token_dict: EmployeeTokenObject):
|
||||
@@ -38,6 +39,7 @@ class StaffCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"8f619257-19fd-404f-b713-7392c588dc36": "staff_create",
|
||||
}
|
||||
__event_validation__ = {"8f619257-19fd-404f-b713-7392c588dc36": InsertStaff}
|
||||
|
||||
@classmethod
|
||||
def staff_create(cls, data: InsertStaff, token_dict: EmployeeTokenObject):
|
||||
@@ -69,6 +71,7 @@ class StaffGetByUUIDEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"7724cfbb-c0ee-4261-959b-61b84e88a34f": "staff_get_by_uu_id",
|
||||
}
|
||||
__event_validation__ = {"7724cfbb-c0ee-4261-959b-61b84e88a34f": SelectStaff}
|
||||
|
||||
@classmethod
|
||||
def staff_get_by_uu_id(cls, data: SelectStaff, token_dict: EmployeeTokenObject):
|
||||
@@ -104,6 +107,7 @@ class StaffUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"5329f35d-ff9d-4656-a831-ba9c8204e483": "staff_update",
|
||||
}
|
||||
__event_validation__ = {"5329f35d-ff9d-4656-a831-ba9c8204e483": None}
|
||||
|
||||
@classmethod
|
||||
def staff_update(cls, staff_uu_id: str, data, token_dict: EmployeeTokenObject):
|
||||
@@ -119,6 +123,7 @@ class StaffPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b1cd7c0a-1458-472b-894f-3adc857c8512": "staff_patch",
|
||||
}
|
||||
__event_validation__ = {"b1cd7c0a-1458-472b-894f-3adc857c8512": None}
|
||||
|
||||
@classmethod
|
||||
def staff_patch(
|
||||
|
||||
@@ -27,6 +27,7 @@ class DecisionBookListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"5c10d6ae-2aee-4243-a7c3-94826d028d13": "building_decision_book_list",
|
||||
}
|
||||
__event_validation__ = {"5c10d6ae-2aee-4243-a7c3-94826d028d13": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_list(
|
||||
@@ -67,6 +68,7 @@ class DecisionBookCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"0a68cb44-271a-4829-81f6-cd99a5f326b4": "building_decision_book_create",
|
||||
}
|
||||
__event_validation__ = {"0a68cb44-271a-4829-81f6-cd99a5f326b4": InsertDecisionBook}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_create(
|
||||
@@ -177,6 +179,7 @@ class DecisionBookUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"6bc7035c-3b53-4c0a-8cc9-1ec9c6af1e29": "building_decision_book_update",
|
||||
}
|
||||
__event_validation__ = {"6bc7035c-3b53-4c0a-8cc9-1ec9c6af1e29": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_update(cls, data: InsertDecisionBook, token_dict: dict):
|
||||
@@ -189,6 +192,7 @@ class DecisionBookPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"7b58ed84-9a65-4588-994d-30df8366b050": "building_decision_book_patch",
|
||||
}
|
||||
__event_validation__ = {"7b58ed84-9a65-4588-994d-30df8366b050": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_patch(cls, data: InsertDecisionBook, token_dict: dict):
|
||||
@@ -201,6 +205,7 @@ class DecisionBookApprovalEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"fc745142-3437-4ca2-89fa-c5a3e2b5c6c2": "building_decision_book_approval",
|
||||
}
|
||||
__event_validation__ = {"fc745142-3437-4ca2-89fa-c5a3e2b5c6c2": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_approval(cls, data, token_dict):
|
||||
|
||||
@@ -40,6 +40,7 @@ class DecisionBookDecisionBookItemsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"eb36de59-8268-4d96-80b6-5d01c12bf0b1": "building_decision_book_items_list",
|
||||
}
|
||||
__event_validation__ = {"eb36de59-8268-4d96-80b6-5d01c12bf0b1": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_items_list(
|
||||
@@ -120,6 +121,9 @@ class DecisionBookDecisionBookItemsCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"dce10509-0da5-46fb-af3c-a81d54d5481c": "building_decision_book_items_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"dce10509-0da5-46fb-af3c-a81d54d5481c": InsertBuildDecisionBookItems
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def iterate_over_build_parts(
|
||||
@@ -494,6 +498,7 @@ class DecisionBookDecisionBookItemsUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"f0fdfe1b-806b-4175-ad50-a1a165c0dfb7": "building_decision_book_items_update",
|
||||
}
|
||||
__event_validation__ = {"f0fdfe1b-806b-4175-ad50-a1a165c0dfb7": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_items_update(
|
||||
@@ -510,6 +515,7 @@ class DecisionBookDecisionBookItemsPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"42328809-b516-477b-82cc-2d6fadf28843": "building_decision_book_items_patch",
|
||||
}
|
||||
__event_validation__ = {"42328809-b516-477b-82cc-2d6fadf28843": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_items_patch(
|
||||
|
||||
@@ -27,6 +27,7 @@ class DecisionBookDecisionBookItemsDebitsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"a1d2b1f6-9b8d-4f6b-8f4d-6b1f6a9d8b1a": "decision_book_decision_book_items_debits_list",
|
||||
}
|
||||
__event_validation__ = {"a1d2b1f6-9b8d-4f6b-8f4d-6b1f6a9d8b1a": None}
|
||||
|
||||
@classmethod
|
||||
def decision_book_decision_book_items_debits_list(
|
||||
@@ -54,6 +55,7 @@ class DecisionBookDecisionBookItemsDebitsCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"a1d2b1f6-9b8d-4f6b-8f4d-6b1f6a9d8b1a": "decision_book_decision_book_items_debits_create",
|
||||
}
|
||||
__event_validation__ = {"a1d2b1f6-9b8d-4f6b-8f4d-6b1f6a9d8b1a": None}
|
||||
|
||||
@classmethod
|
||||
def decision_book_decision_book_items_debits_create(
|
||||
|
||||
@@ -34,10 +34,13 @@ class DecisionBookPersonListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"ea324dc0-3b08-4896-9040-7fa0401a176f": "building_decision_book_person_list",
|
||||
}
|
||||
__event_validation__ = {"ea324dc0-3b08-4896-9040-7fa0401a176f": None}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_person_list(
|
||||
cls, data: ListOptions, token_dict: EmployeeTokenObject
|
||||
cls,
|
||||
data: ListOptions,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
return
|
||||
|
||||
@@ -48,6 +51,9 @@ class DecisionBookPersonAddEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"e346f720-880b-4b07-93d6-9ac76fbbaa33": "building_decision_book_person_add",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"e346f720-880b-4b07-93d6-9ac76fbbaa33": DecisionBookDecisionBookInvitationsUpdate
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_person_add(
|
||||
@@ -142,6 +148,9 @@ class DecisionBookPersonRemoveEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"30588869-04cd-48ea-ad00-0e4f8dd7f735": "building_decision_book_people_remove",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"30588869-04cd-48ea-ad00-0e4f8dd7f735": RemoveDecisionBookPerson
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def building_decision_book_people_remove(
|
||||
@@ -156,6 +165,9 @@ class DecisionBookPersonAttendEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"bdcba521-0116-441c-ace1-84c5b68c86c7": "decision_book_invitations_attend",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"bdcba521-0116-441c-ace1-84c5b68c86c7": DecisionBookDecisionBookInvitationsAttend
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def decision_book_invitations_attend(
|
||||
@@ -222,6 +234,9 @@ class DecisionBookPersonAssignOccupantEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"c0b65098-9c79-4212-b1d0-c7e7836cf141": "decision_book_invitations_assign_occupant",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"c0b65098-9c79-4212-b1d0-c7e7836cf141": DecisionBookDecisionBookInvitationsAssign
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def decision_book_invitations_assign_occupant(
|
||||
|
||||
@@ -32,6 +32,7 @@ class BuildDecisionBookInvitationsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"e2277528-8c9c-4c0c-ae64-3ce80cae664b": "decision_book_invitations_list",
|
||||
}
|
||||
__event_validation__ = {"e2277528-8c9c-4c0c-ae64-3ce80cae664b": None}
|
||||
|
||||
@classmethod
|
||||
def decision_book_invitations_list(
|
||||
@@ -48,6 +49,9 @@ class BuildDecisionBookInvitationsCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d0bfa20c-841d-421c-98e6-d308f938d16a": "decision_book_invitations_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"d0bfa20c-841d-421c-98e6-d308f938d16a": DecisionBookDecisionBookInvitations
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def decision_book_invitations_create(
|
||||
@@ -248,6 +252,9 @@ class BuildDecisionBookInvitationsUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"92413636-53a8-4a05-842c-1485a64e00d1": "decision_book_invitations_attend",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"92413636-53a8-4a05-842c-1485a64e00d1": DecisionBookDecisionBookInvitationsUpdate
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def decision_book_invitations_attend(
|
||||
|
||||
169
api_events/events/decision_book/decision_book_payment.py
Normal file
169
api_events/events/decision_book/decision_book_payment.py
Normal file
@@ -0,0 +1,169 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import status
|
||||
|
||||
from api_validations.validations_response.parts import BuildPartsListResponse
|
||||
from databases import (
|
||||
Build,
|
||||
BuildParts,
|
||||
)
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
from api_validations.validations_request import (
|
||||
InsertBuildParts,
|
||||
UpdateBuildParts,
|
||||
ListOptions,
|
||||
)
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookPayments
|
||||
|
||||
|
||||
class DecisionBookPaymentListEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "SELECT"
|
||||
__event_keys__ = {
|
||||
"49bb8ab8-520d-4676-a159-aaf84f37f372": "decision_book_payment_list"
|
||||
}
|
||||
__event_validation__ = {"49bb8ab8-520d-4676-a159-aaf84f37f372": None}
|
||||
|
||||
@classmethod
|
||||
def decision_book_payment_list(
|
||||
cls,
|
||||
list_options: ListOptions,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
"""
|
||||
SELECT payment_plan_time_periods, process_date, payment_amount, currency, payment_types_id,
|
||||
payment_types_uu_id, period_time, process_date_y, process_date_m, build_decision_book_item_id,
|
||||
build_decision_book_item_uu_id, decision_book_project_id, decision_book_project_uu_id, build_parts_id,
|
||||
build_parts_uu_id, id, uu_id, ref_id, created_at, updated_at, cryp_uu_id, created_by, created_by_id,
|
||||
updated_by, updated_by_id, confirmed_by, confirmed_by_id, is_confirmed, replication_id, deleted,
|
||||
active, is_notification_send, is_email_send, expiry_starts, expiry_ends, account_records_id,
|
||||
account_records_uu_id FROM public.build_decision_book_payments;
|
||||
"""
|
||||
from sqlalchemy import func, select, union_all, extract, Integer
|
||||
|
||||
build_parts_id, build_decision_book_id = 7, ""
|
||||
payment_types_id_recv, payment_types_id_deb = 46, 45
|
||||
|
||||
BuildDecisionBookPayments.filter_attr = list_options
|
||||
# Define the subqueries
|
||||
debit_subquery = (
|
||||
select(
|
||||
BuildDecisionBookPayments.payment_plan_time_periods,
|
||||
func.sum(BuildDecisionBookPayments.payment_amount).label("debit"),
|
||||
func.cast(0, Integer).label("recv"),
|
||||
func.max(BuildDecisionBookPayments.process_date).label("ls"),
|
||||
)
|
||||
.where(
|
||||
BuildDecisionBookPayments.build_parts_id == build_parts_id,
|
||||
BuildDecisionBookPayments.payment_types_id == payment_types_id_deb,
|
||||
BuildDecisionBookPayments.build_decision_book_id
|
||||
== build_decision_book_id,
|
||||
extract("year", func.current_date())
|
||||
== extract("year", BuildDecisionBookPayments.process_date),
|
||||
extract("month", func.current_date())
|
||||
== extract("month", BuildDecisionBookPayments.process_date),
|
||||
)
|
||||
.group_by(BuildDecisionBookPayments.payment_plan_time_periods)
|
||||
)
|
||||
|
||||
recv_subquery = (
|
||||
select(
|
||||
BuildDecisionBookPayments.payment_plan_time_periods,
|
||||
func.cast(0, Integer).label("debit"),
|
||||
func.sum(BuildDecisionBookPayments.payment_amount).label("recv"),
|
||||
func.max(BuildDecisionBookPayments.process_date).label("ls"),
|
||||
)
|
||||
.where(
|
||||
BuildDecisionBookPayments.build_parts_id == build_parts_id,
|
||||
BuildDecisionBookPayments.payment_types_id == payment_types_id_recv,
|
||||
BuildDecisionBookPayments.build_decision_book_id
|
||||
== build_decision_book_id,
|
||||
extract("year", func.current_date())
|
||||
== extract("year", BuildDecisionBookPayments.process_date),
|
||||
extract("month", func.current_date())
|
||||
== extract("month", BuildDecisionBookPayments.process_date),
|
||||
)
|
||||
.group_by(BuildDecisionBookPayments.payment_plan_time_periods)
|
||||
)
|
||||
|
||||
# Combine the subqueries using union_all
|
||||
combined_subquery = union_all(debit_subquery, recv_subquery).alias("AA")
|
||||
|
||||
# Final query
|
||||
final_query = select(
|
||||
combined_subquery.c.payment_plan_time_periods,
|
||||
func.sum(combined_subquery.c.debit).label("debit"),
|
||||
func.sum(combined_subquery.c.recv).label("recv"),
|
||||
combined_subquery.c.ls.label("Last Seen"),
|
||||
).group_by(
|
||||
combined_subquery.c.payment_plan_time_periods, combined_subquery.c.ls
|
||||
)
|
||||
|
||||
# Execute the query
|
||||
book_payments_month = BuildDecisionBookPayments.session.execute(
|
||||
final_query
|
||||
).fetchall()
|
||||
print("book_payments_month", book_payments_month)
|
||||
|
||||
debit_subquery = (
|
||||
select(
|
||||
BuildDecisionBookPayments.payment_plan_time_periods,
|
||||
func.sum(BuildDecisionBookPayments.payment_amount).label("debit"),
|
||||
func.cast(0, Integer).label("recv"),
|
||||
func.max(BuildDecisionBookPayments.process_date).label("ls"),
|
||||
)
|
||||
.where(
|
||||
BuildDecisionBookPayments.build_parts_id == build_parts_id,
|
||||
BuildDecisionBookPayments.payment_types_id == payment_types_id_deb,
|
||||
BuildDecisionBookPayments.build_decision_book_id
|
||||
== build_decision_book_id,
|
||||
)
|
||||
.group_by(BuildDecisionBookPayments.payment_plan_time_periods)
|
||||
)
|
||||
|
||||
recv_subquery = (
|
||||
select(
|
||||
BuildDecisionBookPayments.payment_plan_time_periods,
|
||||
func.cast(0, Integer).label("debit"),
|
||||
func.sum(BuildDecisionBookPayments.payment_amount).label("recv"),
|
||||
func.max(BuildDecisionBookPayments.process_date).label("ls"),
|
||||
)
|
||||
.where(
|
||||
BuildDecisionBookPayments.build_parts_id == build_parts_id,
|
||||
BuildDecisionBookPayments.payment_types_id == payment_types_id_recv,
|
||||
BuildDecisionBookPayments.build_decision_book_id
|
||||
== build_decision_book_id,
|
||||
)
|
||||
.group_by(BuildDecisionBookPayments.payment_plan_time_periods)
|
||||
)
|
||||
|
||||
# Combine the subqueries using union_all
|
||||
combined_subquery = union_all(debit_subquery, recv_subquery).alias("AA")
|
||||
|
||||
# Final query
|
||||
final_query = select(
|
||||
combined_subquery.c.payment_plan_time_periods,
|
||||
func.sum(combined_subquery.c.debit).label("debit"),
|
||||
func.sum(combined_subquery.c.recv).label("recv"),
|
||||
combined_subquery.c.ls.label("Last Seen"),
|
||||
).group_by(
|
||||
combined_subquery.c.payment_plan_time_periods, combined_subquery.c.ls
|
||||
)
|
||||
|
||||
# Execute the query
|
||||
book_payments = BuildDecisionBookPayments.session.execute(
|
||||
final_query
|
||||
).fetchall()
|
||||
print("book_payments", book_payments)
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Building Parts Records are listed",
|
||||
result=[book_payments, book_payments_month],
|
||||
cls_object=BuildParts,
|
||||
response_model=BuildPartsListResponse,
|
||||
filter_attributes=list_options,
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Union
|
||||
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from api_library.date_time_actions.date_functions import system_arrow, client_arrow
|
||||
from databases import (
|
||||
BuildDecisionBookProjects,
|
||||
BuildDecisionBookProjectPerson,
|
||||
@@ -32,6 +32,7 @@ class ProjectDecisionBookListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"96459b36-37f2-4d5b-8370-c459058d5bce": "project_decision_book_list",
|
||||
}
|
||||
__event_validation__ = {"96459b36-37f2-4d5b-8370-c459058d5bce": None}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_list(
|
||||
@@ -76,6 +77,9 @@ class ProjectDecisionBookCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b8e44bb2-f157-4dd5-8a24-0e02db4877c9": "project_decision_book_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b8e44bb2-f157-4dd5-8a24-0e02db4877c9": InsertBuildDecisionBookProjects
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_create(
|
||||
@@ -132,6 +136,9 @@ class ProjectDecisionBookUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"bfe3ef13-030f-495f-b692-94bcb746d700": "project_decision_book_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"bfe3ef13-030f-495f-b692-94bcb746d700": UpdateBuildDecisionBookProjects
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_update(
|
||||
@@ -209,6 +216,9 @@ class ProjectDecisionBookApprovalEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"a83a83fe-8446-4c60-9ae5-d1c06adbf626": "project_decision_book_approval",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"a83a83fe-8446-4c60-9ae5-d1c06adbf626": ApprovalsBuildDecisionBookProjects
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_approval(
|
||||
@@ -216,6 +226,8 @@ class ProjectDecisionBookApprovalEventMethods(MethodToEvent):
|
||||
data: ApprovalsBuildDecisionBookProjects,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
BuildDecisionBookPayments.client_arrow = client_arrow
|
||||
BuildDecisionBookPayments.client_arrow.timezone = token_dict.timezone or "GMT+3"
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
raise BuildDecisionBookProjects.raise_http_exception(
|
||||
status_code="HTTP_403_FORBIDDEN",
|
||||
@@ -260,6 +272,7 @@ class ProjectDecisionBookApprovalEventMethods(MethodToEvent):
|
||||
message="This project is not allowed for this occupant",
|
||||
data={},
|
||||
)
|
||||
|
||||
decision_book_project = BuildDecisionBookProjects.filter_one(
|
||||
BuildDecisionBookProjects.id
|
||||
== decision_book_project_person.build_decision_book_project_id,
|
||||
@@ -306,12 +319,15 @@ class ProjectDecisionBookApprovalEventMethods(MethodToEvent):
|
||||
process_date=str(local_date),
|
||||
process_date_m=int(local_date.month),
|
||||
process_date_y=int(local_date.year),
|
||||
payment_plan_time_periods=str(
|
||||
decision_book_project.project_type
|
||||
),
|
||||
period_time=f"{local_date.year}-{str(local_date.month).zfill(2)}",
|
||||
decision_book_project_id=decision_book_project.id,
|
||||
decision_book_project_uu_id=str(decision_book_project.uu_id),
|
||||
**book_payment_dict,
|
||||
)
|
||||
created_book_payment.save_and_confirm()
|
||||
return
|
||||
|
||||
updated_decision_book_project = decision_book_project.update(**data_dict)
|
||||
updated_decision_book_project.save_and_confirm()
|
||||
return AlchemyJsonResponse(
|
||||
@@ -328,6 +344,7 @@ class ProjectDecisionBookPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"444d67a0-b3a8-4ca2-9d8d-f1acc75011e0": "project_decision_book_patch",
|
||||
}
|
||||
__event_validation__ = {"444d67a0-b3a8-4ca2-9d8d-f1acc75011e0": None}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_patch(
|
||||
|
||||
@@ -23,6 +23,7 @@ class BuildDecisionBookProjectItemsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"ce3630e4-2bf9-4433-bdab-1ee72117e54b": "build_decision_book_project_items_list",
|
||||
}
|
||||
__event_validation__ = {"ce3630e4-2bf9-4433-bdab-1ee72117e54b": None}
|
||||
|
||||
@staticmethod
|
||||
def build_decision_book_project_items_list(
|
||||
@@ -45,11 +46,14 @@ class BuildDecisionBookProjectItemsCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b27e4fd0-6e3e-441b-9b33-806ac7082444": "build_decision_book_project_items_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b27e4fd0-6e3e-441b-9b33-806ac7082444": InsertBuildDecisionBookProjectItems,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def build_decision_book_project_items_create(
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
data: InsertBuildDecisionBookProjectItems,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
if isinstance(token_dict, EmployeeTokenObject):
|
||||
raise BuildDecisionBookProjectItems.raise_http_exception(
|
||||
@@ -60,8 +64,10 @@ class BuildDecisionBookProjectItemsCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
elif isinstance(token_dict, OccupantTokenObject):
|
||||
book_project = BuildDecisionBookProjects.filter_one(
|
||||
BuildDecisionBookProjects.uu_id == data.build_decision_book_project_uu_id,
|
||||
BuildDecisionBookProjects.project_response_living_space_id == token_dict.selected_occupant.living_space_id
|
||||
BuildDecisionBookProjects.uu_id
|
||||
== data.build_decision_book_project_uu_id,
|
||||
BuildDecisionBookProjects.project_response_living_space_id
|
||||
== token_dict.selected_occupant.living_space_id,
|
||||
).data
|
||||
if not book_project:
|
||||
raise BuildDecisionBookProjectItems.raise_http_exception(
|
||||
@@ -72,7 +78,9 @@ class BuildDecisionBookProjectItemsCreateEventMethods(MethodToEvent):
|
||||
)
|
||||
data_dict = data.excluded_dump()
|
||||
data_dict["build_decision_book_project_id"] = book_project.id
|
||||
created_project_item = BuildDecisionBookProjectItems.find_or_create(**data_dict)
|
||||
created_project_item = BuildDecisionBookProjectItems.find_or_create(
|
||||
**data_dict
|
||||
)
|
||||
created_project_item.save_and_confirm()
|
||||
return AlchemyJsonResponse(
|
||||
message="Build Decision Book Project Items Create",
|
||||
@@ -86,6 +94,9 @@ class BuildDecisionBookProjectItemsUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"b2b7cdce-9a0c-4722-90ff-8bef36b4ec6b": "build_decision_book_project_items_update",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"b2b7cdce-9a0c-4722-90ff-8bef36b4ec6b": UpdateBuildDecisionBookProjectItems
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def build_decision_book_project_items_update(
|
||||
@@ -106,6 +117,7 @@ class BuildDecisionBookProjectItemsPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"e59d50df-dd22-4823-aeae-b9490332885c": "build_decision_book_project_items_patch",
|
||||
}
|
||||
__event_validation__ = {"e59d50df-dd22-4823-aeae-b9490332885c": None}
|
||||
|
||||
@staticmethod
|
||||
def build_decision_book_project_items_patch(
|
||||
|
||||
@@ -23,6 +23,7 @@ class ProjectDecisionBookPersonListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"7101b5ca-8bef-40f9-8b4d-646d9994e18f": "project_decision_book_person_list",
|
||||
}
|
||||
__event_validation__ = {"7101b5ca-8bef-40f9-8b4d-646d9994e18f": None}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_person_list(
|
||||
@@ -64,6 +65,9 @@ class ProjectDecisionBookPersonCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"9c88e314-84e8-435e-8c1e-6a5aae80b2e6": "project_decision_book_person_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"9c88e314-84e8-435e-8c1e-6a5aae80b2e6": InsertBuildDecisionBookProjectPerson
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_create(
|
||||
@@ -99,6 +103,9 @@ class ProjectDecisionBookPersonUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"7fbd18a0-c099-4494-ada1-bb23e39bb141": "project_decision_book_update_person",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"7fbd18a0-c099-4494-ada1-bb23e39bb141": UpdateBuildDecisionBookProjectPerson
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_update(
|
||||
@@ -135,6 +142,7 @@ class ProjectDecisionBookPersonPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"a122e84a-5556-4bf7-b680-1f47c438d4f7": "project_decision_book_person_patch",
|
||||
}
|
||||
__event_validation__ = {"a122e84a-5556-4bf7-b680-1f47c438d4f7": None}
|
||||
|
||||
@classmethod
|
||||
def project_decision_book_patch(
|
||||
|
||||
@@ -15,44 +15,80 @@ from api_events.events.events.events_bind_services import (
|
||||
)
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
from databases.sql_models.company.employee import Employees
|
||||
from databases.sql_models.event.event import Event2Occupant, Event2Employee
|
||||
|
||||
|
||||
class ModulesBindOccupantEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"": "modules_bind_occupant",
|
||||
"91003e90-8ead-4705-98a3-f8731c6ecb38": "modules_bind_occupant",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"91003e90-8ead-4705-98a3-f8731c6ecb38": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def modules_bind_occupant_system(
|
||||
cls, build_living_space_id: int, modules_id: int, expires_at: str = None
|
||||
def bind_default_module_for_first_init_occupant(
|
||||
cls, build_living_space_id: int, expires_at: str = None
|
||||
):
|
||||
|
||||
living_space = BuildLivingSpace.filter_one(
|
||||
BuildLivingSpace.id == build_living_space_id,
|
||||
BuildLivingSpace.id == build_living_space_id, system=True
|
||||
).data
|
||||
modules = Modules.filter_one(Modules.id == modules_id).data
|
||||
|
||||
modules = Modules.filter_all(Modules.is_default_module == True).data
|
||||
print("living_space", living_space, "modules", modules)
|
||||
if not living_space or not modules:
|
||||
print(f"Giving living Space or Modules: {modules.module_name} not found")
|
||||
print(f"Giving living Space or Modules: Default not found")
|
||||
return
|
||||
service_build_dict = dict(build_living_space_id=living_space.id)
|
||||
if expires_at:
|
||||
service_build_dict["expires_at"] = str(system_arrow.get(expires_at))
|
||||
else:
|
||||
service_build_dict["expires_at"] = str(
|
||||
system_arrow.get(living_space.expiry_ends)
|
||||
)
|
||||
expiry_ends = str(system_arrow.get(living_space.expiry_ends))
|
||||
service_build_dict["expires_at"] = expiry_ends
|
||||
|
||||
for service in modules.retrieve_services():
|
||||
ServiceBindOccupantEventMethods.bind_services_occupant_system(
|
||||
**service_build_dict,
|
||||
service_id=service.id,
|
||||
)
|
||||
BuildLivingSpace.save()
|
||||
for module in modules:
|
||||
for service in module.retrieve_services():
|
||||
event_occupant = Event2Occupant.find_or_create(
|
||||
event_service_id=service.id,
|
||||
event_service_uu_id=str(service.uu_id),
|
||||
build_living_space_id=living_space.id,
|
||||
build_living_space_uu_id=str(living_space.uu_id),
|
||||
)
|
||||
event_occupant.save_and_confirm()
|
||||
return True
|
||||
|
||||
# @classmethod
|
||||
# def modules_bind_occupant_system(
|
||||
# cls, build_living_space_id: int, modules_id: int, expires_at: str = None
|
||||
# ):
|
||||
#
|
||||
# living_space = BuildLivingSpace.filter_one(
|
||||
# BuildLivingSpace.id == build_living_space_id,
|
||||
# ).data
|
||||
# modules = Modules.filter_one(Modules.id == modules_id).data
|
||||
#
|
||||
# if not living_space or not modules:
|
||||
# print(f"Giving living Space or Modules: {modules.module_name} not found")
|
||||
# return
|
||||
# service_build_dict = dict(build_living_space_id=living_space.id)
|
||||
# if expires_at:
|
||||
# service_build_dict["expires_at"] = str(system_arrow.get(expires_at))
|
||||
# else:
|
||||
# service_build_dict["expires_at"] = str(
|
||||
# system_arrow.get(living_space.expiry_ends)
|
||||
# )
|
||||
#
|
||||
# for service in modules.retrieve_services():
|
||||
# ServiceBindOccupantEventMethods.bind_services_occupant_system(
|
||||
# **service_build_dict,
|
||||
# service_id=service.id,
|
||||
# )
|
||||
# BuildLivingSpace.save()
|
||||
# return True
|
||||
|
||||
@classmethod
|
||||
def modules_bind_occupant(
|
||||
cls,
|
||||
@@ -67,8 +103,42 @@ class ModulesBindEmployeeEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"": "modules_bind_employee",
|
||||
"d4ed23db-62e9-4015-b7c0-698a7917aa0c": "modules_bind_employee",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"d4ed23db-62e9-4015-b7c0-698a7917aa0c": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_default_module_for_first_init_occupant(
|
||||
cls, employee_id: int, expires_at: str = None
|
||||
):
|
||||
|
||||
employee = Employees.filter_one(
|
||||
Employees.id == employee_id,
|
||||
).data
|
||||
modules = Modules.filter_all(Modules.is_default_module == True).data
|
||||
print("living_space", employee, "modules", modules)
|
||||
if not employee or not modules:
|
||||
print(f"Giving living Space or Modules: Default not found")
|
||||
return
|
||||
service_build_dict = dict(build_living_space_id=employee.id)
|
||||
if expires_at:
|
||||
service_build_dict["expires_at"] = str(system_arrow.get(expires_at))
|
||||
else:
|
||||
expiry_ends = str(system_arrow.get(employee.expiry_ends))
|
||||
service_build_dict["expires_at"] = expiry_ends
|
||||
|
||||
for module in modules:
|
||||
for service in module.retrieve_services():
|
||||
event_employee = Event2Employee.find_or_create(
|
||||
event_service_id=service.id,
|
||||
event_service_uu_id=str(service.uu_id),
|
||||
employee_id=employee.id,
|
||||
employee_uu_id=str(employee.uu_id),
|
||||
)
|
||||
event_employee.save_and_confirm()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def modules_bind_employee(
|
||||
|
||||
@@ -4,12 +4,12 @@ from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from databases import (
|
||||
Modules,
|
||||
Employees,
|
||||
BuildParts,
|
||||
BuildLivingSpace,
|
||||
Service2Events,
|
||||
Services,
|
||||
OccupantTypes,
|
||||
Event2Employee,
|
||||
@@ -29,49 +29,40 @@ class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"0d2bc5c9-d4b1-4951-8305-69da4a687fdc": "bind_services_occupant",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"0d2bc5c9-d4b1-4951-8305-69da4a687fdc": RegisterServices2Occupant
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_services_occupant_system(
|
||||
cls, build_living_space_id: int, service_id: int, expires_at: str = None
|
||||
):
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
living_space = BuildLivingSpace.filter_one(
|
||||
BuildLivingSpace.id == build_living_space_id,
|
||||
).data
|
||||
service = Services.filter_one(Services.id == service_id).data
|
||||
add_events_list = Service2Events.filter_all(
|
||||
Service2Events.service_id == service.id,
|
||||
).data
|
||||
if not service:
|
||||
print("Service is not valid. Service can not be binded")
|
||||
return
|
||||
|
||||
if not living_space:
|
||||
print("Living Space is not valid. Service is not binded")
|
||||
return
|
||||
if not add_events_list:
|
||||
print(f"Service has no events registered. Please contact with your manager")
|
||||
return
|
||||
|
||||
event_ids_list = [
|
||||
{
|
||||
"build_living_space_id": living_space.id,
|
||||
"build_living_space_uu_id": str(living_space.uu_id),
|
||||
"event_id": service_event.event_id,
|
||||
"event_uu_id": str(service_event.event_uu_id),
|
||||
"is_confirmed": True,
|
||||
"expiry_ends": str(expires_at) if expires_at else "2099-12-31 03:00:00",
|
||||
}
|
||||
for service_event in add_events_list
|
||||
]
|
||||
if expires_at:
|
||||
expires_at = str(system_arrow.get(expires_at))
|
||||
else:
|
||||
expires_at = str(system_arrow.get(living_space.expiry_ends))
|
||||
|
||||
session_execute = Services.session.execute(
|
||||
insert(Event2Occupant)
|
||||
.values(event_ids_list)
|
||||
.on_conflict_do_nothing(
|
||||
index_elements=["build_living_space_id", "event_id"],
|
||||
)
|
||||
occupants_event = Event2Occupant.find_or_create(
|
||||
event_service_id=service.id,
|
||||
event_service_uu_id=str(service.uu_id),
|
||||
build_living_space_id=living_space.id,
|
||||
build_living_space_uu_id=str(living_space.uu_id),
|
||||
expiry_ends=expires_at,
|
||||
)
|
||||
count_row = session_execute.rowcount
|
||||
print(f"{count_row} events are added to occupant {str(living_space.uu_id)}")
|
||||
Services.save()
|
||||
occupants_event.save_and_confirm()
|
||||
print(f"{service.service_name} is added to occupant {str(living_space.uu_id)}")
|
||||
|
||||
@classmethod
|
||||
def bind_services_occupant(
|
||||
@@ -90,9 +81,7 @@ class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
occupants_build_part = BuildParts.filter_one(
|
||||
BuildParts.uu_id == data.build_part_uu_id,
|
||||
BuildParts.build_id == token_dict.selected_occupant.build_id,
|
||||
BuildParts.active == True,
|
||||
).data
|
||||
print("occupants_build_part", occupants_build_part)
|
||||
if not occupants_build_part:
|
||||
return JSONResponse(
|
||||
content={
|
||||
@@ -127,14 +116,14 @@ class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
service_events = Service2Events.filter_all(
|
||||
Service2Events.service_id == service.id,
|
||||
).data
|
||||
if not service_events:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Service has no events registered. Please contact with your manager",
|
||||
)
|
||||
# service_events = Service2Events.filter_all(
|
||||
# Service2Events.service_id == service.id,
|
||||
# ).data
|
||||
# if not service_events:
|
||||
# raise HTTPException(
|
||||
# status_code=status.HTTP_404_NOT_FOUND,
|
||||
# detail="Service has no events registered. Please contact with your manager",
|
||||
# )
|
||||
|
||||
living_space = BuildLivingSpace.filter_one(
|
||||
BuildLivingSpace.build_parts_id == occupants_build_part.id,
|
||||
@@ -151,27 +140,27 @@ class ServiceBindOccupantEventMethods(MethodToEvent):
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
event_ids_list = [
|
||||
{
|
||||
"build_living_space_id": living_space.id,
|
||||
"build_living_space_uu_id": str(living_space.uu_id),
|
||||
"event_id": service_event.event_id,
|
||||
"event_uu_id": str(service_event.event_uu_id),
|
||||
"is_confirmed": True,
|
||||
}
|
||||
for service_event in service_events
|
||||
]
|
||||
|
||||
session_execute = Services.session.execute(
|
||||
insert(Event2Occupant)
|
||||
.values(event_ids_list)
|
||||
.on_conflict_do_nothing(
|
||||
index_elements=["employee_id", "event_id"],
|
||||
)
|
||||
)
|
||||
count_row = session_execute.rowcount
|
||||
print(f"{count_row} events are added to employee {str(living_space.uu_id)}")
|
||||
Services.save()
|
||||
# event_ids_list = [
|
||||
# {
|
||||
# "build_living_space_id": living_space.id,
|
||||
# "build_living_space_uu_id": str(living_space.uu_id),
|
||||
# "event_id": service_event.event_id,
|
||||
# "event_uu_id": str(service_event.event_uu_id),
|
||||
# "is_confirmed": True,
|
||||
# }
|
||||
# for service_event in service_events
|
||||
# ]
|
||||
#
|
||||
# session_execute = Services.session.execute(
|
||||
# insert(Event2Occupant)
|
||||
# .values(event_ids_list)
|
||||
# .on_conflict_do_nothing(
|
||||
# index_elements=["employee_id", "event_id"],
|
||||
# )
|
||||
# )
|
||||
# count_row = session_execute.rowcount
|
||||
# print(f"{count_row} events are added to employee {str(living_space.uu_id)}")
|
||||
# Services.save()
|
||||
|
||||
|
||||
class ServiceBindEmployeeEventMethods(MethodToEvent):
|
||||
@@ -180,47 +169,54 @@ class ServiceBindEmployeeEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"50f84023-d8ec-4257-bfce-08ddf077c101": "bind_services_employee_super_user",
|
||||
}
|
||||
__event_validation__ = {"50f84023-d8ec-4257-bfce-08ddf077c101": None}
|
||||
|
||||
@classmethod
|
||||
def bind_services_employee(cls, service_id: int, employee_id: int):
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
employee = Employees.filter_by_one(
|
||||
id=employee_id, **Employees.valid_record_dict
|
||||
).data
|
||||
service = Services.filter_by_one(
|
||||
id=service_id, **Services.valid_record_dict
|
||||
).data
|
||||
service_events = Service2Events.filter_all(
|
||||
Service2Events.service_id == service.id,
|
||||
).data
|
||||
if not service_events:
|
||||
raise Exception(
|
||||
"Service has no events registered. Please contact with your manager"
|
||||
)
|
||||
if not service:
|
||||
print("Service is not valid. Service can not be binded")
|
||||
return
|
||||
|
||||
event_ids_list = [
|
||||
{
|
||||
"employee_id": employee_id,
|
||||
"employee_uu_id": str(employee.uu_id),
|
||||
"event_id": service_event.event_id,
|
||||
"event_uu_id": str(service_event.event_uu_id),
|
||||
"is_confirmed": True,
|
||||
}
|
||||
for service_event in service_events
|
||||
]
|
||||
if not employee:
|
||||
print("Employee is not valid. Service is not binded")
|
||||
return
|
||||
|
||||
session_execute = Services.session.execute(
|
||||
insert(Event2Employee)
|
||||
.values(event_ids_list)
|
||||
.on_conflict_do_nothing(
|
||||
index_elements=["employee_id", "event_id"],
|
||||
)
|
||||
)
|
||||
count_row = session_execute.rowcount
|
||||
print(f"{count_row} events are added to employee {employee.uu_id}")
|
||||
for service_event in service_events:
|
||||
service_event.save_and_confirm()
|
||||
# service_events = Service2Events.filter_all(
|
||||
# Service2Events.service_id == service.id,
|
||||
# ).data
|
||||
# if not service_events:
|
||||
# raise Exception(
|
||||
# "Service has no events registered. Please contact with your manager"
|
||||
# )
|
||||
|
||||
# event_ids_list = [
|
||||
# {
|
||||
# "employee_id": employee_id,
|
||||
# "employee_uu_id": str(employee.uu_id),
|
||||
# "event_id": service_event.event_id,
|
||||
# "event_uu_id": str(service_event.event_uu_id),
|
||||
# "is_confirmed": True,
|
||||
# }
|
||||
# for service_event in service_events
|
||||
# ]
|
||||
#
|
||||
# session_execute = Services.session.execute(
|
||||
# insert(Event2Employee)
|
||||
# .values(event_ids_list)
|
||||
# .on_conflict_do_nothing(
|
||||
# index_elements=["employee_id", "event_id"],
|
||||
# )
|
||||
# )
|
||||
# count_row = session_execute.rowcount
|
||||
# print(f"{count_row} events are added to employee {employee.uu_id}")
|
||||
# for service_event in service_events:
|
||||
# service_event.save_and_confirm()
|
||||
|
||||
@classmethod
|
||||
def bind_services_employee_super_user(
|
||||
@@ -228,8 +224,6 @@ class ServiceBindEmployeeEventMethods(MethodToEvent):
|
||||
data: RegisterServices2Employee,
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
if isinstance(token_dict, OccupantTokenObject):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
@@ -262,52 +256,61 @@ class ServiceBindEmployeeEventMethods(MethodToEvent):
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
service_events = Service2Events.filter_all(
|
||||
Service2Events.service_id == service.id,
|
||||
).data
|
||||
if not service_events:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Service has no events registered. Please contact with your manager",
|
||||
)
|
||||
|
||||
event_ids_list = [
|
||||
{
|
||||
"employee_id": employee.id,
|
||||
"employee_uu_id": employee.uu_id,
|
||||
"event_id": service_event.event_id,
|
||||
"event_uu_id": service_event.event_uu_id,
|
||||
"is_confirmed": True,
|
||||
}
|
||||
for service_event in service_events
|
||||
]
|
||||
|
||||
session_execute = Services.session.execute(
|
||||
insert(Event2Employee)
|
||||
.values(event_ids_list)
|
||||
.on_conflict_do_nothing(
|
||||
index_elements=["employee_id", "event_id"],
|
||||
)
|
||||
)
|
||||
count_row = session_execute.rowcount
|
||||
if not count_row:
|
||||
Services.save()
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": False,
|
||||
"message": "No events are added to employee",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
"message": f"{count_row} events are added to employee",
|
||||
"data": {},
|
||||
},
|
||||
status_code=status.HTTP_200_OK,
|
||||
event_of_employee = Event2Employee.find_or_create(
|
||||
event_service_id=service.id,
|
||||
event_service_uu_id=str(service.uu_id),
|
||||
employee_id=employee.id,
|
||||
employee_uu_id=str(employee.uu_id),
|
||||
)
|
||||
event_of_employee.save_and_confirm()
|
||||
print(f"{service.service_name} is added to employee {str(employee.uu_id)}")
|
||||
|
||||
# service_events = Service2Events.filter_all(
|
||||
# Service2Events.service_id == service.id,
|
||||
# ).data
|
||||
# if not service_events:
|
||||
# raise HTTPException(
|
||||
# status_code=status.HTTP_404_NOT_FOUND,
|
||||
# detail="Service has no events registered. Please contact with your manager",
|
||||
# )
|
||||
#
|
||||
# event_ids_list = [
|
||||
# {
|
||||
# "employee_id": employee.id,
|
||||
# "employee_uu_id": employee.uu_id,
|
||||
# "event_id": service_event.event_id,
|
||||
# "event_uu_id": service_event.event_uu_id,
|
||||
# "is_confirmed": True,
|
||||
# }
|
||||
# for service_event in service_events
|
||||
# ]
|
||||
#
|
||||
# session_execute = Services.session.execute(
|
||||
# insert(Event2Employee)
|
||||
# .values(event_ids_list)
|
||||
# .on_conflict_do_nothing(
|
||||
# index_elements=["employee_id", "event_id"],
|
||||
# )
|
||||
# )
|
||||
# count_row = session_execute.rowcount
|
||||
# if not count_row:
|
||||
# Services.save()
|
||||
# return JSONResponse(
|
||||
# content={
|
||||
# "completed": False,
|
||||
# "message": "No events are added to employee",
|
||||
# "data": {},
|
||||
# },
|
||||
# status_code=status.HTTP_200_OK,
|
||||
# )
|
||||
# return JSONResponse(
|
||||
# content={
|
||||
# "completed": True,
|
||||
# "message": f"{count_row} events are added to employee",
|
||||
# "data": {},
|
||||
# },
|
||||
# status_code=status.HTTP_200_OK,
|
||||
# )
|
||||
|
||||
|
||||
ServiceBindOccupantEventMethod = ServiceBindOccupantEventMethods(
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import Union
|
||||
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
from api_events.events.events.events_services import ServicesEvents
|
||||
from databases import (
|
||||
Events,
|
||||
Employees,
|
||||
@@ -14,7 +15,6 @@ from databases import (
|
||||
from api_validations.validations_request import (
|
||||
RegisterEvents2Employee,
|
||||
RegisterEvents2Occupant,
|
||||
CreateEvents,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
@@ -29,6 +29,7 @@ class EventsListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"9fa01bef-c0e8-4fe1-b9ed-2ff1c4f35faa": "events_list",
|
||||
}
|
||||
__event_validation__ = {"9fa01bef-c0e8-4fe1-b9ed-2ff1c4f35faa": None}
|
||||
|
||||
@classmethod
|
||||
def events_list(
|
||||
@@ -36,106 +37,50 @@ class EventsListEventMethods(MethodToEvent):
|
||||
list_options: ListOptions,
|
||||
token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
list_options.page = 1
|
||||
list_options.size = 10000
|
||||
Events.filter_attr = list_options
|
||||
records = Events.filter_all()
|
||||
if isinstance(token_dict, OccupantTokenObject):
|
||||
occupant_events = Event2Occupant.filter_all(
|
||||
Event2Occupant.build_living_space_id
|
||||
== token_dict.selected_occupant.living_space_id
|
||||
).data
|
||||
records = Events.filter_all(
|
||||
Events.id.in_([event.event_id for event in occupant_events])
|
||||
)
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Events are listed successfully",
|
||||
result=records,
|
||||
)
|
||||
elif isinstance(token_dict, EmployeeTokenObject):
|
||||
employee_events = Event2Employee.filter_all(
|
||||
Event2Employee.employee_id == token_dict.selected_company.employee_id
|
||||
).data
|
||||
records = Events.filter_all(
|
||||
Events.id.in_([event.event_id for event in employee_events])
|
||||
)
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="Events are listed successfully",
|
||||
result=records,
|
||||
)
|
||||
return AlchemyJsonResponse(
|
||||
completed=True,
|
||||
message="DecisionBook are listed successfully",
|
||||
result=records,
|
||||
completed=False,
|
||||
message="Events are NOT listed successfully",
|
||||
result=[],
|
||||
)
|
||||
|
||||
|
||||
class EventsCreateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "CREATE"
|
||||
__event_keys__ = {
|
||||
"514a9f8f-e5e5-4e10-9d0b-2de8f461fc1b": "events_create",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def events_create(cls, data: CreateEvents, token_dict):
|
||||
event = Events.find_or_create(
|
||||
**token_dict.user_creds,
|
||||
event_name=data.event_name,
|
||||
event_description=data.event_description,
|
||||
event_date=data.event_date,
|
||||
event_location=data.event_location,
|
||||
active=True,
|
||||
deleted=False,
|
||||
)
|
||||
Events.save()
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Event created successfully.",
|
||||
"event": event.uu_id,
|
||||
}
|
||||
|
||||
|
||||
class EventsUpdateEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"f94e7b79-2369-4840-bf2b-244934ca3136": "events_update",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def events_update(cls, data: CreateEvents, token_dict):
|
||||
event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data
|
||||
if not event:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="No event found. Please contact your responsible company.",
|
||||
)
|
||||
event.update(
|
||||
**token_dict.user_creds,
|
||||
event_name=data.event_name,
|
||||
event_description=data.event_description,
|
||||
event_date=data.event_date,
|
||||
event_location=data.event_location,
|
||||
)
|
||||
Events.save()
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Event updated successfully.",
|
||||
"event": event.uu_id,
|
||||
}
|
||||
|
||||
|
||||
class EventsPatchEventMethods(MethodToEvent):
|
||||
|
||||
event_type = "PATCH"
|
||||
__event_keys__ = {
|
||||
"41944c63-22d3-4866-affd-34bcd49da58b": "events_patch",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def events_patch(cls, data: CreateEvents, token_dict):
|
||||
event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data
|
||||
if not event:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="No event found. Please contact your responsible company.",
|
||||
)
|
||||
event.update(
|
||||
**token_dict.user_creds,
|
||||
event_name=data.event_name,
|
||||
event_description=data.event_description,
|
||||
event_date=data.event_date,
|
||||
event_location=data.event_location,
|
||||
)
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Event patched successfully.",
|
||||
"event": event.uu_id,
|
||||
}
|
||||
|
||||
|
||||
class EventsBindEventToOccupantMethods(MethodToEvent):
|
||||
|
||||
event_type = "UPDATE"
|
||||
__event_keys__ = {
|
||||
"d9aa58aa-37f7-4c27-861d-3105f76f5cdc": "bind_events_employee",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"d9aa58aa-37f7-4c27-861d-3105f76f5cdc": RegisterEvents2Employee
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_events_employee(cls, data: RegisterEvents2Employee, token_dict):
|
||||
@@ -185,6 +130,9 @@ class EventsBindEventToEmployeeMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"8bb4f4fc-b474-427e-90b3-d8681f308bb5": "bind_events_occupant",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"8bb4f4fc-b474-427e-90b3-d8681f308bb5": RegisterEvents2Occupant
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def bind_events_occupant(cls, data: RegisterEvents2Occupant, token_dict):
|
||||
@@ -218,21 +166,105 @@ class EventsBindEventToEmployeeMethods(MethodToEvent):
|
||||
}
|
||||
|
||||
|
||||
EventsListEventMethod = EventsListEventMethods(
|
||||
action=ActionsSchema(endpoint="/event/list")
|
||||
)
|
||||
EventsCreateEventMethod = EventsCreateEventMethods(
|
||||
action=ActionsSchema(endpoint="/event/create")
|
||||
)
|
||||
EventsUpdateEventMethod = EventsUpdateEventMethods(
|
||||
action=ActionsSchema(endpoint="/event/update")
|
||||
)
|
||||
EventsPatchEventMethod = EventsPatchEventMethods(
|
||||
action=ActionsSchema(endpoint="/event/patch")
|
||||
)
|
||||
EventsBindEventToOccupantMethod = EventsBindEventToOccupantMethods(
|
||||
action=ActionsSchema(endpoint="/bind/events/occupant")
|
||||
)
|
||||
EventsBindEventToEmployeeMethod = EventsBindEventToEmployeeMethods(
|
||||
action=ActionsSchema(endpoint="/bind/events/employee")
|
||||
)
|
||||
EventsListEventMethod = EventsListEventMethods(
|
||||
action=ActionsSchema(endpoint="/event/list")
|
||||
)
|
||||
|
||||
# EventsCreateEventMethod = EventsCreateEventMethods(
|
||||
# action=ActionsSchema(endpoint="/event/create")
|
||||
# )
|
||||
# EventsUpdateEventMethod = EventsUpdateEventMethods(
|
||||
# action=ActionsSchema(endpoint="/event/update")
|
||||
# )
|
||||
# EventsPatchEventMethod = EventsPatchEventMethods(
|
||||
# action=ActionsSchema(endpoint="/event/patch")
|
||||
# )
|
||||
#
|
||||
|
||||
# class EventsCreateEventMethods(MethodToEvent):
|
||||
#
|
||||
# event_type = "CREATE"
|
||||
# __event_keys__ = {
|
||||
# "514a9f8f-e5e5-4e10-9d0b-2de8f461fc1b": "events_create",
|
||||
# }
|
||||
#
|
||||
# @classmethod
|
||||
# def events_create(cls, data: CreateEvents, token_dict):
|
||||
# event = Events.find_or_create(
|
||||
# **token_dict.user_creds,
|
||||
# event_name=data.event_name,
|
||||
# event_description=data.event_description,
|
||||
# event_date=data.event_date,
|
||||
# event_location=data.event_location,
|
||||
# active=True,
|
||||
# deleted=False,
|
||||
# )
|
||||
# Events.save()
|
||||
# return {
|
||||
# "status": "success",
|
||||
# "message": "Event created successfully.",
|
||||
# "event": event.uu_id,
|
||||
# }
|
||||
# class EventsUpdateEventMethods(MethodToEvent):
|
||||
#
|
||||
# event_type = "UPDATE"
|
||||
# __event_keys__ = {
|
||||
# "f94e7b79-2369-4840-bf2b-244934ca3136": "events_update",
|
||||
# }
|
||||
#
|
||||
# @classmethod
|
||||
# def events_update(cls, data: CreateEvents, token_dict):
|
||||
# event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data
|
||||
# if not event:
|
||||
# raise HTTPException(
|
||||
# status_code=404,
|
||||
# detail="No event found. Please contact your responsible company.",
|
||||
# )
|
||||
# event.update(
|
||||
# **token_dict.user_creds,
|
||||
# event_name=data.event_name,
|
||||
# event_description=data.event_description,
|
||||
# event_date=data.event_date,
|
||||
# event_location=data.event_location,
|
||||
# )
|
||||
# Events.save()
|
||||
# return {
|
||||
# "status": "success",
|
||||
# "message": "Event updated successfully.",
|
||||
# "event": event.uu_id,
|
||||
# }
|
||||
#
|
||||
#
|
||||
# class EventsPatchEventMethods(MethodToEvent):
|
||||
#
|
||||
# event_type = "PATCH"
|
||||
# __event_keys__ = {
|
||||
# "41944c63-22d3-4866-affd-34bcd49da58b": "events_patch",
|
||||
# }
|
||||
#
|
||||
# @classmethod
|
||||
# def events_patch(cls, data: CreateEvents, token_dict):
|
||||
# event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data
|
||||
# if not event:
|
||||
# raise HTTPException(
|
||||
# status_code=404,
|
||||
# detail="No event found. Please contact your responsible company.",
|
||||
# )
|
||||
# event.update(
|
||||
# **token_dict.user_creds,
|
||||
# event_name=data.event_name,
|
||||
# event_description=data.event_description,
|
||||
# event_date=data.event_date,
|
||||
# event_location=data.event_location,
|
||||
# )
|
||||
# return {
|
||||
# "status": "success",
|
||||
# "message": "Event patched successfully.",
|
||||
# "event": event.uu_id,
|
||||
# }
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import Union
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_validations.validations_response.people import PeopleListResponse
|
||||
from databases import (
|
||||
Build,
|
||||
People,
|
||||
@@ -25,6 +26,12 @@ class PeopleListEventMethods(MethodToEvent):
|
||||
"25cbbaf8-117a-470f-a844-2cfc70f71dde": "human_resources_users_people_list",
|
||||
"cdf62f06-ec50-40de-b19e-adb3dd34bb95": "people_list_only_occupant_tenant_or_owner",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"0a05f03c-6ed8-4230-a4ff-6e7cf886909b": PeopleListResponse,
|
||||
"b5612538-0445-4a4a-ab13-d2a06037f7a5": None,
|
||||
"25cbbaf8-117a-470f-a844-2cfc70f71dde": None,
|
||||
"cdf62f06-ec50-40de-b19e-adb3dd34bb95": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def super_users_people_list(
|
||||
@@ -54,6 +61,9 @@ class PeopleListEventMethods(MethodToEvent):
|
||||
completed=True,
|
||||
message="People are listed successfully",
|
||||
result=records,
|
||||
cls_object=People,
|
||||
filter_attributes=list_options,
|
||||
response_model=PeopleListResponse,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -92,6 +102,9 @@ class PeopleCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"2d1513f4-44ed-4fa3-84d1-dfbd0eadf9a1": "people_create",
|
||||
}
|
||||
__event_validation__ = {
|
||||
"2d1513f4-44ed-4fa3-84d1-dfbd0eadf9a1": InsertPerson,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def people_create(
|
||||
@@ -121,6 +134,7 @@ class PeopleUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"e05cf22c-16c4-450b-86c8-417896a26afc": "people_update",
|
||||
}
|
||||
__event_validation__ = {"e05cf22c-16c4-450b-86c8-417896a26afc": UpdateUsers}
|
||||
|
||||
@classmethod
|
||||
def people_update(
|
||||
@@ -175,6 +189,7 @@ class PeoplePatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"3ae16d66-090b-4d27-b567-cce1b10a1c3b": "people_patch",
|
||||
}
|
||||
__event_validation__ = {"3ae16d66-090b-4d27-b567-cce1b10a1c3b": None}
|
||||
|
||||
@classmethod
|
||||
def people_patch(cls):
|
||||
@@ -187,6 +202,7 @@ class PeopleDeleteEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"7f84c7a2-a120-4867-90d4-6767a41320db": "people_delete",
|
||||
}
|
||||
__event_validation__ = {"7f84c7a2-a120-4867-90d4-6767a41320db": None}
|
||||
|
||||
|
||||
PeopleListEventMethod = PeopleListEventMethods(
|
||||
|
||||
@@ -3,15 +3,15 @@ import typing
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from api_configs import ApiStatic
|
||||
from databases import MongoQueryIdentity, Users, Companies, People
|
||||
from databases.no_sql_models.validations import DomainViaUser
|
||||
|
||||
from api_events.events.abstract_class import MethodToEvent, ActionsSchema
|
||||
|
||||
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||
from api_validations.core_response import AlchemyJsonResponse
|
||||
|
||||
|
||||
from api_services.email.service import send_email
|
||||
from api_services.templates.password_templates import change_your_password_template
|
||||
from api_validations.validations_request import (
|
||||
InsertUsers,
|
||||
UpdateUsers,
|
||||
@@ -27,6 +27,7 @@ class UserListEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"1483a8a2-d244-4593-b9f8-f1b4bcbefcd5": "user_list",
|
||||
}
|
||||
__event_validation__ = {"1483a8a2-d244-4593-b9f8-f1b4bcbefcd5": None}
|
||||
|
||||
@classmethod
|
||||
def user_list(
|
||||
@@ -71,6 +72,7 @@ class UserCreateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"8eb50c24-4bdc-4309-9836-f7048daee409": "user_create",
|
||||
}
|
||||
__event_validation__ = {"8eb50c24-4bdc-4309-9836-f7048daee409": InsertUsers}
|
||||
|
||||
@classmethod
|
||||
def user_create(
|
||||
@@ -87,6 +89,21 @@ class UserCreateEventMethods(MethodToEvent):
|
||||
company_uuid=created_user.related_company,
|
||||
)
|
||||
mongo_query_identity.create_domain_via_user(payload=domain_via_user)
|
||||
reset_password_token = created_user.reset_password_token(
|
||||
found_user=created_user
|
||||
)
|
||||
send_email_completed = send_email(
|
||||
subject=f"Dear {created_user.user_tag}, your password has been changed.",
|
||||
receivers=[str(created_user.email)],
|
||||
html=change_your_password_template(
|
||||
user_name=created_user.user_tag,
|
||||
forgot_link=ApiStatic.forgot_link(forgot_key=reset_password_token),
|
||||
),
|
||||
)
|
||||
if not send_email_completed:
|
||||
raise created_user.raise_http_exception(
|
||||
status_code=400, message="Email can not be sent. Try again later"
|
||||
)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"completed": True,
|
||||
@@ -110,6 +127,7 @@ class UserUpdateEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d08a9470-1eb0-4890-a9e8-b6686239d7e9": "user_update",
|
||||
}
|
||||
__event_validation__ = {"d08a9470-1eb0-4890-a9e8-b6686239d7e9": UpdateUsers}
|
||||
|
||||
@classmethod
|
||||
def user_update(
|
||||
@@ -150,6 +168,7 @@ class UserPatchEventMethods(MethodToEvent):
|
||||
__event_keys__ = {
|
||||
"d26a1a3c-eaeb-4d01-b35b-a5ed714e29c0": "user_patch",
|
||||
}
|
||||
__event_validation__ = {"d26a1a3c-eaeb-4d01-b35b-a5ed714e29c0": None}
|
||||
|
||||
@classmethod
|
||||
def user_patch(cls, data: PatchRecord, user_uu_id: str, token_dict):
|
||||
|
||||
@@ -9,13 +9,13 @@ class AuthDefaultEventBlock(AddEventFunctionality):
|
||||
{"function_code": "cee96b9b-8487-4e9f-aaed-2e8c79687bf9"},
|
||||
{"function_code": "48379bb2-ba81-4d8e-a9dd-58837cfcbf67"},
|
||||
{"function_code": "f09f7c1a-bee6-4e32-8444-962ec8f39091"},
|
||||
{"function_code": "87a15ade-3474-4206-b574-bbf8580cbb14"},
|
||||
{"function_code": "c519f9af-92e1-47b2-abf7-5a3316d075f7"},
|
||||
{"function_code": "8b586848-2fb3-4161-abbe-642157eec7ce"},
|
||||
{"function_code": "5cc22e4e-a0f7-4077-be41-1871feb3dfd1"},
|
||||
{"function_code": "c90f3334-10c9-4181-b5ff-90d98a0287b2"},
|
||||
{"function_code": "e3ca6e24-b9f8-4127-949c-3bfa364e3513"},
|
||||
{"function_code": "c140cd5f-307f-4046-a93e-3ade032a57a7"},
|
||||
{"function_code": "af9e121e-24bb-44ac-a616-471d5754360e"},
|
||||
]
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
|
||||
@@ -48,10 +48,6 @@ class SuperUserEventBlock(AddEventFunctionality):
|
||||
{"function_code": "ffdc445f-da10-4ce4-9531-d2bdb9a198ae"},
|
||||
{"function_code": "b0e55a7e-af81-468c-b46c-a6b3a6b68d5d"},
|
||||
{"function_code": "1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27"},
|
||||
{"function_code": "68b3b5ed-b74c-4a27-820f-3959214e94e9"},
|
||||
{"function_code": "a2271854-6b90-43da-a440-a62b70d90528"},
|
||||
{"function_code": "5ad38a66-1189-451e-babb-77de2d63d757"},
|
||||
{"function_code": "e3876bfe-8847-4dea-ae36-e709f7431930"},
|
||||
{"function_code": "9c251d7d-da70-4d63-a72c-e69c26270442"},
|
||||
{"function_code": "6f1406ac-577d-4f2c-8077-71fff2252c5f"},
|
||||
{"function_code": "88d37b78-1ac4-4513-9d25-090ac3a24f31"},
|
||||
|
||||
@@ -6,6 +6,7 @@ class BuildResident(AddEventFunctionality):
|
||||
related_code = "FL-RES"
|
||||
events = [
|
||||
{"function_code": "bdcba521-0116-441c-ace1-84c5b68c86c7"},
|
||||
{"function_code": "208e6273-17ef-44f0-814a-8098f816b63a"},
|
||||
]
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
|
||||
@@ -14,7 +14,7 @@ class DateTimeLocal:
|
||||
def find_last_day_of_month(self, date_value):
|
||||
today = self.get(date_value).date()
|
||||
_, last_day = calendar.monthrange(today.year, today.month)
|
||||
return self.get(today.year, today.month, last_day)
|
||||
return self.get(today.year, today.month, last_day, 23, 59, 59).to(self.timezone)
|
||||
|
||||
def find_first_day_of_month(self, date_value):
|
||||
today = self.get(date_value).date()
|
||||
|
||||
@@ -52,14 +52,14 @@ class OccupantToken(BaseModel):
|
||||
build_part_id: int
|
||||
build_part_uuid: str
|
||||
|
||||
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
|
||||
reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||
|
||||
responsible_company_id: Optional[int] = None
|
||||
responsible_company_uuid: Optional[str] = None
|
||||
responsible_employee_id: Optional[int] = None
|
||||
responsible_employee_uuid: Optional[str] = None
|
||||
|
||||
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
|
||||
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||
|
||||
|
||||
class CompanyToken(BaseModel): # Required Company Object for an employee
|
||||
|
||||
@@ -81,7 +81,7 @@ class CompanyToken(BaseModel): # Required Company Object for an employee
|
||||
bulk_duties_id: int
|
||||
|
||||
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
|
||||
reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||
|
||||
|
||||
class OccupantTokenObject(ApplicationToken):
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import re
|
||||
from gc import garbage
|
||||
|
||||
import textdistance
|
||||
from unidecode import unidecode
|
||||
from datetime import datetime
|
||||
@@ -11,6 +14,9 @@ from databases import (
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
from databases.sql_models.company.company import Companies
|
||||
from databases.sql_models.identity.identity import People
|
||||
|
||||
|
||||
class InsertBudgetRecord(BaseModel):
|
||||
iban: str
|
||||
@@ -53,153 +59,527 @@ def strip_date_to_valid(date_str):
|
||||
return datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def find_iban_in_comment(iban: str, comment: str):
|
||||
iban_results, iban_count = BuildIbanDescription.filter_by(iban=iban)
|
||||
def find_iban_in_comment(iban: str, comment: str, living_space_dict: dict = None):
|
||||
iban_results = BuildIbanDescription.filter_all(
|
||||
BuildIbanDescription.iban == iban, system=True
|
||||
).data
|
||||
sm_dict_extended, sm_dict_digit = {}, {}
|
||||
# is_reference_build = any(
|
||||
# letter in comment.lower() for letter in ["no", "daire", "nolu"]
|
||||
# )
|
||||
if iban_count:
|
||||
for iban_result in iban_results:
|
||||
candidate_parts = comment.split(" ")
|
||||
extended_candidate_parts, digit_part = [], []
|
||||
for part in candidate_parts:
|
||||
if part.lower() not in ["no", "daire", "nolu"]:
|
||||
extended_candidate_parts.append(part)
|
||||
# if part.isdigit():
|
||||
# digit_part.append(part)
|
||||
if extended_candidate_parts:
|
||||
if all(
|
||||
candidate_part.lower() in comment.lower()
|
||||
for candidate_part in extended_candidate_parts
|
||||
):
|
||||
similarity_ratio = textdistance.jaro_winkler(
|
||||
unidecode(str(iban_result.search_word)), comment
|
||||
)
|
||||
found = False
|
||||
name_list = (
|
||||
unidecode(str(iban_result.search_word))
|
||||
.replace(".", " ")
|
||||
.split(" ")
|
||||
)
|
||||
for name in name_list:
|
||||
if len(name) > 3 and name.lower() in comment.lower():
|
||||
found = True
|
||||
break
|
||||
for iban_result in iban_results or []:
|
||||
candidate_parts = comment.split(" ")
|
||||
extended_candidate_parts, digit_part = [], []
|
||||
for part in candidate_parts:
|
||||
if part.lower() not in ["no", "daire", "nolu"]:
|
||||
extended_candidate_parts.append(part)
|
||||
# if part.isdigit():
|
||||
# digit_part.append(part)
|
||||
if extended_candidate_parts:
|
||||
if all(
|
||||
candidate_part.lower() in comment.lower()
|
||||
for candidate_part in extended_candidate_parts
|
||||
):
|
||||
similarity_ratio = textdistance.jaro_winkler(
|
||||
unidecode(str(iban_result.search_word)), comment
|
||||
)
|
||||
found = False
|
||||
name_list = (
|
||||
unidecode(str(iban_result.search_word)).replace(".", " ").split(" ")
|
||||
)
|
||||
for name in name_list:
|
||||
if len(name) > 3 and name.lower() in comment.lower():
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
similarity_ratio = 0.1
|
||||
sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
|
||||
|
||||
if sm_dict_extended:
|
||||
result = sorted(
|
||||
sm_dict_extended.items(), key=lambda item: item[1], reverse=True
|
||||
)[0]
|
||||
if float(result[1]) >= 0.5:
|
||||
iban_result = BuildIbanDescription.find_one(id=int(result[0]))
|
||||
return {
|
||||
"decision_book_project_id": iban_result.decision_book_project_id,
|
||||
"company_id": iban_result.company_id,
|
||||
"customer_id": iban_result.customer_id,
|
||||
"build_parts_id": iban_result.build_parts_id,
|
||||
"found_from": "Name",
|
||||
"similarity": result[1],
|
||||
}
|
||||
if not found:
|
||||
similarity_ratio = 0.1
|
||||
sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
|
||||
if sm_dict_extended:
|
||||
result = sorted(
|
||||
sm_dict_extended.items(), key=lambda item: item[1], reverse=True
|
||||
)[0]
|
||||
if float(result[1]) >= 0.5:
|
||||
iban_result = BuildIbanDescription.filter_one(
|
||||
BuildIbanDescription.id == int(result[0]), system=True
|
||||
).data
|
||||
return {
|
||||
# "decision_book_project_id": iban_result.decision_book_project_id,
|
||||
# "build_parts_id": iban_result.build_parts_id,
|
||||
"company_id": iban_result.company_id,
|
||||
"customer_id": iban_result.customer_id,
|
||||
"found_from": "Name",
|
||||
"similarity": result[1],
|
||||
}
|
||||
return {
|
||||
"decision_book_project_id": None,
|
||||
# "decision_book_project_id": None,
|
||||
# "build_parts_id": None,
|
||||
"company_id": None,
|
||||
"customer_id": None,
|
||||
"build_parts_id": None,
|
||||
"found_from": None,
|
||||
"similarity": 0.0,
|
||||
}
|
||||
|
||||
|
||||
def parse_comment_with_name(iban: str, comment: str):
|
||||
if "*" in comment:
|
||||
b_comment, a_comment = (
|
||||
unidecode(str(comment)).split("*")[0],
|
||||
unidecode(str(comment)).split("*")[1],
|
||||
def remove_spaces_from_string(remove_string: str):
|
||||
letter_list = []
|
||||
for letter in remove_string.split(" "):
|
||||
if letter_ := "".join(i for i in letter if not i == " "):
|
||||
letter_list.append(letter_)
|
||||
return " ".join(letter_list).upper()
|
||||
|
||||
|
||||
def get_garbage_words(comment: str, search_word: str):
|
||||
garbage_words = unidecode(remove_spaces_from_string(comment))
|
||||
search_word = unidecode(remove_spaces_from_string(search_word))
|
||||
for word in search_word.split(" "):
|
||||
garbage_words = garbage_words.replace(
|
||||
remove_spaces_from_string(unidecode(word)), ""
|
||||
)
|
||||
a_result = find_iban_in_comment(iban, a_comment)
|
||||
b_result = find_iban_in_comment(iban, b_comment)
|
||||
if a_result["similarity"] > b_result["similarity"]:
|
||||
a_result["send_person_id"] = a_result["customer_id"]
|
||||
return a_result
|
||||
if cleaned_from_spaces := remove_spaces_from_string(garbage_words):
|
||||
return str(unidecode(cleaned_from_spaces)).upper()
|
||||
return None
|
||||
|
||||
|
||||
def remove_garbage_words(comment: str, garbage_word: str):
|
||||
cleaned_comment = remove_spaces_from_string(comment.replace("*", " "))
|
||||
if garbage_word:
|
||||
garbage_word = remove_spaces_from_string(garbage_word.replace("*", " "))
|
||||
for letter in garbage_word.split(" "):
|
||||
cleaned_comment = unidecode(remove_spaces_from_string(cleaned_comment))
|
||||
cleaned_comment = cleaned_comment.replace(
|
||||
remove_spaces_from_string(letter), ""
|
||||
)
|
||||
return str(remove_spaces_from_string(cleaned_comment)).upper()
|
||||
|
||||
|
||||
def check_is_comment_is_build(comment: str):
|
||||
has_build_words = False
|
||||
candidate_parts = remove_spaces_from_string(comment.replace("*", " ")).split(" ")
|
||||
for candidate_part in candidate_parts:
|
||||
candidate_part = remove_spaces_from_string(candidate_part).replace(":", "")
|
||||
for build_word in ["no", "daire", "apt", "apartman"]:
|
||||
if unidecode(candidate_part).upper() in unidecode(build_word).upper():
|
||||
has_build_words = True
|
||||
break
|
||||
return has_build_words
|
||||
|
||||
|
||||
def get_list_of_build_words(comment: str):
|
||||
build_words = []
|
||||
candidate_parts = remove_spaces_from_string(comment.replace("*", " "))
|
||||
for build_word in ["no", "nolu", "daire", "apt", "apartman"]:
|
||||
if unidecode(build_word).upper() in unidecode(candidate_parts).upper():
|
||||
st = unidecode(candidate_parts).upper().index(unidecode(build_word).upper())
|
||||
et = st + len(build_word)
|
||||
st = st - 5 if st > 5 else 0
|
||||
et = et + 5 if et + 5 <= len(candidate_parts) else len(candidate_parts)
|
||||
number_digit = "".join(
|
||||
letter for letter in str(candidate_parts[st:et]) if letter.isdigit()
|
||||
)
|
||||
if number_digit:
|
||||
rt_dict = {
|
||||
"garbage": candidate_parts[st:et],
|
||||
"number": int(number_digit) if number_digit else None,
|
||||
}
|
||||
build_words.append(rt_dict)
|
||||
return build_words
|
||||
|
||||
|
||||
def generate_pattern(word):
|
||||
if len(word) < 1:
|
||||
raise ValueError("The word must have at least 1 character.")
|
||||
add_string, add_match = "\d{1,3}$\s?$", f"{{1, {len(word)}}}"
|
||||
adda_string = "d{1,3}$\s?\^["
|
||||
return adda_string + f"{word}]" + add_match + rf"{word}(?:e|é|ı|i|ğr)?" + add_string
|
||||
|
||||
|
||||
def test_pattern(word, test_cases): # Generate the pattern
|
||||
pattern = generate_pattern(word)
|
||||
for test in test_cases: # Test the regex pattern on each input and print results
|
||||
if re.match(pattern, test, re.IGNORECASE):
|
||||
print(f"'{test}' matches the pattern.", "*" * 60)
|
||||
else:
|
||||
b_result["send_person_id"] = None
|
||||
return b_result
|
||||
else:
|
||||
result = find_iban_in_comment(iban, comment)
|
||||
result["send_person_id"] = result.get("customer_id", None)
|
||||
return result
|
||||
print(f"'{test}' does NOT match the pattern.")
|
||||
|
||||
|
||||
def wag_insert_budget_record(data):
|
||||
similarity_result = parse_comment_with_name(data["iban"], data["process_comment"])
|
||||
build_iban = BuildIbans.find_one(iban=data["iban"])
|
||||
def parse_comment_for_living_space(
|
||||
iban: str, comment: str, living_space_dict: dict = None
|
||||
):
|
||||
comment = unidecode(comment)
|
||||
best_similarity = dict(
|
||||
company=None,
|
||||
living_space=None,
|
||||
found_from=None,
|
||||
similarity=0.0,
|
||||
garbage="",
|
||||
cleaned="",
|
||||
)
|
||||
if not iban in living_space_dict:
|
||||
return best_similarity
|
||||
for person in living_space_dict[iban]["people"]:
|
||||
person: People = person
|
||||
first_name = unidecode(person.firstname).upper()
|
||||
last_name = unidecode(person.surname).upper()
|
||||
search_word_list = [
|
||||
remove_spaces_from_string("".join([f"{first_name} {last_name}"])),
|
||||
remove_spaces_from_string("".join([f"{last_name} {first_name}"])),
|
||||
]
|
||||
if middle_name := unidecode(person.middle_name).upper():
|
||||
search_word_list.append(
|
||||
remove_spaces_from_string(f"{first_name} {middle_name} {last_name}")
|
||||
)
|
||||
search_word_list.append(
|
||||
remove_spaces_from_string(f"{last_name} {middle_name} {first_name}")
|
||||
)
|
||||
|
||||
if payload := InsertBudgetRecord(**data):
|
||||
payload_dict = payload.model_dump(exclude_unset=True, exclude_none=True)
|
||||
decision_books = BuildDecisionBook.select_only(
|
||||
BuildDecisionBook.period_start_date
|
||||
< strip_date_to_valid(payload_dict["bank_date"]),
|
||||
BuildDecisionBook.period_stop_date
|
||||
> strip_date_to_valid(payload_dict["bank_date"]),
|
||||
select_args=[BuildDecisionBook.id],
|
||||
order_by=[BuildDecisionBook.expiry_ends.desc()],
|
||||
)
|
||||
payload_dict["build_id"] = getattr(
|
||||
BuildIbans.find_one(iban=data["iban"]), "build_id", None
|
||||
)
|
||||
living_space, count = BuildLivingSpace.find_living_from_customer_id(
|
||||
similarity_result.get("customer_id", None),
|
||||
strip_date_to_valid(payload_dict["bank_date"]),
|
||||
)
|
||||
# living_space, count = BuildLivingSpace.filter(
|
||||
# or_(
|
||||
# BuildLivingSpace.owner_person_id
|
||||
# == similarity_result.get("customer_id", None),
|
||||
# BuildLivingSpace.life_person_id
|
||||
# == similarity_result.get("customer_id", None),
|
||||
# ),
|
||||
# BuildLivingSpace.start_date
|
||||
# < strip_date_to_valid(payload_dict["bank_date"]) - timedelta(days=30),
|
||||
# BuildLivingSpace.stop_date
|
||||
# > strip_date_to_valid(payload_dict["bank_date"]) + timedelta(days=30),
|
||||
# BuildLivingSpace.active == True,
|
||||
# BuildLivingSpace.deleted == False,
|
||||
# )
|
||||
payload_dict["build_decision_book_id"] = (
|
||||
decision_books[0][0].id if decision_books else None
|
||||
)
|
||||
payload_dict["company_id"] = similarity_result.get("company_id", None)
|
||||
payload_dict["customer_id"] = similarity_result.get("customer_id", None)
|
||||
payload_dict["send_person_id"] = similarity_result.get("send_person_id", None)
|
||||
cleaned_comment = unidecode(comment).upper()
|
||||
for search_word in search_word_list:
|
||||
garbage_words = get_garbage_words(comment, unidecode(search_word))
|
||||
if garbage_words:
|
||||
garbage_words = unidecode(garbage_words).upper()
|
||||
cleaned_comment = unidecode(
|
||||
remove_garbage_words(comment, garbage_words)
|
||||
).upper()
|
||||
similarity_ratio = textdistance.jaro_winkler(
|
||||
cleaned_comment, str(search_word).upper()
|
||||
)
|
||||
if len(cleaned_comment) < len(f"{first_name}{last_name}"):
|
||||
continue
|
||||
if cleaned_comment and 0.9 < similarity_ratio <= 1:
|
||||
print(
|
||||
"cleaned comment dict",
|
||||
dict(
|
||||
garbage=garbage_words,
|
||||
cleaned=cleaned_comment,
|
||||
similarity=similarity_ratio,
|
||||
search_word=search_word,
|
||||
comment=comment,
|
||||
last_similarity=float(best_similarity["similarity"]),
|
||||
),
|
||||
)
|
||||
if similarity_ratio > float(best_similarity["similarity"]):
|
||||
for living_space in living_space_dict[iban]["living_space"]:
|
||||
if living_space.person_id == person.id:
|
||||
best_similarity = {
|
||||
"company": None,
|
||||
"living_space": living_space,
|
||||
"found_from": "Person Name",
|
||||
"similarity": similarity_ratio,
|
||||
"garbage": garbage_words,
|
||||
"cleaned": cleaned_comment,
|
||||
}
|
||||
return best_similarity
|
||||
|
||||
payload_dict["build_parts_id"] = (
|
||||
living_space[0].build_parts_id if living_space else None
|
||||
)
|
||||
|
||||
payload_dict["bank_date_y"] = strip_date_to_valid(
|
||||
payload_dict["bank_date"]
|
||||
).year
|
||||
payload_dict["bank_date_m"] = strip_date_to_valid(
|
||||
payload_dict["bank_date"]
|
||||
).month
|
||||
payload_dict["bank_date_d"] = strip_date_to_valid(payload_dict["bank_date"]).day
|
||||
payload_dict["bank_date_w"] = strip_date_to_valid(
|
||||
payload_dict["bank_date"]
|
||||
).isocalendar()[2]
|
||||
payload_dict["build_id"] = build_iban.build_id if build_iban else None
|
||||
payload_dict["replication_id"] = 55
|
||||
payload_dict["receive_debit"] = (
|
||||
"R" if payload_dict["currency_value"] < 0 else "D"
|
||||
def parse_comment_for_build_parts(
|
||||
comment: str, max_build_part: int = 200, parse: str = "DAIRE"
|
||||
):
|
||||
from regex_func import category_finder
|
||||
|
||||
results, results_list = category_finder(comment), []
|
||||
print("results[parse]", results[parse])
|
||||
for result in results[parse] or []:
|
||||
if digits := "".join([letter for letter in str(result) if letter.isdigit()]):
|
||||
print("digits", digits)
|
||||
if int(digits) <= int(max_build_part):
|
||||
results_list.append(int(digits))
|
||||
return results_list or None
|
||||
|
||||
|
||||
def parse_comment_for_company_or_individual(comment: str):
|
||||
companies_list = Companies.filter_all(
|
||||
Companies.commercial_type != "Commercial", system=True
|
||||
).data
|
||||
comment = unidecode(comment)
|
||||
best_similarity = dict(
|
||||
company=None,
|
||||
living_space=None,
|
||||
found_from=None,
|
||||
similarity=0.0,
|
||||
garbage="",
|
||||
cleaned="",
|
||||
)
|
||||
for company in companies_list:
|
||||
search_word = unidecode(company.public_name)
|
||||
garbage_words = get_garbage_words(comment, search_word)
|
||||
cleaned_comment = remove_garbage_words(comment, garbage_words)
|
||||
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
|
||||
if similarity_ratio > float(best_similarity["similarity"]):
|
||||
best_similarity = {
|
||||
"company": company,
|
||||
"living_space": None,
|
||||
"found_from": "Customer Public Name",
|
||||
"similarity": similarity_ratio,
|
||||
"garbage": garbage_words,
|
||||
"cleaned": cleaned_comment,
|
||||
}
|
||||
# print(
|
||||
# 'cleaned_comment', cleaned_comment, '\n'
|
||||
# 'search_word', search_word, '\n'
|
||||
# 'best_similarity', best_similarity, '\n'
|
||||
# 'company name', company.public_name, '\n'
|
||||
# 'similarity_ratio', similarity_ratio, '\n'
|
||||
# 'garbage_words', garbage_words
|
||||
# )
|
||||
return best_similarity
|
||||
|
||||
|
||||
def parse_comment_to_split_with_star(account_record: AccountRecords):
|
||||
if "*" in account_record.process_comment:
|
||||
process_comment = str(account_record.process_comment.replace("**", "*"))
|
||||
process_comments = process_comment.split("*")
|
||||
return len(process_comments), *process_comments
|
||||
return 1, account_record.process_comment
|
||||
|
||||
|
||||
def check_build_living_space_matches_with_build_parts(
|
||||
living_space_dict: dict, best_similarity: dict, iban: str, whole_comment: str
|
||||
):
|
||||
if 0.6 < float(best_similarity["similarity"]) < 0.8:
|
||||
build_parts = living_space_dict[iban]["build_parts"]
|
||||
if best_similarity["living_space"]:
|
||||
build_parts_id = best_similarity["living_space"].build_parts_id
|
||||
parser_dict = dict(
|
||||
comment=str(whole_comment), max_build_part=len(build_parts)
|
||||
)
|
||||
print("build parts similarity", best_similarity, "parser_dict", parser_dict)
|
||||
results_list = parse_comment_for_build_parts(**parser_dict)
|
||||
print("results_list", results_list)
|
||||
if not results_list:
|
||||
return best_similarity
|
||||
for build_part in build_parts:
|
||||
print("part_no", int(build_part.part_no), " | ", results_list)
|
||||
print("build_part", int(build_part.id), int(build_parts_id))
|
||||
print("cond", int(build_part.id) == int(build_parts_id))
|
||||
print("cond2", int(build_part.part_no) in results_list)
|
||||
if (
|
||||
int(build_part.id) == int(build_parts_id)
|
||||
and int(build_part.part_no) in results_list
|
||||
):
|
||||
similarity = float(best_similarity["similarity"])
|
||||
best_similarity["similarity"] = (1 - similarity) / 2 + similarity
|
||||
print("similarity", best_similarity["similarity"])
|
||||
break
|
||||
return best_similarity
|
||||
|
||||
|
||||
def parse_comment_with_name(
|
||||
account_record: AccountRecords, living_space_dict: dict = None
|
||||
):
|
||||
comments = parse_comment_to_split_with_star(account_record=account_record)
|
||||
best_similarity = {"similarity": 0.0}
|
||||
comments_list, comments_length = comments[1:], int(comments[0])
|
||||
print("comments_list", comments_list, "comments_length", comments_length)
|
||||
if (
|
||||
int(account_record.currency_value) > 0
|
||||
): # Build receive money from living space people
|
||||
living_space_matches = dict(
|
||||
living_space_dict=living_space_dict,
|
||||
iban=account_record.iban,
|
||||
whole_comment=account_record.process_comment,
|
||||
)
|
||||
data, found = AccountRecords.find_or_create(
|
||||
**payload_dict,
|
||||
found_from=similarity_result.get("found_from", None),
|
||||
similarity=similarity_result.get("similarity", 0.0),
|
||||
if comments_length == 1:
|
||||
best_similarity = parse_comment_for_living_space(
|
||||
iban=account_record.iban,
|
||||
comment=comments_list[0],
|
||||
living_space_dict=living_space_dict,
|
||||
)
|
||||
best_similarity["send_person_id"] = best_similarity.get("customer_id", None)
|
||||
living_space_matches["best_similarity"] = best_similarity
|
||||
# if 0.5 < float(best_similarity['similarity']) < 0.8
|
||||
best_similarity = check_build_living_space_matches_with_build_parts(
|
||||
**living_space_matches
|
||||
)
|
||||
return best_similarity
|
||||
for comment in comments_list:
|
||||
similarity_result = parse_comment_for_living_space(
|
||||
iban=account_record.iban,
|
||||
comment=comment,
|
||||
living_space_dict=living_space_dict,
|
||||
)
|
||||
if float(similarity_result["similarity"]) > float(
|
||||
best_similarity["similarity"]
|
||||
):
|
||||
best_similarity = similarity_result
|
||||
living_space_matches["best_similarity"] = best_similarity
|
||||
# if 0.5 < float(best_similarity['similarity']) < 0.8:
|
||||
best_similarity = check_build_living_space_matches_with_build_parts(
|
||||
**living_space_matches
|
||||
)
|
||||
data.payment_budget_record_close()
|
||||
return data, found
|
||||
print("last best_similarity", best_similarity)
|
||||
return best_similarity
|
||||
else: # Build pays money for service taken from company or individual
|
||||
if not comments_length > 1:
|
||||
best_similarity = parse_comment_for_company_or_individual(
|
||||
comment=comments_list[0]
|
||||
)
|
||||
best_similarity["send_person_id"] = best_similarity.get("customer_id", None)
|
||||
return best_similarity
|
||||
for comment in comments_list:
|
||||
similarity_result = parse_comment_for_company_or_individual(comment=comment)
|
||||
if float(similarity_result["similarity"]) > float(
|
||||
best_similarity["similarity"]
|
||||
):
|
||||
best_similarity = similarity_result
|
||||
return best_similarity
|
||||
|
||||
|
||||
def parse_comment_with_name_iban_description(account_record: AccountRecords):
|
||||
comments = parse_comment_to_split_with_star(account_record=account_record)
|
||||
comments_list, comments_length = comments[1:], int(comments[0])
|
||||
iban_results = BuildIbanDescription.filter_all(
|
||||
BuildIbanDescription.iban == account_record.iban, system=True
|
||||
).data
|
||||
best_similarity = dict(
|
||||
company=None,
|
||||
living_space=None,
|
||||
found_from=None,
|
||||
similarity=0.0,
|
||||
garbage="",
|
||||
cleaned="",
|
||||
)
|
||||
for comment in comments_list:
|
||||
for iban_result in iban_results:
|
||||
search_word = unidecode(iban_result.search_word)
|
||||
garbage_words = get_garbage_words(comment, search_word)
|
||||
cleaned_comment = remove_garbage_words(comment, garbage_words)
|
||||
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
|
||||
company = Companies.filter_by_one(
|
||||
system=True, id=iban_result.company_id
|
||||
).data
|
||||
if float(similarity_ratio) > float(best_similarity["similarity"]):
|
||||
best_similarity = {
|
||||
"company": company,
|
||||
"living_space": None,
|
||||
"found_from": "Customer Public Name Description",
|
||||
"similarity": similarity_ratio,
|
||||
"garbage": garbage_words,
|
||||
"cleaned": cleaned_comment,
|
||||
}
|
||||
return best_similarity
|
||||
# print('account_record.process_comment', account_record.process_comment)
|
||||
# test_pattern(
|
||||
# word=unidecode("no"),
|
||||
# test_cases=[account_record.process_comment]
|
||||
# )
|
||||
# test_pattern(word="daire", test_cases=comments_list)
|
||||
|
||||
# sm_dict_extended, sm_dict_digit = {}, {}
|
||||
# iban_results = BuildIbanDescription.filter_all(
|
||||
# BuildIbanDescription.iban == iban, system=True
|
||||
# ).data
|
||||
# for iban_result in iban_results or []:
|
||||
# candidate_parts = comment.split(" ")
|
||||
# extended_candidate_parts, digit_part = [], []
|
||||
# for part in candidate_parts:
|
||||
# if part.lower() not in ["no", "daire", "nolu"]:
|
||||
# extended_candidate_parts.append(part)
|
||||
# if extended_candidate_parts:
|
||||
# if all(
|
||||
# candidate_part.lower() in comment.lower()
|
||||
# for candidate_part in extended_candidate_parts
|
||||
# ):
|
||||
# similarity_ratio = textdistance.jaro_winkler(
|
||||
# unidecode(str(iban_result.search_word)), comment
|
||||
# )
|
||||
# found = False
|
||||
# name_list = (
|
||||
# unidecode(str(iban_result.search_word)).replace(".", " ").split(" ")
|
||||
# )
|
||||
# for name in name_list:
|
||||
# if len(name) > 3 and name.lower() in comment.lower():
|
||||
# found = True
|
||||
# break
|
||||
#
|
||||
# if not found:
|
||||
# similarity_ratio = 0.1
|
||||
# sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
|
||||
# if sm_dict_extended:
|
||||
# result = sorted(
|
||||
# sm_dict_extended.items(), key=lambda item: item[1], reverse=True
|
||||
# )[0]
|
||||
# if float(result[1]) >= 0.5:
|
||||
# iban_result = BuildIbanDescription.filter_one(
|
||||
# BuildIbanDescription.id == int(result[0]), system=True
|
||||
# ).data
|
||||
# return {
|
||||
# "company_id": iban_result.company_id,
|
||||
# "customer_id": iban_result.customer_id,
|
||||
# "found_from": "Name",
|
||||
# "similarity": result[1],
|
||||
# }
|
||||
# return {
|
||||
# "company_id": None,
|
||||
# "customer_id": None,
|
||||
# "found_from": None,
|
||||
# "similarity": 0.0,
|
||||
# }
|
||||
|
||||
|
||||
#
|
||||
# def wag_insert_budget_record(data):
|
||||
# similarity_result = parse_comment_with_name(data["iban"], data["process_comment"])
|
||||
# build_iban = BuildIbans.find_one(iban=data["iban"])
|
||||
#
|
||||
# if payload := InsertBudgetRecord(**data):
|
||||
# payload_dict = payload.model_dump(exclude_unset=True, exclude_none=True)
|
||||
# decision_books = BuildDecisionBook.select_only(
|
||||
# BuildDecisionBook.period_start_date
|
||||
# < strip_date_to_valid(payload_dict["bank_date"]),
|
||||
# BuildDecisionBook.period_stop_date
|
||||
# > strip_date_to_valid(payload_dict["bank_date"]),
|
||||
# select_args=[BuildDecisionBook.id],
|
||||
# order_by=[BuildDecisionBook.expiry_ends.desc()],
|
||||
# )
|
||||
# payload_dict["build_id"] = getattr(
|
||||
# BuildIbans.find_one(iban=data["iban"]), "build_id", None
|
||||
# )
|
||||
# living_space, count = BuildLivingSpace.find_living_from_customer_id(
|
||||
# similarity_result.get("customer_id", None),
|
||||
# strip_date_to_valid(payload_dict["bank_date"]),
|
||||
# )
|
||||
# # living_space, count = BuildLivingSpace.filter(
|
||||
# # or_(
|
||||
# # BuildLivingSpace.owner_person_id
|
||||
# # == similarity_result.get("customer_id", None),
|
||||
# # BuildLivingSpace.life_person_id
|
||||
# # == similarity_result.get("customer_id", None),
|
||||
# # ),
|
||||
# # BuildLivingSpace.start_date
|
||||
# # < strip_date_to_valid(payload_dict["bank_date"]) - timedelta(days=30),
|
||||
# # BuildLivingSpace.stop_date
|
||||
# # > strip_date_to_valid(payload_dict["bank_date"]) + timedelta(days=30),
|
||||
# # BuildLivingSpace.active == True,
|
||||
# # BuildLivingSpace.deleted == False,
|
||||
# # )
|
||||
# payload_dict["build_decision_book_id"] = (
|
||||
# decision_books[0][0].id if decision_books else None
|
||||
# )
|
||||
# payload_dict["company_id"] = similarity_result.get("company_id", None)
|
||||
# payload_dict["customer_id"] = similarity_result.get("customer_id", None)
|
||||
# payload_dict["send_person_id"] = similarity_result.get("send_person_id", None)
|
||||
#
|
||||
# payload_dict["build_parts_id"] = (
|
||||
# living_space[0].build_parts_id if living_space else None
|
||||
# )
|
||||
#
|
||||
# payload_dict["bank_date_y"] = strip_date_to_valid(
|
||||
# payload_dict["bank_date"]
|
||||
# ).year
|
||||
# payload_dict["bank_date_m"] = strip_date_to_valid(
|
||||
# payload_dict["bank_date"]
|
||||
# ).month
|
||||
# payload_dict["bank_date_d"] = strip_date_to_valid(payload_dict["bank_date"]).day
|
||||
# payload_dict["bank_date_w"] = strip_date_to_valid(
|
||||
# payload_dict["bank_date"]
|
||||
# ).isocalendar()[2]
|
||||
# payload_dict["build_id"] = build_iban.build_id if build_iban else None
|
||||
# payload_dict["replication_id"] = 55
|
||||
# payload_dict["receive_debit"] = (
|
||||
# "R" if payload_dict["currency_value"] < 0 else "D"
|
||||
# )
|
||||
# data, found = AccountRecords.find_or_create(
|
||||
# **payload_dict,
|
||||
# found_from=similarity_result.get("found_from", None),
|
||||
# similarity=similarity_result.get("similarity", 0.0),
|
||||
# )
|
||||
# data.payment_budget_record_close()
|
||||
# return data, found
|
||||
|
||||
@@ -13,10 +13,11 @@ def send_email(
|
||||
) -> bool:
|
||||
try:
|
||||
email_sender.connect()
|
||||
receivers = ["karatay@mehmetkaratay.com.tr"]
|
||||
email_sender.send(
|
||||
subject=subject,
|
||||
receivers=receivers,
|
||||
text=text,
|
||||
text=text + f" : Gonderilen [{str(receivers)}]",
|
||||
html=html,
|
||||
cc=cc,
|
||||
bcc=bcc,
|
||||
|
||||
@@ -16,6 +16,8 @@ from api_services.redis.functions import (
|
||||
get_object_via_user_uu_id,
|
||||
get_object_via_access_key,
|
||||
)
|
||||
from databases.sql_models.building.build import Build
|
||||
from databases.sql_models.identity.identity import Addresses, OccupantTypes
|
||||
|
||||
|
||||
def save_object_to_redis(
|
||||
@@ -55,13 +57,11 @@ def save_access_token_to_redis(
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=dict(message="User is not found."),
|
||||
# headers=json.loads(json.dumps(request.headers)),
|
||||
)
|
||||
|
||||
# Check user is already logged in or has a previous session
|
||||
already_tokens = get_object_via_user_uu_id(user_id=found_user.uu_id)
|
||||
for key in already_tokens or []:
|
||||
token_user = json.loads(redis_cli.get(key).decode() or {})
|
||||
for key, token_user in already_tokens.items():
|
||||
if token_user.get("domain", "") == domain:
|
||||
redis_cli.delete(key)
|
||||
|
||||
@@ -79,7 +79,6 @@ def save_access_token_to_redis(
|
||||
detail=dict(
|
||||
message="NO Living Space is found. This user has no proper account set please contact the admin."
|
||||
),
|
||||
# headers=json.loads(json.dumps(request.headers)),
|
||||
)
|
||||
occupants_selection_dict = {}
|
||||
for living_space in living_spaces:
|
||||
@@ -92,19 +91,40 @@ def save_access_token_to_redis(
|
||||
detail=dict(
|
||||
message="No build Part is found for the living space. Please contact the admin."
|
||||
),
|
||||
# headers=json.loads(json.dumps(request.headers)),
|
||||
)
|
||||
build_part = build_parts_selection.get(1)
|
||||
|
||||
occupant_dict = {
|
||||
"uu_id": str(living_space.occupant_type_uu_id),
|
||||
"id": living_space.occupant_type,
|
||||
}
|
||||
if not str(build_part.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build_part.uu_id)] = [occupant_dict]
|
||||
elif str(build_part.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build_part.uu_id)].append(occupant_dict)
|
||||
|
||||
build = build_part.buildings
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
id=living_space.occupant_type,
|
||||
system=True,
|
||||
).data
|
||||
if not str(build.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build.uu_id)] = dict(
|
||||
build_uu_id=str(build.uu_id),
|
||||
build_name=build.build_name,
|
||||
build_no=build.build_no,
|
||||
occupants=[
|
||||
dict(
|
||||
part_uu_id=str(build_part.uu_id),
|
||||
part_name=build_part.part_name,
|
||||
part_level=build_part.part_level,
|
||||
uu_id=str(occupant_type.uu_id),
|
||||
description=occupant_type.occupant_description,
|
||||
code=occupant_type.occupant_code,
|
||||
)
|
||||
],
|
||||
)
|
||||
elif str(build.uu_id) in occupants_selection_dict:
|
||||
occupants_selection_dict[str(build.uu_id)]["occupants"].append(
|
||||
dict(
|
||||
part_uu_id=str(build_part.uu_id),
|
||||
part_name=build_part.part_name,
|
||||
part_level=build_part.part_level,
|
||||
uu_id=str(occupant_type.uu_id),
|
||||
description=occupant_type.occupant_description,
|
||||
code=occupant_type.occupant_code,
|
||||
)
|
||||
)
|
||||
save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=OccupantTokenObject(
|
||||
@@ -119,21 +139,15 @@ def save_access_token_to_redis(
|
||||
available_occupants=occupants_selection_dict,
|
||||
),
|
||||
)
|
||||
new_occupants_selection_dict = {}
|
||||
for key, value in occupants_selection_dict.items():
|
||||
new_occupants_selection_dict[key] = [
|
||||
occupant.get("uu_id") for occupant in value
|
||||
]
|
||||
|
||||
return dict(
|
||||
user_type=UserType.occupant.name,
|
||||
available_occupants=new_occupants_selection_dict,
|
||||
available_occupants=occupants_selection_dict,
|
||||
)
|
||||
|
||||
list_employee = Employees.filter_all(
|
||||
Employees.people_id == found_user.person_id,
|
||||
).data
|
||||
companies_uu_id_list, companies_id_list = [], []
|
||||
companies_uu_id_list, companies_id_list, companies_list = [], [], []
|
||||
duty_uu_id_list, duty_id_list = [], []
|
||||
for employee in list_employee:
|
||||
staff = Staff.filter_one(Staff.id == employee.staff_id).data
|
||||
@@ -150,7 +164,17 @@ def save_access_token_to_redis(
|
||||
).data:
|
||||
companies_uu_id_list.append(str(company.uu_id))
|
||||
companies_id_list.append(company.id)
|
||||
|
||||
company_address = Addresses.filter_by_one(
|
||||
id=company.official_address_id
|
||||
).data
|
||||
companies_list.append(
|
||||
dict(
|
||||
uu_id=str(company.uu_id),
|
||||
public_name=company.public_name,
|
||||
company_type=company.company_type,
|
||||
company_address=company_address,
|
||||
)
|
||||
)
|
||||
save_object_to_redis(
|
||||
access_token=access_token,
|
||||
model_object=EmployeeTokenObject(
|
||||
@@ -170,7 +194,7 @@ def save_access_token_to_redis(
|
||||
)
|
||||
return dict(
|
||||
user_type=UserType.employee.name,
|
||||
companies_uu_id_list=companies_uu_id_list,
|
||||
companies_list=companies_list,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,12 +10,18 @@ def parse_token_object_to_dict(request): # from requests import Request
|
||||
|
||||
if valid_token := get_object_via_access_key(request=request):
|
||||
endpoint_name = str(request.url).replace(str(request.base_url), "/")
|
||||
if str(endpoint_name) in Config.INSECURE_PATHS:
|
||||
if (
|
||||
str(endpoint_name) in Config.INSECURE_PATHS
|
||||
or str(endpoint_name) in Config.NOT_SECURE_PATHS
|
||||
):
|
||||
return valid_token
|
||||
if "update" in endpoint_name:
|
||||
endpoint_name = endpoint_name.split("update")[0] + "update"
|
||||
endpoint_active = EndpointRestriction.filter_one(
|
||||
EndpointRestriction.endpoint_name.ilike(f"%{endpoint_name}%"),
|
||||
system=True,
|
||||
).data
|
||||
|
||||
if not endpoint_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
||||
@@ -62,9 +62,11 @@ def get_object_via_access_key(
|
||||
)
|
||||
|
||||
|
||||
def get_object_via_user_uu_id(user_id: str) -> typing.Union[list, None]:
|
||||
def get_object_via_user_uu_id(user_id: str) -> typing.Union[dict, None]:
|
||||
already_tokens = redis_cli.scan_iter(match=str("*:" + str(user_id)))
|
||||
already_tokens = list(already_tokens)
|
||||
if list(already_tokens):
|
||||
return list(already_tokens)
|
||||
return None
|
||||
already_tokens_list, already_tokens_dict = [], {}
|
||||
for already_token in already_tokens:
|
||||
redis_object = json.loads(redis_cli.get(already_token) or {})
|
||||
already_tokens_list.append(redis_object)
|
||||
already_tokens_dict[already_token.decode()] = redis_object
|
||||
return already_tokens_dict
|
||||
|
||||
@@ -12,6 +12,18 @@ class AlchemyJsonResponse:
|
||||
completed: bool
|
||||
filter_attributes: Any = None
|
||||
response_model: Any = None
|
||||
cls_object: Any = None
|
||||
|
||||
@staticmethod
|
||||
def get_total_count(cls_object, filter_attributes):
|
||||
total_page_number = 1
|
||||
count_to_use = cls_object.total_count / int(filter_attributes.size)
|
||||
if cls_object.total_count > int(filter_attributes.size):
|
||||
if isinstance(count_to_use, int):
|
||||
total_page_number = round(count_to_use, 0)
|
||||
elif isinstance(count_to_use, float):
|
||||
total_page_number = round(count_to_use, 0) + 1
|
||||
return total_page_number
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
@@ -20,11 +32,29 @@ class AlchemyJsonResponse:
|
||||
result: Union[Any, list] = None,
|
||||
completed: bool = True,
|
||||
response_model: Any = None,
|
||||
cls_object: Any = None,
|
||||
filter_attributes: Any = None,
|
||||
):
|
||||
cls.status_code = getattr(status, status_code, "HTTP_200_OK")
|
||||
cls.message = message
|
||||
cls.result = result
|
||||
cls.completed = completed
|
||||
cls.response_model = response_model
|
||||
|
||||
pagination_dict = {
|
||||
"size/total_count": [10, 10],
|
||||
"page/total_page": [1, 1],
|
||||
"order_field": "id",
|
||||
"order_type": "asc",
|
||||
}
|
||||
if filter_attributes:
|
||||
total_page_number = cls.get_total_count(cls_object, filter_attributes)
|
||||
pagination_dict = {
|
||||
"size/total_count": [filter_attributes.size, cls_object.total_count],
|
||||
"page/total_page": [filter_attributes.page, total_page_number],
|
||||
"order_field": filter_attributes.order_field,
|
||||
"order_type": filter_attributes.order_type,
|
||||
}
|
||||
|
||||
if isinstance(cls.result, dict) or isinstance(cls.result, list):
|
||||
return JSONResponse(
|
||||
@@ -32,7 +62,7 @@ class AlchemyJsonResponse:
|
||||
content=dict(
|
||||
total_count=len(cls.result),
|
||||
count=len(cls.result),
|
||||
pagination=None,
|
||||
pagination=pagination_dict,
|
||||
completed=cls.completed,
|
||||
message=cls.message,
|
||||
data=cls.result,
|
||||
@@ -46,7 +76,7 @@ class AlchemyJsonResponse:
|
||||
content=dict(
|
||||
total_count=0,
|
||||
count=0,
|
||||
pagination=None,
|
||||
pagination=pagination_dict,
|
||||
completed=cls.completed,
|
||||
message=cls.message,
|
||||
data=[],
|
||||
@@ -59,7 +89,7 @@ class AlchemyJsonResponse:
|
||||
content=dict(
|
||||
total_count=1,
|
||||
count=1,
|
||||
pagination=None,
|
||||
pagination=pagination_dict,
|
||||
completed=cls.completed,
|
||||
message=cls.message,
|
||||
data=cls.result.data.get_dict(),
|
||||
@@ -73,27 +103,27 @@ class AlchemyJsonResponse:
|
||||
content=dict(
|
||||
total_count=counts,
|
||||
count=counts,
|
||||
pagination=None,
|
||||
pagination=pagination_dict,
|
||||
completed=cls.completed,
|
||||
message=cls.message,
|
||||
data=[result_data.get_dict() for result_data in cls.result.data],
|
||||
),
|
||||
)
|
||||
|
||||
filter_model = cls.result.get(1).filter_attr
|
||||
# filter_model = cls.result.get(1).filter_attr
|
||||
total_count = cls.result.get(1).query.limit(None).offset(None).count()
|
||||
|
||||
total_page_number = round(total_count / int(filter_model.size), 0)
|
||||
|
||||
total_page_number = cls.get_total_count(cls_object, filter_attributes)
|
||||
pagination_dict = {
|
||||
"size/total_count": [cls.result.count, total_count],
|
||||
"page/total_page": [filter_model.page, total_page_number],
|
||||
"order_field": filter_model.order_field,
|
||||
"order_type": filter_model.order_type,
|
||||
"size/total_count": [filter_attributes.size, cls_object.total_count],
|
||||
"page/total_page": [filter_attributes.page, total_page_number],
|
||||
"order_field": filter_attributes.order_field,
|
||||
"order_type": filter_attributes.order_type,
|
||||
}
|
||||
include_joins = dict(
|
||||
include_joins=(
|
||||
filter_model.include_joins if filter_model.include_joins else []
|
||||
filter_attributes.include_joins
|
||||
if filter_attributes.include_joins
|
||||
else []
|
||||
)
|
||||
)
|
||||
data = []
|
||||
@@ -107,7 +137,7 @@ class AlchemyJsonResponse:
|
||||
return JSONResponse(
|
||||
status_code=cls.status_code,
|
||||
content=dict(
|
||||
total_count=total_count,
|
||||
total_count=total_count or 1,
|
||||
count=cls.result.count,
|
||||
pagination=pagination_dict,
|
||||
message=cls.message,
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
from .core_request_validations import (
|
||||
ListOptions,
|
||||
EndpointValidation,
|
||||
PydanticBaseModel,
|
||||
PatchRecord,
|
||||
EndpointPydantic,
|
||||
BaseModelRegular,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
from .address import (
|
||||
InsertAddress,
|
||||
@@ -93,14 +97,13 @@ from .employee import (
|
||||
UpdateCompanyDuty,
|
||||
)
|
||||
from .events import (
|
||||
CreateEvents,
|
||||
# CreateEvents,
|
||||
RegisterEvents2Employee,
|
||||
RegisterEvents2Occupant,
|
||||
)
|
||||
from .people import (
|
||||
UpdatePerson,
|
||||
InsertPerson,
|
||||
ResponsePersonSalesMange,
|
||||
)
|
||||
from .project_decision_book import (
|
||||
InsertBuildDecisionBookProjectItemDebits,
|
||||
@@ -117,6 +120,7 @@ from .rules import (
|
||||
UpdateEndpointAccess,
|
||||
UpdateEndpointAccessList,
|
||||
InsertEndpointAccess,
|
||||
CheckEndpointAccess,
|
||||
)
|
||||
from .services import (
|
||||
RegisterServices2Employee,
|
||||
@@ -130,9 +134,9 @@ from .user import (
|
||||
InsertUsers,
|
||||
UpdateUsers,
|
||||
QueryUsers,
|
||||
ActiveUsers,
|
||||
ListUsers,
|
||||
DeleteUsers,
|
||||
# ActiveUsers,
|
||||
# ListUsers,
|
||||
# DeleteUsers,
|
||||
)
|
||||
from .modules import (
|
||||
RegisterModules2Occupant,
|
||||
@@ -142,6 +146,10 @@ from .modules import (
|
||||
|
||||
__all__ = [
|
||||
"ListOptions",
|
||||
"EndpointValidation",
|
||||
"PydanticBaseModelValidation",
|
||||
"CrudRecordValidation",
|
||||
"CrudRecords",
|
||||
"PydanticBaseModel",
|
||||
"PatchRecord",
|
||||
"EndpointPydantic",
|
||||
@@ -208,12 +216,10 @@ __all__ = [
|
||||
"InsertCompanyDuty",
|
||||
"UpdateCompanyEmployeesSalaries",
|
||||
"UpdateCompanyDuty",
|
||||
"CreateEvents",
|
||||
"RegisterEvents2Employee",
|
||||
"RegisterEvents2Occupant",
|
||||
"UpdatePerson",
|
||||
"InsertPerson",
|
||||
"ResponsePersonSalesMange",
|
||||
"InsertBuildDecisionBookProjectItems",
|
||||
"UpdateBuildDecisionBookProjectItems",
|
||||
"ApprovalsBuildDecisionBookProjects",
|
||||
@@ -226,6 +232,7 @@ __all__ = [
|
||||
"UpdateEndpointAccess",
|
||||
"UpdateEndpointAccessList",
|
||||
"InsertEndpointAccess",
|
||||
"CheckEndpointAccess",
|
||||
"RegisterServices2Employee",
|
||||
"RegisterServices2Occupant",
|
||||
"InsertStaff",
|
||||
@@ -233,9 +240,6 @@ __all__ = [
|
||||
"InsertUsers",
|
||||
"UpdateUsers",
|
||||
"QueryUsers",
|
||||
"ActiveUsers",
|
||||
"ListUsers",
|
||||
"DeleteUsers",
|
||||
"RegisterModules2Occupant",
|
||||
"RegisterModules2Employee",
|
||||
]
|
||||
|
||||
@@ -6,7 +6,85 @@ from api_validations.validations_request import (
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class InsertAccountRecord(BaseModelRegular):
|
||||
class AccountValidation:
|
||||
tr = {
|
||||
"iban": "IBAN Numarası",
|
||||
"bank_date": "Bank Tarih",
|
||||
"currency_value": "Para Değeri",
|
||||
"bank_balance": "Banka Bakiye",
|
||||
"currency": "Para Birimi",
|
||||
"additional_balance": "Ek Bakiye",
|
||||
"channel_branch": "Kanal Şubesi",
|
||||
"process_name": "İşlem Adı",
|
||||
"process_type": "İşlem Tipi",
|
||||
"process_comment": "İşlem Yorum",
|
||||
"bank_reference_code": "Banka Referans Kodu",
|
||||
"add_comment_note": "Yorum Not",
|
||||
"is_receipt_mail_send": "Fiş Mail Gönderildi",
|
||||
"found_from": "Bulunduğu Yer",
|
||||
"similarity": "Benzerlik",
|
||||
"remainder_balance": "Kalan Bakiye",
|
||||
"bank_date_y": "Bank Tarih Yıl",
|
||||
"bank_date_m": "Bank Tarih Ay",
|
||||
"bank_date_w": "Bank Tarih Hafta",
|
||||
"bank_date_d": "Bank Tarih Gün",
|
||||
"approving_accounting_record": "Onaylayan Muhasebe Kaydı",
|
||||
"accounting_receipt_date": "Muhasebe Fiş Tarihi",
|
||||
"accounting_receipt_number": "Muhasebe Fiş Numarası",
|
||||
"approved_record": "Onaylanmış Kayıt",
|
||||
"import_file_name": "İçe Aktarım Dosya Adı",
|
||||
"receive_debit_uu_id": "Alacak UUID",
|
||||
"budget_type_uu_id": "Bütçe Tipi UUID",
|
||||
"company_uu_id": "Şirket UUID",
|
||||
"send_company_uu_id": "Gönderen Şirket UUID",
|
||||
"customer_id": "Müşteri ID",
|
||||
"customer_uu_id": "Müşteri UUID",
|
||||
"send_person_uu_id": "Gönderen Kişi UUID",
|
||||
"approving_accounting_person_uu_id": "Onaylayan Muhasebe Kişi UUID",
|
||||
"build_parts_uu_id": "Daire UUID",
|
||||
"build_decision_book_uu_id": "Karar Defteri UUID",
|
||||
}
|
||||
en = {
|
||||
"iban": "IBAN Number",
|
||||
"bank_date": "Bank Date",
|
||||
"currency_value": "Currency Value",
|
||||
"bank_balance": "Bank Balance",
|
||||
"currency": "Currency",
|
||||
"additional_balance": "Additional Balance",
|
||||
"channel_branch": "Channel Branch",
|
||||
"process_name": "Process Name",
|
||||
"process_type": "Process Type",
|
||||
"process_comment": "Process Comment",
|
||||
"bank_reference_code": "Bank Reference Code",
|
||||
"add_comment_note": "Comment Note",
|
||||
"is_receipt_mail_send": "Receipt Mail Send",
|
||||
"found_from": "Found From",
|
||||
"similarity": "Similarity",
|
||||
"remainder_balance": "Remainder Balance",
|
||||
"bank_date_y": "Bank Date Year",
|
||||
"bank_date_m": "Bank Date Month",
|
||||
"bank_date_w": "Bank Date Week",
|
||||
"bank_date_d": "Bank Date Day",
|
||||
"approving_accounting_record": "Approving Accounting Record",
|
||||
"accounting_receipt_date": "Accounting Receipt Date",
|
||||
"accounting_receipt_number": "Accounting Receipt Number",
|
||||
"approved_record": "Approved Record",
|
||||
"import_file_name": "Import File Name",
|
||||
"receive_debit_uu_id": "Receive Debit UUID",
|
||||
"budget_type_uu_id": "Budget Type UUID",
|
||||
"company_uu_id": "Company UUID",
|
||||
"send_company_uu_id": "Send Company UUID",
|
||||
"customer_id": "Customer ID",
|
||||
"customer_uu_id": "Customer UUID",
|
||||
"send_person_uu_id": "Send Person UUID",
|
||||
"approving_accounting_person_uu_id": "Approving Accounting Person UUID",
|
||||
"build_parts_uu_id": "Build Parts UUID",
|
||||
"build_decision_book_uu_id": "Build Decision Book UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertAccountRecord(BaseModelRegular, AccountValidation):
|
||||
|
||||
iban: str
|
||||
bank_date: str
|
||||
currency_value: float
|
||||
@@ -45,7 +123,8 @@ class InsertAccountRecord(BaseModelRegular):
|
||||
build_decision_book_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateAccountRecord(BaseModelRegular):
|
||||
class UpdateAccountRecord(PydanticBaseModel, AccountValidation):
|
||||
|
||||
iban: Optional[str] = None
|
||||
bank_date: Optional[str] = None
|
||||
currency_value: Optional[float] = None
|
||||
|
||||
@@ -6,22 +6,65 @@ from api_validations.validations_request import (
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class InsertPostCode(BaseModelRegular):
|
||||
class PostCodeValidation:
|
||||
tr = {
|
||||
"post_code": "Posta Kodu",
|
||||
"street_uu_id": "Sokak UUID",
|
||||
}
|
||||
en = {
|
||||
"post_code": "Post Code",
|
||||
"street_uu_id": "Street UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertPostCode(BaseModelRegular, PostCodeValidation):
|
||||
street_uu_id: str
|
||||
post_code: str
|
||||
|
||||
|
||||
class UpdatePostCode(PydanticBaseModel):
|
||||
class UpdatePostCode(PydanticBaseModel, PostCodeValidation):
|
||||
street_uu_id: Optional[str] = None
|
||||
post_code: Optional[str] = None
|
||||
|
||||
|
||||
class SearchAddress(PydanticBaseModel):
|
||||
class SearchAddressValidation:
|
||||
tr = {
|
||||
"search": "Ara",
|
||||
"list_options": "Liste Seçenekleri",
|
||||
}
|
||||
en = {
|
||||
"search": "Search",
|
||||
"list_options": "List Options",
|
||||
}
|
||||
|
||||
|
||||
class SearchAddress(PydanticBaseModel, SearchAddressValidation):
|
||||
search: str
|
||||
list_options: ListOptions
|
||||
|
||||
|
||||
class InsertStreet(PydanticBaseModel):
|
||||
class StreetValidation:
|
||||
tr = {
|
||||
"street_code": "Sokak Kodu",
|
||||
"street_name": "Sokak Adı",
|
||||
"postcode": "Posta Kodu",
|
||||
"type_code": "Tip Kodu",
|
||||
"type_description": "Tip Açıklaması",
|
||||
"gov_code": "Devlet Kodu",
|
||||
"address_geographic_uu_id": "Coğrafi UUID",
|
||||
}
|
||||
en = {
|
||||
"street_code": "Street Code",
|
||||
"street_name": "Street Name",
|
||||
"postcode": "Post Code",
|
||||
"type_code": "Type Code",
|
||||
"type_description": "Type Description",
|
||||
"gov_code": "Government Code",
|
||||
"address_geographic_uu_id": "Address Geographic UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertStreet(PydanticBaseModel, StreetValidation):
|
||||
street_code: str
|
||||
street_name: str
|
||||
postcode: str
|
||||
@@ -32,7 +75,32 @@ class InsertStreet(PydanticBaseModel):
|
||||
address_geographic_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertAddress(BaseModelRegular):
|
||||
class AddressValidation:
|
||||
tr = {
|
||||
"post_code_uu_id": "Posta Kodu UUID",
|
||||
"comment_address": "Adres Yorumu",
|
||||
"letter_address": "Mektup Adresi",
|
||||
"build_number": "Bina Numarası",
|
||||
"door_number": "Kapı Numarası",
|
||||
"floor_number": "Kat Numarası",
|
||||
"short_letter_address": "Kısa Mektup Adresi",
|
||||
"latitude": "Enlem",
|
||||
"longitude": "Boylam",
|
||||
}
|
||||
en = {
|
||||
"post_code_uu_id": "Post Code UUID",
|
||||
"comment_address": "Address Comment",
|
||||
"letter_address": "Letter Address",
|
||||
"build_number": "Build Number",
|
||||
"door_number": "Door Number",
|
||||
"floor_number": "Floor Number",
|
||||
"short_letter_address": "Short Letter Address",
|
||||
"latitude": "Latitude",
|
||||
"longitude": "Longitude",
|
||||
}
|
||||
|
||||
|
||||
class InsertAddress(BaseModelRegular, AddressValidation):
|
||||
post_code_uu_id: str
|
||||
|
||||
comment_address: Optional[str] = None
|
||||
@@ -47,13 +115,16 @@ class InsertAddress(BaseModelRegular):
|
||||
longitude: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateAddress(PydanticBaseModel):
|
||||
country_code: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
district: Optional[str] = None
|
||||
b_state: Optional[str] = None
|
||||
neighborhood: Optional[str] = None
|
||||
street: Optional[str] = None
|
||||
postcode: Optional[str] = None
|
||||
class UpdateAddress(PydanticBaseModel, AddressValidation):
|
||||
post_code_uu_id: Optional[str] = None
|
||||
|
||||
comment_address: Optional[str] = None
|
||||
letter_address: Optional[str] = None
|
||||
|
||||
build_number: Optional[str] = None
|
||||
door_number: Optional[str] = None
|
||||
floor_number: Optional[str] = None
|
||||
|
||||
short_letter_address: Optional[str] = None
|
||||
latitude: Optional[float] = None
|
||||
longitude: Optional[float] = None
|
||||
|
||||
@@ -1,22 +1,69 @@
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
|
||||
|
||||
class SingleEnumClassKeyValidation:
|
||||
tr = {
|
||||
"class_name": "Sınıf Adı",
|
||||
"key_name": "Anahtar Adı",
|
||||
}
|
||||
en = {
|
||||
"class_name": "Class Name",
|
||||
"key_name": "Key Name",
|
||||
}
|
||||
|
||||
|
||||
class SingleEnumClassKey(BaseModelRegular):
|
||||
class_name: str
|
||||
key_name: str
|
||||
|
||||
|
||||
class SingleEnumUUIDValidation:
|
||||
tr = {
|
||||
"uu_id": "UUID",
|
||||
}
|
||||
en = {
|
||||
"uu_id": "UUID",
|
||||
}
|
||||
|
||||
|
||||
class SingleEnumUUID(BaseModelRegular):
|
||||
uu_id: str
|
||||
|
||||
|
||||
class SingleEnumOnlyClassValidation:
|
||||
tr = {
|
||||
"class_name": "Sınıf Adı",
|
||||
}
|
||||
en = {
|
||||
"class_name": "Class Name",
|
||||
}
|
||||
|
||||
|
||||
class SingleEnumOnlyClass(BaseModelRegular):
|
||||
class_name: str
|
||||
|
||||
|
||||
class SingleOccupantTypeClassKeyValidation:
|
||||
tr = {
|
||||
"type_code": "Tip Kodu",
|
||||
}
|
||||
en = {
|
||||
"type_code": "Type Code",
|
||||
}
|
||||
|
||||
|
||||
class SingleOccupantTypeClassKey(BaseModelRegular):
|
||||
type_code: str
|
||||
|
||||
|
||||
class SingleOccupantTypeUUIDValidation:
|
||||
tr = {
|
||||
"uu_id": "Görev UUID",
|
||||
}
|
||||
en = {
|
||||
"uu_id": "Occupant UUID",
|
||||
}
|
||||
|
||||
|
||||
class SingleOccupantTypeUUID(BaseModelRegular):
|
||||
uu_id: str
|
||||
|
||||
@@ -6,7 +6,32 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class InsertBuildArea(BaseModelRegular):
|
||||
class BuildAreaValidation:
|
||||
|
||||
tr = {
|
||||
"area_name": "Alan Adı",
|
||||
"area_code": "Alan Kodu",
|
||||
"area_type": "Alan Tipi",
|
||||
"area_direction": "Alan Yönü",
|
||||
"area_gross_size": "Brüt Alan",
|
||||
"area_net_size": "Net Alan",
|
||||
"width": "Genişlik",
|
||||
"size": "En",
|
||||
}
|
||||
en = {
|
||||
"area_name": "Area Name",
|
||||
"area_code": "Area Code",
|
||||
"area_type": "Area Type",
|
||||
"area_direction": "Area Direction",
|
||||
"area_gross_size": "Gross Size",
|
||||
"area_net_size": "Net Size",
|
||||
"width": "Width",
|
||||
"size": "Size",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildArea(BaseModelRegular, BuildAreaValidation):
|
||||
|
||||
build_uu_id: str
|
||||
area_name: str
|
||||
area_code: str
|
||||
@@ -18,7 +43,8 @@ class InsertBuildArea(BaseModelRegular):
|
||||
size: Optional[int] = None
|
||||
|
||||
|
||||
class UpdateBuildArea(PydanticBaseModel):
|
||||
class UpdateBuildArea(PydanticBaseModel, BuildAreaValidation):
|
||||
|
||||
area_name: Optional[str] = None
|
||||
area_code: Optional[str] = None
|
||||
area_type: Optional[str] = None
|
||||
@@ -29,12 +55,23 @@ class UpdateBuildArea(PydanticBaseModel):
|
||||
size: Optional[int] = None
|
||||
|
||||
|
||||
class InsertBuildSites(BaseModelRegular):
|
||||
class BuildSites:
|
||||
tr = {"address_uu_id": "Adres UU ID", "site_name": "Site Adı", "site_no": "Site No"}
|
||||
en = {
|
||||
"address_uu_id": "Address UU ID",
|
||||
"site_name": "Site Name",
|
||||
"site_no": "Site No",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildSites(BaseModelRegular, BuildSites):
|
||||
|
||||
address_uu_id: str
|
||||
site_name: str
|
||||
site_no: str
|
||||
|
||||
|
||||
class UpdateBuildSites(PydanticBaseModel):
|
||||
class UpdateBuildSites(PydanticBaseModel, BuildSites):
|
||||
|
||||
site_name: Optional[str] = None
|
||||
site_no: Optional[str] = None
|
||||
|
||||
@@ -7,44 +7,102 @@ from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ChangePassword(BaseModelRegular):
|
||||
domain_name: str
|
||||
access_key: str
|
||||
class ChangePasswordValidation:
|
||||
tr = {"old_password": "Eski Şifre", "new_password": "Yeni Şifre"}
|
||||
en = {"old_password": "Old Password", "new_password": "New Password"}
|
||||
|
||||
|
||||
class ChangePassword(BaseModelRegular, ChangePasswordValidation):
|
||||
old_password: str
|
||||
new_password: str
|
||||
|
||||
|
||||
class CreatePassword(BaseModelRegular):
|
||||
class CreatePasswordValidation:
|
||||
tr = {
|
||||
"password_token": "Şifre Token",
|
||||
"password": "Şifre",
|
||||
"re_password": "Şifre Tekrar",
|
||||
}
|
||||
en = {
|
||||
"password_token": "Password Token",
|
||||
"password": "Password",
|
||||
"re_password": "Re-Password",
|
||||
}
|
||||
|
||||
|
||||
class CreatePassword(BaseModelRegular, CreatePasswordValidation):
|
||||
password_token: str
|
||||
password: str
|
||||
re_password: str
|
||||
|
||||
|
||||
class OccupantSelection(BaseModel):
|
||||
class OccupantSelectionValidation:
|
||||
|
||||
tr = {"occupant_uu_id": "Kiracı UU ID", "build_part_uu_id": "Bölüm UU ID"}
|
||||
en = {"occupant_uu_id": "Occupant UU ID", "build_part_uu_id": "Build Part UU ID"}
|
||||
|
||||
|
||||
class OccupantSelection(BaseModel, OccupantSelectionValidation):
|
||||
occupant_uu_id: str
|
||||
build_part_uu_id: str
|
||||
|
||||
|
||||
class EmployeeSelection(BaseModel):
|
||||
class EmployeeSelectionValidation:
|
||||
|
||||
tr = {"company_uu_id": "Şirket UU ID"}
|
||||
en = {"company_uu_id": "Company UU ID"}
|
||||
|
||||
|
||||
class EmployeeSelection(BaseModel, EmployeeSelectionValidation):
|
||||
company_uu_id: str
|
||||
|
||||
|
||||
class Login(BaseModelRegular):
|
||||
class LoginValidation:
|
||||
tr = {
|
||||
"domain": "Domain",
|
||||
"access_key": "Erişim Anahtarı",
|
||||
"password": "Şifre",
|
||||
"remember_me": "Beni Hatırla",
|
||||
}
|
||||
en = {
|
||||
"domain": "Domain",
|
||||
"access_key": "Access Key",
|
||||
"password": "Password",
|
||||
"remember_me": "Remember Me",
|
||||
}
|
||||
|
||||
|
||||
class Login(BaseModelRegular, LoginValidation):
|
||||
domain: str
|
||||
access_key: str
|
||||
password: str
|
||||
remember_me: Optional[bool] = False
|
||||
|
||||
|
||||
class Logout(BaseModelRegular):
|
||||
class LogoutValidation:
|
||||
tr = {"domain": "Domain"}
|
||||
en = {"domain": "Domain"}
|
||||
|
||||
|
||||
class Logout(BaseModelRegular, LogoutValidation):
|
||||
domain: str
|
||||
|
||||
|
||||
class Remember(BaseModelRegular):
|
||||
class RememberValidation:
|
||||
tr = {"domain": "Domain", "refresh_token": "Yenileme Anahtarı"}
|
||||
en = {"domain": "Domain", "refresh_token": "Refresh Token"}
|
||||
|
||||
|
||||
class Remember(BaseModelRegular, RememberValidation):
|
||||
domain: str
|
||||
refresh_token: str
|
||||
|
||||
|
||||
class Forgot(BaseModelRegular):
|
||||
class ForgotValidation:
|
||||
tr = {"domain": "Domain", "access_key": "Erişim Anahtarı"}
|
||||
en = {"domain": "Domain", "access_key": "Access Key"}
|
||||
|
||||
|
||||
class Forgot(BaseModelRegular, ForgotValidation):
|
||||
domain: str
|
||||
access_key: str
|
||||
|
||||
@@ -2,11 +2,44 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class InsertBuildLivingSpace(BaseModelRegular):
|
||||
class BuildLivingSpaceValidation:
|
||||
tr = {
|
||||
"person_uu_id": "Kişi UUID'si",
|
||||
"build_parts_uu_id": "Bina UUID'si",
|
||||
"occupant_type_uu_id": "Mülk Sahibi UUID'si",
|
||||
"expiry_starts": "Geçerlilik Başlangıç Tarihi",
|
||||
"expiry_ends": "Geçerlilik Bitiş Tarihi",
|
||||
}
|
||||
en = {
|
||||
"person_uu_id": "Person UUID",
|
||||
"build_parts_uu_id": "Build UUID",
|
||||
"occupant_type_uu_id": "Occupant UUID",
|
||||
"expiry_starts": "Expiry Starts",
|
||||
"expiry_ends": "Expiry Ends",
|
||||
}
|
||||
|
||||
|
||||
class PydanticBaseModelValidationUpdate:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"is_tenant_live": "Kiracı mı?",
|
||||
"build_parts_uu_id": "Bina UUID'si",
|
||||
"person_uu_id": "Kişi UUID'si",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"is_tenant_live": "Is Tenant Live?",
|
||||
"build_parts_uu_id": "Build UUID",
|
||||
"person_uu_id": "Person UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildLivingSpace(BaseModelRegular, BuildLivingSpaceValidation):
|
||||
person_uu_id: str
|
||||
build_parts_uu_id: str
|
||||
occupant_type_uu_id: str
|
||||
@@ -14,7 +47,7 @@ class InsertBuildLivingSpace(BaseModelRegular):
|
||||
expiry_ends: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateBuildLivingSpace(PydanticBaseModel):
|
||||
class UpdateBuildLivingSpace(PydanticBaseModel, BuildLivingSpaceValidation):
|
||||
is_tenant_live: Optional[bool] = None
|
||||
build_parts_uu_id: Optional[str] = None
|
||||
person_uu_id: Optional[str] = None
|
||||
|
||||
@@ -2,21 +2,81 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class UpdateBuildTypes(PydanticBaseModel): ...
|
||||
class BuildTypesUpdateValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"function_code": "Fonksiyon Kodu",
|
||||
"type_code": "Tip Kodu",
|
||||
"lang": "Dil",
|
||||
"type_name": "Tip Adı",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"function_code": "Function Code",
|
||||
"type_code": "Type Code",
|
||||
"lang": "Language",
|
||||
"type_name": "Type Name",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildTypes(PydanticBaseModel):
|
||||
class BuildTypesValidation:
|
||||
tr = {
|
||||
"function_code": "Fonksiyon Kodu",
|
||||
"type_code": "Tip Kodu",
|
||||
"lang": "Dil",
|
||||
"type_name": "Tip Adı",
|
||||
}
|
||||
en = {
|
||||
"function_code": "Function Code",
|
||||
"type_code": "Type Code",
|
||||
"lang": "Language",
|
||||
"type_name": "Type Name",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildTypes(BaseModelRegular, BuildTypesValidation):
|
||||
function_code: str
|
||||
type_code: str
|
||||
lang: str
|
||||
type_name: str
|
||||
|
||||
|
||||
class InsertBuildParts(PydanticBaseModel):
|
||||
class UpdateBuildTypes(PydanticBaseModel, BuildTypesUpdateValidation): ...
|
||||
|
||||
|
||||
class BuildPartsValidation:
|
||||
tr = {
|
||||
"address_gov_code": "Adres İl Kodu",
|
||||
"part_no": "Daire No",
|
||||
"part_level": "Daire Seviyesi",
|
||||
"build_part_type_uu_id": "Bina Daire Tipi UUID'si",
|
||||
"part_code": "Daire Kodu",
|
||||
"part_gross_size": "Daire Brüt Alanı",
|
||||
"part_net_size": "Daire Net Alanı",
|
||||
"default_accessory": "Varsayılan Aksesuar",
|
||||
"human_livable": "İnsan Yaşanabilir",
|
||||
"part_direction": "Daire Yönü",
|
||||
}
|
||||
en = {
|
||||
"address_gov_code": "Address Gov Code",
|
||||
"part_no": "Flat No",
|
||||
"part_level": "Flat Level",
|
||||
"build_part_type_uu_id": "Build Flat Type UUID",
|
||||
"part_code": "Flat Code",
|
||||
"part_gross_size": "Flat Gross Size",
|
||||
"part_net_size": "Flat Net Size",
|
||||
"default_accessory": "Default Accessory",
|
||||
"human_livable": "Human Livable",
|
||||
"part_direction": "Flat Direction",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildParts(BaseModelRegular, BuildPartsValidation):
|
||||
build_uu_id: str
|
||||
address_gov_code: str
|
||||
part_no: int
|
||||
@@ -30,11 +90,41 @@ class InsertBuildParts(PydanticBaseModel):
|
||||
part_direction_uu_id: Optional[str] = None
|
||||
ref_id: Optional[str] = None
|
||||
|
||||
# current_owner_person_uu_id: Optional[str] = None
|
||||
# current_tenant_person_uu_id: Optional[str] = None
|
||||
|
||||
class UpdateBuildPartsValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"address_gov_code": "Adres İl Kodu",
|
||||
"part_no": "Daire No",
|
||||
"part_level": "Daire Seviyesi",
|
||||
"build_part_type_uu_id": "Bina Daire Tipi UUID'si",
|
||||
"part_code": "Daire Kodu",
|
||||
"part_gross_size": "Daire Brüt Alanı",
|
||||
"part_net_size": "Daire Net Alanı",
|
||||
"default_accessory": "Varsayılan Aksesuar",
|
||||
"human_livable": "İnsan Yaşanabilir",
|
||||
"part_direction": "Daire Yönü",
|
||||
"current_owner_person_uu_id": "Mevcut Sahip Kişi UUID'si",
|
||||
"current_tenant_person_uu_id": "Mevcut Kiracı Kişi UUID'si",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"address_gov_code": "Address Gov Code",
|
||||
"part_no": "Flat No",
|
||||
"part_level": "Flat Level",
|
||||
"build_part_type_uu_id": "Build Flat Type UUID",
|
||||
"part_code": "Flat Code",
|
||||
"part_gross_size": "Flat Gross Size",
|
||||
"part_net_size": "Flat Net Size",
|
||||
"default_accessory": "Default Accessory",
|
||||
"human_livable": "Human Livable",
|
||||
"part_direction": "Flat Direction",
|
||||
"current_owner_person_uu_id": "Current Owner Person UUID",
|
||||
"current_tenant_person_uu_id": "Current Tenant Person UUID",
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildParts(PydanticBaseModel):
|
||||
class UpdateBuildParts(PydanticBaseModel, UpdateBuildPartsValidation):
|
||||
address_gov_code: Optional[str] = None
|
||||
part_no: Optional[int] = None
|
||||
part_level: Optional[int] = None
|
||||
|
||||
@@ -1,20 +1,66 @@
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
ListOptions,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
)
|
||||
|
||||
|
||||
class InsertBuild(BaseModelRegular):
|
||||
class BuildValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"gov_address_code": "Devlet Adres Kodu",
|
||||
"build_name": "Bina Adı",
|
||||
"build_types_uu_id": "Bina Tipi",
|
||||
"build_no": "Bina No",
|
||||
"max_floor": "Kat Sayısı",
|
||||
"underground_floor": "Bodrum Kat Sayısı",
|
||||
"address_uu_id": "Adres",
|
||||
"build_date": "Yapım Tarihi",
|
||||
"decision_period_date": "Karar Tarihi",
|
||||
"tax_no": "Vergi No",
|
||||
"lift_count": "Asansör Sayısı",
|
||||
"heating_system": "Isıtma Sistemi",
|
||||
"cooling_system": "Soğutma Sistemi",
|
||||
"hot_water_system": "Sıcak Su Sistemi",
|
||||
"block_service_man_count": "Hizmet Görevlisi Sayısı",
|
||||
"security_service_man_count": "Güvenlik Görevlisi Sayısı",
|
||||
"garage_count": "Garaj Sayısı",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"gov_address_code": "Government Address Code",
|
||||
"build_name": "Building Name",
|
||||
"build_types_uu_id": "Building Type",
|
||||
"build_no": "Building No",
|
||||
"max_floor": "Number of Floors",
|
||||
"underground_floor": "Number of Basement Floors",
|
||||
"address_uu_id": "Address",
|
||||
"build_date": "Construction Date",
|
||||
"decision_period_date": "Decision Date",
|
||||
"tax_no": "Tax No",
|
||||
"lift_count": "Number of Elevators",
|
||||
"heating_system": "Heating System",
|
||||
"cooling_system": "Cooling System",
|
||||
"hot_water_system": "Hot Water System",
|
||||
"block_service_man_count": "Number of Service Officers",
|
||||
"security_service_man_count": "Number of Security Officers",
|
||||
"garage_count": "Number of Garages",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuild(BaseModelRegular, BuildValidation):
|
||||
|
||||
gov_address_code: str
|
||||
build_name: str
|
||||
build_types_uu_id: str
|
||||
max_floor: int
|
||||
underground_floor: int
|
||||
address_uu_id: str
|
||||
build_date: str
|
||||
decision_period_date: str
|
||||
build_date: datetime
|
||||
decision_period_date: datetime
|
||||
|
||||
tax_no: Optional[str] = None
|
||||
lift_count: Optional[int] = None
|
||||
@@ -26,14 +72,25 @@ class InsertBuild(BaseModelRegular):
|
||||
garage_count: Optional[int] = None
|
||||
|
||||
|
||||
class UpdateBuild(PydanticBaseModel):
|
||||
class BuildUpdateValidation:
|
||||
tr = {
|
||||
**BuildValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**BuildValidation.en,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuild(PydanticBaseModel, BuildUpdateValidation):
|
||||
gov_address_code: Optional[str] = None
|
||||
build_name: Optional[str] = None
|
||||
build_no: Optional[str] = None
|
||||
build_types: Optional[str] = None
|
||||
build_types_uu_id: Optional[str] = None
|
||||
max_floor: Optional[int] = None
|
||||
underground_floor: Optional[int] = None
|
||||
build_date: Optional[str] = None
|
||||
build_date: Optional[datetime] = None
|
||||
tax_no: Optional[str] = None
|
||||
lift_count: Optional[int] = None
|
||||
heating_system: Optional[bool] = None
|
||||
|
||||
@@ -2,11 +2,26 @@ from typing import Optional, List
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class InsertCompany(BaseModelRegular):
|
||||
class CompanyValidation:
|
||||
tr = {
|
||||
"formal_name": "Resmi Ad",
|
||||
"company_type": "Şirket Tipi",
|
||||
"commercial_type": "Ticari Tip",
|
||||
"tax_no": "Vergi No",
|
||||
"public_name": "Halka Açık Ad",
|
||||
"company_tag": "Şirket Etiketi",
|
||||
"default_lang_type": "Varsayılan Dil Tipi",
|
||||
"default_money_type": "Varsayılan Para Tipi",
|
||||
"official_address_uu_id": "Resmi Adres UU ID",
|
||||
}
|
||||
|
||||
|
||||
class InsertCompany(BaseModelRegular, CompanyValidation):
|
||||
formal_name: str
|
||||
company_type: str
|
||||
commercial_type: str
|
||||
@@ -19,7 +34,18 @@ class InsertCompany(BaseModelRegular):
|
||||
# parent_uu_id: Optional[int] = None
|
||||
|
||||
|
||||
class UpdateCompany(PydanticBaseModel):
|
||||
class CompanyUpdateValidation:
|
||||
tr = {
|
||||
**CompanyValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**CompanyValidation.tr,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateCompany(PydanticBaseModel, CompanyUpdateValidation):
|
||||
company_uu_id: str
|
||||
public_name: Optional[str] = None
|
||||
formal_name: Optional[str] = None
|
||||
@@ -30,6 +56,20 @@ class UpdateCompany(PydanticBaseModel):
|
||||
official_address_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class MatchCompany2Company(PydanticBaseModel):
|
||||
class MatchCompany2CompanyValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"match_company_uu_id": "Eşleşen Şirket UU ID",
|
||||
"duty_uu_id": "Görev UU ID",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"match_company_uu_id": "Match Company UU ID",
|
||||
"duty_uu_id": "Duty UU ID",
|
||||
}
|
||||
|
||||
|
||||
class MatchCompany2Company(PydanticBaseModel, MatchCompany2CompanyValidation):
|
||||
match_company_uu_id: List[str]
|
||||
duty_uu_id: str
|
||||
show_only: Optional[bool] = None
|
||||
|
||||
@@ -3,14 +3,93 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
|
||||
|
||||
class ListOptions(BaseModelRegular):
|
||||
class ListOptionsValidation:
|
||||
tr = {
|
||||
"page": "Sayfa",
|
||||
"size": "Boyut",
|
||||
"order_field": "Sıralama Alanı",
|
||||
"order_type": "Sıralama Türü",
|
||||
"include_joins": "Alt İçerikleri",
|
||||
"query": "Sorgu",
|
||||
}
|
||||
en = {
|
||||
"page": "Page",
|
||||
"size": "Size",
|
||||
"order_field": "Order Field",
|
||||
"order_type": "Order Type",
|
||||
"include_joins": "Include Joins",
|
||||
"query": "Query",
|
||||
}
|
||||
|
||||
|
||||
class ListOptions(BaseModelRegular, ListOptionsValidation):
|
||||
page: Optional[int] = 1
|
||||
size: Optional[int] = 10
|
||||
order_field: Optional[str] = "id"
|
||||
order_type: Optional[str] = "asc"
|
||||
include_joins: Optional[list] = []
|
||||
query: Optional[dict] = {}
|
||||
include_joins: Optional[list] = None
|
||||
query: Optional[dict] = None
|
||||
|
||||
|
||||
class CrudRecordValidation:
|
||||
tr = {
|
||||
"uu_id": "UUID",
|
||||
"created_at": "Oluşturulma Tarihi",
|
||||
"updated_at": "Güncellenme Tarihi",
|
||||
"created_by": "Oluşturan",
|
||||
"updated_by": "Güncelleyen",
|
||||
"confirmed_by": "Onaylayan",
|
||||
"is_confirmed": "Onay",
|
||||
"expiry_starts": "Geçerlilik Başlangıç Tarihi",
|
||||
"expiry_ends": "Geçerlilik Bitiş Tarihi",
|
||||
"active": "Aktif",
|
||||
"is_notification_send": "Bildirim Gönderildi",
|
||||
"is_email_send": "E-posta Gönderildi",
|
||||
}
|
||||
en = {
|
||||
"uu_id": "UUID",
|
||||
"created_at": "Created At",
|
||||
"updated_at": "Updated At",
|
||||
"created_by": "Created By",
|
||||
"updated_by": "Updated By",
|
||||
"confirmed_by": "Confirmed By",
|
||||
"is_confirmed": "Confirmed",
|
||||
"expiry_starts": "Expiry Starts",
|
||||
"expiry_ends": "Expiry Ends",
|
||||
"active": "Active",
|
||||
"is_notification_send": "Notification Send",
|
||||
"is_email_send": "Email Send",
|
||||
}
|
||||
|
||||
|
||||
class CrudRecords:
|
||||
uu_id: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
created_by: Optional[str] = None
|
||||
updated_by: Optional[str] = None
|
||||
confirmed_by: Optional[str] = None
|
||||
is_confirmed: Optional[bool] = None
|
||||
active: Optional[bool] = None
|
||||
is_notification_send: Optional[bool] = None
|
||||
is_email_send: Optional[bool] = None
|
||||
|
||||
|
||||
class PydanticBaseModelValidation:
|
||||
tr = {
|
||||
"active": "Aktif",
|
||||
"deleted": "Silinmiş",
|
||||
"expiry_starts": "Geçerlilik Başlangıç Tarihi",
|
||||
"expiry_ends": "Geçerlilik Bitiş Tarihi",
|
||||
"is_confirmed": "Onay",
|
||||
}
|
||||
en = {
|
||||
"active": "Active",
|
||||
"deleted": "Deleted",
|
||||
"expiry_starts": "Expiry Starts",
|
||||
"expiry_ends": "Expiry Ends",
|
||||
"is_confirmed": "Confirmed",
|
||||
}
|
||||
|
||||
|
||||
class PydanticBaseModel(BaseModelRegular):
|
||||
@@ -18,13 +97,16 @@ class PydanticBaseModel(BaseModelRegular):
|
||||
active: Optional[bool] = None
|
||||
deleted: Optional[bool] = None
|
||||
expiry_starts: Optional[str] = None
|
||||
expiry_ends: Optional[str] = None
|
||||
# expiry_ends: Optional[str] = None
|
||||
is_confirmed: Optional[bool] = None
|
||||
|
||||
|
||||
class EndpointPydantic(BaseModelRegular):
|
||||
data: Optional[dict] = None
|
||||
|
||||
data: Optional[dict] = {}
|
||||
|
||||
class EndpointValidation(BaseModelRegular):
|
||||
endpoint: Optional[str] = None
|
||||
|
||||
|
||||
class PatchRecord(BaseModelRegular):
|
||||
|
||||
@@ -2,37 +2,126 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitations(BaseModelRegular):
|
||||
class DecisionBookDecisionBookInvitationsValidation:
|
||||
tr = {
|
||||
"build_decision_book_uu_id": "Karar Defteri UUID",
|
||||
"message": "Mesaj",
|
||||
"planned_date": "Planlanan Tarih",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_uu_id": "Decision Book UUID",
|
||||
"message": "Message",
|
||||
"planned_date": "Planned Date",
|
||||
}
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitations(
|
||||
BaseModelRegular, DecisionBookDecisionBookInvitationsValidation
|
||||
):
|
||||
build_decision_book_uu_id: str
|
||||
message: str
|
||||
planned_date: str
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsAttend(BaseModelRegular):
|
||||
class DecisionBookDecisionBookInvitationsAttendValidation:
|
||||
tr = {
|
||||
"token": "Token",
|
||||
"is_attend": "Katılacak mı?",
|
||||
}
|
||||
en = {
|
||||
"token": "Token",
|
||||
"is_attend": "Is Attend?",
|
||||
}
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsAttend(
|
||||
BaseModelRegular, DecisionBookDecisionBookInvitationsAttendValidation
|
||||
):
|
||||
token: str
|
||||
is_attend: bool
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsAssign(BaseModelRegular):
|
||||
class DecisionBookDecisionBookInvitationsAssignValidation:
|
||||
tr = {
|
||||
"token": "Token",
|
||||
"build_living_space_uu_id": "Yapı Yaşam Alanı UUID",
|
||||
"occupant_type_uu_id": "Sakin Tipi UUID",
|
||||
}
|
||||
en = {
|
||||
"token": "Token",
|
||||
"build_living_space_uu_id": "Build Living Space UUID",
|
||||
"occupant_type_uu_id": "Occupant Type UUID",
|
||||
}
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsAssign(
|
||||
BaseModelRegular, DecisionBookDecisionBookInvitationsAssignValidation
|
||||
):
|
||||
token: str
|
||||
build_living_space_uu_id: str
|
||||
occupant_type_uu_id: str
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsUpdate(PydanticBaseModel):
|
||||
class DecisionBookDecisionBookInvitationsUpdateValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"token": "Token",
|
||||
"occupant_type_uu_id": "Sakin Tipi UUID",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"token": "Token",
|
||||
"occupant_type_uu_id": "Occupant Type UUID",
|
||||
}
|
||||
|
||||
|
||||
class DecisionBookDecisionBookInvitationsUpdate(
|
||||
PydanticBaseModel, DecisionBookDecisionBookInvitationsUpdateValidation
|
||||
):
|
||||
token: str
|
||||
occupant_type_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class ListDecisionBook(ListOptions):
|
||||
class ListDecisionBookValidation:
|
||||
tr = {
|
||||
"build_decision_book_uu_id": "Karar Defteri UUID",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_uu_id": "Decision Book UUID",
|
||||
}
|
||||
|
||||
|
||||
class ListDecisionBook(ListOptions, ListDecisionBookValidation):
|
||||
build_decision_book_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertDecisionBook(PydanticBaseModel):
|
||||
class InsertDecisionBookValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"build_uu_id": "Yapı UUID",
|
||||
"decision_type": "Karar Tipi",
|
||||
"meeting_date": "Toplantı Tarihi",
|
||||
"is_out_sourced": "Dış Kaynak mı?",
|
||||
"resp_company_fix_wage": "Firma Sabit Ücreti",
|
||||
"resp_company_uu_id": "Firma UUID",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"build_uu_id": "Build UUID",
|
||||
"decision_type": "Decision Type",
|
||||
"meeting_date": "Meeting Date",
|
||||
"is_out_sourced": "Is Out Sourced?",
|
||||
"resp_company_fix_wage": "Company Fixed Wage",
|
||||
"resp_company_uu_id": "Company UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertDecisionBook(PydanticBaseModel, InsertDecisionBookValidation):
|
||||
build_uu_id: str
|
||||
decision_type: str
|
||||
meeting_date: str
|
||||
@@ -42,12 +131,44 @@ class InsertDecisionBook(PydanticBaseModel):
|
||||
resp_company_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertDecisionBookCompleted(BaseModelRegular):
|
||||
class InsertDecisionBookCompletedValidation:
|
||||
tr = {
|
||||
"build_decision_book_uu_id": "Karar Defteri UUID",
|
||||
"meeting_completed_date": "Toplantı Tamamlanma Tarihi",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_uu_id": "Decision Book UUID",
|
||||
"meeting_completed_date": "Meeting Completed Date",
|
||||
}
|
||||
|
||||
|
||||
class InsertDecisionBookCompleted(
|
||||
BaseModelRegular, InsertDecisionBookCompletedValidation
|
||||
):
|
||||
build_decision_book_uu_id: str
|
||||
meeting_completed_date: str
|
||||
|
||||
|
||||
class InsertDecisionBookPerson(BaseModelRegular):
|
||||
class InsertDecisionBookPersonValidation:
|
||||
tr = {
|
||||
"person_uu_id": "Kişi UUID",
|
||||
"build_decision_book_uu_id": "Karar Defteri UUID",
|
||||
"management_typecode_uu_id": "Yönetim Tipi UUID",
|
||||
"dues_discount_approval_date": "Aidat İndirim Onay Tarihi",
|
||||
"dues_fix_discount": "Aidat Sabit İndirim",
|
||||
"dues_percent_discount": "Aidat Yüzde İndirim",
|
||||
}
|
||||
en = {
|
||||
"person_uu_id": "Person UUID",
|
||||
"build_decision_book_uu_id": "Decision Book UUID",
|
||||
"management_typecode_uu_id": "Management Type UUID",
|
||||
"dues_discount_approval_date": "Dues Discount Approval Date",
|
||||
"dues_fix_discount": "Dues Fix Discount",
|
||||
"dues_percent_discount": "Dues Percent Discount",
|
||||
}
|
||||
|
||||
|
||||
class InsertDecisionBookPerson(BaseModelRegular, InsertDecisionBookPersonValidation):
|
||||
person_uu_id: str
|
||||
build_decision_book_uu_id: str
|
||||
management_typecode_uu_id: str
|
||||
@@ -57,18 +178,67 @@ class InsertDecisionBookPerson(BaseModelRegular):
|
||||
dues_percent_discount: Optional[int] = None
|
||||
|
||||
|
||||
class UpdateDecisionBookPerson(PydanticBaseModel):
|
||||
class UpdateDecisionBookPersonValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"dues_fix_discount": "Aidat Sabit İndirim",
|
||||
"dues_percent_discount": "Aidat Yüzde İndirim",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"dues_fix_discount": "Dues Fix Discount",
|
||||
"dues_percent_discount": "Dues Percent Discount",
|
||||
}
|
||||
|
||||
|
||||
class UpdateDecisionBookPerson(PydanticBaseModel, UpdateDecisionBookPersonValidation):
|
||||
|
||||
dues_fix_discount: Optional[float] = None
|
||||
dues_percent_discount: Optional[int] = None
|
||||
|
||||
|
||||
class RemoveDecisionBookPerson(PydanticBaseModel):
|
||||
class RemoveDecisionBookPersonValidation:
|
||||
tr = {
|
||||
"person_uu_id": "Kişi UUID",
|
||||
"build_decision_book_person_uu_id": "Karar Defteri Kişi UUID",
|
||||
}
|
||||
en = {
|
||||
"person_uu_id": "Person UUID",
|
||||
"build_decision_book_person_uu_id": "Decision Book Person UUID",
|
||||
}
|
||||
|
||||
|
||||
class RemoveDecisionBookPerson(PydanticBaseModel, RemoveDecisionBookPersonValidation):
|
||||
person_uu_id: str
|
||||
build_decision_book_person_uu_id: str
|
||||
|
||||
|
||||
class UpdateDecisionBook(PydanticBaseModel):
|
||||
class UpdateDecisionBookValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"decision_book_pdf_path": "Karar Defteri PDF Yolu",
|
||||
"is_out_sourced": "Dış Kaynak mı?",
|
||||
"contact_agreement_path": "İletişim Anlaşma Yolu",
|
||||
"contact_agreement_date": "İletişim Anlaşma Tarihi",
|
||||
"meeting_date": "Toplantı Tarihi",
|
||||
"decision_type": "Karar Tipi",
|
||||
"resp_company_fix_wage": "Firma Sabit Ücreti",
|
||||
"resp_company_uu_id": "Firma UUID",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"decision_book_pdf_path": "Decision Book PDF Path",
|
||||
"is_out_sourced": "Is Out Sourced?",
|
||||
"contact_agreement_path": "Contact Agreement Path",
|
||||
"contact_agreement_date": "Contact Agreement Date",
|
||||
"meeting_date": "Meeting Date",
|
||||
"decision_type": "Decision Type",
|
||||
"resp_company_fix_wage": "Company Fixed Wage",
|
||||
"resp_company_uu_id": "Company UUID",
|
||||
}
|
||||
|
||||
|
||||
class UpdateDecisionBook(PydanticBaseModel, UpdateDecisionBookValidation):
|
||||
decision_book_pdf_path: Optional[str] = None
|
||||
is_out_sourced: Optional[bool] = None
|
||||
contact_agreement_path: Optional[str] = None
|
||||
@@ -80,7 +250,34 @@ class UpdateDecisionBook(PydanticBaseModel):
|
||||
resp_company_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertBuildDecisionBookItems(BaseModelRegular):
|
||||
class InsertBuildDecisionBookItemsValidation:
|
||||
tr = {
|
||||
"token": "Token",
|
||||
"info_type_uu_id": "Bilgi Tipi UUID",
|
||||
"item_comment": "Öğe Yorumu / Açıklama",
|
||||
"currency": "Para Birimi",
|
||||
"unit_type": "Birim Tipi",
|
||||
"debit_start_date": "Borç Başlangıç Tarihi",
|
||||
"debit_end_date": "Borç Bitiş Tarihi",
|
||||
"unit_price_is_fixed": "Birim Fiyat Sabit mi?",
|
||||
"unit_price": "Birim Fiyat",
|
||||
}
|
||||
en = {
|
||||
"token": "Token",
|
||||
"info_type_uu_id": "Info Type UUID",
|
||||
"item_comment": "Item Comment",
|
||||
"currency": "Currency",
|
||||
"unit_type": "Unit Type",
|
||||
"debit_start_date": "Debit Start Date",
|
||||
"debit_end_date": "Debit End Date",
|
||||
"unit_price_is_fixed": "Unit Price Is Fixed?",
|
||||
"unit_price": "Unit Price",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookItems(
|
||||
BaseModelRegular, InsertBuildDecisionBookItemsValidation
|
||||
):
|
||||
token: str
|
||||
info_type_uu_id: str
|
||||
item_comment: str
|
||||
@@ -96,19 +293,64 @@ class InsertBuildDecisionBookItems(BaseModelRegular):
|
||||
# item_objection: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookItems(PydanticBaseModel):
|
||||
class UpdateBuildDecisionBookItemsValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"item_comment": "Öğe Yorumu / Açıklama",
|
||||
"item_objection": "Öğe İtirazı",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"item_comment": "Item Comment",
|
||||
"item_objection": "Item Objection",
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookItems(
|
||||
PydanticBaseModel, UpdateBuildDecisionBookItemsValidation
|
||||
):
|
||||
item_comment: Optional[str] = None
|
||||
item_objection: Optional[str] = None
|
||||
|
||||
|
||||
class InsertBuildDecisionBookItemDebits(BaseModelRegular):
|
||||
class InsertBuildDecisionBookItemDebitsValidation:
|
||||
tr = {
|
||||
"build_decision_book_item_uu_id": "Karar Defteri Öğe UUID",
|
||||
"dues_values": "Aidat Değerleri",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_item_uu_id": "Decision Book Item UUID",
|
||||
"dues_values": "Dues Values",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookItemDebits(
|
||||
BaseModelRegular, InsertBuildDecisionBookItemDebitsValidation
|
||||
):
|
||||
build_decision_book_item_uu_id: str
|
||||
dues_values: dict
|
||||
# dues_types_uu_id: str
|
||||
# decision_taken: Optional[bool] = None
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookItemDebits(PydanticBaseModel):
|
||||
class UpdateBuildDecisionBookItemDebitsValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"dues_types_uu_id": "Aidat Tipi UUID",
|
||||
"dues_values": "Aidat Değerleri",
|
||||
"decision_taken": "Karar Alındı mı?",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"dues_types_uu_id": "Dues Type UUID",
|
||||
"dues_values": "Dues Values",
|
||||
"decision_taken": "Decision Taken?",
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookItemDebits(
|
||||
PydanticBaseModel, UpdateBuildDecisionBookItemDebitsValidation
|
||||
):
|
||||
dues_types_uu_id: Optional[str] = None
|
||||
dues_values: Optional[dict] = None
|
||||
decision_taken: Optional[bool] = None
|
||||
|
||||
@@ -2,11 +2,29 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class DepartmentsPydantic(PydanticBaseModel):
|
||||
class DepartmentsPydanticValidation:
|
||||
tr = {
|
||||
"department_code": "Department Kodu",
|
||||
"department_name": "Departman Adı",
|
||||
"department_description": "Departman Açıklaması",
|
||||
"company_uu_id": "Şirket UUID",
|
||||
"parent_department_uu_id": "Üst Departman UUID",
|
||||
}
|
||||
en = {
|
||||
"department_code": "Department Code",
|
||||
"department_name": "Department Name",
|
||||
"department_description": "Department Description",
|
||||
"company_uu_id": "Company UUID",
|
||||
"parent_department_uu_id": "Parent Department UUID",
|
||||
}
|
||||
|
||||
|
||||
class DepartmentsPydantic(PydanticBaseModel, PydanticBaseModelValidation):
|
||||
|
||||
department_code: Optional[str]
|
||||
department_name: Optional[str]
|
||||
|
||||
@@ -2,55 +2,177 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class BindEmployees2People(PydanticBaseModel):
|
||||
class BindEmployees2PeopleValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"staff_uu_id": "Kadro UUID",
|
||||
"people_uu_id": "Kişi UUID",
|
||||
"expiry_starts": "Başlangıç Tarihi",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"staff_uu_id": "Staff UUID",
|
||||
"people_uu_id": "People UUID",
|
||||
"expiry_starts": "Start Date",
|
||||
}
|
||||
|
||||
|
||||
class BindEmployees2People(PydanticBaseModel, BindEmployees2PeopleValidation):
|
||||
staff_uu_id: str
|
||||
people_uu_id: str
|
||||
expiry_starts: Optional[str] = None
|
||||
|
||||
|
||||
class UnBindEmployees2People(PydanticBaseModel):
|
||||
class UnBindEmployees2PeopleValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"people_uu_id": "Kişi UUID",
|
||||
"expiry_ends": "Bitiş Tarihi",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"people_uu_id": "People UUID",
|
||||
"expiry_ends": "End Date",
|
||||
}
|
||||
|
||||
|
||||
class UnBindEmployees2People(PydanticBaseModel, UnBindEmployees2PeopleValidation):
|
||||
people_uu_id: str
|
||||
expiry_ends: str
|
||||
|
||||
|
||||
class InsertEmployees(PydanticBaseModel):
|
||||
class InsertEmployeesValidation:
|
||||
tr = {
|
||||
"staff_uu_id": "Kadro UUID",
|
||||
"people_uu_id": "Kişi UUID",
|
||||
}
|
||||
en = {
|
||||
"staff_uu_id": "Staff UUID",
|
||||
"people_uu_id": "People UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertEmployees(BaseModelRegular, InsertEmployeesValidation):
|
||||
staff_uu_id: str
|
||||
people_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertCompanyDuty(PydanticBaseModel):
|
||||
class InsertCompanyDutyValidation:
|
||||
tr = {
|
||||
"duty_code": "Görev Kodu",
|
||||
"duty_name": "Görev Adı",
|
||||
"duty_description": "Görev Açıklaması",
|
||||
}
|
||||
en = {
|
||||
"duty_code": "Duty Code",
|
||||
"duty_name": "Duty Name",
|
||||
"duty_description": "Duty Description",
|
||||
}
|
||||
|
||||
|
||||
class InsertCompanyDuty(BaseModelRegular, InsertCompanyDutyValidation):
|
||||
duty_code: str
|
||||
duty_name: str
|
||||
duty_description: Optional[str] = None
|
||||
|
||||
|
||||
class SelectDuties(PydanticBaseModel):
|
||||
class SelectDutiesValidation:
|
||||
tr = {
|
||||
"duty_uu_id": "Görev UUID",
|
||||
}
|
||||
en = {
|
||||
"duty_uu_id": "Duty UUID",
|
||||
}
|
||||
|
||||
|
||||
class SelectDuties(BaseModelRegular, SelectDutiesValidation):
|
||||
duty_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertDuties(PydanticBaseModel):
|
||||
class InsertDutiesValidation:
|
||||
tr = {
|
||||
"duties_uu_id": "Görev UUID",
|
||||
"department_uu_id": "Departman UUID",
|
||||
"is_default_duty": "Varsayılan Görev",
|
||||
}
|
||||
en = {
|
||||
"duties_uu_id": "Duty UUID",
|
||||
"department_uu_id": "Department UUID",
|
||||
"is_default_duty": "Default Duty",
|
||||
}
|
||||
|
||||
|
||||
class InsertDuties(BaseModelRegular, InsertDutiesValidation):
|
||||
duties_uu_id: str
|
||||
department_uu_id: str
|
||||
is_default_duty: Optional[bool] = False
|
||||
|
||||
|
||||
class UpdateDutiesValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"duties_uu_id": "Görev UUID",
|
||||
"department_uu_id": "Departman UUID",
|
||||
"is_default_duty": "Varsayılan Görev",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"duties_uu_id": "Duty UUID",
|
||||
"department_uu_id": "Department UUID",
|
||||
"is_default_duty": "Default Duty",
|
||||
}
|
||||
|
||||
|
||||
class UpdateDuties(PydanticBaseModel):
|
||||
duties_uu_id: Optional[str] = None
|
||||
department_uu_id: Optional[str] = None
|
||||
is_default_duty: Optional[bool] = None
|
||||
|
||||
|
||||
class UpdateCompanyDutyValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"duty_code": "Görev Kodu",
|
||||
"duty_name": "Görev Adı",
|
||||
"duty_description": "Görev Açıklaması",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"duty_code": "Duty Code",
|
||||
"duty_name": "Duty Name",
|
||||
"duty_description": "Duty Description",
|
||||
}
|
||||
|
||||
|
||||
class UpdateCompanyDuty(PydanticBaseModel):
|
||||
duty_code: Optional[str] = None
|
||||
duty_name: Optional[str] = None
|
||||
duty_description: Optional[str] = None
|
||||
|
||||
|
||||
class InsertCompanyEmployeesSalaries(PydanticBaseModel):
|
||||
class InsertCompanyEmployeesSalariesValidation:
|
||||
tr = {
|
||||
"gross_salary": "Brüt Maaş",
|
||||
"net_salary": "Net Maaş",
|
||||
"start_date": "Başlangıç Tarihi",
|
||||
"stop_date": "Bitiş Tarihi",
|
||||
"people_id": "Kişi ID",
|
||||
}
|
||||
en = {
|
||||
"gross_salary": "Gross Salary",
|
||||
"net_salary": "Net Salary",
|
||||
"start_date": "Start Date",
|
||||
"stop_date": "Stop Date",
|
||||
"people_id": "People ID",
|
||||
}
|
||||
|
||||
|
||||
class InsertCompanyEmployeesSalaries(BaseModelRegular):
|
||||
gross_salary: float
|
||||
net_salary: float
|
||||
start_date: str
|
||||
@@ -58,6 +180,25 @@ class InsertCompanyEmployeesSalaries(PydanticBaseModel):
|
||||
people_id: int
|
||||
|
||||
|
||||
class UpdateCompanyEmployeesSalariesValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"gross_salary": "Brüt Maaş",
|
||||
"net_salary": "Net Maaş",
|
||||
"start_date": "Başlangıç Tarihi",
|
||||
"stop_date": "Bitiş Tarihi",
|
||||
"people_id": "Kişi ID",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"gross_salary": "Gross Salary",
|
||||
"net_salary": "Net Salary",
|
||||
"start_date": "Start Date",
|
||||
"stop_date": "Stop Date",
|
||||
"people_id": "People ID",
|
||||
}
|
||||
|
||||
|
||||
class UpdateCompanyEmployeesSalaries(PydanticBaseModel):
|
||||
gross_salary: Optional[float] = None
|
||||
net_salary: Optional[float] = None
|
||||
@@ -66,7 +207,24 @@ class UpdateCompanyEmployeesSalaries(PydanticBaseModel):
|
||||
people_id: Optional[int] = None
|
||||
|
||||
|
||||
class InsertCompanyEmployees(PydanticBaseModel):
|
||||
class InsertCompanyEmployeesValidation:
|
||||
tr = {
|
||||
"employee_description": "Çalışan Açıklaması",
|
||||
"person_uu_id": "Kişi UUID",
|
||||
"duty_uu_id": "Görev UUID",
|
||||
"start_date": "Başlangıç Tarihi",
|
||||
"stop_date": "Bitiş Tarihi",
|
||||
}
|
||||
en = {
|
||||
"employee_description": "Employee Description",
|
||||
"person_uu_id": "Person UUID",
|
||||
"duty_uu_id": "Duty UUID",
|
||||
"start_date": "Start Date",
|
||||
"stop_date": "Stop Date",
|
||||
}
|
||||
|
||||
|
||||
class InsertCompanyEmployees(BaseModelRegular, InsertCompanyEmployeesValidation):
|
||||
|
||||
employee_description: Optional[str] = None
|
||||
person_uu_id: str
|
||||
@@ -76,6 +234,19 @@ class InsertCompanyEmployees(PydanticBaseModel):
|
||||
stop_date: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateCompanyEmployees(PydanticBaseModel):
|
||||
class UpdateCompanyEmployeesValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
"stop_date": "Bitiş Tarihi",
|
||||
"employee_description": "Çalışan Açıklaması",
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
"stop_date": "Stop Date",
|
||||
"employee_description": "Employee Description",
|
||||
}
|
||||
|
||||
|
||||
class UpdateCompanyEmployees(PydanticBaseModel, UpdateCompanyEmployeesValidation):
|
||||
stop_date: Optional[str] = None
|
||||
employee_description: Optional[str] = None
|
||||
|
||||
@@ -6,19 +6,36 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class CreateEvents(PydanticBaseModel):
|
||||
event_name: Optional[str] = None
|
||||
event_description: Optional[str] = None
|
||||
event_date: Optional[str] = None
|
||||
event_location: Optional[str] = None
|
||||
class RegisterEvents2EmployeeValidation:
|
||||
tr = {
|
||||
"event_uu_id_list": "Etkinlikler Listesi",
|
||||
"employee_uu_id": "Çalışan UU ID",
|
||||
}
|
||||
en = {
|
||||
"event_uu_id_list": "Event List",
|
||||
"employee_uu_id": "Employee UU ID",
|
||||
}
|
||||
|
||||
|
||||
class RegisterEvents2Employee(PydanticBaseModel):
|
||||
event_uu_id_list: list[str] = []
|
||||
class RegisterEvents2Employee(BaseModelRegular, RegisterEvents2EmployeeValidation):
|
||||
event_uu_id_list: list[str] = None
|
||||
employee_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class RegisterEvents2Occupant(PydanticBaseModel):
|
||||
event_uu_id_list: list[str] = []
|
||||
class RegisterEvents2OccupantValidation:
|
||||
tr = {
|
||||
"event_uu_id_list": "Etkinlikler Listesi",
|
||||
"build_part_uu_id": "Bina Parça UU ID",
|
||||
"occupant_uu_id": "Apartman Sakini UU ID",
|
||||
}
|
||||
en = {
|
||||
"event_uu_id_list": "Event List",
|
||||
"build_part_uu_id": "Building Part UU ID",
|
||||
"occupant_uu_id": "Occupant UU ID",
|
||||
}
|
||||
|
||||
|
||||
class RegisterEvents2Occupant(BaseModelRegular, RegisterEvents2OccupantValidation):
|
||||
event_uu_id_list: list[str] = None
|
||||
build_part_uu_id: Optional[str] = None
|
||||
occupant_uu_id: Optional[str] = None
|
||||
|
||||
@@ -5,12 +5,36 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class RegisterModules2Occupant(BaseModelRegular):
|
||||
class RegisterModules2OccupantValidation:
|
||||
tr = {
|
||||
"modules_uu_id": "Modül Listesi",
|
||||
"occupant_uu_id": "Mülk Sahibi",
|
||||
"build_part_uu_id": "Daire UUID",
|
||||
}
|
||||
en = {
|
||||
"modules_uu_id": "Module List",
|
||||
"occupant_uu_id": "Occupant",
|
||||
"build_part_uu_id": "Flat UUID",
|
||||
}
|
||||
|
||||
|
||||
class RegisterModules2Occupant(BaseModelRegular, RegisterModules2OccupantValidation):
|
||||
modules_uu_id: str
|
||||
occupant_uu_id: str
|
||||
build_part_uu_id: str
|
||||
|
||||
|
||||
class RegisterModules2Employee(BaseModelRegular):
|
||||
class RegisterModules2EmployeeValidation:
|
||||
tr = {
|
||||
"modules_uu_id": "Modül Listesi",
|
||||
"employee_uu_id": "Çalışan",
|
||||
}
|
||||
en = {
|
||||
"modules_uu_id": "Module List",
|
||||
"employee_uu_id": "Employee",
|
||||
}
|
||||
|
||||
|
||||
class RegisterModules2Employee(BaseModelRegular, RegisterModules2EmployeeValidation):
|
||||
modules_uu_id: str
|
||||
employee_uu_id: str
|
||||
|
||||
@@ -2,11 +2,29 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
ListOptions,
|
||||
)
|
||||
|
||||
|
||||
class InsertPerson(BaseModelRegular):
|
||||
class InsertPersonValidation:
|
||||
tr = {
|
||||
"firstname": "İsim",
|
||||
"surname": "Soyisim",
|
||||
"sex_code": "Cinsiyet",
|
||||
"national_identity_id": "T.C. Kimlik Numarası",
|
||||
"middle_name": "Orta İsim",
|
||||
"father_name": "Baba Adı",
|
||||
"mother_name": "Anne Adı",
|
||||
"country_code": "Ülke Kodu",
|
||||
"birth_place": "Doğum Yeri",
|
||||
"birth_date": "Doğum Tarihi",
|
||||
"tax_no": "Vergi Numarası",
|
||||
"ref_id": "Referans ID",
|
||||
}
|
||||
|
||||
|
||||
class InsertPerson(BaseModelRegular, InsertPersonValidation):
|
||||
firstname: str
|
||||
surname: str
|
||||
sex_code: str
|
||||
@@ -21,17 +39,18 @@ class InsertPerson(BaseModelRegular):
|
||||
ref_id: Optional[str] = None
|
||||
|
||||
|
||||
class ResponsePersonSalesMange(PydanticBaseModel):
|
||||
uu_id: str
|
||||
firstname: str
|
||||
surname: str
|
||||
national_identity_id: str
|
||||
birth_place: Optional[str] = None
|
||||
birth_date: Optional[str] = None
|
||||
tax_no: Optional[str] = None
|
||||
class UpdatePersonValidation:
|
||||
tr = {
|
||||
**PydanticBaseModelValidation.tr,
|
||||
**InsertPersonValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**PydanticBaseModelValidation.en,
|
||||
**InsertPersonValidation.tr,
|
||||
}
|
||||
|
||||
|
||||
class UpdatePerson(PydanticBaseModel):
|
||||
class UpdatePerson(PydanticBaseModel, UpdatePersonValidation):
|
||||
firstname: Optional[str] = None
|
||||
surname: Optional[str] = None
|
||||
middle_name: Optional[str]
|
||||
|
||||
@@ -2,10 +2,30 @@ from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
)
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectItems(BaseModelRegular):
|
||||
class InsertBuildDecisionBookProjectItemsValidation:
|
||||
tr = {
|
||||
"build_decision_book_project_uu_id": "Proje UUID",
|
||||
"item_header": "Başlık",
|
||||
"item_comment": "Açıklama",
|
||||
"attachment_pdf_path": "Ek Dosya Yolu",
|
||||
"item_objection": "İtiraz",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_project_uu_id": "Project UUID",
|
||||
"item_header": "Header",
|
||||
"item_comment": "Comment",
|
||||
"attachment_pdf_path": "Attachment PDF Path",
|
||||
"item_objection": "Objection",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectItems(
|
||||
BaseModelRegular, InsertBuildDecisionBookProjectItemsValidation
|
||||
):
|
||||
build_decision_book_project_uu_id: str
|
||||
item_header: str
|
||||
item_comment: str
|
||||
@@ -13,6 +33,17 @@ class InsertBuildDecisionBookProjectItems(BaseModelRegular):
|
||||
item_objection: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectItemsValidation:
|
||||
tr = {
|
||||
**InsertBuildDecisionBookProjectItemsValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**InsertBuildDecisionBookProjectItemsValidation.en,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectItems(PydanticBaseModel):
|
||||
item_header: Optional[str] = None
|
||||
item_comment: Optional[str] = None
|
||||
@@ -21,7 +52,30 @@ class UpdateBuildDecisionBookProjectItems(PydanticBaseModel):
|
||||
build_decision_book_project_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectPerson(BaseModelRegular):
|
||||
class InsertBuildDecisionBookProjectPersonValidation:
|
||||
tr = {
|
||||
"dues_percent_discount": "İskonto Oranı",
|
||||
"job_fix_wage": "Sabit Ücret",
|
||||
"bid_price": "Teklif Fiyatı",
|
||||
"decision_price": "Karar Fiyatı",
|
||||
"build_decision_book_project_uu_id": "Proje UUID",
|
||||
"living_space_uu_id": "Yaşam Alanı UUID",
|
||||
"project_team_type_uu_id": "Proje Takım Tipi UUID",
|
||||
}
|
||||
en = {
|
||||
"dues_percent_discount": "Discount Rate",
|
||||
"job_fix_wage": "Fixed Wage",
|
||||
"bid_price": "Bid Price",
|
||||
"decision_price": "Decision Price",
|
||||
"build_decision_book_project_uu_id": "Project UUID",
|
||||
"living_space_uu_id": "Living Space UUID",
|
||||
"project_team_type_uu_id": "Project Team Type UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectPerson(
|
||||
BaseModelRegular, InsertBuildDecisionBookProjectPersonValidation
|
||||
):
|
||||
dues_percent_discount: Optional[int] = None
|
||||
job_fix_wage: Optional[float] = None
|
||||
bid_price: Optional[float] = None
|
||||
@@ -31,7 +85,20 @@ class InsertBuildDecisionBookProjectPerson(BaseModelRegular):
|
||||
project_team_type_uu_id: str
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectPerson(PydanticBaseModel):
|
||||
class UpdateBuildDecisionBookProjectPersonValidation:
|
||||
tr = {
|
||||
**InsertBuildDecisionBookProjectPersonValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**InsertBuildDecisionBookProjectPersonValidation.en,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectPerson(
|
||||
PydanticBaseModel, UpdateBuildDecisionBookProjectPersonValidation
|
||||
):
|
||||
dues_percent_discount: Optional[int] = None
|
||||
job_fix_wage: Optional[float] = None
|
||||
bid_price: Optional[float] = None
|
||||
@@ -41,7 +108,48 @@ class UpdateBuildDecisionBookProjectPerson(PydanticBaseModel):
|
||||
project_team_type_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjects(BaseModelRegular):
|
||||
class InsertBuildDecisionBookProjectsValidation:
|
||||
tr = {
|
||||
"build_decision_book_item_uu_id": "Karar Defteri UUID",
|
||||
"project_responsible_person_uu_id": "Proje Sorumlu Kişi UUID",
|
||||
"project_name": "Proje Adı",
|
||||
"project_start_date": "Proje Başlangıç Tarihi",
|
||||
"project_stop_date": "Proje Bitiş Tarihi",
|
||||
"project_type": "Proje Tipi",
|
||||
"is_out_sourced": "Dış Kaynak Kullanımı",
|
||||
"project_note": "Proje Notu",
|
||||
"decision_book_pdf_path": "Karar Defteri PDF Yolu",
|
||||
"resp_company_fix_wage": "Firma Sabit Ücreti",
|
||||
"contact_agreement_path": "İletişim Anlaşması Yolu",
|
||||
"contact_agreement_date": "İletişim Anlaşması Tarihi",
|
||||
"meeting_date": "Toplantı Tarihi",
|
||||
"currency": "Para Birimi",
|
||||
"bid_price": "Teklif Fiyatı",
|
||||
"resp_company_uu_id": "Firma UUID",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_item_uu_id": "Decision Book UUID",
|
||||
"project_responsible_person_uu_id": "Project Responsible Person UUID",
|
||||
"project_name": "Project Name",
|
||||
"project_start_date": "Project Start Date",
|
||||
"project_stop_date": "Project Stop Date",
|
||||
"project_type": "Project Type",
|
||||
"is_out_sourced": "Out Sourced",
|
||||
"project_note": "Project Note",
|
||||
"decision_book_pdf_path": "Decision Book PDF Path",
|
||||
"resp_company_fix_wage": "Company Fixed Wage",
|
||||
"contact_agreement_path": "Contact Agreement Path",
|
||||
"contact_agreement_date": "Contact Agreement Date",
|
||||
"meeting_date": "Meeting Date",
|
||||
"currency": "Currency",
|
||||
"bid_price": "Bid Price",
|
||||
"resp_company_uu_id": "Company UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjects(
|
||||
BaseModelRegular, InsertBuildDecisionBookProjectsValidation
|
||||
):
|
||||
build_decision_book_item_uu_id: str
|
||||
project_responsible_person_uu_id: str
|
||||
project_name: str
|
||||
@@ -61,7 +169,20 @@ class InsertBuildDecisionBookProjects(BaseModelRegular):
|
||||
resp_company_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjects(PydanticBaseModel):
|
||||
class UpdateBuildDecisionBookProjectsValidation:
|
||||
tr = {
|
||||
**InsertBuildDecisionBookProjectsValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**InsertBuildDecisionBookProjectsValidation.en,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjects(
|
||||
PydanticBaseModel, UpdateBuildDecisionBookProjectsValidation
|
||||
):
|
||||
build_decision_book_project_uu_id: str
|
||||
is_out_sourced: Optional[bool] = False
|
||||
project_note: Optional[str] = None
|
||||
@@ -75,14 +196,56 @@ class UpdateBuildDecisionBookProjects(PydanticBaseModel):
|
||||
approved_price: Optional[float] = None
|
||||
|
||||
|
||||
class ApprovalsBuildDecisionBookProjects(PydanticBaseModel):
|
||||
class ApprovalsBuildDecisionBookProjectsValidation:
|
||||
tr = {
|
||||
"build_decision_book_project_uu_id": "Karar Defteri Proje UUID",
|
||||
"project_stop_date": "Proje Bitiş Tarihi",
|
||||
"status_code": "Durum Kodu",
|
||||
"final_price_list": "Son Fiyat Listesi",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_project_uu_id": "Decision Book Project UUID",
|
||||
"project_stop_date": "Project Stop Date",
|
||||
"status_code": "Status Code",
|
||||
"final_price_list": "Final Price List",
|
||||
}
|
||||
|
||||
|
||||
class ApprovalsBuildDecisionBookProjects(
|
||||
PydanticBaseModel, ApprovalsBuildDecisionBookProjectsValidation
|
||||
):
|
||||
build_decision_book_project_uu_id: str
|
||||
project_stop_date: str
|
||||
status_code: Optional[int] = None
|
||||
final_price_list: Optional[list[dict]] = None # {"date": "2021-01-01", "price": 1000}
|
||||
final_price_list: Optional[list[dict]] = (
|
||||
None # {"date": "2021-01-01", "price": 1000}
|
||||
)
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectItemDebits(PydanticBaseModel):
|
||||
class InsertBuildDecisionBookProjectItemDebitsValidation:
|
||||
tr = {
|
||||
"build_decision_book_project_item_uu_id": "Karar Defteri Proje Öğe UUID",
|
||||
"payment_date": "Ödeme Tarihi",
|
||||
"dues_values": "Borç Değerleri",
|
||||
"is_official": "Resmi Mi?",
|
||||
"discount_value": "İskonto Oranı",
|
||||
"discount_fix": "İskonto Sabit",
|
||||
"decision_taken": "Karar Alındı Mı?",
|
||||
}
|
||||
en = {
|
||||
"build_decision_book_project_item_uu_id": "Decision Book Project Item UUID",
|
||||
"payment_date": "Payment Date",
|
||||
"dues_values": "Dues Values",
|
||||
"is_official": "Is Official?",
|
||||
"discount_value": "Discount Rate",
|
||||
"discount_fix": "Discount Fix",
|
||||
"decision_taken": "Decision Taken?",
|
||||
}
|
||||
|
||||
|
||||
class InsertBuildDecisionBookProjectItemDebits(
|
||||
PydanticBaseModel, InsertBuildDecisionBookProjectItemDebitsValidation
|
||||
):
|
||||
build_decision_book_project_item_uu_id: str
|
||||
payment_date: str
|
||||
dues_values: dict
|
||||
@@ -92,7 +255,20 @@ class InsertBuildDecisionBookProjectItemDebits(PydanticBaseModel):
|
||||
decision_taken: Optional[bool] = None
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectItemDebits(PydanticBaseModel):
|
||||
class UpdateBuildDecisionBookProjectItemDebitsValidation:
|
||||
tr = {
|
||||
**InsertBuildDecisionBookProjectItemDebitsValidation.tr,
|
||||
**PydanticBaseModelValidation.tr,
|
||||
}
|
||||
en = {
|
||||
**InsertBuildDecisionBookProjectItemDebitsValidation.en,
|
||||
**PydanticBaseModelValidation.en,
|
||||
}
|
||||
|
||||
|
||||
class UpdateBuildDecisionBookProjectItemDebits(
|
||||
PydanticBaseModel, UpdateBuildDecisionBookProjectItemDebitsValidation
|
||||
):
|
||||
dues_values: Optional[str] = None
|
||||
discount_value: Optional[float] = None
|
||||
discount_fix: Optional[float] = None
|
||||
|
||||
@@ -6,6 +6,9 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class CheckEndpointAccess(BaseModelRegular):
|
||||
endpoint: str
|
||||
|
||||
class InsertEndpointAccess(PydanticBaseModel):
|
||||
duty_uu_id: str
|
||||
endpoint_restriction_list_uu_ids: list
|
||||
|
||||
@@ -5,12 +5,36 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class RegisterServices2Occupant(BaseModelRegular):
|
||||
class RegisterServices2OccupantValidation:
|
||||
tr = {
|
||||
"service_uu_id": "Hizmet UUID",
|
||||
"occupant_uu_id": "Müşteri UUID",
|
||||
"build_part_uu_id": "Bina Parça UUID",
|
||||
}
|
||||
en = {
|
||||
"service_uu_id": "Service UUID",
|
||||
"occupant_uu_id": "Occupant UUID",
|
||||
"build_part_uu_id": "Building Part UUID",
|
||||
}
|
||||
|
||||
|
||||
class RegisterServices2Occupant(BaseModelRegular, RegisterServices2OccupantValidation):
|
||||
service_uu_id: str
|
||||
occupant_uu_id: str
|
||||
build_part_uu_id: str
|
||||
|
||||
|
||||
class RegisterServices2Employee(BaseModelRegular):
|
||||
class RegisterServices2EmployeeValidation:
|
||||
tr = {
|
||||
"service_uu_id": "Hizmet UUID",
|
||||
"employee_uu_id": "Personel UUID",
|
||||
}
|
||||
en = {
|
||||
"service_uu_id": "Service UUID",
|
||||
"employee_uu_id": "Employee UUID",
|
||||
}
|
||||
|
||||
|
||||
class RegisterServices2Employee(BaseModelRegular, RegisterServices2EmployeeValidation):
|
||||
service_uu_id: str
|
||||
employee_uu_id: str
|
||||
|
||||
@@ -6,15 +6,38 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class InsertStaff(PydanticBaseModel):
|
||||
class InsertStaffValidation:
|
||||
tr = {
|
||||
"staff_name": "Kadro Adı",
|
||||
"staff_description": "Kadro Açıklaması",
|
||||
"staff_code": "Kadro Kodu",
|
||||
"duties_uu_id": "Görev UUID",
|
||||
}
|
||||
en = {
|
||||
"staff_name": "Staff Name",
|
||||
"staff_description": "Staff Description",
|
||||
"staff_code": "Staff Code",
|
||||
"duties_uu_id": "Duties UUID",
|
||||
}
|
||||
|
||||
|
||||
class InsertStaff(BaseModelRegular, InsertStaffValidation):
|
||||
|
||||
staff_name: str
|
||||
staff_description: Optional[str] = None
|
||||
staff_code: Optional[str] = None
|
||||
|
||||
duties_uu_id: str
|
||||
|
||||
|
||||
class SelectStaff(PydanticBaseModel):
|
||||
class SelectStaffValidation:
|
||||
tr = {
|
||||
"duties_uu_id": "Görev UUID",
|
||||
}
|
||||
en = {
|
||||
"duties_uu_id": "Duties UUID",
|
||||
}
|
||||
|
||||
|
||||
class SelectStaff(PydanticBaseModel, SelectStaffValidation):
|
||||
|
||||
duties_uu_id: str
|
||||
|
||||
@@ -6,7 +6,24 @@ from api_validations.validations_request import (
|
||||
)
|
||||
|
||||
|
||||
class InsertUsers(PydanticBaseModel):
|
||||
class InsertUsersValidation:
|
||||
tr = {
|
||||
"people_uu_id": "Kişi UUID",
|
||||
"user_tag": "Kullanıcı Etiketi",
|
||||
"email": "E-posta",
|
||||
"phone_number": "Telefon Numarası",
|
||||
"avatar": "Avatar",
|
||||
}
|
||||
en = {
|
||||
"people_uu_id": "People UUID",
|
||||
"user_tag": "User Tag",
|
||||
"email": "Email",
|
||||
"phone_number": "Phone Number",
|
||||
"avatar": "Avatar",
|
||||
}
|
||||
|
||||
|
||||
class InsertUsers(PydanticBaseModel, InsertUsersValidation):
|
||||
people_uu_id: str
|
||||
user_tag: str
|
||||
email: Optional[str] = None
|
||||
@@ -14,7 +31,26 @@ class InsertUsers(PydanticBaseModel):
|
||||
avatar: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateUsers(PydanticBaseModel):
|
||||
class UpdateUsersValidation:
|
||||
tr = {
|
||||
"people_uu_id": "Kişi UUID",
|
||||
"nick_name": "Kullanıcı Etiketi",
|
||||
"domain_name": "Domain Adı",
|
||||
"email": "E-posta",
|
||||
"phone_number": "Telefon Numarası",
|
||||
"avatar": "Avatar",
|
||||
}
|
||||
en = {
|
||||
"people_uu_id": "People UUID",
|
||||
"nick_name": "User Tag",
|
||||
"domain_name": "Domain Name",
|
||||
"email": "Email",
|
||||
"phone_number": "Phone Number",
|
||||
"avatar": "Avatar",
|
||||
}
|
||||
|
||||
|
||||
class UpdateUsers(PydanticBaseModel, UpdateUsersValidation):
|
||||
people_uu_id: Optional[str] = None
|
||||
nick_name: Optional[str] = None
|
||||
domain_name: Optional[str] = None
|
||||
@@ -23,23 +59,14 @@ class UpdateUsers(PydanticBaseModel):
|
||||
avatar: Optional[str] = None
|
||||
|
||||
|
||||
class QueryUsersValidation:
|
||||
tr = {
|
||||
"uu_id": "UUID",
|
||||
}
|
||||
en = {
|
||||
"uu_id": "UUID",
|
||||
}
|
||||
|
||||
|
||||
class QueryUsers(PydanticBaseModel):
|
||||
uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class DeleteUsers(PydanticBaseModel):
|
||||
key_id: Optional[str] = None
|
||||
query: Optional[List[QueryUsers]] = None
|
||||
data: Optional[dict] = None
|
||||
|
||||
|
||||
class ListUsers(PydanticBaseModel):
|
||||
key_id: Optional[str] = None
|
||||
query: Optional[QueryUsers] = None
|
||||
data: Optional[ListOptions] = None
|
||||
|
||||
|
||||
class ActiveUsers(PydanticBaseModel):
|
||||
key_id: Optional[str] = None
|
||||
query: Optional[List[QueryUsers]] = None
|
||||
data: Optional[dict] = None
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
from .building import ListBuildingResponse
|
||||
|
||||
__all__ = [
|
||||
"ListBuildingResponse",
|
||||
]
|
||||
|
||||
138
api_validations/validations_response/account.py
Normal file
138
api_validations/validations_response/account.py
Normal file
@@ -0,0 +1,138 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class AccountListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"iban": "IBAN Numarası",
|
||||
"bank_date": "Banka İşlem Tarihi",
|
||||
"currency_value": "Para Birimi Değeri",
|
||||
"bank_balance": "Banka Bakiyesi",
|
||||
"currency": "Para Birimi Birimi",
|
||||
"additional_balance": "Ek Bakiye",
|
||||
"channel_branch": "Şube Banka",
|
||||
"process_name": "Banka İşlem Türü Adı",
|
||||
"process_type": "Banka İşlem Türü",
|
||||
"process_comment": "İşlem Kayıt Yorumu",
|
||||
"process_garbage": "İşlem Kayıt Çöpü",
|
||||
"bank_reference_code": "Banka Referans Kodu",
|
||||
"add_comment_note": "Yorum Not Ekle",
|
||||
"is_receipt_mail_send": "Makbuz Posta Gönderildi",
|
||||
"found_from": "Bulunduğu Yer",
|
||||
"similarity": "Benzerlik",
|
||||
"remainder_balance": "Kalan Bakiye",
|
||||
"bank_date_y": "Banka İşlem Yılı",
|
||||
"bank_date_m": "Banka İşlem Ayı",
|
||||
"bank_date_w": "Banka İşlem Haftası",
|
||||
"bank_date_d": "Banka İşlem Günü",
|
||||
"approving_accounting_record": "Onaylayan Muhasebe Kaydı",
|
||||
"accounting_receipt_date": "Muhasebe Makbuz Tarihi",
|
||||
"accounting_receipt_number": "Muhasebe Makbuz Numarası",
|
||||
"approved_record": "Onaylanmış Kayıt",
|
||||
"import_file_name": "İçe Aktarım Dosya Adı",
|
||||
"receive_debit": "Alacak Borç",
|
||||
"receive_debit_uu_id": "Alacak Borç UU Kimliği",
|
||||
"budget_type": "Bütçe Türü",
|
||||
"budget_type_uu_id": "Bütçe Türü UU Kimliği",
|
||||
"company_uu_id": "Şirket UU Kimliği",
|
||||
"send_company_uu_id": "Şirket UU Kimliği Gönder",
|
||||
"send_person_uu_id": "Kişi UU Kimliği Gönder",
|
||||
"approving_accounting_person_uu_id": "Onaylayan Muhasebe Kişi UU Kimliği",
|
||||
"living_space_uu_id": "Yaşam Alanı UU Kimliği",
|
||||
"customer_uu_id": "Müşteri UU Kimliği",
|
||||
"build_uu_id": "Yapı UU Kimliği",
|
||||
"build_parts_uu_id": "Yapı Parça UU Kimliği",
|
||||
"build_decision_book_uu_id": "Yapı Karar Defteri UU Kimliği",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"iban": "IBAN Number",
|
||||
"bank_date": "Bank Transaction Date",
|
||||
"currency_value": "Currency Value",
|
||||
"bank_balance": "Bank Balance",
|
||||
"currency": "Unit of Currency",
|
||||
"additional_balance": "Additional Balance",
|
||||
"channel_branch": "Branch Bank",
|
||||
"process_name": "Bank Process Type Name",
|
||||
"process_type": "Bank Process Type",
|
||||
"process_comment": "Transaction Record Comment",
|
||||
"process_garbage": "Transaction Record Garbage",
|
||||
"bank_reference_code": "Bank Reference Code",
|
||||
"add_comment_note": "Add Comment Note",
|
||||
"is_receipt_mail_send": "Receipt Mail Send",
|
||||
"found_from": "Found From",
|
||||
"similarity": "Similarity",
|
||||
"remainder_balance": "Remainder Balance",
|
||||
"bank_date_y": "Bank Date Year",
|
||||
"bank_date_m": "Bank Date Month",
|
||||
"bank_date_w": "Bank Date Week",
|
||||
"bank_date_d": "Bank Date Day",
|
||||
"approving_accounting_record": "Approving Accounting Record",
|
||||
"accounting_receipt_date": "Accounting Receipt Date",
|
||||
"accounting_receipt_number": "Accounting Receipt Number",
|
||||
"approved_record": "Approved Record",
|
||||
"import_file_name": "Import File Name",
|
||||
"receive_debit": "Receive Debit",
|
||||
"receive_debit_uu_id": "Receive Debit UU ID",
|
||||
"budget_type": "Budget Type",
|
||||
"budget_type_uu_id": "Budget Type UU ID",
|
||||
"company_uu_id": "Company UU ID",
|
||||
"send_company_uu_id": "Send Company UU ID",
|
||||
"send_person_uu_id": "Send Person UU ID",
|
||||
"approving_accounting_person_uu_id": "Approving Accounting Person UU ID",
|
||||
"living_space_uu_id": "Living Space UU ID",
|
||||
"customer_uu_id": "Customer UU ID",
|
||||
"build_uu_id": "Build UU ID",
|
||||
"build_parts_uu_id": "Build Parts UU ID",
|
||||
"build_decision_book_uu_id": "Build Decision Book UU ID",
|
||||
}
|
||||
|
||||
|
||||
class AccountListResponse(BaseModelRegular, CrudRecords, AccountListValidation):
|
||||
|
||||
iban: Optional[str] = None
|
||||
bank_date: Optional[str] = None
|
||||
currency_value: Optional[str] = None
|
||||
bank_balance: Optional[str] = None
|
||||
currency: Optional[str] = None
|
||||
additional_balance: Optional[str] = None
|
||||
channel_branch: Optional[str] = None
|
||||
process_name: Optional[str] = None
|
||||
process_type: Optional[str] = None
|
||||
process_comment: Optional[str] = None
|
||||
process_garbage: Optional[str] = None
|
||||
bank_reference_code: Optional[str] = None
|
||||
add_comment_note: Optional[str] = None
|
||||
is_receipt_mail_send: Optional[str] = None
|
||||
found_from: Optional[str] = None
|
||||
similarity: Optional[str] = None
|
||||
remainder_balance: Optional[str] = None
|
||||
bank_date_y: Optional[str] = None
|
||||
bank_date_m: Optional[str] = None
|
||||
bank_date_w: Optional[str] = None
|
||||
bank_date_d: Optional[str] = None
|
||||
approving_accounting_record: Optional[str] = None
|
||||
accounting_receipt_date: Optional[str] = None
|
||||
accounting_receipt_number: Optional[str] = None
|
||||
approved_record: Optional[str] = None
|
||||
import_file_name: Optional[str] = None
|
||||
receive_debit: Optional[str] = None
|
||||
receive_debit_uu_id: Optional[str] = None
|
||||
budget_type: Optional[str] = None
|
||||
budget_type_uu_id: Optional[str] = None
|
||||
company_uu_id: Optional[str] = None
|
||||
send_company_uu_id: Optional[str] = None
|
||||
send_person_uu_id: Optional[str] = None
|
||||
approving_accounting_person_uu_id: Optional[str] = None
|
||||
living_space_uu_id: Optional[str] = None
|
||||
customer_uu_id: Optional[str] = None
|
||||
build_uu_id: Optional[str] = None
|
||||
build_parts_uu_id: Optional[str] = None
|
||||
build_decision_book_uu_id: Optional[str] = None
|
||||
30
api_validations/validations_response/address.py
Normal file
30
api_validations/validations_response/address.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class ListAddressResponse:
|
||||
pass
|
||||
|
||||
|
||||
"""
|
||||
street_id: Mapped[int] = mapped_column(ForeignKey("address_street.id"))
|
||||
street_uu_id: Mapped[str] = mapped_column(
|
||||
String, server_default="", comment="Street UUID"
|
||||
)
|
||||
postcode: Mapped[str] = mapped_column(
|
||||
String(32), nullable=False, comment="Postcode"
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AddressPostCodeResponse:
|
||||
street_id: Optional[int] = None
|
||||
street_uu_id: Optional[str] = None
|
||||
postcode: Optional[str] = None
|
||||
125
api_validations/validations_response/building.py
Normal file
125
api_validations/validations_response/building.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class BuildListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"gov_address_code": "Devlet Adres Kodu",
|
||||
"build_name": "Bina Adı",
|
||||
"build_types_uu_id": "Bina Tipi",
|
||||
"build_no": "Bina No",
|
||||
"max_floor": "Kat Sayısı",
|
||||
"underground_floor": "Bodrum Kat Sayısı",
|
||||
"address_uu_id": "Adres",
|
||||
"build_date": "Yapım Tarihi",
|
||||
"decision_period_date": "Karar Tarihi",
|
||||
"tax_no": "Vergi No",
|
||||
"lift_count": "Asansör Sayısı",
|
||||
"heating_system": "Isıtma Sistemi",
|
||||
"cooling_system": "Soğutma Sistemi",
|
||||
"hot_water_system": "Sıcak Su Sistemi",
|
||||
"block_service_man_count": "Hizmet Görevlisi Sayısı",
|
||||
"security_service_man_count": "Güvenlik Görevlisi Sayısı",
|
||||
"garage_count": "Garaj Sayısı",
|
||||
"site_uu_id": "Site UUID",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"gov_address_code": "Government Address Code",
|
||||
"build_name": "Building Name",
|
||||
"build_types_uu_id": "Building Type",
|
||||
"build_no": "Building No",
|
||||
"max_floor": "Number of Floors",
|
||||
"underground_floor": "Number of Basement Floors",
|
||||
"address_uu_id": "Address",
|
||||
"build_date": "Construction Date",
|
||||
"decision_period_date": "Decision Date",
|
||||
"tax_no": "Tax No",
|
||||
"lift_count": "Number of Elevators",
|
||||
"heating_system": "Heating System",
|
||||
"cooling_system": "Cooling System",
|
||||
"hot_water_system": "Hot Water System",
|
||||
"block_service_man_count": "Number of Service Officers",
|
||||
"security_service_man_count": "Number of Security Officers",
|
||||
"garage_count": "Number of Garages",
|
||||
"site_uu_id": "Site UUID",
|
||||
}
|
||||
|
||||
|
||||
class ListBuildingResponse(BaseModelRegular, CrudRecords, BuildListValidation):
|
||||
|
||||
gov_address_code: Optional[str] = None
|
||||
build_name: Optional[str] = None
|
||||
build_types_uu_id: Optional[str] = None
|
||||
build_no: Optional[str] = None
|
||||
max_floor: Optional[int] = None
|
||||
underground_floor: Optional[int] = None
|
||||
address_uu_id: Optional[str] = None
|
||||
build_date: Optional[str] = None
|
||||
decision_period_date: Optional[str] = None
|
||||
tax_no: Optional[str] = None
|
||||
lift_count: Optional[int] = None
|
||||
heating_system: Optional[bool] = None
|
||||
cooling_system: Optional[bool] = None
|
||||
hot_water_system: Optional[bool] = None
|
||||
block_service_man_count: Optional[int] = None
|
||||
security_service_man_count: Optional[int] = None
|
||||
garage_count: Optional[int] = None
|
||||
site_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
# class InsertBuild(BaseModelRegular, BuildValidation):
|
||||
# gov_address_code: str
|
||||
# build_name: str
|
||||
# build_types_uu_id: str
|
||||
# max_floor: int
|
||||
# underground_floor: int
|
||||
# address_uu_id: str
|
||||
# build_date: str
|
||||
# decision_period_date: str
|
||||
#
|
||||
# tax_no: Optional[str] = None
|
||||
# lift_count: Optional[int] = None
|
||||
# heating_system: Optional[bool] = None
|
||||
# cooling_system: Optional[bool] = None
|
||||
# hot_water_system: Optional[bool] = None
|
||||
# block_service_man_count: Optional[int] = None
|
||||
# security_service_man_count: Optional[int] = None
|
||||
# garage_count: Optional[int] = None
|
||||
#
|
||||
#
|
||||
# class BuildUpdateValidation:
|
||||
# tr = {
|
||||
# **BuildValidation.tr,
|
||||
# **PydanticBaseModelValidation.tr,
|
||||
# }
|
||||
# en = {
|
||||
# **BuildValidation.en,
|
||||
# **PydanticBaseModelValidation.en,
|
||||
# }
|
||||
#
|
||||
#
|
||||
# class UpdateBuild(PydanticBaseModel, BuildUpdateValidation):
|
||||
# gov_address_code: Optional[str] = None
|
||||
# build_name: Optional[str] = None
|
||||
# build_no: Optional[str] = None
|
||||
# build_types: Optional[str] = None
|
||||
# max_floor: Optional[int] = None
|
||||
# underground_floor: Optional[int] = None
|
||||
# build_date: Optional[str] = None
|
||||
# tax_no: Optional[str] = None
|
||||
# lift_count: Optional[int] = None
|
||||
# heating_system: Optional[bool] = None
|
||||
# cooling_system: Optional[bool] = None
|
||||
# hot_water_system: Optional[bool] = None
|
||||
# block_service_man_count: Optional[int] = None
|
||||
# security_service_man_count: Optional[int] = None
|
||||
# garage_count: Optional[int] = None
|
||||
# address_uu_id: Optional[str] = None
|
||||
54
api_validations/validations_response/living_space.py
Normal file
54
api_validations/validations_response/living_space.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class LivingSpaceListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"fix_value": "Sabit Değer",
|
||||
"fix_percent": "Sabit Yüzde",
|
||||
"agreement_no": "Anlaşma No",
|
||||
"marketing_process": "Pazarlama Süreci",
|
||||
"marketing_layer": "Pazarlama Katmanı",
|
||||
"build_parts_id": "Bölüm ID",
|
||||
"build_parts_uu_id": "Bölüm UUID",
|
||||
"person_id": "Sorumlu Kişi ID",
|
||||
"person_uu_id": "Sorumlu Kişi UUID",
|
||||
"occupant_type": "Kiracı Tipi",
|
||||
"occupant_type_uu_id": "Kiracı Tipi UUID",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"fix_value": "Fixed Value",
|
||||
"fix_percent": "Fixed Percent",
|
||||
"agreement_no": "Agreement No",
|
||||
"marketing_process": "Marketing Process",
|
||||
"marketing_layer": "Marketing Layer",
|
||||
"build_parts_id": "Part ID",
|
||||
"build_parts_uu_id": "Part UUID",
|
||||
"person_id": "Responsible Person ID",
|
||||
"person_uu_id": "Responsible Person UUID",
|
||||
"occupant_type": "Occupant Type",
|
||||
"occupant_type_uu_id": "Occupant Type UUID",
|
||||
}
|
||||
|
||||
|
||||
class LivingSpaceListResponse(BaseModelRegular, CrudRecords, LivingSpaceListValidation):
|
||||
|
||||
fix_value: Optional[float] = None
|
||||
fix_percent: Optional[float] = None
|
||||
agreement_no: Optional[str] = None
|
||||
marketing_process: Optional[str] = None
|
||||
marketing_layer: Optional[str] = None
|
||||
build_parts_id: Optional[int] = None
|
||||
build_parts_uu_id: Optional[str] = None
|
||||
person_id: Optional[int] = None
|
||||
person_uu_id: Optional[str] = None
|
||||
occupant_type: Optional[str] = None
|
||||
occupant_type_uu_id: Optional[str] = None
|
||||
56
api_validations/validations_response/parts.py
Normal file
56
api_validations/validations_response/parts.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class BuildPartsListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"address_gov_code": "Adres Kapı Kodu",
|
||||
"part_no": "Bölüm No",
|
||||
"part_level": "Bölüm Seviyesi",
|
||||
"part_code": "Bölüm Kodu",
|
||||
"part_gross": "Bölüm Brüt",
|
||||
"part_net": "Bölüm Net",
|
||||
"default_accessory": "Varsayılan Aksesuar",
|
||||
"human_livable": "İnsan Yaşamı",
|
||||
"due_part_key": "Sabit Ödeme Grubu",
|
||||
"build_uu_id": "Bina UUID",
|
||||
"part_direction_uu_id": "Bölüm Yönü UUID",
|
||||
"part_type_uu_id": "Bölüm Tipi UUID",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"address_gov_code": "Address Government Code",
|
||||
"part_no": "Part Number",
|
||||
"part_level": "Part Level",
|
||||
"part_code": "Part Code",
|
||||
"part_gross": "Part Gross",
|
||||
"part_net": "Part Net",
|
||||
"default_accessory": "Default Accessory",
|
||||
"human_livable": "Human Livable",
|
||||
"due_part_key": "Constant Payment Group",
|
||||
"build_uu_id": "Building UUID",
|
||||
"part_direction_uu_id": "Part Direction UUID",
|
||||
"part_type_uu_id": "Part Type UUID",
|
||||
}
|
||||
|
||||
|
||||
class BuildPartsListResponse(BaseModelRegular, CrudRecords, BuildPartsListValidation):
|
||||
address_gov_code: Optional[str] = None
|
||||
part_no: Optional[int] = None
|
||||
part_level: Optional[int] = None
|
||||
part_code: Optional[str] = None
|
||||
part_gross: Optional[int] = None
|
||||
part_net: Optional[int] = None
|
||||
default_accessory: Optional[str] = None
|
||||
human_livable: Optional[bool] = None
|
||||
due_part_key: Optional[str] = None
|
||||
build_uu_id: Optional[str] = None
|
||||
part_direction_uu_id: Optional[str] = None
|
||||
part_type_uu_id: Optional[str] = None
|
||||
59
api_validations/validations_response/people.py
Normal file
59
api_validations/validations_response/people.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from typing import Optional
|
||||
from api_validations.core_validations import BaseModelRegular
|
||||
from api_validations.validations_request import (
|
||||
PydanticBaseModel,
|
||||
PydanticBaseModelValidation,
|
||||
CrudRecordValidation,
|
||||
CrudRecords,
|
||||
)
|
||||
|
||||
|
||||
class PeopleListValidation:
|
||||
tr = {
|
||||
**CrudRecordValidation.tr,
|
||||
"firstname": "Ad",
|
||||
"surname": "Soyad",
|
||||
"middle_name": "Orta İsim",
|
||||
"sex_code": "Cinsiyet Kodu",
|
||||
"person_ref": "Kişi Referansı",
|
||||
"person_tag": "Kişi Etiketi",
|
||||
"father_name": "Baba Adı",
|
||||
"mother_name": "Anne Adı",
|
||||
"country_code": "Ülke Kodu",
|
||||
"national_identity_id": "Kimlik Numarası",
|
||||
"birth_place": "Doğum Yeri",
|
||||
"birth_date": "Doğum Tarihi",
|
||||
"tax_no": "Vergi Numarası",
|
||||
}
|
||||
en = {
|
||||
**CrudRecordValidation.en,
|
||||
"firstname": "First Name",
|
||||
"surname": "Surname",
|
||||
"middle_name": "Middle Name",
|
||||
"sex_code": "Sex Code",
|
||||
"person_ref": "Person Reference",
|
||||
"person_tag": "Person Tag",
|
||||
"father_name": "Father's Name",
|
||||
"mother_name": "Mother's Name",
|
||||
"country_code": "Country Code",
|
||||
"national_identity_id": "National Identity ID",
|
||||
"birth_place": "Birth Place",
|
||||
"birth_date": "Birth Date",
|
||||
"tax_no": "Tax Number",
|
||||
}
|
||||
|
||||
|
||||
class PeopleListResponse(BaseModelRegular, CrudRecords, PeopleListValidation):
|
||||
firstname: Optional[str] = None
|
||||
surname: Optional[str] = None
|
||||
middle_name: Optional[str] = None
|
||||
sex_code: Optional[str] = None
|
||||
person_ref: Optional[str] = None
|
||||
person_tag: Optional[str] = None
|
||||
father_name: Optional[str] = None
|
||||
mother_name: Optional[str] = None
|
||||
country_code: Optional[str] = None
|
||||
national_identity_id: Optional[str] = None
|
||||
birth_place: Optional[str] = None
|
||||
birth_date: Optional[str] = None
|
||||
tax_no: Optional[str] = None
|
||||
@@ -6,7 +6,6 @@ from databases.sql_models.account.account import (
|
||||
AccountCodes,
|
||||
AccountDetail,
|
||||
AccountMaster,
|
||||
AccountRecordDecisionPaymentClosed,
|
||||
AccountRecordExchanges,
|
||||
)
|
||||
from databases.sql_models.building.budget import (
|
||||
@@ -66,6 +65,8 @@ from databases.sql_models.event.event import (
|
||||
Events,
|
||||
Event2Occupant,
|
||||
Event2Employee,
|
||||
Event2OccupantExtra,
|
||||
Event2EmployeeExtra,
|
||||
)
|
||||
from databases.sql_models.identity.identity import (
|
||||
Addresses,
|
||||
@@ -108,7 +109,6 @@ __all__ = [
|
||||
"AccountCodes",
|
||||
"AccountDetail",
|
||||
"AccountMaster",
|
||||
"AccountRecordDecisionPaymentClosed",
|
||||
"AccountRecordExchanges",
|
||||
"BuildIbans",
|
||||
"BuildIbanDescription",
|
||||
@@ -151,6 +151,8 @@ __all__ = [
|
||||
"Events",
|
||||
"Event2Occupant",
|
||||
"Event2Employee",
|
||||
"Event2OccupantExtra",
|
||||
"Event2EmployeeExtra",
|
||||
"Addresses",
|
||||
"AddressCity",
|
||||
"AddressStreet",
|
||||
|
||||
@@ -16,8 +16,6 @@ from databases.no_sql_models.validations import (
|
||||
from api_library.date_time_actions.date_functions import system_arrow, client_arrow
|
||||
from api_configs import ApiStatic, Auth
|
||||
|
||||
from api_services.redis.auth_actions.auth import save_access_token_to_redis
|
||||
|
||||
|
||||
class PasswordModule:
|
||||
|
||||
@@ -128,7 +126,7 @@ class AuthModule(PasswordModule):
|
||||
except Exception as e:
|
||||
err = e
|
||||
token_is_expired = system_arrow.now() >= system_arrow.get(
|
||||
found_user.password_expiry_begins
|
||||
str(found_user.password_expiry_begins)
|
||||
).shift(days=replace_day)
|
||||
|
||||
if not password_token == found_user.password_token and token_is_expired:
|
||||
@@ -159,7 +157,8 @@ class AuthModule(PasswordModule):
|
||||
found_user.hash_password = new_password_dict.get("password")
|
||||
found_user.password_token = "" if found_user.password_token else ""
|
||||
query_engine.refresh_password_history_via_user(payload=history_dict)
|
||||
return found_user.save()
|
||||
found_user.save()
|
||||
return found_user
|
||||
|
||||
@staticmethod
|
||||
def reset_password_token(found_user):
|
||||
@@ -168,6 +167,7 @@ class AuthModule(PasswordModule):
|
||||
Auth.REFRESHER_TOKEN_LENGTH
|
||||
)
|
||||
found_user.save()
|
||||
return found_user.password_token
|
||||
|
||||
def generate_refresher_token(self, domain: str, remember_me=False):
|
||||
from databases import (
|
||||
@@ -183,13 +183,20 @@ class AuthModule(PasswordModule):
|
||||
already_token.expires_at = system_arrow.shift(days=3)
|
||||
already_token.save()
|
||||
return refresh_token
|
||||
UsersTokens.create(
|
||||
users_tokens = UsersTokens.filter_by_all(
|
||||
user_id=self.id, token_type="RememberMe", domain=domain, system=True
|
||||
).data
|
||||
if users_tokens:
|
||||
users_tokens.query.delete()
|
||||
UsersTokens.save()
|
||||
|
||||
users_token = UsersTokens.find_or_create(
|
||||
user_id=self.id,
|
||||
token_type="RememberMe",
|
||||
token=refresh_token,
|
||||
domain=domain,
|
||||
)
|
||||
UsersTokens.save()
|
||||
users_token.save_and_confirm()
|
||||
return refresh_token
|
||||
return None
|
||||
|
||||
@@ -204,6 +211,7 @@ class UserLoginModule(AuthModule):
|
||||
|
||||
@classmethod
|
||||
def login_user_with_credentials(cls, data, request):
|
||||
from api_services.redis.auth_actions.auth import save_access_token_to_redis
|
||||
from databases import (
|
||||
Users,
|
||||
People,
|
||||
@@ -234,7 +242,6 @@ class UserLoginModule(AuthModule):
|
||||
found_user.last_platform = headers_request.get("evyos-platform", None)
|
||||
found_user.last_remote_addr = headers_request.get("evyos-ip-ext", None)
|
||||
found_user.last_seen = str(system_arrow.now())
|
||||
|
||||
if ext_ip := headers_request.get("evyos-ip-ext"):
|
||||
agent = headers_request.get("evyos-user-agent", "")
|
||||
platform = headers_request.get("evyos-platform", "")
|
||||
|
||||
@@ -156,7 +156,7 @@ class AccountMaster(CrudCollection):
|
||||
__exclude__fields__ = []
|
||||
|
||||
doc_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Document Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
|
||||
)
|
||||
plug_type: Mapped[str] = mapped_column(String, nullable=False, comment="Plug Type")
|
||||
plug_number: Mapped[int] = mapped_column(
|
||||
@@ -243,7 +243,7 @@ class AccountDetail(CrudCollection):
|
||||
__enum_list__ = [("plug_type", "AccountingReceiptTypes", "M")]
|
||||
|
||||
doc_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Document Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
|
||||
)
|
||||
line_no: Mapped[int] = mapped_column(
|
||||
SmallInteger, nullable=False, comment="Line Number"
|
||||
@@ -368,6 +368,11 @@ class AccountDetail(CrudCollection):
|
||||
|
||||
|
||||
class AccountRecords(CrudCollection):
|
||||
"""
|
||||
build_decision_book_id = kaydın sorumlu olduğu karar defteri
|
||||
send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi
|
||||
customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma
|
||||
"""
|
||||
|
||||
__tablename__ = "account_records"
|
||||
__exclude__fields__ = []
|
||||
@@ -375,17 +380,12 @@ class AccountRecords(CrudCollection):
|
||||
("receive_debit", "DebitTypes", "D"),
|
||||
("budget_type", "BudgetType", "B"),
|
||||
]
|
||||
"""
|
||||
build_decision_book_id = kaydın sorumlu olduğu karar defteri
|
||||
send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi
|
||||
customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma
|
||||
"""
|
||||
|
||||
iban: Mapped[str] = mapped_column(
|
||||
String(64), nullable=False, comment="IBAN Number of Bank"
|
||||
)
|
||||
bank_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Bank Transaction Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Date"
|
||||
)
|
||||
|
||||
currency_value: Mapped[float] = mapped_column(
|
||||
@@ -412,6 +412,9 @@ class AccountRecords(CrudCollection):
|
||||
process_comment: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Transaction Record Comment"
|
||||
)
|
||||
process_garbage: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Transaction Record Garbage"
|
||||
)
|
||||
bank_reference_code: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Bank Reference Code"
|
||||
)
|
||||
@@ -431,7 +434,7 @@ class AccountRecords(CrudCollection):
|
||||
Boolean, server_default="0"
|
||||
)
|
||||
accounting_receipt_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01 00:00:00"
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
|
||||
)
|
||||
accounting_receipt_number: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||
status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||
@@ -465,8 +468,6 @@ class AccountRecords(CrudCollection):
|
||||
String, nullable=True, comment="Send Company UU ID"
|
||||
)
|
||||
|
||||
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||
customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID")
|
||||
send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||
send_person_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Send Person UU ID"
|
||||
@@ -477,7 +478,20 @@ class AccountRecords(CrudCollection):
|
||||
approving_accounting_person_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Approving Accounting Person UU ID"
|
||||
)
|
||||
# build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True)
|
||||
|
||||
living_space_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_living_space.id"), nullable=True
|
||||
)
|
||||
living_space_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Living Space UU ID"
|
||||
)
|
||||
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||
customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID")
|
||||
|
||||
build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True)
|
||||
build_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Build UU ID"
|
||||
)
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"), nullable=True
|
||||
)
|
||||
@@ -652,54 +666,55 @@ class AccountRecords(CrudCollection):
|
||||
# print("is all dues_type", payment_dict["dues_type"], paid_value)
|
||||
|
||||
|
||||
class AccountRecordDecisionPaymentClosed(CrudCollection):
|
||||
|
||||
__tablename__ = "account_record_decision_payment_closed"
|
||||
__exclude__fields__ = []
|
||||
|
||||
arc_currency: Mapped[str] = mapped_column(
|
||||
String(5), nullable=False, comment="Unit of Currency"
|
||||
)
|
||||
arc_processing_time: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Processing Time"
|
||||
)
|
||||
arc_currency_value: Mapped[float] = mapped_column(
|
||||
Numeric(20, 6), nullable=False, comment="Currency Value"
|
||||
)
|
||||
|
||||
decision_book_budgets_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("decision_book_budgets.id"), nullable=True
|
||||
)
|
||||
decision_book_budgets_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Budget UUID"
|
||||
)
|
||||
|
||||
build_decision_book_payment_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_decision_book_payments.id")
|
||||
)
|
||||
build_decision_book_payment_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Build Decision Book Payment UU ID"
|
||||
)
|
||||
account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
|
||||
account_records_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Account Record UU ID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"_account_record_decision_payment_closed_ndx_00",
|
||||
account_records_id,
|
||||
build_decision_book_payment_id,
|
||||
arc_processing_time,
|
||||
),
|
||||
Index(
|
||||
"_account_record_decision_payment_closed_ndx_01",
|
||||
build_decision_book_payment_id,
|
||||
account_records_id,
|
||||
arc_processing_time,
|
||||
),
|
||||
{"comment": "Account Record Decision Payment Closed Information"},
|
||||
)
|
||||
# class AccountRecordDecisionPaymentClosed(CrudCollection):
|
||||
#
|
||||
# __tablename__ = "account_record_decision_payment_closed"
|
||||
# __exclude__fields__ = []
|
||||
#
|
||||
# arc_currency: Mapped[str] = mapped_column(
|
||||
# String(5), nullable=False, comment="Unit of Currency"
|
||||
# )
|
||||
# arc_processing_time: Mapped[TIMESTAMP] = mapped_column(
|
||||
# TIMESTAMP(timezone=True), nullable=False, comment="Processing Time"
|
||||
# )
|
||||
# arc_currency_value: Mapped[float] = mapped_column(
|
||||
# Numeric(20, 6), nullable=False, comment="Currency Value"
|
||||
# )
|
||||
#
|
||||
# decision_book_budgets_id: Mapped[int] = mapped_column(
|
||||
# ForeignKey("decision_book_budgets.id"), nullable=True
|
||||
# )
|
||||
# decision_book_budgets_uu_id: Mapped[str] = mapped_column(
|
||||
# String, nullable=True, comment="Budget UUID"
|
||||
# )
|
||||
#
|
||||
# build_decision_book_payment_id: Mapped[int] = mapped_column(
|
||||
# ForeignKey("build_decision_book_payments.id")
|
||||
# )
|
||||
# build_decision_book_payment_uu_id: Mapped[str] = mapped_column(
|
||||
# String, nullable=True, comment="Build Decision Book Payment UU ID"
|
||||
# )
|
||||
# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
|
||||
# account_records_uu_id: Mapped[str] = mapped_column(
|
||||
# String, nullable=True, comment="Account Record UU ID"
|
||||
# )
|
||||
#
|
||||
# __table_args__ = (
|
||||
# Index(
|
||||
# "_account_record_decision_payment_closed_ndx_00",
|
||||
# account_records_id,
|
||||
# build_decision_book_payment_id,
|
||||
# arc_processing_time,
|
||||
# ),
|
||||
# Index(
|
||||
# "_account_record_decision_payment_closed_ndx_01",
|
||||
# build_decision_book_payment_id,
|
||||
# account_records_id,
|
||||
# arc_processing_time,
|
||||
# ),
|
||||
# {"comment": "Account Record Decision Payment Closed Information"},
|
||||
# )
|
||||
#
|
||||
|
||||
|
||||
class AccountRecordExchanges(CrudCollection):
|
||||
|
||||
@@ -16,20 +16,20 @@ class BuildIbans(CrudCollection):
|
||||
String(40), server_default="", nullable=False, comment="IBAN number"
|
||||
)
|
||||
start_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Bank Transaction Start Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Start Date"
|
||||
)
|
||||
|
||||
stop_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="2900-01-01 00:00:00"
|
||||
TIMESTAMP(timezone=True), server_default="2900-01-01 00:00:00"
|
||||
)
|
||||
bank_code: Mapped[str] = mapped_column(String(24), server_default="TR0000000000000")
|
||||
xcomment: Mapped[str] = mapped_column(String(64), server_default="????")
|
||||
|
||||
build_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||
ForeignKey("build.id"), nullable=True, comment="Building ID"
|
||||
)
|
||||
build_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Building UUID", index=True
|
||||
String, nullable=True, comment="Building UUID", index=True
|
||||
)
|
||||
# building: Mapped["Build"] = relationship(
|
||||
# "Build", back_populates="build_ibans", foreign_keys=[build_id]
|
||||
@@ -67,23 +67,25 @@ class BuildIbanDescription(CrudCollection):
|
||||
String, nullable=False, comment="Search Word", index=True
|
||||
)
|
||||
|
||||
decision_book_project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_decision_book_projects.id")
|
||||
)
|
||||
decision_book_project_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Decision Book Project UUID"
|
||||
)
|
||||
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
|
||||
# decision_book_project_id: Mapped[int] = mapped_column(
|
||||
# ForeignKey("build_decision_book_projects.id")
|
||||
# )
|
||||
# decision_book_project_uu_id: Mapped[str] = mapped_column(
|
||||
# String, nullable=False, comment="Decision Book Project UUID"
|
||||
# )
|
||||
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||
customer_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Customer UUID"
|
||||
String, nullable=True, comment="Customer UUID"
|
||||
)
|
||||
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
|
||||
company_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Company UUID"
|
||||
String, nullable=True, comment="Company UUID"
|
||||
)
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"), nullable=True
|
||||
)
|
||||
build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"))
|
||||
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Parts UUID"
|
||||
String, nullable=True, comment="Build Parts UUID"
|
||||
)
|
||||
|
||||
# decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
|
||||
|
||||
@@ -125,7 +125,7 @@ class DecisionBookBudgets(CrudCollection):
|
||||
__exclude__fields__ = []
|
||||
|
||||
process_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False
|
||||
TIMESTAMP(timezone=True), nullable=False
|
||||
) # Başlangıç tarihi
|
||||
budget_codes_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("decision_book_budget_codes.id"), nullable=False
|
||||
|
||||
@@ -151,10 +151,10 @@ class Build(CrudCollection, SelectActionWithEmployee):
|
||||
SmallInteger, server_default="0", nullable=False, comment="Underground Floor"
|
||||
)
|
||||
build_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01"
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01"
|
||||
)
|
||||
decision_period_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP,
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default="1900-01-01",
|
||||
comment="Building annual ordinary meeting period",
|
||||
)
|
||||
@@ -196,7 +196,6 @@ class Build(CrudCollection, SelectActionWithEmployee):
|
||||
foreign_keys="BuildDecisionBook.build_id",
|
||||
)
|
||||
|
||||
#
|
||||
# build_ibans: Mapped["BuildIbans"] = relationship(
|
||||
# "BuildIbans", back_populates="building", foreign_keys="BuildIbans.build_id"
|
||||
# )
|
||||
@@ -272,17 +271,20 @@ class Build(CrudCollection, SelectActionWithEmployee):
|
||||
def update_action(cls, data: UpdateBuild, build_uu_id: str, token):
|
||||
from databases import Addresses
|
||||
|
||||
print("data_dict", data.dump())
|
||||
data_dict = data.excluded_dump()
|
||||
if data.official_address_uu_id:
|
||||
if data.address_uu_id:
|
||||
official_address = Addresses.filter_one(
|
||||
Addresses.uu_id == data.address_uu_id
|
||||
).data
|
||||
data_dict["address_id"] = official_address.id if official_address else None
|
||||
del data_dict["address_uu_id"]
|
||||
if build_to_update := cls.filter_one(
|
||||
cls.uu_id == build_uu_id, cls.person_id == token.id
|
||||
).data:
|
||||
return build_to_update.update(**data_dict)
|
||||
print("data_dict", data_dict)
|
||||
if build_to_update := cls.filter_one(cls.uu_id == build_uu_id).data:
|
||||
print("build_to_update", build_to_update.get_dict())
|
||||
updated_build = build_to_update.update(**data_dict)
|
||||
updated_build.save()
|
||||
print("updated_build", updated_build.get_dict())
|
||||
return updated_build
|
||||
|
||||
@property
|
||||
def top_flat(self):
|
||||
@@ -479,7 +481,7 @@ class BuildParts(CrudCollection):
|
||||
@property
|
||||
def part_name(self):
|
||||
if build_type := BuildTypes.filter_by_one(
|
||||
system=True, id=self.build_part_type_id
|
||||
system=True, id=self.part_type_id
|
||||
).data:
|
||||
return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
|
||||
return f"Undefined:{str(build_type.type_name).upper()}"
|
||||
@@ -513,16 +515,6 @@ class BuildLivingSpace(CrudCollection):
|
||||
marketing_process: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||
marketing_layer: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||
|
||||
discounted_percentage: Mapped[float] = mapped_column(
|
||||
Numeric(6, 2), server_default="0.00"
|
||||
) # %22
|
||||
discounted_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # Normal: 78.00 TL
|
||||
calculated_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # sana düz 75.00 TL yapar
|
||||
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"),
|
||||
nullable=False,
|
||||
@@ -561,8 +553,8 @@ class BuildLivingSpace(CrudCollection):
|
||||
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||
):
|
||||
from databases import Services, OccupantTypes
|
||||
from api_events.events.events.events_bind_services import (
|
||||
ServiceBindOccupantEventMethods,
|
||||
from api_events.events.events.events_bind_modules import (
|
||||
ModulesBindOccupantEventMethods,
|
||||
)
|
||||
|
||||
if data.get("expiry_starts"):
|
||||
@@ -576,18 +568,15 @@ class BuildLivingSpace(CrudCollection):
|
||||
related_service = Services.filter_by_one(
|
||||
related_responsibility=occupant_type.occupant_code,
|
||||
).data
|
||||
|
||||
created_living_space.save_and_confirm()
|
||||
|
||||
if not related_service:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_418_IM_A_TEAPOT,
|
||||
detail="Service is not found in database. Re-enter service record then try again.",
|
||||
)
|
||||
ServiceBindOccupantEventMethods.bind_services_occupant_system(
|
||||
service_id=related_service.id,
|
||||
ModulesBindOccupantEventMethods.bind_default_module_for_first_init_occupant(
|
||||
build_living_space_id=created_living_space.id,
|
||||
)
|
||||
created_living_space.save_and_confirm()
|
||||
return created_living_space
|
||||
|
||||
@classmethod
|
||||
@@ -608,6 +597,57 @@ class BuildLivingSpace(CrudCollection):
|
||||
return living_spaces.data, living_spaces.count
|
||||
|
||||
|
||||
class BuildManagement(CrudCollection):
|
||||
|
||||
__tablename__ = "build_management"
|
||||
__exclude__fields__ = []
|
||||
|
||||
discounted_percentage: Mapped[float] = mapped_column(
|
||||
Numeric(6, 2), server_default="0.00"
|
||||
) # %22
|
||||
discounted_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # Normal: 78.00 TL
|
||||
calculated_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # sana düz 75.00 TL yapar
|
||||
|
||||
occupant_type: Mapped[int] = mapped_column(
|
||||
ForeignKey("occupant_types.id"),
|
||||
nullable=False,
|
||||
comment="Occupant Type",
|
||||
)
|
||||
occupant_type_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Occupant Type UUID"
|
||||
)
|
||||
build_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||
)
|
||||
build_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Building UUID"
|
||||
)
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="Build Part ID",
|
||||
)
|
||||
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Part UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"build_management_ndx_00",
|
||||
build_parts_id,
|
||||
occupant_type,
|
||||
"expiry_starts",
|
||||
unique=True,
|
||||
),
|
||||
{"comment": "Management of the building parts that are related to people"},
|
||||
)
|
||||
|
||||
|
||||
class BuildArea(CrudCollection):
|
||||
"""
|
||||
Builds class based on declarative_base and BaseMixin via session
|
||||
|
||||
@@ -53,12 +53,12 @@ class BuildDecisionBook(CrudCollection):
|
||||
) #
|
||||
is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||
meeting_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01"
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01"
|
||||
)
|
||||
decision_type: Mapped[str] = mapped_column(String(3), server_default="RBM")
|
||||
meeting_is_completed: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||
meeting_completed_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=True, comment="Meeting Completed Date"
|
||||
TIMESTAMP(timezone=True), nullable=True, comment="Meeting Completed Date"
|
||||
)
|
||||
|
||||
build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=False)
|
||||
@@ -267,10 +267,10 @@ class BuildDecisionBookInvitations(CrudCollection):
|
||||
Text, nullable=True, comment="Invitation Message"
|
||||
)
|
||||
planned_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Planned Meeting Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date"
|
||||
)
|
||||
planned_date_expires: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Planned Meeting Date Expires"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date Expires"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
@@ -345,16 +345,16 @@ class BuildDecisionBookPerson(CrudCollection):
|
||||
dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||
dues_fix_discount: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
|
||||
dues_discount_approval_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01 00:00:00"
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
|
||||
)
|
||||
send_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Confirmation Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Confirmation Date"
|
||||
)
|
||||
is_attending: Mapped[bool] = mapped_column(
|
||||
Boolean, server_default="0", comment="Occupant is Attending to invitation"
|
||||
)
|
||||
confirmed_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=True, comment="Confirmation Date"
|
||||
TIMESTAMP(timezone=True), nullable=True, comment="Confirmation Date"
|
||||
)
|
||||
token: Mapped[str] = mapped_column(
|
||||
String, server_default="", comment="Invitation Token"
|
||||
@@ -589,6 +589,11 @@ class BuildDecisionBookItems(CrudCollection):
|
||||
build_decision_book_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Decision Book UUID"
|
||||
)
|
||||
item_short_comment: Mapped[str] = mapped_column(
|
||||
String(24),
|
||||
nullable=True,
|
||||
comment="This field is reserved for use in grouping data or in the pivot heading.",
|
||||
)
|
||||
|
||||
decision_books: Mapped["BuildDecisionBook"] = relationship(
|
||||
"BuildDecisionBook",
|
||||
@@ -841,7 +846,7 @@ class BuildDecisionBookPayments(CrudCollection):
|
||||
String(10), nullable=False, comment="Payment Plan Time Periods"
|
||||
)
|
||||
process_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Payment Due Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Payment Due Date"
|
||||
)
|
||||
payment_amount: Mapped[float] = mapped_column(
|
||||
Numeric(16, 2), nullable=False, comment="Payment Amount"
|
||||
@@ -865,7 +870,19 @@ class BuildDecisionBookPayments(CrudCollection):
|
||||
comment="Build Decision Book Item ID",
|
||||
)
|
||||
build_decision_book_item_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Decision Book Item UUID"
|
||||
String, nullable=False, comment="Decision Book Item UUID"
|
||||
)
|
||||
# build_decision_book_id: Mapped[int] = mapped_column(
|
||||
# ForeignKey("build_decision_book.id"), nullable=True
|
||||
# )
|
||||
# build_decision_book_uu_id: Mapped[str] = mapped_column(
|
||||
# String, nullable=True, comment="Decision Book UUID"
|
||||
# )
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"), nullable=False
|
||||
)
|
||||
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Part UUID"
|
||||
)
|
||||
decision_book_project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_decision_book_projects.id"),
|
||||
@@ -875,12 +892,11 @@ class BuildDecisionBookPayments(CrudCollection):
|
||||
decision_book_project_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Decision Book Project UUID"
|
||||
)
|
||||
|
||||
build_parts_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_parts.id"), nullable=False
|
||||
account_records_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("account_records.id"), nullable=True
|
||||
)
|
||||
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Part UUID"
|
||||
account_records_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=True, comment="Account Record UU ID"
|
||||
)
|
||||
|
||||
# budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True)
|
||||
@@ -918,8 +934,11 @@ class BuildDecisionBookPayments(CrudCollection):
|
||||
build_parts_id,
|
||||
payment_plan_time_periods,
|
||||
process_date,
|
||||
payment_types_id,
|
||||
account_records_id,
|
||||
unique=True,
|
||||
),
|
||||
Index("build_decision_book_payments_detail_ndx_01", account_records_id),
|
||||
{"comment": "Payment Details of Decision Book Payments"},
|
||||
)
|
||||
|
||||
@@ -934,17 +953,17 @@ class BuildDecisionBookLegal(CrudCollection):
|
||||
__exclude__fields__ = []
|
||||
|
||||
period_start_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Start Date of Legal Period"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Start Date of Legal Period"
|
||||
)
|
||||
lawsuits_decision_number: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Lawsuits Decision Number"
|
||||
)
|
||||
lawsuits_decision_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Lawsuits Decision Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Lawsuits Decision Date"
|
||||
)
|
||||
|
||||
period_stop_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="2099-12-31 23:59:59"
|
||||
TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
|
||||
)
|
||||
decision_book_pdf_path: Mapped[str] = mapped_column(
|
||||
String, server_default="", nullable=True
|
||||
@@ -956,10 +975,10 @@ class BuildDecisionBookLegal(CrudCollection):
|
||||
String, server_default="", nullable=True
|
||||
)
|
||||
contact_agreement_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01 00:00:00", nullable=True
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", nullable=True
|
||||
)
|
||||
meeting_date: Mapped[str] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01 00:00:00"
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
|
||||
)
|
||||
lawsuits_type: Mapped[str] = mapped_column(String(1), server_default="C")
|
||||
lawsuits_name: Mapped[str] = mapped_column(String(128))
|
||||
@@ -1016,10 +1035,10 @@ class BuildDecisionBookProjects(CrudCollection):
|
||||
String, nullable=False, comment="Project Name"
|
||||
)
|
||||
project_start_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, nullable=False, comment="Project Start Date"
|
||||
TIMESTAMP(timezone=True), nullable=False, comment="Project Start Date"
|
||||
)
|
||||
project_stop_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="2099-12-31 23:59:59"
|
||||
TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
|
||||
)
|
||||
project_type: Mapped[str] = mapped_column(String, server_default="C")
|
||||
project_note: Mapped[str] = mapped_column(Text)
|
||||
@@ -1037,7 +1056,7 @@ class BuildDecisionBookProjects(CrudCollection):
|
||||
is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||
|
||||
meeting_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01 00:00:00", index=True
|
||||
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", index=True
|
||||
)
|
||||
currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
|
||||
bid_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
|
||||
@@ -1220,6 +1239,11 @@ class BuildDecisionBookProjectItems(CrudCollection):
|
||||
item_estimated_cost: Mapped[float] = mapped_column(
|
||||
Numeric(16, 2), server_default="0", comment="Estimated Cost"
|
||||
)
|
||||
item_short_comment: Mapped[str] = mapped_column(
|
||||
String(24),
|
||||
nullable=True,
|
||||
comment="This field is reserved for use in grouping data or in the pivot heading.",
|
||||
)
|
||||
|
||||
build_decision_book_project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_decision_book_projects.id"), nullable=False
|
||||
|
||||
@@ -80,10 +80,10 @@ class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||
valid_record_args = lambda class_: [class_.active == True, class_.deleted == False]
|
||||
|
||||
expiry_starts: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default=func.now(), nullable=False
|
||||
TIMESTAMP(timezone=True), server_default=func.now(), nullable=False
|
||||
)
|
||||
expiry_ends: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, default="2099-12-31", server_default="2099-12-31"
|
||||
TIMESTAMP(timezone=True), default="2099-12-31", server_default="2099-12-31"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -21,7 +21,9 @@ class Events(CrudCollection):
|
||||
__tablename__ = "events"
|
||||
__exclude__fields__ = []
|
||||
|
||||
event_type: Mapped[str] = mapped_column(String, nullable=False, comment="default")
|
||||
event_type: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event Type"
|
||||
)
|
||||
function_code: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="function code"
|
||||
)
|
||||
@@ -104,6 +106,27 @@ class Services(CrudCollection):
|
||||
)
|
||||
related_responsibility: Mapped[str] = mapped_column(String, server_default="")
|
||||
|
||||
@classmethod
|
||||
def retrieve_service_via_occupant_code(cls, occupant_code):
|
||||
from databases import OccupantTypes
|
||||
|
||||
occupant_type = OccupantTypes.filter_by_one(
|
||||
system=True,
|
||||
occupant_code=occupant_code,
|
||||
).data
|
||||
if not occupant_type:
|
||||
cls.raise_http_exception(
|
||||
status_code="HTTP_404_NOT_FOUND",
|
||||
error_case="RECORD_NOT_FOUND",
|
||||
message=f"No occupant type found for this code : {occupant_code}",
|
||||
data={
|
||||
"occupant_code": occupant_code,
|
||||
},
|
||||
)
|
||||
return cls.filter_one(
|
||||
cls.related_responsibility == occupant_type.occupant_code
|
||||
).data
|
||||
|
||||
__table_args__ = ({"comment": "Services Information"},)
|
||||
|
||||
|
||||
@@ -123,6 +146,62 @@ class Service2Events(CrudCollection):
|
||||
__table_args__ = ({"comment": "Service2Events Information"},)
|
||||
|
||||
|
||||
class Event2OccupantExtra(CrudCollection):
|
||||
|
||||
__tablename__ = "event2occupant_extra"
|
||||
__exclude__fields__ = []
|
||||
|
||||
build_living_space_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("build_living_space.id"), nullable=False
|
||||
)
|
||||
build_living_space_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Living Space UUID"
|
||||
)
|
||||
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
event_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"event2occupant_extra_bind_event_to_occupant",
|
||||
build_living_space_id,
|
||||
event_id,
|
||||
unique=True,
|
||||
),
|
||||
{"comment": "Occupant2Event Information"},
|
||||
)
|
||||
|
||||
|
||||
class Event2EmployeeExtra(CrudCollection):
|
||||
"""
|
||||
Employee2Event class based on declarative_base and BaseMixin via session
|
||||
"""
|
||||
|
||||
__tablename__ = "event2employee_extra"
|
||||
__exclude__fields__ = []
|
||||
|
||||
employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||
employee_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Employee UUID"
|
||||
)
|
||||
|
||||
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
event_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"event2employee_extra_employee_to_event",
|
||||
employee_id,
|
||||
event_id,
|
||||
unique=True,
|
||||
),
|
||||
{"comment": "Employee to Event Information"},
|
||||
)
|
||||
|
||||
|
||||
class Event2Employee(CrudCollection):
|
||||
"""
|
||||
Employee2Event class based on declarative_base and BaseMixin via session
|
||||
@@ -131,23 +210,44 @@ class Event2Employee(CrudCollection):
|
||||
__tablename__ = "event2employee"
|
||||
__exclude__fields__ = []
|
||||
|
||||
employee_id = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||
employee_uu_id = mapped_column(String, nullable=False, comment="Employee UUID")
|
||||
event_id = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||
employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||
employee_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Employee UUID"
|
||||
)
|
||||
event_service_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("services.id"), nullable=False
|
||||
)
|
||||
event_service_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event Cluster UUID"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("event2employee_employee_to_event", employee_id, event_id, unique=True),
|
||||
Index(
|
||||
"event2employee_employee_to_event",
|
||||
employee_id,
|
||||
event_service_id,
|
||||
unique=True,
|
||||
),
|
||||
{"comment": "Employee to Event Information"},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_event_id_by_employee_id(cls, employee_id) -> (list, list):
|
||||
active_events = cls.filter_all(cls.employee_id == employee_id)
|
||||
active_events_id = [event.event_id for event in active_events.data]
|
||||
active_events = Events.filter_all(Events.id.in_(active_events_id))
|
||||
active_events_uu_id = [str(event.uu_id) for event in active_events.data]
|
||||
return active_events_id, active_events_uu_id
|
||||
def get_event_id_by_employee_id(cls, employee_id) -> list:
|
||||
occupant_events = cls.filter_all(
|
||||
cls.employee_id == employee_id,
|
||||
).data
|
||||
active_events = Service2Events.filter_all(
|
||||
Service2Events.service_id.in_(
|
||||
[event.event_service_id for event in occupant_events]
|
||||
),
|
||||
system=True,
|
||||
).data
|
||||
active_events_id = [event.event_id for event in active_events]
|
||||
if extra_events := Event2EmployeeExtra.filter_all(
|
||||
Event2EmployeeExtra.employee_id == employee_id
|
||||
).data:
|
||||
active_events_id.extend([event.event_id for event in extra_events])
|
||||
return active_events_id
|
||||
|
||||
|
||||
class Event2Occupant(CrudCollection):
|
||||
@@ -158,39 +258,50 @@ class Event2Occupant(CrudCollection):
|
||||
__tablename__ = "event2occupant"
|
||||
__exclude__fields__ = []
|
||||
|
||||
build_living_space_id = mapped_column(
|
||||
build_living_space_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("build_living_space.id"), nullable=False
|
||||
)
|
||||
build_living_space_uu_id = mapped_column(
|
||||
build_living_space_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Build Living Space UUID"
|
||||
)
|
||||
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||
event_service_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("services.id"), nullable=False
|
||||
)
|
||||
event_service_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event Cluster UUID"
|
||||
)
|
||||
# event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
# event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"event2occupant_bind_event_to_occupant",
|
||||
build_living_space_id,
|
||||
event_id,
|
||||
event_service_id,
|
||||
unique=True,
|
||||
),
|
||||
{"comment": "Occupant2Event Information"},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_event_id_by_build_living_space_id(
|
||||
cls, build_living_space_id
|
||||
) -> (list, list):
|
||||
active_events = cls.filter_all(
|
||||
def get_event_id_by_build_living_space_id(cls, build_living_space_id) -> list:
|
||||
occupant_events = cls.filter_all(
|
||||
cls.build_living_space_id == build_living_space_id,
|
||||
).data
|
||||
active_events = Service2Events.filter_all(
|
||||
Service2Events.service_id.in_(
|
||||
[event.event_service_id for event in occupant_events]
|
||||
),
|
||||
system=True,
|
||||
).data
|
||||
active_events_id = [event.event_id for event in active_events]
|
||||
active_events = Events.filter_all(Events.id.in_(active_events_id)).data
|
||||
active_events_uu_id = [str(event.uu_id) for event in active_events]
|
||||
return active_events_id, active_events_uu_id
|
||||
if extra_events := Event2OccupantExtra.filter_all(
|
||||
Event2OccupantExtra.build_living_space_id == build_living_space_id
|
||||
).data:
|
||||
active_events_id.extend([event.event_id for event in extra_events])
|
||||
return active_events_id
|
||||
|
||||
|
||||
# Delete later code
|
||||
class ModulePrice(CrudCollection):
|
||||
"""
|
||||
ModulePrice class based on declarative_base and BaseMixin via session
|
||||
@@ -199,27 +310,34 @@ class ModulePrice(CrudCollection):
|
||||
__tablename__ = "module_price"
|
||||
__exclude__fields__ = []
|
||||
|
||||
campaign_code = mapped_column(String, nullable=False, comment="Campaign Code")
|
||||
campaign_code: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Campaign Code"
|
||||
)
|
||||
module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
|
||||
module_uu_id = mapped_column(String, nullable=False, comment="Module UUID")
|
||||
module_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Module UUID"
|
||||
)
|
||||
service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
|
||||
service_uu_id = mapped_column(String, nullable=False, comment="Service UUID")
|
||||
service_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Service UUID"
|
||||
)
|
||||
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||
event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||
event_uu_id: Mapped[str] = mapped_column(
|
||||
String, nullable=False, comment="Event UUID"
|
||||
)
|
||||
is_counted_percentage: Mapped[float] = mapped_column(
|
||||
Numeric(6, 2), server_default="0.00"
|
||||
) # %22
|
||||
discounted_price = mapped_column(
|
||||
discounted_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # Normal: 78.00 TL
|
||||
calculated_price = mapped_column(
|
||||
calculated_price: Mapped[float] = mapped_column(
|
||||
Numeric(20, 2), server_default="0.00"
|
||||
) # sana düz 75.00 TL yapar
|
||||
|
||||
__table_args__ = ({"comment": "ModulePrice Information"},)
|
||||
|
||||
|
||||
#
|
||||
# class Modules2Occupant(CrudCollection):
|
||||
# """
|
||||
# ModulesOccupantPrices class based on declarative_base and BaseMixin via session
|
||||
|
||||
@@ -37,7 +37,8 @@ class UsersTokens(CrudCollection):
|
||||
token: Mapped[str] = mapped_column(String, server_default="")
|
||||
domain: Mapped[str] = mapped_column(String, server_default="")
|
||||
expires_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, default=str(system_arrow.shift(date=system_arrow.now(), days=3))
|
||||
TIMESTAMP(timezone=True),
|
||||
default=str(system_arrow.shift(date=system_arrow.now(), days=3)),
|
||||
)
|
||||
|
||||
# users = relationship("Users", back_populates="tokens", foreign_keys=[user_id])
|
||||
@@ -92,7 +93,7 @@ class Users(CrudCollection, UserLoginModule, SelectAction):
|
||||
)
|
||||
password_expiry_begins: Mapped[TIMESTAMP] = mapped_column(
|
||||
"expiry_begins",
|
||||
TIMESTAMP,
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default=func.now(),
|
||||
comment="Timestamp when password expiry begins",
|
||||
)
|
||||
@@ -321,7 +322,9 @@ class People(CrudCollection, SelectAction):
|
||||
String, server_default="", comment="Birth place of the person"
|
||||
)
|
||||
birth_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP, server_default="1900-01-01", comment="Birth date of the person"
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default="1900-01-01",
|
||||
comment="Birth date of the person",
|
||||
)
|
||||
tax_no: Mapped[str] = mapped_column(
|
||||
String, server_default="", comment="Tax number of the person"
|
||||
@@ -342,7 +345,9 @@ class People(CrudCollection, SelectAction):
|
||||
|
||||
@property
|
||||
def full_name(self):
|
||||
return f"{self.firstname} {self.middle_name} {self.surname}"
|
||||
if self.middle_name:
|
||||
return f"{self.firstname} {self.middle_name} {self.surname}"
|
||||
return f"{self.firstname} {self.surname}"
|
||||
|
||||
@classmethod
|
||||
def create_action(cls, data: InsertPerson, token):
|
||||
@@ -982,7 +987,7 @@ class Contracts(CrudCollection):
|
||||
comment="contract_code is the unique code given by the system.",
|
||||
)
|
||||
contract_date: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP,
|
||||
TIMESTAMP(timezone=True),
|
||||
server_default="2099-12-31 23:59:59",
|
||||
comment="contract date is the date the contract is made. "
|
||||
"expire start is the start date of the contract, expire en is the end date of the contract.",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from api_configs import WagDatabase
|
||||
|
||||
# from api_configs import TestDatabase as WagDatabase
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
@@ -7,10 +9,11 @@ from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
engine_config = {
|
||||
"url": WagDatabase.DATABASE_URL,
|
||||
"pool_size": 10,
|
||||
"max_overflow": 0,
|
||||
"pool_size": 20,
|
||||
"max_overflow": 10,
|
||||
"echo": False,
|
||||
"isolation_level": "READ COMMITTED",
|
||||
"pool_pre_ping": True,
|
||||
}
|
||||
|
||||
engine = create_engine(**engine_config)
|
||||
|
||||
188
docker-compose
Normal file
188
docker-compose
Normal file
@@ -0,0 +1,188 @@
|
||||
services:
|
||||
|
||||
commercial_mongo_service:
|
||||
container_name: commercial_mongo_service
|
||||
image: "bitnami/mongodb:latest"
|
||||
# image: "bitnami/mongodb:4.4.1-debian-10-r3"
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
- MONGODB_DISABLE_ENFORCE_AUTH=true
|
||||
- MONGODB_ROOT_PASSWORD=root
|
||||
- MONGODB_DATABASE=mongo_database
|
||||
- MONGODB_USERNAME=mongo_user
|
||||
- MONGODB_PASSWORD=mongo_password
|
||||
- MONGO_INITDB_ROOT_USERNAME=mongo_user
|
||||
- MONGO_INITDB_ROOT_PASSWORD=mongo_password
|
||||
- MONGO_INITDB_DATABASE=mongo_database
|
||||
volumes:
|
||||
- wag_commercial_mongodb_data:/bitnami/mongodb
|
||||
ports:
|
||||
- "11110:27017"
|
||||
|
||||
commercial_memory_service:
|
||||
container_name: commercial_memory_service
|
||||
image: 'bitnami/redis:latest'
|
||||
restart: on-failure
|
||||
environment:
|
||||
- REDIS_HOST=commercial_redis_service
|
||||
- REDIS_PASSWORD=commercial_redis_password
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_DB=0
|
||||
networks:
|
||||
- network_store_services
|
||||
ports:
|
||||
- "11112:6379"
|
||||
|
||||
postgres_commercial:
|
||||
image: 'bitnami/postgresql:latest'
|
||||
container_name: postgres_commercial
|
||||
restart: on-failure
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
- POSTGRES_DB=wag_database
|
||||
- POSTGRES_USER=berkay_wag_user
|
||||
- POSTGRES_PASSWORD=berkay_wag_user_password
|
||||
depends_on:
|
||||
- commercial_mongo_service
|
||||
ports:
|
||||
- "5434:5432"
|
||||
volumes:
|
||||
- wag_postgres_commercial_data:/bitnami/postgresql
|
||||
|
||||
wag_management_service:
|
||||
container_name: wag_management_service
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app/Dockerfile
|
||||
ports:
|
||||
- "41575:41575"
|
||||
networks:
|
||||
- network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
# - grafana
|
||||
#
|
||||
# wag_management_service_second:
|
||||
# container_name: wag_management_service_second
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app/Dockerfile
|
||||
# ports:
|
||||
# - "41576:41575"
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
# - grafana
|
||||
|
||||
# wag_management_init_service:
|
||||
# container_name: wag_management_init_service
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_init/Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
#
|
||||
# wag_bank_services:
|
||||
# container_name: wag_bank_services
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_banks/mailService.Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
# environment:
|
||||
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
# - PYTHONPATH=/service_app_banks
|
||||
##
|
||||
# wag_account_services:
|
||||
# container_name: wag_account_services
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_account_records/account.Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
# environment:
|
||||
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
# - PYTHONPATH=/
|
||||
#
|
||||
# prometheus:
|
||||
# image: prom/prometheus
|
||||
# container_name: prometheus
|
||||
# ports:
|
||||
# - "9090:9090"
|
||||
# volumes:
|
||||
# - ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
# command:
|
||||
# - '--config.file=/etc/prometheus/prometheus.yml'
|
||||
# networks:
|
||||
# - network_store_services
|
||||
#
|
||||
# grafana:
|
||||
# image: grafana/grafana
|
||||
# container_name: grafana
|
||||
# ports:
|
||||
# - "3030:3000"
|
||||
# depends_on:
|
||||
# - prometheus
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# environment:
|
||||
# - GF_SECURITY_ADMIN_USER=admin
|
||||
# - GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
# - GF_USERS_ALLOW_SIGN_UP=false
|
||||
# - GF_USERS_ALLOW_ORG_CREATE=false
|
||||
# volumes:
|
||||
# - grafana_data:/var/lib/grafana
|
||||
#
|
||||
# wag_management_test_service:
|
||||
# container_name: wag_management_test_service
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_test/Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
|
||||
# nginx-proxy-wag:
|
||||
# container_name: nginx-proxy-wag
|
||||
# image: 'jc21/nginx-proxy-manager:latest'
|
||||
# restart: unless-stopped
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_service
|
||||
# ports:
|
||||
# - '80:80' # Public HTTP Port
|
||||
# - '443:443' # Public HTTPS Port
|
||||
# - '81:81' # Admin Web Port
|
||||
# volumes:
|
||||
# - ./data:/data
|
||||
# - ./letsencrypt:/etc/letsencrypt
|
||||
|
||||
networks:
|
||||
network_store_services:
|
||||
|
||||
volumes:
|
||||
grafana_data:
|
||||
wag_postgres_commercial_data:
|
||||
wag_commercial_mongodb_data:
|
||||
|
||||
# environment:
|
||||
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
# - REDIS_HOST=commercial_memory_service
|
||||
# - REDIS_PASSWORD=commercial_redis_password
|
||||
# - REDIS_PORT=6379
|
||||
# - REDIS_DB=0
|
||||
@@ -2,8 +2,8 @@ services:
|
||||
|
||||
commercial_mongo_service:
|
||||
container_name: commercial_mongo_service
|
||||
image: "bitnami/mongodb:latest"
|
||||
# image: "bitnami/mongodb:4.4.1-debian-10-r3"
|
||||
# image: "bitnami/mongodb:latest"
|
||||
image: "bitnami/mongodb:4.4.1-debian-10-r3"
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
@@ -61,78 +61,91 @@ services:
|
||||
- "41575:41575"
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- wag_management_init_service
|
||||
- grafana
|
||||
|
||||
wag_management_service_second:
|
||||
container_name: wag_management_service_second
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app/Dockerfile
|
||||
ports:
|
||||
- "41576:41575"
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- wag_management_init_service
|
||||
- grafana
|
||||
|
||||
wag_management_init_service:
|
||||
container_name: wag_management_init_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app_init/Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- postgres_commercial
|
||||
|
||||
wag_bank_services:
|
||||
container_name: wag_bank_services
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app_banks/mailService.Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- postgres_commercial
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
volumes:
|
||||
- wag_cronjob-volume:/etc/cron.d/send_email_cron
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
container_name: prometheus
|
||||
ports:
|
||||
- "9090:9090"
|
||||
volumes:
|
||||
- ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
networks:
|
||||
- network_store_services
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
ports:
|
||||
- "3000:3000"
|
||||
depends_on:
|
||||
- prometheus
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
- GF_SECURITY_ADMIN_USER=admin
|
||||
- GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_USERS_ALLOW_ORG_CREATE=false
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
# - grafana
|
||||
#
|
||||
# wag_management_service_second:
|
||||
# container_name: wag_management_service_second
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app/Dockerfile
|
||||
# ports:
|
||||
# - "41576:41575"
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
# - grafana
|
||||
|
||||
# wag_management_init_service:
|
||||
# container_name: wag_management_init_service
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_init/Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
#
|
||||
# wag_bank_services:
|
||||
# container_name: wag_bank_services
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_app_banks/mailService.Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
# environment:
|
||||
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
# - PYTHONPATH=/service_app_banks
|
||||
##
|
||||
# wag_account_services:
|
||||
# container_name: wag_account_services
|
||||
# restart: on-failure
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: service_account_records/account.Dockerfile
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - postgres_commercial
|
||||
# environment:
|
||||
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
# - PYTHONPATH=/
|
||||
#
|
||||
# prometheus:
|
||||
# image: prom/prometheus
|
||||
# container_name: prometheus
|
||||
# ports:
|
||||
# - "9090:9090"
|
||||
# volumes:
|
||||
# - ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
# command:
|
||||
# - '--config.file=/etc/prometheus/prometheus.yml'
|
||||
# networks:
|
||||
# - network_store_services
|
||||
#
|
||||
# grafana:
|
||||
# image: grafana/grafana
|
||||
# container_name: grafana
|
||||
# ports:
|
||||
# - "3030:3000"
|
||||
# depends_on:
|
||||
# - prometheus
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# environment:
|
||||
# - GF_SECURITY_ADMIN_USER=admin
|
||||
# - GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
# - GF_USERS_ALLOW_SIGN_UP=false
|
||||
# - GF_USERS_ALLOW_ORG_CREATE=false
|
||||
# volumes:
|
||||
# - grafana_data:/var/lib/grafana
|
||||
#
|
||||
# wag_management_test_service:
|
||||
# container_name: wag_management_test_service
|
||||
# build:
|
||||
@@ -142,7 +155,7 @@ services:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_init_service
|
||||
#
|
||||
|
||||
# nginx-proxy-wag:
|
||||
# container_name: nginx-proxy-wag
|
||||
# image: 'jc21/nginx-proxy-manager:latest'
|
||||
@@ -163,9 +176,8 @@ networks:
|
||||
network_store_services:
|
||||
|
||||
volumes:
|
||||
wag_cronjob-volume:
|
||||
wag_postgres_commercial_data:
|
||||
grafana_data:
|
||||
wag_postgres_commercial_data:
|
||||
wag_commercial_mongodb_data:
|
||||
|
||||
# environment:
|
||||
|
||||
181
docker-prod
Normal file
181
docker-prod
Normal file
@@ -0,0 +1,181 @@
|
||||
services:
|
||||
|
||||
commercial_mongo_service:
|
||||
container_name: commercial_mongo_service
|
||||
# image: "bitnami/mongodb:latest"
|
||||
image: "bitnami/mongodb:4.4.1-debian-10-r3"
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
- MONGODB_DISABLE_ENFORCE_AUTH=true
|
||||
- MONGODB_ROOT_PASSWORD=root
|
||||
- MONGODB_DATABASE=mongo_database
|
||||
- MONGODB_USERNAME=mongo_user
|
||||
- MONGODB_PASSWORD=mongo_password
|
||||
- MONGO_INITDB_ROOT_USERNAME=mongo_user
|
||||
- MONGO_INITDB_ROOT_PASSWORD=mongo_password
|
||||
- MONGO_INITDB_DATABASE=mongo_database
|
||||
volumes:
|
||||
- wag_commercial_mongodb_data:/bitnami/mongodb
|
||||
ports:
|
||||
- "11110:27017"
|
||||
|
||||
commercial_memory_service:
|
||||
container_name: commercial_memory_service
|
||||
image: 'bitnami/redis:latest'
|
||||
restart: on-failure
|
||||
environment:
|
||||
- REDIS_HOST=commercial_redis_service
|
||||
- REDIS_PASSWORD=commercial_redis_password
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_DB=0
|
||||
networks:
|
||||
- network_store_services
|
||||
ports:
|
||||
- "11112:6379"
|
||||
|
||||
postgres_commercial:
|
||||
image: 'bitnami/postgresql:latest'
|
||||
container_name: postgres_commercial
|
||||
restart: on-failure
|
||||
networks:
|
||||
- network_store_services
|
||||
environment:
|
||||
- POSTGRES_DB=wag_database
|
||||
- POSTGRES_USER=berkay_wag_user
|
||||
- POSTGRES_PASSWORD=berkay_wag_user_password
|
||||
depends_on:
|
||||
- commercial_mongo_service
|
||||
ports:
|
||||
- "5434:5432"
|
||||
volumes:
|
||||
- wag_postgres_commercial_data:/bitnami/postgresql
|
||||
|
||||
wag_management_service:
|
||||
container_name: wag_management_service
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app/Dockerfile
|
||||
ports:
|
||||
- "41575:41575"
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- wag_management_init_service
|
||||
# - grafana
|
||||
|
||||
wag_management_service_second:
|
||||
container_name: wag_management_service_second
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app/Dockerfile
|
||||
ports:
|
||||
- "41576:41575"
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- wag_management_init_service
|
||||
# - grafana
|
||||
|
||||
wag_management_init_service:
|
||||
container_name: wag_management_init_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app_init/Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- postgres_commercial
|
||||
|
||||
wag_bank_services:
|
||||
container_name: wag_bank_services
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app_banks/mailService.Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- postgres_commercial
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
- PYTHONPATH=/service_app_banks
|
||||
#
|
||||
wag_account_services:
|
||||
container_name: wag_account_services
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_account_records/account.Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- postgres_commercial
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
|
||||
- PYTHONPATH=/
|
||||
|
||||
# prometheus:
|
||||
# image: prom/prometheus
|
||||
# container_name: prometheus
|
||||
# ports:
|
||||
# - "9090:9090"
|
||||
# volumes:
|
||||
# - ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
# command:
|
||||
# - '--config.file=/etc/prometheus/prometheus.yml'
|
||||
# networks:
|
||||
# - network_store_services
|
||||
#
|
||||
# grafana:
|
||||
# image: grafana/grafana
|
||||
# container_name: grafana
|
||||
# ports:
|
||||
# - "3030:3000"
|
||||
# depends_on:
|
||||
# - prometheus
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# environment:
|
||||
# - GF_SECURITY_ADMIN_USER=admin
|
||||
# - GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
# - GF_USERS_ALLOW_SIGN_UP=false
|
||||
# - GF_USERS_ALLOW_ORG_CREATE=false
|
||||
# volumes:
|
||||
# - grafana_data:/var/lib/grafana
|
||||
|
||||
wag_management_test_service:
|
||||
container_name: wag_management_test_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: service_app_test/Dockerfile
|
||||
networks:
|
||||
- network_store_services
|
||||
depends_on:
|
||||
- wag_management_init_service
|
||||
|
||||
# nginx-proxy-wag:
|
||||
# container_name: nginx-proxy-wag
|
||||
# image: 'jc21/nginx-proxy-manager:latest'
|
||||
# restart: unless-stopped
|
||||
# networks:
|
||||
# - network_store_services
|
||||
# depends_on:
|
||||
# - wag_management_service
|
||||
# ports:
|
||||
# - '80:80' # Public HTTP Port
|
||||
# - '443:443' # Public HTTPS Port
|
||||
# - '81:81' # Admin Web Port
|
||||
# volumes:
|
||||
# - ./data:/data
|
||||
# - ./letsencrypt:/etc/letsencrypt
|
||||
|
||||
networks:
|
||||
network_store_services:
|
||||
|
||||
volumes:
|
||||
grafana_data:
|
||||
wag_postgres_commercial_data:
|
||||
wag_commercial_mongodb_data:
|
||||
93
service_account_records/.dockerignore
Normal file
93
service_account_records/.dockerignore
Normal file
@@ -0,0 +1,93 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitattributes
|
||||
|
||||
|
||||
# CI
|
||||
.codeclimate.yml
|
||||
.travis.yml
|
||||
.taskcluster.yml
|
||||
|
||||
# Docker
|
||||
docker-compose.yml
|
||||
service_app/Dockerfile
|
||||
.docker
|
||||
.dockerignore
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__/
|
||||
**/*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
service_app/env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Virtual environment
|
||||
service_app/.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# PyCharm
|
||||
.idea
|
||||
|
||||
# Python mode for VIM
|
||||
.ropeproject
|
||||
**/.ropeproject
|
||||
|
||||
# Vim swap files
|
||||
**/*.swp
|
||||
|
||||
# VS Code
|
||||
.vscode/
|
||||
|
||||
test_application/
|
||||
|
||||
|
||||
162
service_account_records/.gitignore
vendored
Normal file
162
service_account_records/.gitignore
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.idea/
|
||||
.Python
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
service_app/.env
|
||||
.venv
|
||||
service_app/env/
|
||||
venv/
|
||||
service_app/env/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
0
service_account_records/__init__.py
Normal file
0
service_account_records/__init__.py
Normal file
25
service_account_records/account.Dockerfile
Normal file
25
service_account_records/account.Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
FROM python:3.10-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
COPY ../service_account_records/account.requirements.txt .
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
RUN pip install --no-cache-dir --upgrade -r account.requirements.txt
|
||||
|
||||
COPY ../service_account_records ./service_account_records
|
||||
COPY ../api_library ./service_account_records/api_library
|
||||
COPY ../api_services ./service_account_records/api_services
|
||||
COPY ../databases ./service_account_records/databases
|
||||
COPY ../api_configs ./service_account_records/api_configs
|
||||
COPY ../api_objects ./service_account_records/api_objects
|
||||
COPY ../api_validations ./service_account_records/api_validations
|
||||
|
||||
RUN apt-get update && apt-get install -y cron
|
||||
|
||||
# 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC
|
||||
RUN echo "*/15 * * * * /usr/local/bin/python /service_account_records/app_accounts.py >> /var/log/cron.log 2>&1" >> /tmp/crontab_list && crontab /tmp/crontab_list
|
||||
RUN touch /var/log/cron.log
|
||||
|
||||
CMD cron && tail -f /var/log/cron.log
|
||||
18
service_account_records/account.requirements.txt
Normal file
18
service_account_records/account.requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
arrow
|
||||
Deprecated
|
||||
python-dotenv
|
||||
pydantic
|
||||
sqlalchemy-mixins
|
||||
psycopg2-binary
|
||||
redmail
|
||||
redbox
|
||||
pandas
|
||||
xlrd
|
||||
Unidecode
|
||||
redis
|
||||
cryptography
|
||||
fastapi
|
||||
requests
|
||||
pymongo
|
||||
unidecode
|
||||
textdistance
|
||||
341
service_account_records/app_accounts.py
Normal file
341
service_account_records/app_accounts.py
Normal file
@@ -0,0 +1,341 @@
|
||||
import sys
|
||||
|
||||
if "/service_account_records" not in list(sys.path):
|
||||
sys.path.append("/service_account_records")
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from api_services.bank_actions.wag_account_record_parser import (
|
||||
parse_comment_with_name,
|
||||
parse_comment_with_name_iban_description,
|
||||
)
|
||||
from databases import (
|
||||
AccountRecords,
|
||||
BuildIbans,
|
||||
BuildDecisionBook,
|
||||
Build,
|
||||
BuildLivingSpace,
|
||||
People,
|
||||
OccupantTypes,
|
||||
)
|
||||
from api_validations.validations_request import ListOptions
|
||||
from api_library.date_time_actions.date_functions import system_arrow
|
||||
from databases.sql_models.building.build import BuildParts
|
||||
from databases.sql_models.building.decision_book import BuildDecisionBookPayments
|
||||
from databases.sql_models.others.enums import ApiEnumDropdown
|
||||
|
||||
|
||||
account_list = ListOptions(
|
||||
page=1,
|
||||
size=1000,
|
||||
order_field="bank_date",
|
||||
order_type="desc",
|
||||
)
|
||||
|
||||
|
||||
def account_records_find_decision_book():
|
||||
created_ibans, iban_build_dict = [], {}
|
||||
AccountRecords.filter_attr = account_list
|
||||
account_records_list: list[AccountRecords] = AccountRecords.filter_all(
|
||||
AccountRecords.build_decision_book_id == None,
|
||||
AccountRecords.remainder_balance < AccountRecords.currency_value,
|
||||
system=True,
|
||||
).data
|
||||
for account_record in account_records_list:
|
||||
if account_record.iban not in created_ibans:
|
||||
found_iban: BuildIbans = BuildIbans.filter_one(
|
||||
BuildIbans.iban == account_record.iban, system=True
|
||||
).data
|
||||
if not found_iban:
|
||||
try:
|
||||
create_build_ibans = BuildIbans.find_or_create(
|
||||
iban=account_record.iban,
|
||||
start_date=str(system_arrow.now().shift(days=-1)),
|
||||
)
|
||||
create_build_ibans.save_and_confirm()
|
||||
created_ibans.append(account_record.iban)
|
||||
except Exception as e:
|
||||
print("Exception of find_decision_book ln:55", e)
|
||||
if not getattr(found_iban, "build_id", None): # It is in database already
|
||||
iban_build_dict["iban"] = account_record.iban
|
||||
iban_build_dict["build_id"] = None
|
||||
else:
|
||||
found_decision_book = BuildDecisionBook.filter_one(
|
||||
BuildDecisionBook.build_id == found_iban.build_id,
|
||||
BuildDecisionBook.expiry_starts <= account_record.bank_date,
|
||||
BuildDecisionBook.expiry_ends > account_record.bank_date,
|
||||
).data
|
||||
if found_decision_book:
|
||||
account_record.update(
|
||||
build_decision_book_id=found_decision_book.id,
|
||||
build_decision_book_uu_id=str(found_decision_book.uu_id),
|
||||
build_id=found_decision_book.build_id,
|
||||
build_uu_id=str(found_decision_book.build_uu_id),
|
||||
)
|
||||
account_record.save()
|
||||
return
|
||||
|
||||
|
||||
def account_save_search_result(account_record, similarity_result):
|
||||
found_company = similarity_result.get("company", None)
|
||||
found_customer, part, build = (
|
||||
similarity_result.get("living_space", None),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
if found_customer:
|
||||
part = BuildParts.filter_by_one(
|
||||
id=found_customer.build_parts_id, human_livable=True
|
||||
).data
|
||||
if part:
|
||||
build = Build.filter_by_one(id=part.build_id).data
|
||||
|
||||
account_record.similarity = similarity_result.get("similarity", 0.00)
|
||||
account_record.found_from = similarity_result.get("found_from", None)
|
||||
account_record.company_id = getattr(found_company, "id", None)
|
||||
account_record.company_uu_id = getattr(found_company, "uu_id", None)
|
||||
account_record.build_parts_id = getattr(part, "id", None)
|
||||
account_record.build_parts_uu_id = getattr(part, "uu_id", None)
|
||||
|
||||
if not account_record.build_id and build:
|
||||
account_record.build_id = getattr(build, "id", None)
|
||||
account_record.build_uu_id = getattr(build, "uu_id", None)
|
||||
|
||||
# account_record.send_person_id = getattr(found_send_person, "id", None)
|
||||
# account_record.send_person_uu_id = getattr(found_send_person, "uu_id", None)
|
||||
account_record.living_space_id = getattr(found_customer, "id", None)
|
||||
account_record.living_space_uu_id = getattr(found_customer, "uu_id", None)
|
||||
account_record.save()
|
||||
|
||||
|
||||
def account_get_people_and_living_space_info_via_iban() -> dict:
|
||||
build_living_space_dict = {}
|
||||
account_records_ibans = (
|
||||
AccountRecords.select_only(
|
||||
AccountRecords.build_decision_book_id != None,
|
||||
select_args=[AccountRecords.iban],
|
||||
system=True,
|
||||
)
|
||||
.query.distinct(AccountRecords.iban)
|
||||
.all()
|
||||
)
|
||||
flat_resident = OccupantTypes.filter_by_one(
|
||||
system=True, occupant_category_type="FL", occupant_code="FL-RES"
|
||||
).data
|
||||
flat_owner = OccupantTypes.filter_by_one(
|
||||
system=True, occupant_category_type="FL", occupant_code="FL-OWN"
|
||||
).data
|
||||
flat_tenant = OccupantTypes.filter_by_one(
|
||||
system=True, occupant_category_type="FL", occupant_code="FL-TEN"
|
||||
).data
|
||||
flat_represent = OccupantTypes.filter_by_one(
|
||||
system=True, occupant_category_type="FL", occupant_code="FL-REP"
|
||||
).data
|
||||
for account_records_iban in account_records_ibans:
|
||||
if account_records_iban not in build_living_space_dict:
|
||||
build_iban: BuildIbans = BuildIbans.filter_one(
|
||||
BuildIbans.iban == account_records_iban[0], system=True
|
||||
).data
|
||||
build_parts = BuildParts.filter_by_all(
|
||||
system=True, build_id=build_iban.build_id, human_livable=True
|
||||
).data
|
||||
living_spaces = BuildLivingSpace.filter_all(
|
||||
BuildLivingSpace.build_parts_id.in_(
|
||||
[build_parts.id for build_parts in build_parts]
|
||||
),
|
||||
BuildLivingSpace.occupant_type.in_(
|
||||
[flat_resident.id, flat_owner.id, flat_tenant.id, flat_represent.id]
|
||||
),
|
||||
).data
|
||||
living_spaces_people = [
|
||||
living_space.person_id
|
||||
for living_space in living_spaces
|
||||
if living_space.person_id
|
||||
]
|
||||
people_list = People.filter_all(
|
||||
People.id.in_(living_spaces_people), system=True
|
||||
).data
|
||||
print("build_parts", build_parts)
|
||||
build_living_space_dict[str(account_records_iban[0])] = {
|
||||
"people": list(people_list),
|
||||
"living_space": list(living_spaces),
|
||||
"build_parts": list(build_parts),
|
||||
}
|
||||
return build_living_space_dict
|
||||
|
||||
|
||||
def account_records_search():
|
||||
build_living_space_dict = account_get_people_and_living_space_info_via_iban()
|
||||
AccountRecords.filter_attr, found_list = account_list, []
|
||||
account_records_list: list[AccountRecords] = AccountRecords.filter_all(
|
||||
# AccountRecords.build_decision_book_id != None, system=True
|
||||
).data
|
||||
for account_record in account_records_list:
|
||||
similarity_result = parse_comment_with_name(
|
||||
account_record=account_record, living_space_dict=build_living_space_dict
|
||||
)
|
||||
fs, ac = similarity_result.get("similarity"), account_record.similarity or 0
|
||||
if float(fs) >= 0.8 and float(fs) > float(ac):
|
||||
found_list.append(similarity_result)
|
||||
account_save_search_result(
|
||||
account_record=account_record, similarity_result=similarity_result
|
||||
)
|
||||
else:
|
||||
similarity_result = parse_comment_with_name_iban_description(
|
||||
account_record=account_record
|
||||
)
|
||||
fs, ac = similarity_result.get("similarity"), account_record.similarity or 0
|
||||
if float(fs) >= 0.8 and float(fs) > float(ac):
|
||||
found_list.append(similarity_result)
|
||||
account_save_search_result(
|
||||
account_record=account_record, similarity_result=similarity_result
|
||||
)
|
||||
print("Account Records Search : ", len(found_list), "/", len(account_records_list))
|
||||
return
|
||||
|
||||
|
||||
def pay_the_registration(
|
||||
account_record, receive_enum, debit_enum, is_old_record: bool = False
|
||||
):
|
||||
current_currency_value = float(Decimal(account_record.currency_value)) - float(
|
||||
Decimal(account_record.remainder_balance)
|
||||
)
|
||||
if not current_currency_value > 0:
|
||||
return current_currency_value
|
||||
|
||||
process_date = system_arrow.get(account_record.bank_date)
|
||||
account_bank_date_year, account_bank_date_month = (
|
||||
process_date.date().year,
|
||||
process_date.date().month,
|
||||
)
|
||||
payment_arguments_debit = [
|
||||
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
|
||||
BuildDecisionBookPayments.payment_types_id == debit_enum.id,
|
||||
BuildDecisionBookPayments.account_records_id == None,
|
||||
]
|
||||
if not is_old_record:
|
||||
payment_arguments_debit.extend(
|
||||
[
|
||||
BuildDecisionBookPayments.process_date_y == int(account_bank_date_year),
|
||||
BuildDecisionBookPayments.process_date_m
|
||||
== int(account_bank_date_month),
|
||||
]
|
||||
)
|
||||
payments = (
|
||||
BuildDecisionBookPayments.filter_all(*payment_arguments_debit)
|
||||
.query.order_by(BuildDecisionBookPayments.process_date.asc())
|
||||
.all()
|
||||
)
|
||||
for payment in payments:
|
||||
if not current_currency_value > 0:
|
||||
return current_currency_value
|
||||
|
||||
payment_arguments_receive = [
|
||||
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
|
||||
BuildDecisionBookPayments.payment_plan_time_periods
|
||||
== payment.payment_plan_time_periods,
|
||||
BuildDecisionBookPayments.payment_types_id == receive_enum.id,
|
||||
BuildDecisionBookPayments.build_decision_book_item_id
|
||||
== payment.build_decision_book_item_id,
|
||||
BuildDecisionBookPayments.decision_book_project_id
|
||||
== payment.decision_book_project_id,
|
||||
BuildDecisionBookPayments.process_date == payment.process_date,
|
||||
]
|
||||
if not is_old_record:
|
||||
payment_arguments_receive.extend(
|
||||
[
|
||||
BuildDecisionBookPayments.process_date_y
|
||||
== int(account_bank_date_year),
|
||||
BuildDecisionBookPayments.process_date_m
|
||||
== int(account_bank_date_month),
|
||||
]
|
||||
)
|
||||
|
||||
payment_received = (
|
||||
BuildDecisionBookPayments.filter_all(*payment_arguments_receive).data or []
|
||||
)
|
||||
sum_of_payment_received = sum(
|
||||
[abs(payment.payment_amount) for payment in payment_received]
|
||||
)
|
||||
net_amount = float(abs(Decimal(payment.payment_amount))) - float(
|
||||
abs(Decimal(sum_of_payment_received))
|
||||
)
|
||||
if not net_amount > 0:
|
||||
continue
|
||||
if float(abs(current_currency_value)) < float(abs(net_amount)):
|
||||
net_amount = float(current_currency_value)
|
||||
process_date = system_arrow.get(payment.process_date)
|
||||
try:
|
||||
created_book_payment = BuildDecisionBookPayments.find_or_create(
|
||||
payment_plan_time_periods=payment.payment_plan_time_periods,
|
||||
payment_amount=float(abs(net_amount)),
|
||||
payment_types_id=receive_enum.id,
|
||||
payment_types_uu_id=str(receive_enum.uu_id),
|
||||
process_date=str(process_date),
|
||||
process_date_m=process_date.date().month,
|
||||
process_date_y=process_date.date().year,
|
||||
period_time=f"{process_date.year}-{str(process_date.month).zfill(2)}",
|
||||
build_parts_id=payment.build_parts_id,
|
||||
build_parts_uu_id=str(payment.build_parts_uu_id),
|
||||
account_records_id=account_record.id,
|
||||
account_records_uu_id=str(account_record.uu_id),
|
||||
build_decision_book_item_id=payment.build_decision_book_item_id,
|
||||
build_decision_book_item_uu_id=str(
|
||||
payment.build_decision_book_item_uu_id
|
||||
),
|
||||
decision_book_project_id=payment.decision_book_project_id,
|
||||
decision_book_project_uu_id=str(payment.decision_book_project_uu_id),
|
||||
)
|
||||
created_book_payment.save_and_confirm()
|
||||
created_payment_amount = float(Decimal(created_book_payment.payment_amount))
|
||||
remainder_balance = float(
|
||||
Decimal(account_record.remainder_balance)
|
||||
) + float(abs(created_payment_amount))
|
||||
account_record.update(remainder_balance=remainder_balance)
|
||||
account_record.save()
|
||||
if current_currency_value >= abs(net_amount):
|
||||
current_currency_value -= abs(net_amount)
|
||||
except Exception as e:
|
||||
print("Exception of decision payment ln:300", e)
|
||||
return current_currency_value
|
||||
|
||||
|
||||
def send_accounts_to_decision_payment():
|
||||
AccountRecords.filter_attr = account_list
|
||||
receive_enum = ApiEnumDropdown.filter_by_one(
|
||||
system=True, enum_class="DebitTypes", key="DT-R"
|
||||
).data
|
||||
debit_enum = ApiEnumDropdown.filter_by_one(
|
||||
system=True, enum_class="DebitTypes", key="DT-D"
|
||||
).data
|
||||
account_records_list: list[AccountRecords] = AccountRecords.filter_all(
|
||||
AccountRecords.remainder_balance < AccountRecords.currency_value,
|
||||
AccountRecords.approved_record == True,
|
||||
AccountRecords.receive_debit == receive_enum.id,
|
||||
).data
|
||||
for account_record in account_records_list:
|
||||
current_currency_value = pay_the_registration(
|
||||
account_record, receive_enum, debit_enum
|
||||
)
|
||||
if current_currency_value > 0:
|
||||
pay_the_registration(account_record, receive_enum, debit_enum, True)
|
||||
if abs(float(Decimal(account_record.remainder_balance))) == abs(
|
||||
float(Decimal(account_record.currency_value))
|
||||
):
|
||||
account_record.update(status_id=97)
|
||||
account_record.save()
|
||||
# # # todo If the payment is more than the amount, then create a new account record with the remaining amount
|
||||
return
|
||||
|
||||
|
||||
def account_records_service() -> None:
|
||||
print("Account Records Service is running...")
|
||||
account_records_find_decision_book()
|
||||
account_records_search()
|
||||
send_accounts_to_decision_payment()
|
||||
print("Account Records Service is finished...")
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
account_records_service()
|
||||
8
service_account_records/configs.py
Normal file
8
service_account_records/configs.py
Normal file
@@ -0,0 +1,8 @@
|
||||
class AccountConfig:
|
||||
BEFORE_DAY = 30
|
||||
CATEGORIES = {
|
||||
"DAIRE": ["daire", "dagire", "daare", "nolu daire", "no", "nolu dairenin"],
|
||||
"APARTMAN": ["apartman", "aparman", "aprmn"],
|
||||
"VILLA": ["villa", "vlla"],
|
||||
"BINA": ["bina", "binna"],
|
||||
}
|
||||
2
service_account_records/crontab_list
Normal file
2
service_account_records/crontab_list
Normal file
@@ -0,0 +1,2 @@
|
||||
0 8 * * * /usr/local/bin/python /service_app_banks/app_mail_sender.py >> /var/log/cron.log 2>&1
|
||||
*/15 * * * * /usr/local/bin/python /service_app_banks/isbank/main_single_thread.py >> /var/log/cron.log 2>&1
|
||||
39
service_account_records/regex_func.py
Normal file
39
service_account_records/regex_func.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import sys
|
||||
|
||||
if "/service_account_records" not in list(sys.path):
|
||||
sys.path.append("/service_account_records")
|
||||
|
||||
import re
|
||||
|
||||
from difflib import get_close_matches
|
||||
from configs import AccountConfig
|
||||
|
||||
|
||||
def word_straighten(word, ref_list, threshold=0.8):
|
||||
matches = get_close_matches(word, ref_list, n=1, cutoff=threshold)
|
||||
return matches[0] if matches else word
|
||||
|
||||
|
||||
def category_finder(text, output_template="{kategori} {numara}"):
|
||||
categories = AccountConfig.CATEGORIES
|
||||
result = {
|
||||
category: [] for category in categories
|
||||
} # Sonuçları depolamak için bir sözlük
|
||||
for category, patterns in categories.items():
|
||||
words = re.split(r"\W+", text)
|
||||
straighten_words = [word_straighten(word, patterns) for word in words]
|
||||
straighten_text = " ".join(straighten_words)
|
||||
pattern = (
|
||||
r"(?:\b|\s|^)(?:"
|
||||
+ "|".join(map(re.escape, patterns))
|
||||
+ r")(?:\s*|:|\-|\#)*(\d+)(?:\b|$)"
|
||||
)
|
||||
if founds_list := re.findall(pattern, straighten_text, re.IGNORECASE):
|
||||
list_of_output = [
|
||||
output_template.format(kategori=category, numara=num)
|
||||
for num in founds_list
|
||||
]
|
||||
result[category].extend(
|
||||
[i for i in list_of_output if str(i).replace(" ", "")]
|
||||
)
|
||||
return result
|
||||
@@ -59,7 +59,7 @@ from .decision_book.project_decision_book_items.router import (
|
||||
from .decision_book.project_decision_book_person.router import (
|
||||
build_decision_book_project_people_route,
|
||||
)
|
||||
|
||||
from .validations.router import validations_route
|
||||
|
||||
__all__ = [
|
||||
"account_records_router",
|
||||
@@ -99,4 +99,5 @@ __all__ = [
|
||||
"build_decision_book_project_route",
|
||||
"build_decision_book_project_items_route",
|
||||
"build_decision_book_project_people_route",
|
||||
"validations_route",
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user