diff --git a/api_events/events/__init__.py b/api_events/events/__init__.py index 0f0c6bf..c353d09 100644 --- a/api_events/events/__init__.py +++ b/api_events/events/__init__.py @@ -163,6 +163,11 @@ from api_events.events.decision_book.decision_book_invitations import ( BuildDecisionBookInvitationsCreateEventMethod, BuildDecisionBookInvitationsUpdateEventMethod, ) +from api_events.events.events.events_events import ( + EventsBindEventToEmployeeMethod, + EventsBindEventToOccupantMethod, + EventsListEventMethod, +) __all__ = [ @@ -279,4 +284,7 @@ __all__ = [ "BuildDecisionBookInvitationsUpdateEventMethod", "DecisionBookPersonAttendEventMethod", "DecisionBookPersonAssignOccupantEventMethod", + "EventsBindEventToEmployeeMethod", + "EventsBindEventToOccupantMethod", + "EventsListEventMethod", ] diff --git a/api_events/events/account/account_records.py b/api_events/events/account/account_records.py index 23fdcc2..17a63fb 100644 --- a/api_events/events/account/account_records.py +++ b/api_events/events/account/account_records.py @@ -53,6 +53,7 @@ class AccountRecordsListEventMethods(MethodToEvent): """ return + class AccountRecordsCreateEventMethods(MethodToEvent): event_type = "CREATE" diff --git a/api_events/events/authentication.py b/api_events/events/authentication.py index 72498a5..e1f71ab 100644 --- a/api_events/events/authentication.py +++ b/api_events/events/authentication.py @@ -331,7 +331,9 @@ class AuthenticationChangePasswordEventMethods(MethodToEvent): Users.id == token_dict.user_id, ).data: if found_user.check_password(data.old_password): - found_user.create_password(found_user=found_user, password=data.new_password) + found_user.create_password( + found_user=found_user, password=data.new_password + ) return JSONResponse( content={ "completed": True, @@ -408,10 +410,7 @@ class AuthenticationResetPasswordEventMethods(MethodToEvent): } @classmethod - def authentication_reset_password( - cls, - data: Forgot - ): + def authentication_reset_password(cls, data: Forgot): from sqlalchemy import or_ found_user = Users.query.filter( @@ -426,7 +425,7 @@ class AuthenticationResetPasswordEventMethods(MethodToEvent): detail="Given access key or domain is not matching with the any user record.", ) - reset_password_token = found_user.reset_password_token() + reset_password_token = found_user.reset_password_token(found_user=found_user) send_email_completed = send_email( subject=f"Dear {found_user.user_tag}, a password reset request has been received.", receivers=[str(found_user.email)], @@ -437,13 +436,12 @@ class AuthenticationResetPasswordEventMethods(MethodToEvent): ) if not send_email_completed: raise found_user.raise_http_exception( - status_code=400, - message="Email can not be sent. Try again later" + status_code=400, message="Email can not be sent. Try again later" ) return JSONResponse( content={ "completed": True, - "message": "Password is created successfully", + "message": "Password change link is sent to your email or phone", "data": found_user.get_dict(), }, status_code=status.HTTP_200_OK, @@ -537,7 +535,7 @@ class AuthenticationLogoutEventMethods(MethodToEvent): ) token_users = get_object_via_user_uu_id(token_dict.user_uu_id) for token, token_user in token_users.items(): - if token_user['domain'] == data.domain: + if token_user["domain"] == data.domain: selected_user = Users.filter_one( Users.uu_id == token_dict.user_uu_id, ).data diff --git a/api_events/events/building/building_build.py b/api_events/events/building/building_build.py index 0419bf6..dd9aa7c 100644 --- a/api_events/events/building/building_build.py +++ b/api_events/events/building/building_build.py @@ -1,4 +1,5 @@ import typing +from typing import Union from fastapi import status, HTTPException from fastapi.responses import JSONResponse @@ -28,6 +29,7 @@ from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObj class BuildListEventMethods(MethodToEvent): event_type = "SELECT" + __event_keys__ = { "68b3b5ed-b74c-4a27-820f-3959214e94e9": "build_list", } @@ -78,7 +80,6 @@ class BuildCreateEventMethods(MethodToEvent): ) created_build = Build.create_action(data=data, token=token_dict) - build_type = BuildTypes.filter_by_one( **BuildTypes.valid_record_dict, type_code="APT_YNT" ).data @@ -178,7 +179,12 @@ class BuildUpdateEventMethods(MethodToEvent): } @classmethod - def build_update(cls, build_uu_id: str, data: UpdateBuild, token_dict): + def build_update( + cls, + build_uu_id: str, + data: UpdateBuild, + token_dict: Union[EmployeeTokenObject, OccupantTokenObject] + ): Build.pre_query = Build.select_action( employee_id=token_dict.selected_company.employee_id ) diff --git a/api_events/events/events/events_events.py b/api_events/events/events/events_events.py index 825fa7a..b346a9d 100644 --- a/api_events/events/events/events_events.py +++ b/api_events/events/events/events_events.py @@ -2,6 +2,7 @@ from typing import Union from fastapi.exceptions import HTTPException +from api_events.events.events.events_services import ServicesEvents from databases import ( Events, Employees, @@ -36,100 +37,40 @@ class EventsListEventMethods(MethodToEvent): list_options: ListOptions, token_dict: Union[EmployeeTokenObject, OccupantTokenObject], ): + list_options.page = 1 + list_options.size = 10000 Events.filter_attr = list_options - records = Events.filter_all() + if isinstance(token_dict, OccupantTokenObject): + occupant_events = Event2Occupant.filter_all( + Event2Occupant.build_living_space_id == token_dict.selected_occupant.living_space_id + ).data + records = Events.filter_all( + Events.id.in_([event.event_id for event in occupant_events]) + ) + return AlchemyJsonResponse( + completed=True, + message="DecisionBook are listed successfully", + result=records, + ) + elif isinstance(token_dict, EmployeeTokenObject): + employee_events = Event2Employee.filter_all( + Event2Employee.employee_id == token_dict.selected_company.employee_id + ).data + records = Events.filter_all( + Events.id.in_([event.event_id for event in employee_events]) + ) + return AlchemyJsonResponse( + completed=True, + message="DecisionBook are listed successfully", + result=records, + ) return AlchemyJsonResponse( - completed=True, - message="DecisionBook are listed successfully", - result=records, + completed=False, + message="DecisionBook are NOT listed successfully", + result=[], ) -class EventsCreateEventMethods(MethodToEvent): - - event_type = "CREATE" - __event_keys__ = { - "514a9f8f-e5e5-4e10-9d0b-2de8f461fc1b": "events_create", - } - - @classmethod - def events_create(cls, data: CreateEvents, token_dict): - event = Events.find_or_create( - **token_dict.user_creds, - event_name=data.event_name, - event_description=data.event_description, - event_date=data.event_date, - event_location=data.event_location, - active=True, - deleted=False, - ) - Events.save() - return { - "status": "success", - "message": "Event created successfully.", - "event": event.uu_id, - } - - -class EventsUpdateEventMethods(MethodToEvent): - - event_type = "UPDATE" - __event_keys__ = { - "f94e7b79-2369-4840-bf2b-244934ca3136": "events_update", - } - - @classmethod - def events_update(cls, data: CreateEvents, token_dict): - event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data - if not event: - raise HTTPException( - status_code=404, - detail="No event found. Please contact your responsible company.", - ) - event.update( - **token_dict.user_creds, - event_name=data.event_name, - event_description=data.event_description, - event_date=data.event_date, - event_location=data.event_location, - ) - Events.save() - return { - "status": "success", - "message": "Event updated successfully.", - "event": event.uu_id, - } - - -class EventsPatchEventMethods(MethodToEvent): - - event_type = "PATCH" - __event_keys__ = { - "41944c63-22d3-4866-affd-34bcd49da58b": "events_patch", - } - - @classmethod - def events_patch(cls, data: CreateEvents, token_dict): - event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data - if not event: - raise HTTPException( - status_code=404, - detail="No event found. Please contact your responsible company.", - ) - event.update( - **token_dict.user_creds, - event_name=data.event_name, - event_description=data.event_description, - event_date=data.event_date, - event_location=data.event_location, - ) - return { - "status": "success", - "message": "Event patched successfully.", - "event": event.uu_id, - } - - class EventsBindEventToOccupantMethods(MethodToEvent): event_type = "UPDATE" @@ -218,21 +159,105 @@ class EventsBindEventToEmployeeMethods(MethodToEvent): } -EventsListEventMethod = EventsListEventMethods( - action=ActionsSchema(endpoint="/event/list") -) -EventsCreateEventMethod = EventsCreateEventMethods( - action=ActionsSchema(endpoint="/event/create") -) -EventsUpdateEventMethod = EventsUpdateEventMethods( - action=ActionsSchema(endpoint="/event/update") -) -EventsPatchEventMethod = EventsPatchEventMethods( - action=ActionsSchema(endpoint="/event/patch") -) EventsBindEventToOccupantMethod = EventsBindEventToOccupantMethods( action=ActionsSchema(endpoint="/bind/events/occupant") ) EventsBindEventToEmployeeMethod = EventsBindEventToEmployeeMethods( action=ActionsSchema(endpoint="/bind/events/employee") ) +EventsListEventMethod = EventsListEventMethods( + action=ActionsSchema(endpoint="/event/list") +) + +# EventsCreateEventMethod = EventsCreateEventMethods( +# action=ActionsSchema(endpoint="/event/create") +# ) +# EventsUpdateEventMethod = EventsUpdateEventMethods( +# action=ActionsSchema(endpoint="/event/update") +# ) +# EventsPatchEventMethod = EventsPatchEventMethods( +# action=ActionsSchema(endpoint="/event/patch") +# ) +# + +# class EventsCreateEventMethods(MethodToEvent): +# +# event_type = "CREATE" +# __event_keys__ = { +# "514a9f8f-e5e5-4e10-9d0b-2de8f461fc1b": "events_create", +# } +# +# @classmethod +# def events_create(cls, data: CreateEvents, token_dict): +# event = Events.find_or_create( +# **token_dict.user_creds, +# event_name=data.event_name, +# event_description=data.event_description, +# event_date=data.event_date, +# event_location=data.event_location, +# active=True, +# deleted=False, +# ) +# Events.save() +# return { +# "status": "success", +# "message": "Event created successfully.", +# "event": event.uu_id, +# } +# class EventsUpdateEventMethods(MethodToEvent): +# +# event_type = "UPDATE" +# __event_keys__ = { +# "f94e7b79-2369-4840-bf2b-244934ca3136": "events_update", +# } +# +# @classmethod +# def events_update(cls, data: CreateEvents, token_dict): +# event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data +# if not event: +# raise HTTPException( +# status_code=404, +# detail="No event found. Please contact your responsible company.", +# ) +# event.update( +# **token_dict.user_creds, +# event_name=data.event_name, +# event_description=data.event_description, +# event_date=data.event_date, +# event_location=data.event_location, +# ) +# Events.save() +# return { +# "status": "success", +# "message": "Event updated successfully.", +# "event": event.uu_id, +# } +# +# +# class EventsPatchEventMethods(MethodToEvent): +# +# event_type = "PATCH" +# __event_keys__ = { +# "41944c63-22d3-4866-affd-34bcd49da58b": "events_patch", +# } +# +# @classmethod +# def events_patch(cls, data: CreateEvents, token_dict): +# event = Events.filter_by_one(uu_id=data.uu_id, **Events.valid_record_dict).data +# if not event: +# raise HTTPException( +# status_code=404, +# detail="No event found. Please contact your responsible company.", +# ) +# event.update( +# **token_dict.user_creds, +# event_name=data.event_name, +# event_description=data.event_description, +# event_date=data.event_date, +# event_location=data.event_location, +# ) +# return { +# "status": "success", +# "message": "Event patched successfully.", +# "event": event.uu_id, +# } \ No newline at end of file diff --git a/api_events/events/identity/users.py b/api_events/events/identity/users.py index 717a2a9..e3f2d26 100644 --- a/api_events/events/identity/users.py +++ b/api_events/events/identity/users.py @@ -98,8 +98,7 @@ class UserCreateEventMethods(MethodToEvent): ) if not send_email_completed: raise created_user.raise_http_exception( - status_code=400, - message="Email can not be sent. Try again later" + status_code=400, message="Email can not be sent. Try again later" ) return JSONResponse( content={ diff --git a/api_services/bank_actions/wag_account_record_parser.py b/api_services/bank_actions/wag_account_record_parser.py index 05aa7b5..2ea2256 100644 --- a/api_services/bank_actions/wag_account_record_parser.py +++ b/api_services/bank_actions/wag_account_record_parser.py @@ -131,7 +131,9 @@ def get_garbage_words(comment: str, search_word: str): garbage_words = unidecode(remove_spaces_from_string(comment)) search_word = unidecode(remove_spaces_from_string(search_word)) for word in search_word.split(" "): - garbage_words = garbage_words.replace(remove_spaces_from_string(unidecode(word)), "") + garbage_words = garbage_words.replace( + remove_spaces_from_string(unidecode(word)), "" + ) if cleaned_from_spaces := remove_spaces_from_string(garbage_words): return str(unidecode(cleaned_from_spaces)).upper() return None @@ -143,7 +145,9 @@ def remove_garbage_words(comment: str, garbage_word: str): garbage_word = remove_spaces_from_string(garbage_word.replace("*", " ")) for letter in garbage_word.split(" "): cleaned_comment = unidecode(remove_spaces_from_string(cleaned_comment)) - cleaned_comment = cleaned_comment.replace(remove_spaces_from_string(letter), "") + cleaned_comment = cleaned_comment.replace( + remove_spaces_from_string(letter), "" + ) return str(remove_spaces_from_string(cleaned_comment)).upper() @@ -202,7 +206,12 @@ def parse_comment_for_living_space( ): comment = unidecode(comment) best_similarity = dict( - company=None, living_space=None, found_from=None, similarity=0.0, garbage="", cleaned="" + company=None, + living_space=None, + found_from=None, + similarity=0.0, + garbage="", + cleaned="", ) for person in living_space_dict[iban]["people"]: person: People = person @@ -210,27 +219,41 @@ def parse_comment_for_living_space( last_name = unidecode(person.surname).upper() search_word_list = [ remove_spaces_from_string("".join([f"{first_name} {last_name}"])), - remove_spaces_from_string("".join([f"{last_name} {first_name}"])) + remove_spaces_from_string("".join([f"{last_name} {first_name}"])), ] if middle_name := unidecode(person.middle_name).upper(): - search_word_list.append(remove_spaces_from_string(f"{first_name} {middle_name} {last_name}")) - search_word_list.append(remove_spaces_from_string(f"{last_name} {middle_name} {first_name}")) + search_word_list.append( + remove_spaces_from_string(f"{first_name} {middle_name} {last_name}") + ) + search_word_list.append( + remove_spaces_from_string(f"{last_name} {middle_name} {first_name}") + ) cleaned_comment = unidecode(comment).upper() for search_word in search_word_list: garbage_words = get_garbage_words(comment, unidecode(search_word)) if garbage_words: garbage_words = unidecode(garbage_words).upper() - cleaned_comment = unidecode(remove_garbage_words(comment, garbage_words)).upper() - similarity_ratio = textdistance.jaro_winkler(cleaned_comment, str(search_word).upper()) + cleaned_comment = unidecode( + remove_garbage_words(comment, garbage_words) + ).upper() + similarity_ratio = textdistance.jaro_winkler( + cleaned_comment, str(search_word).upper() + ) if len(cleaned_comment) < len(f"{first_name}{last_name}"): continue if cleaned_comment and 0.9 < similarity_ratio <= 1: print( - 'cleaned comment dict', dict( - garbage=garbage_words, cleaned=cleaned_comment, similarity=similarity_ratio, - search_word=search_word, comment=comment, last_similarity=float(best_similarity["similarity"]) - )) + "cleaned comment dict", + dict( + garbage=garbage_words, + cleaned=cleaned_comment, + similarity=similarity_ratio, + search_word=search_word, + comment=comment, + last_similarity=float(best_similarity["similarity"]), + ), + ) if similarity_ratio > float(best_similarity["similarity"]): for living_space in living_space_dict[iban]["living_space"]: if living_space.person_id == person.id: @@ -245,12 +268,14 @@ def parse_comment_for_living_space( return best_similarity -def parse_comment_for_build_parts(comment: str, max_build_part: int = 200, parse: str = 'DAIRE'): +def parse_comment_for_build_parts( + comment: str, max_build_part: int = 200, parse: str = "DAIRE" +): results, results_list = category_finder(comment), [] - print('results[parse]', results[parse]) + print("results[parse]", results[parse]) for result in results[parse] or []: if digits := "".join([letter for letter in str(result) if letter.isdigit()]): - print('digits', digits) + print("digits", digits) if int(digits) <= int(max_build_part): results_list.append(int(digits)) return results_list or None @@ -262,7 +287,12 @@ def parse_comment_for_company_or_individual(comment: str): ).data comment = unidecode(comment) best_similarity = dict( - company=None, living_space=None, found_from=None, similarity=0.0, garbage="", cleaned="" + company=None, + living_space=None, + found_from=None, + similarity=0.0, + garbage="", + cleaned="", ) for company in companies_list: search_word = unidecode(company.public_name) @@ -300,25 +330,30 @@ def parse_comment_to_split_with_star(account_record: AccountRecords): def check_build_living_space_matches_with_build_parts( living_space_dict: dict, best_similarity: dict, iban: str, whole_comment: str ): - if 0.6 < float(best_similarity['similarity']) < 0.8: - build_parts = living_space_dict[iban]['build_parts'] - if best_similarity['living_space']: - build_parts_id = best_similarity['living_space'].build_parts_id - parser_dict = dict(comment=str(whole_comment), max_build_part=len(build_parts)) - print('build parts similarity', best_similarity, 'parser_dict', parser_dict) + if 0.6 < float(best_similarity["similarity"]) < 0.8: + build_parts = living_space_dict[iban]["build_parts"] + if best_similarity["living_space"]: + build_parts_id = best_similarity["living_space"].build_parts_id + parser_dict = dict( + comment=str(whole_comment), max_build_part=len(build_parts) + ) + print("build parts similarity", best_similarity, "parser_dict", parser_dict) results_list = parse_comment_for_build_parts(**parser_dict) - print('results_list', results_list) + print("results_list", results_list) if not results_list: return best_similarity for build_part in build_parts: - print('part_no', int(build_part.part_no), " | ", results_list) - print('build_part', int(build_part.id), int(build_parts_id)) - print('cond', int(build_part.id) == int(build_parts_id)) - print('cond2', int(build_part.part_no) in results_list) - if int(build_part.id) == int(build_parts_id) and int(build_part.part_no) in results_list: - similarity = float(best_similarity['similarity']) - best_similarity['similarity'] = (1 - similarity) / 2 + similarity - print('similarity', best_similarity['similarity']) + print("part_no", int(build_part.part_no), " | ", results_list) + print("build_part", int(build_part.id), int(build_parts_id)) + print("cond", int(build_part.id) == int(build_parts_id)) + print("cond2", int(build_part.part_no) in results_list) + if ( + int(build_part.id) == int(build_parts_id) + and int(build_part.part_no) in results_list + ): + similarity = float(best_similarity["similarity"]) + best_similarity["similarity"] = (1 - similarity) / 2 + similarity + print("similarity", best_similarity["similarity"]) break return best_similarity @@ -329,14 +364,14 @@ def parse_comment_with_name( comments = parse_comment_to_split_with_star(account_record=account_record) best_similarity = {"similarity": 0.0} comments_list, comments_length = comments[1:], int(comments[0]) - print('comments_list', comments_list, 'comments_length', comments_length) + print("comments_list", comments_list, "comments_length", comments_length) if ( int(account_record.currency_value) > 0 ): # Build receive money from living space people living_space_matches = dict( living_space_dict=living_space_dict, iban=account_record.iban, - whole_comment=account_record.process_comment + whole_comment=account_record.process_comment, ) if comments_length == 1: best_similarity = parse_comment_for_living_space( @@ -345,9 +380,11 @@ def parse_comment_with_name( living_space_dict=living_space_dict, ) best_similarity["send_person_id"] = best_similarity.get("customer_id", None) - living_space_matches['best_similarity'] = best_similarity + living_space_matches["best_similarity"] = best_similarity # if 0.5 < float(best_similarity['similarity']) < 0.8 - best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches) + best_similarity = check_build_living_space_matches_with_build_parts( + **living_space_matches + ) return best_similarity for comment in comments_list: similarity_result = parse_comment_for_living_space( @@ -359,10 +396,12 @@ def parse_comment_with_name( best_similarity["similarity"] ): best_similarity = similarity_result - living_space_matches['best_similarity'] = best_similarity + living_space_matches["best_similarity"] = best_similarity # if 0.5 < float(best_similarity['similarity']) < 0.8: - best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches) - print('last best_similarity', best_similarity) + best_similarity = check_build_living_space_matches_with_build_parts( + **living_space_matches + ) + print("last best_similarity", best_similarity) return best_similarity else: # Build pays money for service taken from company or individual if not comments_length > 1: @@ -387,7 +426,12 @@ def parse_comment_with_name_iban_description(account_record: AccountRecords): BuildIbanDescription.iban == account_record.iban, system=True ).data best_similarity = dict( - company=None, living_space=None, found_from=None, similarity=0.0, garbage="", cleaned="" + company=None, + living_space=None, + found_from=None, + similarity=0.0, + garbage="", + cleaned="", ) for comment in comments_list: for iban_result in iban_results: diff --git a/api_services/redis/auth_actions/auth.py b/api_services/redis/auth_actions/auth.py index d9934bb..3e7163c 100644 --- a/api_services/redis/auth_actions/auth.py +++ b/api_services/redis/auth_actions/auth.py @@ -110,19 +110,21 @@ def save_access_token_to_redis( part_level=build_part.part_level, uu_id=str(occupant_type.uu_id), description=occupant_type.occupant_description, - code=occupant_type.occupant_code + code=occupant_type.occupant_code, ) - ] + ], ) elif str(build.uu_id) in occupants_selection_dict: - occupants_selection_dict[str(build.uu_id)]["occupants"].append(dict( - part_uu_id=str(build_part.uu_id), - part_name=build_part.part_name, - part_level=build_part.part_level, - uu_id=str(occupant_type.uu_id), - description=occupant_type.occupant_description, - code=occupant_type.occupant_code - )) + occupants_selection_dict[str(build.uu_id)]["occupants"].append( + dict( + part_uu_id=str(build_part.uu_id), + part_name=build_part.part_name, + part_level=build_part.part_level, + uu_id=str(occupant_type.uu_id), + description=occupant_type.occupant_description, + code=occupant_type.occupant_code, + ) + ) save_object_to_redis( access_token=access_token, model_object=OccupantTokenObject( @@ -162,13 +164,17 @@ def save_access_token_to_redis( ).data: companies_uu_id_list.append(str(company.uu_id)) companies_id_list.append(company.id) - company_address = Addresses.filter_by_one(id=company.official_address_id).data - companies_list.append(dict( - uu_id=str(company.uu_id), - public_name=company.public_name, - company_type=company.company_type, - company_address=company_address, - )) + company_address = Addresses.filter_by_one( + id=company.official_address_id + ).data + companies_list.append( + dict( + uu_id=str(company.uu_id), + public_name=company.public_name, + company_type=company.company_type, + company_address=company_address, + ) + ) save_object_to_redis( access_token=access_token, diff --git a/api_services/redis/functions.py b/api_services/redis/functions.py index 7e2d66a..c6d84a4 100644 --- a/api_services/redis/functions.py +++ b/api_services/redis/functions.py @@ -70,4 +70,3 @@ def get_object_via_user_uu_id(user_id: str) -> typing.Union[dict, None]: already_tokens_list.append(redis_object) already_tokens_dict[already_token.decode()] = redis_object return already_tokens_dict - diff --git a/api_services/templates/password_templates.py b/api_services/templates/password_templates.py index 8bf01d3..1f35b31 100644 --- a/api_services/templates/password_templates.py +++ b/api_services/templates/password_templates.py @@ -157,8 +157,6 @@ def password_is_changed_template(**kwargs): return template - - def invalid_ip_or_address_found(**kwargs): user_name, current_year, address = ( kwargs["user_name"], diff --git a/api_validations/validations_request/__init__.py b/api_validations/validations_request/__init__.py index 5ac6a8c..6911a19 100644 --- a/api_validations/validations_request/__init__.py +++ b/api_validations/validations_request/__init__.py @@ -93,7 +93,7 @@ from .employee import ( UpdateCompanyDuty, ) from .events import ( - CreateEvents, + # CreateEvents, RegisterEvents2Employee, RegisterEvents2Occupant, ) @@ -208,7 +208,7 @@ __all__ = [ "InsertCompanyDuty", "UpdateCompanyEmployeesSalaries", "UpdateCompanyDuty", - "CreateEvents", + # "CreateEvents", "RegisterEvents2Employee", "RegisterEvents2Occupant", "UpdatePerson", diff --git a/api_validations/validations_request/events.py b/api_validations/validations_request/events.py index b224f66..56b55a1 100644 --- a/api_validations/validations_request/events.py +++ b/api_validations/validations_request/events.py @@ -6,13 +6,6 @@ from api_validations.validations_request import ( ) -class CreateEvents(PydanticBaseModel): - event_name: Optional[str] = None - event_description: Optional[str] = None - event_date: Optional[str] = None - event_location: Optional[str] = None - - class RegisterEvents2Employee(PydanticBaseModel): event_uu_id_list: list[str] = [] employee_uu_id: Optional[str] = None diff --git a/databases/extensions/auth.py b/databases/extensions/auth.py index 3b21121..595b8ce 100644 --- a/databases/extensions/auth.py +++ b/databases/extensions/auth.py @@ -17,8 +17,6 @@ from api_library.date_time_actions.date_functions import system_arrow, client_ar from api_configs import ApiStatic, Auth - - class PasswordModule: @classmethod @@ -174,6 +172,7 @@ class AuthModule(PasswordModule): from databases import ( UsersTokens, ) + if remember_me: refresh_token = self.generate_token(Auth.REFRESHER_TOKEN_LENGTH) if already_token := UsersTokens.filter_by_one( diff --git a/databases/sql_models/building/build.py b/databases/sql_models/building/build.py index ece97c4..30885d1 100644 --- a/databases/sql_models/building/build.py +++ b/databases/sql_models/building/build.py @@ -649,7 +649,6 @@ class BuildManagement(CrudCollection): ) - class BuildArea(CrudCollection): """ Builds class based on declarative_base and BaseMixin via session diff --git a/databases/sql_models/building/decision_book.py b/databases/sql_models/building/decision_book.py index e151056..dd0eab7 100644 --- a/databases/sql_models/building/decision_book.py +++ b/databases/sql_models/building/decision_book.py @@ -590,7 +590,9 @@ class BuildDecisionBookItems(CrudCollection): String, nullable=True, comment="Decision Book UUID" ) item_short_comment: Mapped[str] = mapped_column( - String(24), nullable=True, comment="This field is reserved for use in grouping data or in the pivot heading." + String(24), + nullable=True, + comment="This field is reserved for use in grouping data or in the pivot heading.", ) decision_books: Mapped["BuildDecisionBook"] = relationship( @@ -1232,7 +1234,9 @@ class BuildDecisionBookProjectItems(CrudCollection): Numeric(16, 2), server_default="0", comment="Estimated Cost" ) item_short_comment: Mapped[str] = mapped_column( - String(24), nullable=True, comment="This field is reserved for use in grouping data or in the pivot heading." + String(24), + nullable=True, + comment="This field is reserved for use in grouping data or in the pivot heading.", ) build_decision_book_project_id: Mapped[int] = mapped_column( diff --git a/databases/sql_models/identity/identity.py b/databases/sql_models/identity/identity.py index 992528e..42f8e71 100644 --- a/databases/sql_models/identity/identity.py +++ b/databases/sql_models/identity/identity.py @@ -37,7 +37,8 @@ class UsersTokens(CrudCollection): token: Mapped[str] = mapped_column(String, server_default="") domain: Mapped[str] = mapped_column(String, server_default="") expires_at: Mapped[TIMESTAMP] = mapped_column( - TIMESTAMP(timezone=True), default=str(system_arrow.shift(date=system_arrow.now(), days=3)) + TIMESTAMP(timezone=True), + default=str(system_arrow.shift(date=system_arrow.now(), days=3)), ) # users = relationship("Users", back_populates="tokens", foreign_keys=[user_id]) @@ -321,7 +322,9 @@ class People(CrudCollection, SelectAction): String, server_default="", comment="Birth place of the person" ) birth_date: Mapped[TIMESTAMP] = mapped_column( - TIMESTAMP(timezone=True), server_default="1900-01-01", comment="Birth date of the person" + TIMESTAMP(timezone=True), + server_default="1900-01-01", + comment="Birth date of the person", ) tax_no: Mapped[str] = mapped_column( String, server_default="", comment="Tax number of the person" diff --git a/docker-compose.yml b/docker-compose.yml index 2bdcf17..0dbb524 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -89,19 +89,19 @@ services: # depends_on: # - postgres_commercial # -# wag_bank_services: -# container_name: wag_bank_services -# restart: on-failure -# build: -# context: . -# dockerfile: service_app_banks/mailService.Dockerfile -# networks: -# - network_store_services -# depends_on: -# - postgres_commercial -# environment: -# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database -# - PYTHONPATH=/service_app_banks + wag_bank_services: + container_name: wag_bank_services + restart: on-failure + build: + context: . + dockerfile: service_app_banks/mailService.Dockerfile + networks: + - network_store_services + depends_on: + - postgres_commercial + environment: + - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database + - PYTHONPATH=/service_app_banks # # wag_account_services: # container_name: wag_account_services diff --git a/service_account_records/app_accounts.py b/service_account_records/app_accounts.py index 9c67604..96bf8b9 100644 --- a/service_account_records/app_accounts.py +++ b/service_account_records/app_accounts.py @@ -148,10 +148,9 @@ def account_get_people_and_living_space_info_via_iban() -> dict: if living_space.person_id ] people_list = People.filter_all( - People.id.in_(living_spaces_people), - system=True + People.id.in_(living_spaces_people), system=True ).data - print('build_parts', build_parts) + print("build_parts", build_parts) build_living_space_dict[str(account_records_iban[0])] = { "people": list(people_list), "living_space": list(living_spaces), @@ -310,7 +309,9 @@ def send_accounts_to_decision_payment(): AccountRecords.receive_debit == receive_enum.id, ).data for account_record in account_records_list: - current_currency_value = pay_the_registration(account_record, receive_enum, debit_enum) + current_currency_value = pay_the_registration( + account_record, receive_enum, debit_enum + ) if current_currency_value > 0: pay_the_registration(account_record, receive_enum, debit_enum, True) if abs(float(Decimal(account_record.remainder_balance))) == abs( diff --git a/service_account_records/configs.py b/service_account_records/configs.py index 6cc6b82..6f8eed1 100644 --- a/service_account_records/configs.py +++ b/service_account_records/configs.py @@ -4,5 +4,5 @@ class AccountConfig: "DAIRE": ["daire", "dagire", "daare", "nolu daire", "no", "nolu dairenin"], "APARTMAN": ["apartman", "aparman", "aprmn"], "VILLA": ["villa", "vlla"], - "BINA": ["bina", "binna"] + "BINA": ["bina", "binna"], } diff --git a/service_account_records/regex_func.py b/service_account_records/regex_func.py index 6679817..49fcb2d 100644 --- a/service_account_records/regex_func.py +++ b/service_account_records/regex_func.py @@ -11,13 +11,24 @@ def word_straighten(word, ref_list, threshold=0.8): def category_finder(text, output_template="{kategori} {numara}"): categories = AccountConfig.CATEGORIES - result = {category: [] for category in categories} # Sonuçları depolamak için bir sözlük + result = { + category: [] for category in categories + } # Sonuçları depolamak için bir sözlük for category, patterns in categories.items(): - words = re.split(r'\W+', text) + words = re.split(r"\W+", text) straighten_words = [word_straighten(word, patterns) for word in words] - straighten_text = ' '.join(straighten_words) - pattern = r'(?:\b|\s|^)(?:' + '|'.join(map(re.escape, patterns)) + r')(?:\s*|:|\-|\#)*(\d+)(?:\b|$)' + straighten_text = " ".join(straighten_words) + pattern = ( + r"(?:\b|\s|^)(?:" + + "|".join(map(re.escape, patterns)) + + r")(?:\s*|:|\-|\#)*(\d+)(?:\b|$)" + ) if founds_list := re.findall(pattern, straighten_text, re.IGNORECASE): - list_of_output = [output_template.format(kategori=category, numara=num) for num in founds_list] - result[category].extend([i for i in list_of_output if str(i).replace(' ', '')]) + list_of_output = [ + output_template.format(kategori=category, numara=num) + for num in founds_list + ] + result[category].extend( + [i for i in list_of_output if str(i).replace(" ", "")] + ) return result diff --git a/service_app/routers/authentication/router.py b/service_app/routers/authentication/router.py index 99959fe..1424c4a 100644 --- a/service_app/routers/authentication/router.py +++ b/service_app/routers/authentication/router.py @@ -85,9 +85,8 @@ def authentication_create_password(data: CreatePassword): data=data ) -@login_route.post( - path="/reset_password", summary="Create password with password token" -) + +@login_route.post(path="/reset_password", summary="Create password with password token") def authentication_reset_password(data: Forgot): return AuthenticationResetPasswordEventMethod.authentication_reset_password( data=data diff --git a/service_app/routers/events/events/router.py b/service_app/routers/events/events/router.py index 3dfaf5d..2e9473d 100644 --- a/service_app/routers/events/events/router.py +++ b/service_app/routers/events/events/router.py @@ -12,25 +12,7 @@ event_route = APIRouter(prefix="/event", tags=["Events"]) event_route.include_router(event_route, include_in_schema=True) -@event_route.post(path="/create", summary="Create Event") -def events_create(request: Request, data: CreateEvents): - token_dict = parse_token_object_to_dict(request=request) - return token_dict.available_event(data=data, token_dict=token_dict) - - @event_route.post(path="/list", summary="List Events") def events_list(request: Request, data: ListOptions): token_dict = parse_token_object_to_dict(request=request) return token_dict.available_event(data=data, token_dict=token_dict) - - -@event_route.post(path="/update", summary="Update Event") -def events_update(request: Request, data: CreateEvents): - token_dict = parse_token_object_to_dict(request=request) - return token_dict.available_event(data=data, token_dict=token_dict) - - -# @event_route.patch(path="/patch", summary="Patch Event") -# def events_patch(request: Request, data: CreateEvents): -# token_dict = parse_token_object_to_dict(request=request) -# return token_dict.available_event(data=data, token_dict=token_dict) diff --git a/service_app_banks/isbank/config_isbank.py b/service_app_banks/isbank/config_isbank.py index 9314a24..8df53d5 100644 --- a/service_app_banks/isbank/config_isbank.py +++ b/service_app_banks/isbank/config_isbank.py @@ -1,5 +1,3 @@ - - class Config: # IP_ADDRESS: str = "http://10.10.2.46:41575/internal/isbank/retreive" SERVICE_TIMING: int = 900 # 15 min diff --git a/service_app_banks/isbank/isbank_sender.py b/service_app_banks/isbank/isbank_sender.py index af1b339..d9c22bd 100644 --- a/service_app_banks/isbank/isbank_sender.py +++ b/service_app_banks/isbank/isbank_sender.py @@ -62,6 +62,7 @@ def sender_service(): def is_bank_retrieve_account_records(bank_data): from databases import AccountRecords from api_library.date_time_actions.date_functions import system_arrow + data_bulk = json.loads(bank_data) new_record_list = [] @@ -70,7 +71,7 @@ def is_bank_retrieve_account_records(bank_data): data_dict["bank_balance"] = data_dict.pop("balance") data_dict["import_file_name"] = str(data_keys) data_dict = BankReceive(**data_dict).model_dump() - bank_date = system_arrow.get(str(data_dict['bank_date'])) + bank_date = system_arrow.get(str(data_dict["bank_date"])) data_dict["bank_date_w"] = bank_date.weekday() data_dict["bank_date_m"] = bank_date.month data_dict["bank_date_d"] = bank_date.day @@ -84,12 +85,12 @@ def is_bank_retrieve_account_records(bank_data): AccountRecords.bank_balance == data_dict["bank_balance"], system=True, ).data: - print('already @database record', found_record.id) + print("already @database record", found_record.id) else: new_account_record = AccountRecords.find_or_create(**data_dict) new_account_record.save_and_confirm() new_record_list.append(new_account_record.get_dict()) - print('new_account_record is created', new_account_record.id) + print("new_account_record is created", new_account_record.id) return new_record_list if new_record_list else [] diff --git a/service_app_banks/isbank/main_single_thread.py b/service_app_banks/isbank/main_single_thread.py index 38e3a37..905ace4 100644 --- a/service_app_banks/isbank/main_single_thread.py +++ b/service_app_banks/isbank/main_single_thread.py @@ -8,20 +8,26 @@ from isbank_sender import sender_service if __name__ == "__main__": print("Bank service booted...") - # while True: - # try: - reader_service() - time.sleep(1) - parser_service() - time.sleep(1) - sender_service() - time.sleep(1) + try: + reader_service() + time.sleep(1) + except Exception as e: + err = e + print("Reader Service : ", err) + try: + parser_service() + time.sleep(1) + except Exception as e: + err = e + print("Reader Service : ", err) + try: + sender_service() + time.sleep(1) + except Exception as e: + err = e + print("Reader Service : ", err) print( datetime.datetime.now().__str__(), " : system completed a cycle without error...", ) - # except Exception as e: - # err = e - # print('Raised Error :', err) - # time.sleep(int(Config.SERVICE_TIMING or 900)) - # time.sleep(10) + print("Bank service is completed...") diff --git a/service_app_banks/mailService.Dockerfile b/service_app_banks/mailService.Dockerfile index 88efa70..7a750ae 100644 --- a/service_app_banks/mailService.Dockerfile +++ b/service_app_banks/mailService.Dockerfile @@ -17,6 +17,8 @@ COPY ../api_objects ./service_app_banks/api_objects COPY ../api_validations ./service_app_banks/api_validations #COPY ../service_app_banks/crontab_list /service_app_banks/crontab_to_write +WORKDIR /service_app_banks + RUN apt-get update && apt-get install -y cron # 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC