diff --git a/api_events/events/decision_book/project_decision_book.py b/api_events/events/decision_book/project_decision_book.py index 2b65917..6f5939e 100644 --- a/api_events/events/decision_book/project_decision_book.py +++ b/api_events/events/decision_book/project_decision_book.py @@ -309,7 +309,9 @@ class ProjectDecisionBookApprovalEventMethods(MethodToEvent): process_date=str(local_date), process_date_m=int(local_date.month), process_date_y=int(local_date.year), - payment_plan_time_periods=str(decision_book_project.project_type), + payment_plan_time_periods=str( + decision_book_project.project_type + ), period_time=f"{local_date.year}-{str(local_date.month).zfill(2)}", decision_book_project_id=decision_book_project.id, decision_book_project_uu_id=str(decision_book_project.uu_id), diff --git a/api_events/events/decision_book/project_decision_book_items.py b/api_events/events/decision_book/project_decision_book_items.py index fed0c03..ea7ac18 100644 --- a/api_events/events/decision_book/project_decision_book_items.py +++ b/api_events/events/decision_book/project_decision_book_items.py @@ -60,8 +60,10 @@ class BuildDecisionBookProjectItemsCreateEventMethods(MethodToEvent): ) elif isinstance(token_dict, OccupantTokenObject): book_project = BuildDecisionBookProjects.filter_one( - BuildDecisionBookProjects.uu_id == data.build_decision_book_project_uu_id, - BuildDecisionBookProjects.project_response_living_space_id == token_dict.selected_occupant.living_space_id + BuildDecisionBookProjects.uu_id + == data.build_decision_book_project_uu_id, + BuildDecisionBookProjects.project_response_living_space_id + == token_dict.selected_occupant.living_space_id, ).data if not book_project: raise BuildDecisionBookProjectItems.raise_http_exception( @@ -72,7 +74,9 @@ class BuildDecisionBookProjectItemsCreateEventMethods(MethodToEvent): ) data_dict = data.excluded_dump() data_dict["build_decision_book_project_id"] = book_project.id - created_project_item = BuildDecisionBookProjectItems.find_or_create(**data_dict) + created_project_item = BuildDecisionBookProjectItems.find_or_create( + **data_dict + ) created_project_item.save_and_confirm() return AlchemyJsonResponse( message="Build Decision Book Project Items Create", diff --git a/api_services/bank_actions/wag_account_record_parser.py b/api_services/bank_actions/wag_account_record_parser.py index c569031..89cbc7a 100644 --- a/api_services/bank_actions/wag_account_record_parser.py +++ b/api_services/bank_actions/wag_account_record_parser.py @@ -1,3 +1,6 @@ +import re +from gc import garbage + import textdistance from unidecode import unidecode from datetime import datetime @@ -11,6 +14,9 @@ from databases import ( from typing import Optional from pydantic import BaseModel +from databases.sql_models.company.company import Companies +from databases.sql_models.identity.identity import People + class InsertBudgetRecord(BaseModel): iban: str @@ -53,8 +59,10 @@ def strip_date_to_valid(date_str): return datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S") -def find_iban_in_comment(iban: str, comment: str): - iban_results = BuildIbanDescription.filter_by_one(system=True, iban=iban).data +def find_iban_in_comment(iban: str, comment: str, living_space_dict: dict = None): + iban_results = BuildIbanDescription.filter_all( + BuildIbanDescription.iban == iban, system=True + ).data sm_dict_extended, sm_dict_digit = {}, {} for iban_result in iban_results or []: candidate_parts = comment.split(" ") @@ -74,9 +82,7 @@ def find_iban_in_comment(iban: str, comment: str): ) found = False name_list = ( - unidecode(str(iban_result.search_word)) - .replace(".", " ") - .split(" ") + unidecode(str(iban_result.search_word)).replace(".", " ").split(" ") ) for name in name_list: if len(name) > 3 and name.lower() in comment.lower(): @@ -92,112 +98,376 @@ def find_iban_in_comment(iban: str, comment: str): )[0] if float(result[1]) >= 0.5: iban_result = BuildIbanDescription.filter_one( - BuildIbanDescription.id==int(result[0]), - system=True + BuildIbanDescription.id == int(result[0]), system=True ).data return { - "decision_book_project_id": iban_result.decision_book_project_id, + # "decision_book_project_id": iban_result.decision_book_project_id, + # "build_parts_id": iban_result.build_parts_id, "company_id": iban_result.company_id, "customer_id": iban_result.customer_id, - "build_parts_id": iban_result.build_parts_id, "found_from": "Name", "similarity": result[1], } return { - "decision_book_project_id": None, + # "decision_book_project_id": None, + # "build_parts_id": None, "company_id": None, "customer_id": None, - "build_parts_id": None, "found_from": None, "similarity": 0.0, } -def parse_comment_with_name(iban: str, comment: str): - if "*" in comment: - b_comment, a_comment = ( - unidecode(str(comment)).split("*")[0], - unidecode(str(comment)).split("*")[1], - ) - a_result = find_iban_in_comment(iban, a_comment) - b_result = find_iban_in_comment(iban, b_comment) - if a_result["similarity"] > b_result["similarity"]: - a_result["send_person_id"] = a_result["customer_id"] - return a_result +def remove_spaces_from_string(remove_string: str): + letter_list = [] + for letter in remove_string.split(" "): + if letter_ := "".join(i for i in letter if not i == " "): + letter_list.append(letter_) + return " ".join(letter_list).upper() + + +def get_garbage_words(comment: str, search_word: str): + garbage_words = remove_spaces_from_string(comment) + search_word = remove_spaces_from_string(search_word) + for letter in search_word.split(" "): + garbage_words = garbage_words.replace(remove_spaces_from_string(letter), "") + return str(remove_spaces_from_string(garbage_words)).upper() + + +def remove_garbage_words(comment: str, garbage_word: str): + cleaned_comment = remove_spaces_from_string(comment.replace("*", " ")) + garbage_word = remove_spaces_from_string(garbage_word.replace("*", " ")) + for letter in garbage_word.split(" "): + cleaned_comment = unidecode(remove_spaces_from_string(cleaned_comment)) + cleaned_comment = cleaned_comment.replace(remove_spaces_from_string(letter), "") + return str(remove_spaces_from_string(cleaned_comment)).upper() + + +def check_is_comment_is_build(comment: str): + has_build_words = False + candidate_parts = remove_spaces_from_string(comment.replace("*", " ")).split(" ") + for candidate_part in candidate_parts: + candidate_part = remove_spaces_from_string(candidate_part).replace(":", "") + for build_word in ["no", "daire", "apt", "apartman"]: + if unidecode(candidate_part).upper() in unidecode(build_word).upper(): + has_build_words = True + break + return has_build_words + + +def get_list_of_build_words(comment: str): + build_words = [] + candidate_parts = remove_spaces_from_string(comment.replace("*", " ")) + for build_word in ["no", "nolu", "daire", "apt", "apartman"]: + if unidecode(build_word).upper() in unidecode(candidate_parts).upper(): + st = unidecode(candidate_parts).upper().index(unidecode(build_word).upper()) + et = st + len(build_word) + st = st - 5 if st > 5 else 0 + et = et + 5 if et + 5 <= len(candidate_parts) else len(candidate_parts) + number_digit = "".join( + letter for letter in str(candidate_parts[st:et]) if letter.isdigit() + ) + if number_digit: + rt_dict = { + "garbage": candidate_parts[st:et], + "number": int(number_digit) if number_digit else None, + } + build_words.append(rt_dict) + return build_words + + +def generate_pattern(word): + if len(word) < 1: + raise ValueError("The word must have at least 1 character.") + add_string, add_match = "\d{1,3}$\s?$", f"{{1, {len(word)}}}" + adda_string = "d{1,3}$\s?\^[" + return adda_string + f"{word}]" + add_match + rf"{word}(?:e|é|ı|i|ğr)?" + add_string + + +def test_pattern(word, test_cases): # Generate the pattern + pattern = generate_pattern(word) + for test in test_cases: # Test the regex pattern on each input and print results + if re.match(pattern, test, re.IGNORECASE): + print(f"'{test}' matches the pattern.", "*" * 60) else: - b_result["send_person_id"] = None - return b_result - else: - result = find_iban_in_comment(iban, comment) - result["send_person_id"] = result.get("customer_id", None) - return result + print(f"'{test}' does NOT match the pattern.") -def wag_insert_budget_record(data): - similarity_result = parse_comment_with_name(data["iban"], data["process_comment"]) - build_iban = BuildIbans.find_one(iban=data["iban"]) +def parse_comment_for_living_space( + iban: str, comment: str, living_space_dict: dict = None +): + comment = unidecode(comment) + best_similarity = dict( + company=None, living_space=None, found_from=None, similarity=0.0, garbage="" + ) + for person in living_space_dict[iban]["people"]: + person: People = person + first_name = unidecode(person.firstname).upper() + last_name = unidecode(person.surname).upper() + middle_name = unidecode(person.middle_name).upper() + search_word = f"{first_name} {last_name}" + if middle_name: + search_word = f"{first_name} {middle_name} {last_name}" + garbage_words = get_garbage_words(comment, search_word) + cleaned_comment = remove_garbage_words(comment, garbage_words) + similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word) + if similarity_ratio > float(best_similarity["similarity"]): + for living_space in living_space_dict[iban]["living_space"]: + if living_space.person_id == person.id: + best_similarity = { + "company": None, + "living_space": living_space, + "found_from": "Person Name", + "similarity": similarity_ratio, + "garbage": garbage_words, + } + # print( + # 'cleaned_comment', cleaned_comment, '\n' + # 'search_word', search_word, '\n' + # 'best_similarity', best_similarity, '\n' + # 'person name', f"{first_name} {last_name}", '\n' + # 'similarity_ratio', similarity_ratio, '\n' + # 'garbage_words', garbage + # ) + return best_similarity - if payload := InsertBudgetRecord(**data): - payload_dict = payload.model_dump(exclude_unset=True, exclude_none=True) - decision_books = BuildDecisionBook.select_only( - BuildDecisionBook.period_start_date - < strip_date_to_valid(payload_dict["bank_date"]), - BuildDecisionBook.period_stop_date - > strip_date_to_valid(payload_dict["bank_date"]), - select_args=[BuildDecisionBook.id], - order_by=[BuildDecisionBook.expiry_ends.desc()], - ) - payload_dict["build_id"] = getattr( - BuildIbans.find_one(iban=data["iban"]), "build_id", None - ) - living_space, count = BuildLivingSpace.find_living_from_customer_id( - similarity_result.get("customer_id", None), - strip_date_to_valid(payload_dict["bank_date"]), - ) - # living_space, count = BuildLivingSpace.filter( - # or_( - # BuildLivingSpace.owner_person_id - # == similarity_result.get("customer_id", None), - # BuildLivingSpace.life_person_id - # == similarity_result.get("customer_id", None), - # ), - # BuildLivingSpace.start_date - # < strip_date_to_valid(payload_dict["bank_date"]) - timedelta(days=30), - # BuildLivingSpace.stop_date - # > strip_date_to_valid(payload_dict["bank_date"]) + timedelta(days=30), - # BuildLivingSpace.active == True, - # BuildLivingSpace.deleted == False, - # ) - payload_dict["build_decision_book_id"] = ( - decision_books[0][0].id if decision_books else None - ) - payload_dict["company_id"] = similarity_result.get("company_id", None) - payload_dict["customer_id"] = similarity_result.get("customer_id", None) - payload_dict["send_person_id"] = similarity_result.get("send_person_id", None) - payload_dict["build_parts_id"] = ( - living_space[0].build_parts_id if living_space else None - ) +def parse_comment_for_company_or_individual(comment: str): + companies_list = Companies.filter_all( + Companies.commercial_type != "Commercial", system=True + ).data + comment = unidecode(comment) + best_similarity = dict( + company=None, living_space=None, found_from=None, similarity=0.0, garbage="" + ) + for company in companies_list: + search_word = unidecode(company.public_name) + garbage_words = get_garbage_words(comment, search_word) + cleaned_comment = remove_garbage_words(comment, garbage_words) + similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word) + if similarity_ratio > float(best_similarity["similarity"]): + best_similarity = { + "company": company, + "living_space": None, + "found_from": "Customer Public Name", + "similarity": similarity_ratio, + "garbage": garbage_words, + } + # print( + # 'cleaned_comment', cleaned_comment, '\n' + # 'search_word', search_word, '\n' + # 'best_similarity', best_similarity, '\n' + # 'company name', company.public_name, '\n' + # 'similarity_ratio', similarity_ratio, '\n' + # 'garbage_words', garbage_words + # ) + return best_similarity - payload_dict["bank_date_y"] = strip_date_to_valid( - payload_dict["bank_date"] - ).year - payload_dict["bank_date_m"] = strip_date_to_valid( - payload_dict["bank_date"] - ).month - payload_dict["bank_date_d"] = strip_date_to_valid(payload_dict["bank_date"]).day - payload_dict["bank_date_w"] = strip_date_to_valid( - payload_dict["bank_date"] - ).isocalendar()[2] - payload_dict["build_id"] = build_iban.build_id if build_iban else None - payload_dict["replication_id"] = 55 - payload_dict["receive_debit"] = ( - "R" if payload_dict["currency_value"] < 0 else "D" - ) - data, found = AccountRecords.find_or_create( - **payload_dict, - found_from=similarity_result.get("found_from", None), - similarity=similarity_result.get("similarity", 0.0), - ) - data.payment_budget_record_close() - return data, found + +def parse_comment_to_split_with_star(account_record: AccountRecords): + if "*" in account_record.process_comment: + process_comment = str(account_record.process_comment.replace("**", "*")) + process_comments = process_comment.split("*") + return len(process_comments), *process_comments + return 1, account_record.process_comment + + +def parse_comment_with_name( + account_record: AccountRecords, living_space_dict: dict = None +): + comments = parse_comment_to_split_with_star(account_record=account_record) + best_similarity = {"similarity": 0.0} + comments_list, comments_length = comments[1:], int(comments[0]) + if ( + int(account_record.currency_value) > 0 + ): # Build receive money from living space people + if not comments_length > 1: + best_similarity = parse_comment_for_living_space( + iban=account_record.iban, + comment=comments_list[0], + living_space_dict=living_space_dict, + ) + best_similarity["send_person_id"] = best_similarity.get("customer_id", None) + return best_similarity + for comment in comments_list: + similarity_result = parse_comment_for_living_space( + iban=account_record.iban, + comment=comment, + living_space_dict=living_space_dict, + ) + if float(similarity_result["similarity"]) > float( + best_similarity["similarity"] + ): + best_similarity = similarity_result + return best_similarity + else: # Build pays money for service taken from company or individual + if not comments_length > 1: + best_similarity = parse_comment_for_company_or_individual( + comment=comments_list[0] + ) + best_similarity["send_person_id"] = best_similarity.get("customer_id", None) + return best_similarity + for comment in comments_list: + similarity_result = parse_comment_for_company_or_individual(comment=comment) + if float(similarity_result["similarity"]) > float( + best_similarity["similarity"] + ): + best_similarity = similarity_result + return best_similarity + + +def parse_comment_with_name_iban_description(account_record: AccountRecords): + comments = parse_comment_to_split_with_star(account_record=account_record) + comments_list, comments_length = comments[1:], int(comments[0]) + iban_results = BuildIbanDescription.filter_all( + BuildIbanDescription.iban == account_record.iban, system=True + ).data + best_similarity = dict( + company=None, living_space=None, found_from=None, similarity=0.0, garbage="" + ) + for comment in comments_list: + for iban_result in iban_results: + search_word = unidecode(iban_result.search_word) + garbage_words = get_garbage_words(comment, search_word) + cleaned_comment = remove_garbage_words(comment, garbage_words) + similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word) + company = Companies.filter_by_one( + system=True, id=iban_result.company_id + ).data + if float(similarity_ratio) > float(best_similarity["similarity"]): + best_similarity = { + "company": company, + "living_space": None, + "found_from": "Customer Public Name Description", + "similarity": similarity_ratio, + "garbage": garbage_words, + } + return best_similarity + # print('account_record.process_comment', account_record.process_comment) + # test_pattern( + # word=unidecode("no"), + # test_cases=[account_record.process_comment] + # ) + # test_pattern(word="daire", test_cases=comments_list) + + # sm_dict_extended, sm_dict_digit = {}, {} + # iban_results = BuildIbanDescription.filter_all( + # BuildIbanDescription.iban == iban, system=True + # ).data + # for iban_result in iban_results or []: + # candidate_parts = comment.split(" ") + # extended_candidate_parts, digit_part = [], [] + # for part in candidate_parts: + # if part.lower() not in ["no", "daire", "nolu"]: + # extended_candidate_parts.append(part) + # if extended_candidate_parts: + # if all( + # candidate_part.lower() in comment.lower() + # for candidate_part in extended_candidate_parts + # ): + # similarity_ratio = textdistance.jaro_winkler( + # unidecode(str(iban_result.search_word)), comment + # ) + # found = False + # name_list = ( + # unidecode(str(iban_result.search_word)).replace(".", " ").split(" ") + # ) + # for name in name_list: + # if len(name) > 3 and name.lower() in comment.lower(): + # found = True + # break + # + # if not found: + # similarity_ratio = 0.1 + # sm_dict_extended[f"{iban_result.id}"] = similarity_ratio + # if sm_dict_extended: + # result = sorted( + # sm_dict_extended.items(), key=lambda item: item[1], reverse=True + # )[0] + # if float(result[1]) >= 0.5: + # iban_result = BuildIbanDescription.filter_one( + # BuildIbanDescription.id == int(result[0]), system=True + # ).data + # return { + # "company_id": iban_result.company_id, + # "customer_id": iban_result.customer_id, + # "found_from": "Name", + # "similarity": result[1], + # } + # return { + # "company_id": None, + # "customer_id": None, + # "found_from": None, + # "similarity": 0.0, + # } + + +# +# def wag_insert_budget_record(data): +# similarity_result = parse_comment_with_name(data["iban"], data["process_comment"]) +# build_iban = BuildIbans.find_one(iban=data["iban"]) +# +# if payload := InsertBudgetRecord(**data): +# payload_dict = payload.model_dump(exclude_unset=True, exclude_none=True) +# decision_books = BuildDecisionBook.select_only( +# BuildDecisionBook.period_start_date +# < strip_date_to_valid(payload_dict["bank_date"]), +# BuildDecisionBook.period_stop_date +# > strip_date_to_valid(payload_dict["bank_date"]), +# select_args=[BuildDecisionBook.id], +# order_by=[BuildDecisionBook.expiry_ends.desc()], +# ) +# payload_dict["build_id"] = getattr( +# BuildIbans.find_one(iban=data["iban"]), "build_id", None +# ) +# living_space, count = BuildLivingSpace.find_living_from_customer_id( +# similarity_result.get("customer_id", None), +# strip_date_to_valid(payload_dict["bank_date"]), +# ) +# # living_space, count = BuildLivingSpace.filter( +# # or_( +# # BuildLivingSpace.owner_person_id +# # == similarity_result.get("customer_id", None), +# # BuildLivingSpace.life_person_id +# # == similarity_result.get("customer_id", None), +# # ), +# # BuildLivingSpace.start_date +# # < strip_date_to_valid(payload_dict["bank_date"]) - timedelta(days=30), +# # BuildLivingSpace.stop_date +# # > strip_date_to_valid(payload_dict["bank_date"]) + timedelta(days=30), +# # BuildLivingSpace.active == True, +# # BuildLivingSpace.deleted == False, +# # ) +# payload_dict["build_decision_book_id"] = ( +# decision_books[0][0].id if decision_books else None +# ) +# payload_dict["company_id"] = similarity_result.get("company_id", None) +# payload_dict["customer_id"] = similarity_result.get("customer_id", None) +# payload_dict["send_person_id"] = similarity_result.get("send_person_id", None) +# +# payload_dict["build_parts_id"] = ( +# living_space[0].build_parts_id if living_space else None +# ) +# +# payload_dict["bank_date_y"] = strip_date_to_valid( +# payload_dict["bank_date"] +# ).year +# payload_dict["bank_date_m"] = strip_date_to_valid( +# payload_dict["bank_date"] +# ).month +# payload_dict["bank_date_d"] = strip_date_to_valid(payload_dict["bank_date"]).day +# payload_dict["bank_date_w"] = strip_date_to_valid( +# payload_dict["bank_date"] +# ).isocalendar()[2] +# payload_dict["build_id"] = build_iban.build_id if build_iban else None +# payload_dict["replication_id"] = 55 +# payload_dict["receive_debit"] = ( +# "R" if payload_dict["currency_value"] < 0 else "D" +# ) +# data, found = AccountRecords.find_or_create( +# **payload_dict, +# found_from=similarity_result.get("found_from", None), +# similarity=similarity_result.get("similarity", 0.0), +# ) +# data.payment_budget_record_close() +# return data, found diff --git a/api_validations/validations_request/project_decision_book.py b/api_validations/validations_request/project_decision_book.py index b3d6120..d63ac9b 100644 --- a/api_validations/validations_request/project_decision_book.py +++ b/api_validations/validations_request/project_decision_book.py @@ -79,7 +79,9 @@ class ApprovalsBuildDecisionBookProjects(PydanticBaseModel): build_decision_book_project_uu_id: str project_stop_date: str status_code: Optional[int] = None - final_price_list: Optional[list[dict]] = None # {"date": "2021-01-01", "price": 1000} + final_price_list: Optional[list[dict]] = ( + None # {"date": "2021-01-01", "price": 1000} + ) class InsertBuildDecisionBookProjectItemDebits(PydanticBaseModel): diff --git a/databases/__init__.py b/databases/__init__.py index 8551381..f560024 100644 --- a/databases/__init__.py +++ b/databases/__init__.py @@ -6,7 +6,6 @@ from databases.sql_models.account.account import ( AccountCodes, AccountDetail, AccountMaster, - AccountRecordDecisionPaymentClosed, AccountRecordExchanges, ) from databases.sql_models.building.budget import ( @@ -108,7 +107,6 @@ __all__ = [ "AccountCodes", "AccountDetail", "AccountMaster", - "AccountRecordDecisionPaymentClosed", "AccountRecordExchanges", "BuildIbans", "BuildIbanDescription", diff --git a/databases/sql_models/account/account.py b/databases/sql_models/account/account.py index a27502a..b9e7abb 100644 --- a/databases/sql_models/account/account.py +++ b/databases/sql_models/account/account.py @@ -412,6 +412,9 @@ class AccountRecords(CrudCollection): process_comment: Mapped[str] = mapped_column( String, nullable=False, comment="Transaction Record Comment" ) + process_garbage: Mapped[str] = mapped_column( + String, nullable=True, comment="Transaction Record Garbage" + ) bank_reference_code: Mapped[str] = mapped_column( String, nullable=False, comment="Bank Reference Code" ) @@ -465,8 +468,6 @@ class AccountRecords(CrudCollection): String, nullable=True, comment="Send Company UU ID" ) - customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) - customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID") send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) send_person_uu_id: Mapped[str] = mapped_column( String, nullable=True, comment="Send Person UU ID" @@ -477,7 +478,20 @@ class AccountRecords(CrudCollection): approving_accounting_person_uu_id: Mapped[str] = mapped_column( String, nullable=True, comment="Approving Accounting Person UU ID" ) - # build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True) + + living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), nullable=True + ) + living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Living Space UU ID" + ) + customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) + customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID") + + build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build UU ID" + ) build_parts_id: Mapped[int] = mapped_column( ForeignKey("build_parts.id"), nullable=True ) @@ -652,54 +666,55 @@ class AccountRecords(CrudCollection): # print("is all dues_type", payment_dict["dues_type"], paid_value) -class AccountRecordDecisionPaymentClosed(CrudCollection): - - __tablename__ = "account_record_decision_payment_closed" - __exclude__fields__ = [] - - arc_currency: Mapped[str] = mapped_column( - String(5), nullable=False, comment="Unit of Currency" - ) - arc_processing_time: Mapped[TIMESTAMP] = mapped_column( - TIMESTAMP, nullable=False, comment="Processing Time" - ) - arc_currency_value: Mapped[float] = mapped_column( - Numeric(20, 6), nullable=False, comment="Currency Value" - ) - - decision_book_budgets_id: Mapped[int] = mapped_column( - ForeignKey("decision_book_budgets.id"), nullable=True - ) - decision_book_budgets_uu_id: Mapped[str] = mapped_column( - String, nullable=True, comment="Budget UUID" - ) - - build_decision_book_payment_id: Mapped[int] = mapped_column( - ForeignKey("build_decision_book_payments.id") - ) - build_decision_book_payment_uu_id: Mapped[str] = mapped_column( - String, nullable=True, comment="Build Decision Book Payment UU ID" - ) - account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id")) - account_records_uu_id: Mapped[str] = mapped_column( - String, nullable=True, comment="Account Record UU ID" - ) - - __table_args__ = ( - Index( - "_account_record_decision_payment_closed_ndx_00", - account_records_id, - build_decision_book_payment_id, - arc_processing_time, - ), - Index( - "_account_record_decision_payment_closed_ndx_01", - build_decision_book_payment_id, - account_records_id, - arc_processing_time, - ), - {"comment": "Account Record Decision Payment Closed Information"}, - ) +# class AccountRecordDecisionPaymentClosed(CrudCollection): +# +# __tablename__ = "account_record_decision_payment_closed" +# __exclude__fields__ = [] +# +# arc_currency: Mapped[str] = mapped_column( +# String(5), nullable=False, comment="Unit of Currency" +# ) +# arc_processing_time: Mapped[TIMESTAMP] = mapped_column( +# TIMESTAMP, nullable=False, comment="Processing Time" +# ) +# arc_currency_value: Mapped[float] = mapped_column( +# Numeric(20, 6), nullable=False, comment="Currency Value" +# ) +# +# decision_book_budgets_id: Mapped[int] = mapped_column( +# ForeignKey("decision_book_budgets.id"), nullable=True +# ) +# decision_book_budgets_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Budget UUID" +# ) +# +# build_decision_book_payment_id: Mapped[int] = mapped_column( +# ForeignKey("build_decision_book_payments.id") +# ) +# build_decision_book_payment_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Build Decision Book Payment UU ID" +# ) +# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id")) +# account_records_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Account Record UU ID" +# ) +# +# __table_args__ = ( +# Index( +# "_account_record_decision_payment_closed_ndx_00", +# account_records_id, +# build_decision_book_payment_id, +# arc_processing_time, +# ), +# Index( +# "_account_record_decision_payment_closed_ndx_01", +# build_decision_book_payment_id, +# account_records_id, +# arc_processing_time, +# ), +# {"comment": "Account Record Decision Payment Closed Information"}, +# ) +# class AccountRecordExchanges(CrudCollection): diff --git a/databases/sql_models/account/iban.py b/databases/sql_models/account/iban.py index 22f47ad..8492ed2 100644 --- a/databases/sql_models/account/iban.py +++ b/databases/sql_models/account/iban.py @@ -81,7 +81,9 @@ class BuildIbanDescription(CrudCollection): company_uu_id: Mapped[str] = mapped_column( String, nullable=True, comment="Company UUID" ) - build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=True) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=True + ) build_parts_uu_id: Mapped[str] = mapped_column( String, nullable=True, comment="Build Parts UUID" ) diff --git a/databases/sql_models/building/decision_book.py b/databases/sql_models/building/decision_book.py index 3cf3f99..92b4c9f 100644 --- a/databases/sql_models/building/decision_book.py +++ b/databases/sql_models/building/decision_book.py @@ -865,7 +865,13 @@ class BuildDecisionBookPayments(CrudCollection): comment="Build Decision Book Item ID", ) build_decision_book_item_uu_id: Mapped[str] = mapped_column( - String, nullable=True, comment="Decision Book Item UUID" + String, nullable=False, comment="Decision Book Item UUID" + ) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=False + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Part UUID" ) decision_book_project_id: Mapped[int] = mapped_column( ForeignKey("build_decision_book_projects.id"), @@ -875,12 +881,11 @@ class BuildDecisionBookPayments(CrudCollection): decision_book_project_uu_id: Mapped[str] = mapped_column( String, nullable=True, comment="Decision Book Project UUID" ) - - build_parts_id: Mapped[int] = mapped_column( - ForeignKey("build_parts.id"), nullable=False + account_records_id: Mapped[int] = mapped_column( + ForeignKey("account_records.id"), nullable=True ) - build_parts_uu_id: Mapped[str] = mapped_column( - String, nullable=False, comment="Build Part UUID" + account_records_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Account Record UU ID" ) # budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True) @@ -918,8 +923,11 @@ class BuildDecisionBookPayments(CrudCollection): build_parts_id, payment_plan_time_periods, process_date, + payment_types_id, + account_records_id, unique=True, ), + Index("build_decision_book_payments_detail_ndx_01", account_records_id), {"comment": "Payment Details of Decision Book Payments"}, ) diff --git a/docker-compose.yml b/docker-compose.yml index c37d194..497216e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,25 +1,25 @@ services: -# commercial_mongo_service: -# container_name: commercial_mongo_service -# image: "bitnami/mongodb:latest" -## image: "bitnami/mongodb:4.4.1-debian-10-r3" -# networks: -# - network_store_services -# environment: -# - MONGODB_DISABLE_ENFORCE_AUTH=true -# - MONGODB_ROOT_PASSWORD=root -# - MONGODB_DATABASE=mongo_database -# - MONGODB_USERNAME=mongo_user -# - MONGODB_PASSWORD=mongo_password -# - MONGO_INITDB_ROOT_USERNAME=mongo_user -# - MONGO_INITDB_ROOT_PASSWORD=mongo_password -# - MONGO_INITDB_DATABASE=mongo_database -# volumes: -# - wag_commercial_mongodb_data:/bitnami/mongodb -# ports: -# - "11110:27017" -# + commercial_mongo_service: + container_name: commercial_mongo_service + image: "bitnami/mongodb:latest" +# image: "bitnami/mongodb:4.4.1-debian-10-r3" + networks: + - network_store_services + environment: + - MONGODB_DISABLE_ENFORCE_AUTH=true + - MONGODB_ROOT_PASSWORD=root + - MONGODB_DATABASE=mongo_database + - MONGODB_USERNAME=mongo_user + - MONGODB_PASSWORD=mongo_password + - MONGO_INITDB_ROOT_USERNAME=mongo_user + - MONGO_INITDB_ROOT_PASSWORD=mongo_password + - MONGO_INITDB_DATABASE=mongo_database + volumes: + - wag_commercial_mongodb_data:/bitnami/mongodb + ports: + - "11110:27017" + commercial_memory_service: container_name: commercial_memory_service image: 'bitnami/redis:latest' @@ -51,16 +51,16 @@ services: volumes: - wag_postgres_commercial_data:/bitnami/postgresql -# wag_management_service: -# container_name: wag_management_service -# restart: on-failure -# build: -# context: . -# dockerfile: service_app/Dockerfile -# ports: -# - "41575:41575" -# networks: -# - network_store_services + wag_management_service: + container_name: wag_management_service + restart: on-failure + build: + context: . + dockerfile: service_app/Dockerfile + ports: + - "41575:41575" + networks: + - network_store_services # depends_on: # - wag_management_init_service # - grafana @@ -177,7 +177,7 @@ networks: volumes: wag_postgres_commercial_data: # grafana_data: -# wag_commercial_mongodb_data: + wag_commercial_mongodb_data: # environment: # - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database diff --git a/service_account_records/account.Dockerfile b/service_account_records/account.Dockerfile index b2bf116..6c2e234 100644 --- a/service_account_records/account.Dockerfile +++ b/service_account_records/account.Dockerfile @@ -15,21 +15,12 @@ COPY ../databases ./service_account_records/databases COPY ../api_configs ./service_account_records/api_configs COPY ../api_objects ./service_account_records/api_objects COPY ../api_validations ./service_account_records/api_validations -#COPY ../service_account_records/crontab_list /service_account_records/crontab_to_write RUN apt-get update && apt-get install -y cron # 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC -#RUN echo "0 8 * * * /usr/local/bin/python /service_account_records/app_mail_sender.py >> /var/log/cron.log 2>&1" > /tmp/crontab_list && crontab /tmp/crontab_list -#RUN echo "*/15 * * * * /usr/local/bin/python /service_account_records/isbank/main_single_thread.py >> /var/log/cron.log 2>&1" >> /tmp/crontab_list && crontab /tmp/crontab_list -RUN echo "*/15 * * * * /usr/local/bin/python " >> /tmp/crontab_list && crontab /tmp/crontab_list -#RUN echo /service_account_records/crontab_to_write > /tmp/crontab_list -#RUN crontab /tmp/crontab_list +RUN echo "*/15 * * * * /usr/local/bin/python /service_account_records/app_accounts.py " >> /tmp/crontab_list && crontab /tmp/crontab_list RUN touch /var/log/cron.log CMD cron && tail -f /var/log/cron.log - -#RUN chmod 0644 /etc/cron.d/app_mail_sender_cron -#RUN crontab /etc/cron.d/send_email_cron -#CMD ["python", "-m", "app_mail_sender"] diff --git a/service_account_records/app_accounts.py b/service_account_records/app_accounts.py index c8b5f11..4b69ba6 100644 --- a/service_account_records/app_accounts.py +++ b/service_account_records/app_accounts.py @@ -1,52 +1,58 @@ -import json +from decimal import Decimal -from api_services.bank_actions.wag_account_record_parser import parse_comment_with_name +from api_services.bank_actions.wag_account_record_parser import ( + parse_comment_with_name, + parse_comment_with_name_iban_description, +) from databases import ( AccountRecords, BuildIbans, BuildDecisionBook, - -) -from api_validations.validations_request import ( - ListOptions -) -from api_library.date_time_actions.date_functions import ( - system_arrow + Build, + BuildLivingSpace, + People, + OccupantTypes, ) +from api_validations.validations_request import ListOptions +from api_library.date_time_actions.date_functions import system_arrow +from databases.sql_models.building.build import BuildParts +from databases.sql_models.building.decision_book import BuildDecisionBookPayments +from databases.sql_models.others.enums import ApiEnumDropdown from service_account_records.configs import AccountConfig + account_list = ListOptions( page=1, size=1000, order_field="bank_date", order_type="desc", ) + + def account_records_find_decision_book(): - created_ibans, iban_build_dict = [], {} - AccountRecords.filter_attr = account_list created_ibans, iban_build_dict = [], {} AccountRecords.filter_attr = account_list - days_before_now = system_arrow.now().shift(days=(int(AccountConfig.BEFORE_DAY) * -1)) account_records_list: list[AccountRecords] = AccountRecords.filter_all( - # AccountRecords.bank_date >= str(days_before_now), AccountRecords.build_decision_book_id == None, - system=True + AccountRecords.remainder_balance < AccountRecords.currency_value, + system=True, ).data for account_record in account_records_list: if account_record.iban not in created_ibans: found_iban: BuildIbans = BuildIbans.filter_one( - BuildIbans.iban == account_record.iban, - system=True + BuildIbans.iban == account_record.iban, system=True ).data if not found_iban: - create_build_ibans = BuildIbans.find_or_create( - iban=account_record.iban, - start_date=str(system_arrow.now().shift(days=-1)), - ) - create_build_ibans.save_and_confirm() - created_ibans.append(account_record.iban) - # It is in database already - if not found_iban.build_id: + try: + create_build_ibans = BuildIbans.find_or_create( + iban=account_record.iban, + start_date=str(system_arrow.now().shift(days=-1)), + ) + create_build_ibans.save_and_confirm() + created_ibans.append(account_record.iban) + except Exception as e: + print("Exception of find_decision_book ln:55", e) + if not found_iban.build_id: # It is in database already iban_build_dict["iban"] = account_record.iban iban_build_dict["build_id"] = None else: @@ -56,39 +62,274 @@ def account_records_find_decision_book(): BuildDecisionBook.expiry_ends > account_record.bank_date, ).data if found_decision_book: - print('found_decision_book', found_decision_book.get_dict()) account_record.update( build_decision_book_id=found_decision_book.id, build_decision_book_uu_id=str(found_decision_book.uu_id), + build_id=found_decision_book.build_id, + build_uu_id=str(found_decision_book.build_uu_id), ) account_record.save() - - print('iban_build_dict', iban_build_dict) - print('created_ibans', created_ibans) return +def account_save_search_result(account_record, similarity_result): + found_company = similarity_result.get("company", None) + found_customer, part, build = ( + similarity_result.get("living_space", None), + None, + None, + ) + if found_customer: + part = BuildParts.filter_by_one( + id=found_customer.build_parts_id, human_livable=True + ).data + if part: + build = Build.filter_by_one(id=part.build_id).data + + account_record.similarity = similarity_result.get("similarity", 0.00) + account_record.found_from = similarity_result.get("found_from", None) + account_record.company_id = getattr(found_company, "id", None) + account_record.company_uu_id = getattr(found_company, "uu_id", None) + account_record.build_parts_id = getattr(part, "id", None) + account_record.build_parts_uu_id = getattr(part, "uu_id", None) + + if not account_record.build_id and build: + account_record.build_id = getattr(build, "id", None) + account_record.build_uu_id = getattr(build, "uu_id", None) + + # account_record.send_person_id = getattr(found_send_person, "id", None) + # account_record.send_person_uu_id = getattr(found_send_person, "uu_id", None) + account_record.living_space_id = getattr(found_customer, "id", None) + account_record.living_space_uu_id = getattr(found_customer, "uu_id", None) + account_record.save() + + +def account_get_people_and_living_space_info_via_iban() -> dict: + build_living_space_dict = {} + account_records_ibans = ( + AccountRecords.select_only( + AccountRecords.build_decision_book_id != None, + select_args=[AccountRecords.iban], + system=True, + ) + .query.distinct(AccountRecords.iban) + .all() + ) + flat_resident = OccupantTypes.filter_by_one( + system=True, occupant_category_type="FL", occupant_code="FL-RES" + ).data + flat_owner = OccupantTypes.filter_by_one( + system=True, occupant_category_type="FL", occupant_code="FL-OWN" + ).data + flat_tenant = OccupantTypes.filter_by_one( + system=True, occupant_category_type="FL", occupant_code="FL-TEN" + ).data + flat_represent = OccupantTypes.filter_by_one( + system=True, occupant_category_type="FL", occupant_code="FL-REP" + ).data + for account_records_iban in account_records_ibans: + if account_records_iban not in build_living_space_dict: + build_iban: BuildIbans = BuildIbans.filter_one( + BuildIbans.iban == account_records_iban[0], system=True + ).data + build_parts = BuildParts.filter_by_all( + system=True, build_id=build_iban.build_id, human_livable=True + ).data + living_spaces = BuildLivingSpace.filter_all( + BuildLivingSpace.build_parts_id.in_( + [build_parts.id for build_parts in build_parts] + ), + BuildLivingSpace.occupant_type.in_( + [flat_resident.id, flat_owner.id, flat_tenant.id, flat_represent.id] + ), + ).data + living_spaces_people = [ + living_space.person_id + for living_space in living_spaces + if living_space.person_id + ] + people_list = People.filter_all( + People.id.in_(living_spaces_people), system=True + ).data + build_living_space_dict[str(account_records_iban[0])] = { + "people": list(people_list), + "living_space": list(living_spaces), + "build_parts": list(build_parts), + } + return build_living_space_dict + + def account_records_search(): + build_living_space_dict = account_get_people_and_living_space_info_via_iban() AccountRecords.filter_attr = account_list account_records_list: list[AccountRecords] = AccountRecords.filter_all( - AccountRecords.build_decision_book_id != None, - system=True + AccountRecords.build_decision_book_id != None, system=True + ).data + found_list = [] + for account_record in account_records_list: + similarity_result = parse_comment_with_name( + account_record=account_record, living_space_dict=build_living_space_dict + ) + fs, ac = similarity_result.get("similarity"), account_record.similarity or 0 + if float(fs) >= 0.8 and float(fs) > float(ac): + found_list.append(similarity_result) + account_save_search_result( + account_record=account_record, similarity_result=similarity_result + ) + else: + similarity_result = parse_comment_with_name_iban_description( + account_record=account_record + ) + fs, ac = similarity_result.get("similarity"), account_record.similarity or 0 + if float(fs) >= 0.8 and float(fs) > float(ac): + found_list.append(similarity_result) + account_save_search_result( + account_record=account_record, similarity_result=similarity_result + ) + print("Account Records Search : ", len(found_list), "/", len(account_records_list)) + return + + +def pay_the_registration( + account_record, receive_enum, debit_enum, is_old_record: bool = False +): + current_currency_value = float(Decimal(account_record.currency_value)) - float( + Decimal(account_record.remainder_balance) + ) + if not current_currency_value > 0: + return current_currency_value + + process_date = system_arrow.get(account_record.bank_date) + account_bank_date_year, account_bank_date_month = ( + process_date.date().year, + process_date.date().month, + ) + payment_arguments_debit = [ + BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id, + BuildDecisionBookPayments.payment_types_id == debit_enum.id, + BuildDecisionBookPayments.account_records_id == None, + ] + if not is_old_record: + payment_arguments_debit.extend( + [ + BuildDecisionBookPayments.process_date_y == int(account_bank_date_year), + BuildDecisionBookPayments.process_date_m + == int(account_bank_date_month), + ] + ) + payments = ( + BuildDecisionBookPayments.filter_all(*payment_arguments_debit) + .query.order_by(BuildDecisionBookPayments.process_date.asc()) + .all() + ) + for payment in payments: + if not current_currency_value > 0: + return current_currency_value + + payment_arguments_receive = [ + BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id, + BuildDecisionBookPayments.payment_plan_time_periods + == payment.payment_plan_time_periods, + BuildDecisionBookPayments.payment_types_id == receive_enum.id, + BuildDecisionBookPayments.build_decision_book_item_id + == payment.build_decision_book_item_id, + BuildDecisionBookPayments.decision_book_project_id + == payment.decision_book_project_id, + BuildDecisionBookPayments.process_date == payment.process_date, + ] + if not is_old_record: + payment_arguments_receive.extend( + [ + BuildDecisionBookPayments.process_date_y + == int(account_bank_date_year), + BuildDecisionBookPayments.process_date_m + == int(account_bank_date_month), + ] + ) + + payment_received = ( + BuildDecisionBookPayments.filter_all(*payment_arguments_receive).data or [] + ) + sum_of_payment_received = sum( + [abs(payment.payment_amount) for payment in payment_received] + ) + net_amount = float(abs(Decimal(payment.payment_amount))) - float( + abs(Decimal(sum_of_payment_received)) + ) + if not net_amount > 0: + continue + if float(abs(current_currency_value)) < float(abs(net_amount)): + net_amount = float(current_currency_value) + process_date = system_arrow.get(payment.process_date) + try: + created_book_payment = BuildDecisionBookPayments.find_or_create( + payment_plan_time_periods=payment.payment_plan_time_periods, + payment_amount=float(abs(net_amount)), + payment_types_id=receive_enum.id, + payment_types_uu_id=str(receive_enum.uu_id), + process_date=str(process_date), + process_date_m=process_date.date().month, + process_date_y=process_date.date().year, + period_time=f"{process_date.year}-{str(process_date.month).zfill(2)}", + build_parts_id=payment.build_parts_id, + build_parts_uu_id=str(payment.build_parts_uu_id), + account_records_id=account_record.id, + account_records_uu_id=str(account_record.uu_id), + build_decision_book_item_id=payment.build_decision_book_item_id, + build_decision_book_item_uu_id=str( + payment.build_decision_book_item_uu_id + ), + decision_book_project_id=payment.decision_book_project_id, + decision_book_project_uu_id=str(payment.decision_book_project_uu_id), + ) + created_book_payment.save_and_confirm() + created_payment_amount = float(Decimal(created_book_payment.payment_amount)) + remainder_balance = float( + Decimal(account_record.remainder_balance) + ) + float(abs(created_payment_amount)) + account_record.update(remainder_balance=remainder_balance) + account_record.save() + if current_currency_value >= abs(net_amount): + current_currency_value -= abs(net_amount) + except Exception as e: + print("Exception of decision payment ln:300", e) + return current_currency_value + + +def send_accounts_to_decision_payment(): + AccountRecords.filter_attr = account_list + receive_enum = ApiEnumDropdown.filter_by_one( + system=True, enum_class="DebitTypes", key="DT-R" + ).data + debit_enum = ApiEnumDropdown.filter_by_one( + system=True, enum_class="DebitTypes", key="DT-D" + ).data + account_records_list: list[AccountRecords] = AccountRecords.filter_all( + AccountRecords.remainder_balance < AccountRecords.currency_value, + AccountRecords.approved_record == True, + AccountRecords.receive_debit == receive_enum.id, ).data for account_record in account_records_list: - print('account_record', account_record.get_dict()) - similarity_result = parse_comment_with_name(account_record.iban, account_record.process_comment) - print('similarity_result', similarity_result) - # build_iban = BuildIbans.find_one(iban=data["iban"]) - print('account_records_list', account_records_list) - + current_currency_value = pay_the_registration( + account_record, receive_enum, debit_enum + ) + if current_currency_value > 0: + pay_the_registration(account_record, receive_enum, debit_enum, True) + if abs(float(Decimal(account_record.remainder_balance))) == abs( + float(Decimal(account_record.currency_value)) + ): + account_record.update(status_id=97) + account_record.save() + # todo If the payment is more than the amount, then create a new account record with the remaining amount return + def account_records_service() -> None: account_records_find_decision_book() account_records_search() + send_accounts_to_decision_payment() return - if __name__ == "__main__": account_records_service() diff --git a/service_account_records/configs.py b/service_account_records/configs.py index 5ce4e1e..2888d1c 100644 --- a/service_account_records/configs.py +++ b/service_account_records/configs.py @@ -1,4 +1,2 @@ - - class AccountConfig: - BEFORE_DAY = 30 \ No newline at end of file + BEFORE_DAY = 30 diff --git a/service_app_test/test_application/migrate_old_data/runner.py b/service_app_test/test_application/migrate_old_data/runner.py index 235a4d4..e8bf5b4 100644 --- a/service_app_test/test_application/migrate_old_data/runner.py +++ b/service_app_test/test_application/migrate_old_data/runner.py @@ -74,17 +74,17 @@ selection_list = [ manager_token = "g0Z1YYjh2WqFfoI3MdJ9wrfXAHeL6f7UatEkySzOK0dFX6CH1sXgjQ" people_uu_id = "2c8781f6-c1bc-432d-bb0c-9f5247ecc750" list_of_attendees = [ -"_Ky6UyaoatH5CZEQHZ1gdoj_7HDxSJ2DgBwq6hSXbCKR-WG8E9DPuQ", -"KhdJF7XP6ni_qdIRXSByGHKez8oTSbmzOAZxdi2A_iy-oOW6-VbfPA", -"vn01wNh5moTiNzw2qOvT4h5eqgCQ10jmN3OK659NN-ekriFSmVju_Q", -"EMsteygK4Myingptlpi_sr7Xu0Wsrl7xpQD-ukVY4GBjv6NJakpQ4Q", -"26p4QD1CofK6-Pk_VyDOB2Xstdv_sx88kh4kResWg4TuCuAy7tAT-A", -"2FgntLgLfFM20d32-gOwaHu44Vk_fDlpOh8IvS8gahiW-V0Dv0qLWg", -"TwC_dNuT6Iln0NFinEuvxbnjmb_HwcS8qWg-097gjc8pHTQ1p0nTGA", -"Yn35HP9TJonn9PTFpsq1lH3w-x-zHjTWymBeP6v4XPcwJP_aMj8JsQ", -"C2DbYn1jcocKwjQ8gvJ_jCU0IGAmbJB3JgHj-N_OTu1jNtmhVrW7aQ", -"TT5-9a_JVSf_WBAcN1sAfUyV45J2e7J1NoVGe3BcrrlAI1GNoU2_rw", -"ED4H5zpmxpLXX5rO6eoTb7wIa2x1v0YbNwsFh-iAohWh7fDyhvq_BQ", + "_Ky6UyaoatH5CZEQHZ1gdoj_7HDxSJ2DgBwq6hSXbCKR-WG8E9DPuQ", + "KhdJF7XP6ni_qdIRXSByGHKez8oTSbmzOAZxdi2A_iy-oOW6-VbfPA", + "vn01wNh5moTiNzw2qOvT4h5eqgCQ10jmN3OK659NN-ekriFSmVju_Q", + "EMsteygK4Myingptlpi_sr7Xu0Wsrl7xpQD-ukVY4GBjv6NJakpQ4Q", + "26p4QD1CofK6-Pk_VyDOB2Xstdv_sx88kh4kResWg4TuCuAy7tAT-A", + "2FgntLgLfFM20d32-gOwaHu44Vk_fDlpOh8IvS8gahiW-V0Dv0qLWg", + "TwC_dNuT6Iln0NFinEuvxbnjmb_HwcS8qWg-097gjc8pHTQ1p0nTGA", + "Yn35HP9TJonn9PTFpsq1lH3w-x-zHjTWymBeP6v4XPcwJP_aMj8JsQ", + "C2DbYn1jcocKwjQ8gvJ_jCU0IGAmbJB3JgHj-N_OTu1jNtmhVrW7aQ", + "TT5-9a_JVSf_WBAcN1sAfUyV45J2e7J1NoVGe3BcrrlAI1GNoU2_rw", + "ED4H5zpmxpLXX5rO6eoTb7wIa2x1v0YbNwsFh-iAohWh7fDyhvq_BQ", ] constant = "Toplantı sonucunda araştırmalar tamamlandı, katılımcılara e-posta gönderildi. Onaylayan sayısı yeterli olmadığı için karar alınamadı ve proje iptal edildi ve sonlandırıldı." @@ -361,11 +361,11 @@ elif assign_people_to_create_item == 4: print("project", decode_as_json_indent(project)) project_uu_id = project["uu_id"] - approve_project_item = lambda final_price_list : { + approve_project_item = lambda final_price_list: { "build_decision_book_project_uu_id": project_uu_id, "project_stop_date": "2024-12-01", "final_price_list": final_price_list, - "status_code": 9 + "status_code": 9, } approve_project_dict = approve_project_item( [{"date": "2024-11-03", "price": 585.00}] @@ -398,7 +398,10 @@ elif assign_people_to_create_item == 4: project_uu_id = project["uu_id"] approve_project_dict = approve_project_item( - [{"date": "2024-12-01", "price": 600.25}, {"date": "2025-01-01", "price": 600.50}] + [ + {"date": "2024-12-01", "price": 600.25}, + {"date": "2025-01-01", "price": 600.50}, + ] ) insert_project_item = generate_insert_project_item( **{