updated and cleaned
This commit is contained in:
parent
713730420c
commit
22876d250d
|
|
@ -29,21 +29,24 @@ T = TypeVar("T")
|
|||
|
||||
|
||||
def check_payload_already_exists_mongo_database(filename: str, mongo_provider) -> bool:
|
||||
find_one_result = mongo_provider.find_one(filter_query={
|
||||
"filename": filename
|
||||
})
|
||||
find_one_result = mongo_provider.find_one(filter_query={"filename": filename})
|
||||
if find_one_result:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def write_payload_to_mongo_database(payload, filename: str, mail_info:dict, mongo_provider) -> bool:
|
||||
insert_one_result = mongo_provider.insert_one(document={
|
||||
"filename": filename,
|
||||
"payload": payload,
|
||||
"stage": "read",
|
||||
"created_at": str(arrow.now()), **mail_info
|
||||
})
|
||||
def write_payload_to_mongo_database(
|
||||
payload, filename: str, mail_info: dict, mongo_provider
|
||||
) -> bool:
|
||||
insert_one_result = mongo_provider.insert_one(
|
||||
document={
|
||||
"filename": filename,
|
||||
"payload": payload,
|
||||
"stage": "read",
|
||||
"created_at": str(arrow.now()),
|
||||
**mail_info,
|
||||
}
|
||||
)
|
||||
if insert_one_result.acknowledged:
|
||||
return True
|
||||
return False
|
||||
|
|
@ -57,7 +60,7 @@ def read_email_and_write_to_mongo_database(email_message, mail_info: dict) -> bo
|
|||
storage_reason=[mongo_prefix, str(arrow.now().date())],
|
||||
)
|
||||
if email_message.is_multipart(): # Check if email has multipart content
|
||||
for part in email_message.walk(): # Each part can be an attachment
|
||||
for part in email_message.walk(): # Each part can be an attachment
|
||||
content_disposition = part.get("Content-Disposition")
|
||||
if content_disposition and "attachment" in content_disposition:
|
||||
if filename := part.get_filename():
|
||||
|
|
@ -71,7 +74,7 @@ def read_email_and_write_to_mongo_database(email_message, mail_info: dict) -> bo
|
|||
payload=payload,
|
||||
filename=filename,
|
||||
mongo_provider=mongo_provider,
|
||||
mail_info=mail_info
|
||||
mail_info=mail_info,
|
||||
)
|
||||
else: # Handle non-multipart email, though this is rare for emails with attachments
|
||||
content_disposition = email_message.get("Content-Disposition")
|
||||
|
|
@ -81,7 +84,7 @@ def read_email_and_write_to_mongo_database(email_message, mail_info: dict) -> bo
|
|||
filename=filename,
|
||||
mongo_provider=mongo_provider,
|
||||
)
|
||||
is_iban_in_filename= authorized_iban_cleaned in str(filename)
|
||||
is_iban_in_filename = authorized_iban_cleaned in str(filename)
|
||||
if is_iban_in_filename and file_exists:
|
||||
payload = email_message.get_payload(decode=True)
|
||||
return write_payload_to_mongo_database(
|
||||
|
|
@ -114,10 +117,10 @@ def app():
|
|||
if email_message := banks_mail.email:
|
||||
headers = {k.lower(): v for k, v in banks_mail.headers.items()}
|
||||
mail_info = {
|
||||
"from": headers['from'],
|
||||
"to": headers['to'],
|
||||
"subject": headers['subject'],
|
||||
"date": str(headers['date']),
|
||||
"from": headers["from"],
|
||||
"to": headers["to"],
|
||||
"subject": headers["subject"],
|
||||
"date": str(headers["date"]),
|
||||
}
|
||||
read_email_and_write_to_mongo_database(
|
||||
email_message=email_message, mail_info=mail_info
|
||||
|
|
@ -127,6 +130,6 @@ def app():
|
|||
if __name__ == "__main__":
|
||||
|
||||
while True:
|
||||
print('Running email service...')
|
||||
print("Running email service...")
|
||||
app()
|
||||
time.sleep(Config.EMAIL_SLEEP)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
class Config:
|
||||
# IP_ADDRESS: str = "http://10.10.2.46:41575/internal/isbank/retreive"
|
||||
SERVICE_TIMING: int = 900 # 15 min
|
||||
|
|
|
|||
|
|
@ -18,7 +18,9 @@ def collect_excel_files_from_mongo_database(mongo_provider) -> list:
|
|||
return mongo_provider.find_many(filter_query={"stage": "read"})
|
||||
|
||||
|
||||
def update_parsed_data_to_mongo_database(mongo_provider, collected_data_dict: dict, filename: str) -> None:
|
||||
def update_parsed_data_to_mongo_database(
|
||||
mongo_provider, collected_data_dict: dict, filename: str
|
||||
) -> None:
|
||||
if collected_data_dict:
|
||||
payload = collected_data_dict[filename]
|
||||
if payload:
|
||||
|
|
@ -43,9 +45,9 @@ def parse_excel_file(excel_frame: DataFrame, excel_name: str) -> dict:
|
|||
if len(str(row[1]).split("/")) > 2:
|
||||
data_dict[excel_name] = dict(
|
||||
iban=str(iban),
|
||||
bank_date=arrow.get(datetime.datetime.strptime(
|
||||
str(row[1]), "%d/%m/%Y-%H:%M:%S"
|
||||
)).__str__(),
|
||||
bank_date=arrow.get(
|
||||
datetime.datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S")
|
||||
).__str__(),
|
||||
channel_branch=unidecode(str(row[3])),
|
||||
currency_value=(
|
||||
float(str(row[4]).replace(",", "")) if row[4] else 0
|
||||
|
|
@ -83,7 +85,9 @@ def app():
|
|||
# Extract IBAN and root info from the xl file
|
||||
collected_data_dict = parse_excel_file(excel_frame, filename)
|
||||
update_parsed_data_to_mongo_database(
|
||||
mongo_provider=mongo_provider, collected_data_dict=collected_data_dict, filename=filename
|
||||
mongo_provider=mongo_provider,
|
||||
collected_data_dict=collected_data_dict,
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ ADD /BankServices/RoutineEmailService /
|
|||
ADD /Configs /Configs
|
||||
ADD /Schemas /Schemas
|
||||
ADD /Commons /Commons
|
||||
ADD /BankServices/ServiceDepends/template_accounts.html /templates/template_accounts.html
|
||||
|
||||
ADD /Services/MongoService /Services/MongoService
|
||||
ADD /Services/PostgresService /Services/PostgresService
|
||||
|
|
|
|||
|
|
@ -6,11 +6,24 @@ from jinja2 import Environment, FileSystemLoader
|
|||
from Services.EmailService.provider import send_email
|
||||
|
||||
|
||||
def render_email_template(headers: list, rows: list):
|
||||
def render_email_template(
|
||||
headers: list, rows: list, balance_error: bool, bank_balance: float
|
||||
):
|
||||
template_dir = os.path.join(os.path.dirname(__file__), "templates")
|
||||
env = Environment(loader=FileSystemLoader(template_dir)) # Load templates from the directory
|
||||
template = env.get_template("template_accounts.html") # Load the specific template file
|
||||
return template.render(headers=headers, rows=rows) # Render template with variables
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(template_dir)
|
||||
) # Load templates from the directory
|
||||
template = env.get_template(
|
||||
"template_accounts.html"
|
||||
) # Load the specific template file
|
||||
# Render template with variables
|
||||
return template.render(
|
||||
headers=headers,
|
||||
rows=rows,
|
||||
bank_balance=f"{bank_balance:.4f}",
|
||||
balance_error=balance_error,
|
||||
today=str(arrow.now().date()),
|
||||
)
|
||||
|
||||
|
||||
def send_email_to_given_address(send_to: str, html_template: str):
|
||||
|
|
@ -27,30 +40,42 @@ def send_email_to_given_address(send_to: str, html_template: str):
|
|||
|
||||
|
||||
def set_account_records_to_send_email():
|
||||
"""
|
||||
from app import set_account_records_to_send_email
|
||||
"""
|
||||
db_session = AccountRecords.new_session()
|
||||
account_records = AccountRecords.filter_all(db=db_session).core_query
|
||||
account_records = (
|
||||
AccountRecords.query.filter()
|
||||
.order_by(
|
||||
account_records.order_by(
|
||||
AccountRecords.bank_date.desc(), AccountRecords.bank_reference_code.desc()
|
||||
)
|
||||
.limit(3)
|
||||
.all()
|
||||
)
|
||||
|
||||
first_record, second_record, balance_error = account_records[0], account_records[1], False
|
||||
first_record, second_record, balance_error = (
|
||||
account_records[0],
|
||||
account_records[1],
|
||||
False,
|
||||
)
|
||||
second_balance = first_record.bank_balance - first_record.currency_value
|
||||
if second_balance != second_record.bank_balance:
|
||||
balance_error = True
|
||||
|
||||
rows = [{
|
||||
"date": record.bank_date, "comment": record.bank_comment, "currency": record.currency_value,
|
||||
} for record in account_records]
|
||||
list_of_rows = list()
|
||||
for record in account_records:
|
||||
list_of_rows.append(
|
||||
[record.bank_date, record.process_comment, f"{record.currency_value:.4f}"]
|
||||
)
|
||||
|
||||
send_to = "karatay@mehmetkaratay.com.tr"
|
||||
html_template = render_email_template(
|
||||
headers=["Ulaştığı Tarih", "Banka Transaksiyonu Ek Bilgi", "Aktarım Değeri"],
|
||||
rows=rows,
|
||||
rows=list_of_rows,
|
||||
balance_error=balance_error,
|
||||
bank_balance=account_records[0].bank_balance,
|
||||
)
|
||||
exit()
|
||||
send_email_to_given_address(send_to=send_to, html_template=html_template)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -12,8 +12,6 @@ def check_any_written_stage_in_mongo_database(mongo_provider) -> bool:
|
|||
return mongo_provider.find_one(filter_query={"stage": "written", "send": None})
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
while True:
|
||||
|
|
|
|||
|
|
@ -28,6 +28,9 @@
|
|||
<body>
|
||||
<h1>Günaydın, Admin</h1>
|
||||
<br>
|
||||
<p>Banka Kayıtları : {{today}} </p>
|
||||
<p><b>Son Bakiye : {{bank_balance}} </b></p>
|
||||
<p><b>{{"Status : İkinci Bakiye Hatalı" if balance_error else "Status :OK"}}</b></p>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
|
|
@ -46,5 +49,6 @@
|
|||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<p>Teşekkür ederiz,<br>Evyos Yönetim<br>Saygılarımızla</p>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -16,7 +16,7 @@ def collect_parsed_data_from_mongo_database(mongo_provider) -> list:
|
|||
|
||||
|
||||
def write_parsed_data_to_account_records(
|
||||
file: str, data_dict: dict, collection_name: str, mongo_provider
|
||||
file: str, data_dict: dict, collection_name: str, mongo_provider
|
||||
):
|
||||
db_session = AccountRecords.new_session()
|
||||
data_dict["bank_balance"] = data_dict.pop("balance")
|
||||
|
|
@ -28,7 +28,9 @@ def write_parsed_data_to_account_records(
|
|||
data_dict["bank_date_d"] = bank_date.day
|
||||
data_dict["bank_date_y"] = bank_date.year
|
||||
data_dict["bank_date"] = str(bank_date)
|
||||
if build_iban := BuildIbans.filter_by_one(iban=data_dict["iban"], db=db_session).data:
|
||||
if build_iban := BuildIbans.filter_by_one(
|
||||
iban=data_dict["iban"], db=db_session
|
||||
).data:
|
||||
data_dict.update(
|
||||
{
|
||||
"build_id": build_iban.build_id,
|
||||
|
|
@ -36,7 +38,7 @@ def write_parsed_data_to_account_records(
|
|||
}
|
||||
)
|
||||
if found_record := AccountRecords.filter_one(
|
||||
AccountRecords.bank_date == data_dict["bank_date"],
|
||||
AccountRecords.bank_date == data_dict["bank_date"],
|
||||
AccountRecords.iban == data_dict["iban"],
|
||||
AccountRecords.bank_reference_code == data_dict["bank_reference_code"],
|
||||
AccountRecords.bank_balance == data_dict["bank_balance"],
|
||||
|
|
@ -48,12 +50,13 @@ def write_parsed_data_to_account_records(
|
|||
new_account_record.is_confirmed = True
|
||||
new_account_record.save(db=db_session)
|
||||
mongo_provider.update_one(
|
||||
filter_query={"filename": file}, update_data={"$set": {"stage": "written"}},
|
||||
filter_query={"filename": file},
|
||||
update_data={"$set": {"stage": "written"}},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print('Writer Service is running')
|
||||
print("Writer Service is running")
|
||||
while True:
|
||||
with MongoProvider.mongo_client() as mongo_client:
|
||||
provider = MongoProvider(
|
||||
|
|
@ -71,6 +74,6 @@ if __name__ == "__main__":
|
|||
data_dict=parsed_data,
|
||||
collection_name=provider.collection.name,
|
||||
mongo_provider=provider,
|
||||
file=file_name
|
||||
file=file_name,
|
||||
)
|
||||
time.sleep(60)
|
||||
|
|
|
|||
|
|
@ -15,4 +15,3 @@ class BankReceive(BaseModel):
|
|||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
"""Utility functions for getting line numbers and file locations."""
|
||||
|
||||
from inspect import currentframe, getframeinfo, stack
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
|
||||
|
||||
class SelectorsBase:
|
||||
@classmethod
|
||||
def add_confirmed_filter(cls, first_table, second_table) -> tuple:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import datetime
|
||||
|
||||
|
||||
class DefaultApiConfig:
|
||||
app: str
|
||||
host: str
|
||||
|
|
@ -18,11 +19,12 @@ class DefaultApiConfig:
|
|||
}
|
||||
|
||||
|
||||
|
||||
class ApiConfigs:
|
||||
"""Base class for all configurations."""
|
||||
|
||||
SECRET: str = "59f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc210s"
|
||||
SECRET: str = (
|
||||
"59f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc210s"
|
||||
)
|
||||
ACCESS_TIME: int = 432000
|
||||
REFRESH_TIME: int = 864000
|
||||
DEFAULT_SIZE: int = 10
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
|
||||
|
||||
class HostConfig:
|
||||
|
||||
MAIN_HOST: str = "10.10.2.36" # http://10.10.2.36
|
||||
|
|
@ -10,7 +8,9 @@ class MainConfig:
|
|||
|
||||
APP_NAME: str = "evyos-web-api-gateway"
|
||||
TITLE: str = "WAG API Web Api Gateway"
|
||||
DESCRIPTION: str = "This api is serves as web api gateway only to evyos web services."
|
||||
DESCRIPTION: str = (
|
||||
"This api is serves as web api gateway only to evyos web services."
|
||||
)
|
||||
APP_URL: str = "https://www.wag.eys.gen.tr"
|
||||
|
||||
DATETIME_FORMAT: str = "YYYY-MM-DD HH:mm:ss ZZ"
|
||||
|
|
|
|||
|
|
@ -7,4 +7,6 @@ class MongoConfig:
|
|||
DATABASE_NAME: str = "mongo_database"
|
||||
HOST: str = HostConfig.MAIN_HOST
|
||||
PORT: str = 11777
|
||||
URL: str = f"mongodb://{USER_NAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}?retryWrites=true&w=majority"
|
||||
URL: str = (
|
||||
f"mongodb://{USER_NAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}?retryWrites=true&w=majority"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ find_many_result : [
|
|||
"""
|
||||
if __name__ == "__main__":
|
||||
# Create a new instance of the EmptyRunner class
|
||||
print('URL', MongoConfig.URL)
|
||||
print("URL", MongoConfig.URL)
|
||||
with MongoProvider.mongo_client() as client:
|
||||
current_date = str(arrow.now().date())
|
||||
mongo_provider = MongoProvider(
|
||||
|
|
@ -36,7 +36,7 @@ if __name__ == "__main__":
|
|||
storage_reason=["Collected", "Data", str(current_date)],
|
||||
)
|
||||
uu_id_key = str(uuid4())
|
||||
print('mongo_provider', mongo_provider)
|
||||
print("mongo_provider", mongo_provider)
|
||||
insert_one_result = mongo_provider.insert_one(
|
||||
document={
|
||||
"date": current_date,
|
||||
|
|
@ -46,22 +46,26 @@ if __name__ == "__main__":
|
|||
)
|
||||
# str(insert_one_result.inserted_id)
|
||||
# insert_one_result.acknowledged
|
||||
print('uu_id_key', uu_id_key)
|
||||
print('insert_one_result', insert_one_result)
|
||||
print("uu_id_key", uu_id_key)
|
||||
print("insert_one_result", insert_one_result)
|
||||
update_one_result = mongo_provider.update_one(
|
||||
filter_query={"uuid": uu_id_key},
|
||||
update_data={"$set": {"added": "Added data", "data": "Updated Test data"}},
|
||||
)
|
||||
print('update_one_result', update_one_result)
|
||||
print("update_one_result", update_one_result)
|
||||
|
||||
find_many_result = mongo_provider.find_many(filter_query={"data": "Updated Test data"})
|
||||
print('find_many_result', find_many_result)
|
||||
find_many_result = mongo_provider.find_many(
|
||||
filter_query={"data": "Updated Test data"}
|
||||
)
|
||||
print("find_many_result", find_many_result)
|
||||
|
||||
find_one_result = mongo_provider.find_one(filter_query={"added": "Added data"})
|
||||
print('find_one_result', find_one_result)
|
||||
print("find_one_result", find_one_result)
|
||||
|
||||
# Delete the document
|
||||
delete_result = mongo_provider.delete_one(filter_query={"uuid": uu_id_key})
|
||||
|
||||
# Delete multiple documents
|
||||
delete_many_result = mongo_provider.delete_many(filter_query={"added": "Added data"})
|
||||
delete_many_result = mongo_provider.delete_many(
|
||||
filter_query={"added": "Added data"}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
from Schemas import AddressStreet, Users, Duty
|
||||
from Services.PostgresService.controllers.pagination_controllers import Pagination, PaginationConfig
|
||||
from Services.PostgresService.controllers.pagination_controllers import (
|
||||
Pagination,
|
||||
PaginationConfig,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
@ -40,6 +43,6 @@ if __name__ == "__main__":
|
|||
duty_description=1,
|
||||
db=db,
|
||||
)
|
||||
print('created_duty wrong', created_duty)
|
||||
print("created_duty wrong", created_duty)
|
||||
created_duty.save(db=db)
|
||||
print('created_duty', created_duty)
|
||||
print("created_duty", created_duty)
|
||||
|
|
|
|||
|
|
@ -228,6 +228,7 @@ class Build(CrudCollection, SelectActionWithEmployee):
|
|||
).data:
|
||||
return management_room
|
||||
return None
|
||||
|
||||
#
|
||||
# @classmethod
|
||||
# def create_action(cls, data: InsertBuild, token):
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|||
from Services.PostgresService.controllers.mixin_controllers import CrudCollection
|
||||
|
||||
|
||||
|
||||
class BuildDecisionBook(CrudCollection):
|
||||
"""
|
||||
Builds class based on declarative_base and BaseMixin via session
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from sqlalchemy import (
|
|||
from sqlalchemy.orm import mapped_column, relationship, Mapped
|
||||
|
||||
from Commons.select_functions import SelectAction
|
||||
|
||||
# from ApiLayers.ApiValidations.Custom.token_objects import EmployeeTokenObject
|
||||
# from ApiLayers.ApiValidations.Request import (
|
||||
# InsertCompany,
|
||||
|
|
|
|||
|
|
@ -34,7 +34,8 @@ class UsersTokens(CrudCollection):
|
|||
token: Mapped[str] = mapped_column(String, server_default="")
|
||||
domain: Mapped[str] = mapped_column(String, server_default="")
|
||||
expires_at: Mapped[TIMESTAMP] = mapped_column(
|
||||
TIMESTAMP(timezone=True), default=str(arrow.now().shift(days=3)),
|
||||
TIMESTAMP(timezone=True),
|
||||
default=str(arrow.now().shift(days=3)),
|
||||
)
|
||||
|
||||
# users = relationship("Users", back_populates="tokens", foreign_keys=[user_id])
|
||||
|
|
@ -146,6 +147,7 @@ class Users(CrudCollection, SelectAction):
|
|||
)
|
||||
)
|
||||
)
|
||||
|
||||
#
|
||||
# @classmethod
|
||||
# def create_action(cls, create_user: InsertUsers, token_dict):
|
||||
|
|
@ -359,6 +361,7 @@ class People(CrudCollection, SelectAction):
|
|||
if self.middle_name:
|
||||
return f"{self.firstname} {self.middle_name} {self.surname}"
|
||||
return f"{self.firstname} {self.surname}"
|
||||
|
||||
#
|
||||
# @classmethod
|
||||
# def create_action(cls, data: InsertPerson, token):
|
||||
|
|
|
|||
|
|
@ -1,9 +1,5 @@
|
|||
|
||||
|
||||
class EmailConfig:
|
||||
EMAIL_HOST: str = "10.10.2.34"
|
||||
EMAIL_USERNAME: str = "karatay@mehmetkaratay.com.tr"
|
||||
EMAIL_PASSWORD: str = "system"
|
||||
EMAIL_PORT: int = 587
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from typing import Any, Dict, List, Optional
|
||||
from functools import wraps
|
||||
|
||||
# from pymongo.errors import (
|
||||
# ConnectionFailure,
|
||||
# OperationFailure,
|
||||
|
|
@ -22,4 +23,5 @@ def mongo_error_wrapper(func):
|
|||
:return:
|
||||
"""
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
|
|
|||
|
|
@ -10,11 +10,13 @@ from Services.MongoService.handlers import mongo_error_wrapper
|
|||
|
||||
class MongoBase:
|
||||
"""Base class for MongoDB connection and operations."""
|
||||
|
||||
collection: Collection = None
|
||||
|
||||
|
||||
class MongoErrorHandler:
|
||||
"""Error handler for MongoDB operations."""
|
||||
|
||||
...
|
||||
|
||||
|
||||
|
|
@ -63,7 +65,6 @@ class MongoFindMixin(MongoBase):
|
|||
|
||||
|
||||
class MongoUpdateMixin(MongoBase):
|
||||
|
||||
"""Mixin for MongoDB update operations."""
|
||||
|
||||
@mongo_error_wrapper
|
||||
|
|
@ -100,9 +101,10 @@ class MongoDeleteMixin(MongoBase):
|
|||
"""Delete multiple documents from the collection."""
|
||||
return self.collection.delete_many(filter_query)
|
||||
|
||||
class MongoAggregateMixin(MongoBase):
|
||||
|
||||
class MongoAggregateMixin(MongoBase):
|
||||
"""Mixin for MongoDB aggregation operations."""
|
||||
|
||||
@mongo_error_wrapper
|
||||
def aggregate(self, collection: Collection, pipeline: List[Dict[str, Any]]):
|
||||
"""Execute an aggregation pipeline on the collection."""
|
||||
|
|
@ -110,7 +112,6 @@ class MongoAggregateMixin(MongoBase):
|
|||
return result
|
||||
|
||||
|
||||
|
||||
class MongoProvider(
|
||||
MongoUpdateMixin,
|
||||
MongoInsertMixin,
|
||||
|
|
@ -124,9 +125,7 @@ class MongoProvider(
|
|||
managing collections based on company UUID and storage reason.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, client: MongoClient, database: str, storage_reason: list[str]
|
||||
):
|
||||
def __init__(self, client: MongoClient, database: str, storage_reason: list[str]):
|
||||
"""Initialize MongoDB actions with client and collection info.
|
||||
|
||||
Args:
|
||||
|
|
@ -172,7 +171,9 @@ class MongoProvider(
|
|||
collection_name = ""
|
||||
for each_storage_reason in storage_name_list:
|
||||
if self.delimiter in str(each_storage_reason):
|
||||
raise ValueError(f"Storage reason cannot contain delimiter : {self.delimiter}")
|
||||
raise ValueError(
|
||||
f"Storage reason cannot contain delimiter : {self.delimiter}"
|
||||
)
|
||||
collection_name += f"{self.delimiter}{each_storage_reason}"
|
||||
collection_name = collection_name[1:]
|
||||
self._collection = self._client[self._database][collection_name]
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ class Credentials(BaseModel):
|
|||
"""
|
||||
Class to store user credentials.
|
||||
"""
|
||||
|
||||
person_id: int
|
||||
person_name: str
|
||||
full_name: Optional[str] = None
|
||||
|
|
@ -24,6 +25,7 @@ class MetaData:
|
|||
"""
|
||||
Class to store metadata for a query.
|
||||
"""
|
||||
|
||||
created: bool = False
|
||||
updated: bool = False
|
||||
|
||||
|
|
@ -43,7 +45,9 @@ class CRUDModel:
|
|||
Args:
|
||||
record_created: Record that created or updated
|
||||
"""
|
||||
if getattr(cls.creds, "person_id", None) and getattr(cls.creds, "person_name", None):
|
||||
if getattr(cls.creds, "person_id", None) and getattr(
|
||||
cls.creds, "person_name", None
|
||||
):
|
||||
record_created.created_by_id = cls.creds.person_id
|
||||
record_created.created_by = cls.creds.person_name
|
||||
return
|
||||
|
|
@ -72,7 +76,8 @@ class CRUDModel:
|
|||
|
||||
# Search for existing record
|
||||
query = db.query(cls).filter(
|
||||
cls.expiry_ends > str(arrow.now()), cls.expiry_starts <= str(arrow.now()),
|
||||
cls.expiry_ends > str(arrow.now()),
|
||||
cls.expiry_starts <= str(arrow.now()),
|
||||
)
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
|
@ -119,7 +124,7 @@ class CRUDModel:
|
|||
if str(key[-5:]).lower() == "uu_id": # Special handling for UUID fields
|
||||
return True, str(val)
|
||||
|
||||
if key_: # Handle typed fields
|
||||
if key_: # Handle typed fields
|
||||
if key_ == Mapped[int]:
|
||||
return True, int(val)
|
||||
elif key_ == Mapped[bool]:
|
||||
|
|
@ -130,7 +135,7 @@ class CRUDModel:
|
|||
return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ"))
|
||||
elif key_ == Mapped[str]:
|
||||
return True, str(val)
|
||||
else: # Handle based on Python types
|
||||
else: # Handle based on Python types
|
||||
if isinstance(val, datetime.datetime):
|
||||
return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ"))
|
||||
elif isinstance(val, bool):
|
||||
|
|
@ -146,20 +151,24 @@ class CRUDModel:
|
|||
|
||||
return False, None
|
||||
|
||||
def get_dict(self, exclude_list: Optional[list[InstrumentedAttribute]] = None) -> Dict[str, Any]:
|
||||
def get_dict(
|
||||
self, exclude_list: Optional[list[InstrumentedAttribute]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert model instance to dictionary with customizable fields.
|
||||
Returns:
|
||||
Dictionary representation of the model
|
||||
Dictionary returns only UUID fields and fields that are not in exclude_list
|
||||
"""
|
||||
return_dict: Dict[str, Any] = {} # Handle default field selection
|
||||
return_dict: Dict[str, Any] = {} # Handle default field selection
|
||||
exclude_list = exclude_list or []
|
||||
exclude_list = [exclude_arg.key for exclude_arg in exclude_list]
|
||||
|
||||
columns_set = set(self.columns)
|
||||
columns_list = set([col for col in list(columns_set) if str(col)[-2:] != "id"])
|
||||
columns_extend = set(col for col in list(columns_set) if str(col)[-5:].lower() == "uu_id")
|
||||
columns_extend = set(
|
||||
col for col in list(columns_set) if str(col)[-5:].lower() == "uu_id"
|
||||
)
|
||||
columns_list = set(columns_list) | set(columns_extend)
|
||||
columns_list = list(set(columns_list) - set(exclude_list))
|
||||
|
||||
|
|
@ -173,7 +182,10 @@ class CRUDModel:
|
|||
|
||||
@classmethod
|
||||
def find_or_create(
|
||||
cls, db: Session, exclude_args: Optional[list[InstrumentedAttribute]] = None, **kwargs
|
||||
cls,
|
||||
db: Session,
|
||||
exclude_args: Optional[list[InstrumentedAttribute]] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Find an existing record matching the criteria or create a new one.
|
||||
|
|
@ -188,7 +200,8 @@ class CRUDModel:
|
|||
"""
|
||||
# Search for existing record
|
||||
query = db.query(cls).filter(
|
||||
cls.expiry_ends > str(arrow.now()), cls.expiry_starts <= str(arrow.now()),
|
||||
cls.expiry_ends > str(arrow.now()),
|
||||
cls.expiry_starts <= str(arrow.now()),
|
||||
)
|
||||
exclude_args = exclude_args or []
|
||||
exclude_args = [exclude_arg.key for exclude_arg in exclude_args]
|
||||
|
|
@ -231,7 +244,7 @@ class CRUDModel:
|
|||
db.flush()
|
||||
self.meta_data.updated = True
|
||||
except Exception as e:
|
||||
print('Error:', e)
|
||||
print("Error:", e)
|
||||
self.meta_data.updated = False
|
||||
db.rollback()
|
||||
return self
|
||||
|
|
|
|||
|
|
@ -8,8 +8,7 @@ from Services.PostgresService.controllers.response_controllers import PostgresRe
|
|||
from Configs.api import ApiConfigs
|
||||
|
||||
|
||||
class ListOptions:
|
||||
...
|
||||
class ListOptions: ...
|
||||
|
||||
|
||||
class PaginationConfig(BaseModel):
|
||||
|
|
@ -164,25 +163,29 @@ class PaginationResult:
|
|||
"Order by fields and order types must have the same length."
|
||||
)
|
||||
order_criteria = zip(self.order_by, self.order_type)
|
||||
print('order_criteria', order_criteria)
|
||||
print("order_criteria", order_criteria)
|
||||
if not self._data.data:
|
||||
return self._core_query
|
||||
|
||||
for field, direction in order_criteria:
|
||||
print('field', field, direction)
|
||||
print("field", field, direction)
|
||||
columns = self._data.data[0].filterable_attributes
|
||||
print('columns', columns)
|
||||
print("columns", columns)
|
||||
if field in columns:
|
||||
if direction.lower().startswith("d"):
|
||||
self._core_query = self._core_query.order_by(
|
||||
desc(
|
||||
getattr(self._core_query.column_descriptions[0]["entity"], field)
|
||||
getattr(
|
||||
self._core_query.column_descriptions[0]["entity"], field
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
self._core_query = self._core_query.order_by(
|
||||
asc(
|
||||
getattr(self._core_query.column_descriptions[0]["entity"], field)
|
||||
getattr(
|
||||
self._core_query.column_descriptions[0]["entity"], field
|
||||
)
|
||||
)
|
||||
)
|
||||
return self._core_query
|
||||
|
|
|
|||
|
|
@ -64,9 +64,7 @@ def run_migrations_online() -> None:
|
|||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
|
|
|||
Loading…
Reference in New Issue