94 lines
3.4 KiB
Python
94 lines
3.4 KiB
Python
import glob
|
|
import os
|
|
import shutil
|
|
import json
|
|
|
|
import pandas
|
|
import datetime
|
|
|
|
from unidecode import unidecode
|
|
from logging import getLogger, basicConfig, INFO
|
|
from config_isbank import Config
|
|
|
|
logger = getLogger(__name__)
|
|
basicConfig(filename=__name__, level=INFO)
|
|
|
|
|
|
def read_json_file(json_file_path):
|
|
if os.path.exists(json_file_path):
|
|
with open(json_file_path, "r") as json_file:
|
|
return json.load(json_file)
|
|
return {}
|
|
|
|
|
|
def write_json_file(json_file_path, data):
|
|
with open(json_file_path, "w") as json_file:
|
|
json.dump(data, json_file, indent=4)
|
|
|
|
|
|
def parser_service():
|
|
completed = Config.COMPLETED_PATH
|
|
parsed = Config.PARSED_PATH
|
|
incoming = Config.INCOMING_PATH
|
|
pandas.set_option("display.max_columns", None)
|
|
pandas.set_option("display.width", 1200)
|
|
|
|
current_directory = os.getcwd()
|
|
# current_directory = "/home/berkay/git-evyos/wag-management-api"
|
|
absolute_path = os.path.join(current_directory, incoming)
|
|
|
|
excel_files = glob.glob(os.path.join(absolute_path, "*.xls*"))
|
|
for file_path in excel_files:
|
|
xl_name = str(file_path).split("/")[-1]
|
|
xl_file = pandas.read_excel(file_path)
|
|
xl_frame = pandas.DataFrame(xl_file)
|
|
iban = ""
|
|
for row in xl_frame.itertuples():
|
|
if "IBAN" in str(row[3]).upper():
|
|
iban = str(row[5]).replace(" ", "")
|
|
if not str(row[1]) == "nan" and not str(row[2]) == "nan":
|
|
if len(str(row[1]).split("/")) > 2:
|
|
insert_dict = dict(
|
|
iban=str(iban),
|
|
bank_date=datetime.datetime.strptime(
|
|
str(row[1]), "%d/%m/%Y-%H:%M:%S"
|
|
).__str__(),
|
|
channel_branch=unidecode(str(row[3])),
|
|
currency_value=(
|
|
float(str(row[4]).replace(",", "")) if row[4] else 0
|
|
),
|
|
balance=float(str(row[5]).replace(",", "")) if row[5] else 0,
|
|
additional_balance=(
|
|
float(str(row[6]).replace(",", "")) if row[6] else 0
|
|
),
|
|
process_name=str(row[7]),
|
|
process_type=unidecode(str(row[8])),
|
|
process_comment=unidecode(str(row[9])),
|
|
bank_reference_code=str(row[15]),
|
|
)
|
|
logger.info(f"Insert Dict: {insert_dict}")
|
|
existing_data = read_json_file(parsed)
|
|
if xl_name in existing_data: # Check if the key already exists
|
|
if insert_dict not in existing_data[xl_name]:
|
|
existing_data[xl_name].append(insert_dict)
|
|
else:
|
|
existing_data[xl_name] = [
|
|
insert_dict
|
|
] # Update the JSON data with the new key-value pair
|
|
write_json_file(
|
|
parsed, existing_data
|
|
) # Write the updated data back to the JSON file
|
|
|
|
shutil.move(
|
|
file_path,
|
|
os.path.join(current_directory, completed, os.path.basename(file_path)),
|
|
)
|
|
|
|
|
|
# if __name__ == "__main__":
|
|
# parse_xl_files_and_copy_to_database()
|
|
# time.sleep(int(os.getenv("PARSER_SEQUENCE", 60)))
|
|
# while True:
|
|
# parse_xl_files_and_copy_to_database()
|
|
# time.sleep(int(os.getenv("PARSER_SEQUENCE", 60)))
|