migrator functions updated

This commit is contained in:
2024-11-11 22:23:07 +03:00
parent f6135ced5f
commit ffb85a62f6
56 changed files with 567 additions and 485 deletions

View File

@@ -4,14 +4,18 @@ from service_app_test.api_configs import BothAPIS
def migrate_build(requester: BothAPIS):
return
def migrate_build_area(requester: BothAPIS):
return
def migrate_build_part(requester: BothAPIS):
return
def migrate_build_iban(requester: BothAPIS):
return
def migrate_build_living_space(requester: BothAPIS):
return

View File

@@ -6,26 +6,25 @@ def migrate_company(requester: BothAPIS):
filter_object = FilterObject(
page=1,
size=100,
order_field="id",
order_type="asc",
)
response = requester.wag_api.post(
endpoint="/company/list",
data=filter_object.dump(),
)
response_json = response.json()
print('response_json', response_json)
response_datas = response_json['data']
print("response_json", response_json)
response_datas = response_json["data"]
for response_data in response_datas:
print('response_data', response_data)
response_data['active'] = True
response_data['deleted'] = False
response_data['is_confirmed'] = True
response_data['expiry_starts'] = None
response_data['expiry_ends'] = None
new_response_data = dict()
for key, value in dict(response_data).items():
if value is not None and not str(value) == "None":
new_response_data[key] = response_data[key]
new_response_data.pop("uu_id", None)
print("new_response_data", new_response_data)
new_response_data["company_tag"] = response_data["formal_name"]
response = requester.local_api.post(
endpoint="/company/create",
data=response_data,
data=new_response_data,
)
print('response', response.text)
print("response", response.text)
return

View File

@@ -3,4 +3,4 @@ from service_app_test.api_configs import BothAPIS
def migrate_people(requester: BothAPIS):
# Migrate old data
pass
pass

View File

@@ -12,11 +12,13 @@ system_default = dict(
def read_json_file(json_directory, json_file):
with open(f"{json_directory}/{json_file}.json", "r", encoding='utf-8') as json_file:
with open(f"{json_directory}/{json_file}.json", "r", encoding="utf-8") as json_file:
return loads(json_file.read())
def bulk_insert_alchemy_postgresql(active_session, table, data_list, on_conflict_constraints = None):
def bulk_insert_alchemy_postgresql(
active_session, table, data_list, on_conflict_constraints=None
):
from sqlalchemy.dialects.postgresql import insert
st = perf_counter()
@@ -29,11 +31,10 @@ def bulk_insert_alchemy_postgresql(active_session, table, data_list, on_conflict
)
)
else:
session_execute = active_session.execute(
insert(table)
.values(data_list)
)
session_execute = active_session.execute(insert(table).values(data_list))
count_row = session_execute.rowcount
print(f'Table : {table.__name__} count_row : {count_row} : time took : {round(perf_counter() - st, 2)} seconds')
print(
f"Table : {table.__name__} count_row : {count_row} : time took : {round(perf_counter() - st, 2)} seconds"
)
active_session.commit()
active_session.flush()
active_session.flush()

View File

@@ -17,14 +17,14 @@ login_data = {
"access_key": "karatay.berkay.sup@evyos.com.tr",
"password": "string",
"remember_me": False,
"password_token": "z5nPguDu6PrY_94KlTGxGBCMcK6rZcDl5AGqO5aHWY7TcQF8LJGYikMQs9labvqdz5yM7vE_f8Zq_vqp00o3EnWwWCWhel-EsMcAujBKNRjV3jC_4xk0_1r_unIYogWAjz_spZipX6pncsch2ngvv0dpx4lHSiZ5QrDTA1RefQ",
"password_token": ""
}
login_data_wag = {
"domain": "evyos.com.tr",
"access_key": "karatay.berkay.sup@evyos.com.tr",
"password": "string",
"remember_me": False,
"password_token": ""
"password_token": "",
}
wag_api = RequestToApi()
@@ -36,7 +36,7 @@ wag_api.selected_object = wag_api.login_via_email_and_password(
local_api = RequestToApi()
local_api.overwrite_base_url(base_url=LocalAPI.base_url)
local_api.selected_object = local_api.login_via_email_and_password(
login_data=login_data, is_password_valid=False
login_data=login_data, is_password_valid=True
)
both_apis = BothAPIS()
@@ -45,6 +45,7 @@ both_apis.local_api = local_api
migrate_company(requester=both_apis)
migrate_people(requester=both_apis)
exit()
migrate_build(requester=both_apis)
migrate_build_area(requester=both_apis)
migrate_build_part(requester=both_apis)