From 31d8e68673ce643cbdd68292a3027bf49095142f Mon Sep 17 00:00:00 2001 From: saratomaz Date: Fri, 22 Nov 2024 15:47:15 +0000 Subject: [PATCH] Restructure and organize project files --- .buildkite/db_sync_full_sync.yml | 6 +- db_sync_tests/utils/aws_db_utils.py | 278 ------------------ flake.nix | 2 +- .../scripts/db-sync-start.sh | 0 .../scripts/postgres-start.sh | 0 .../tests/__init__.py | 0 .../tests}/db_tables_ddl.sql | 0 .../tests/full_sync_from_clean_state.py | 4 +- .../tests/iohk_snapshot_restoration.py | 4 +- .../tests/local_snapshot_restoration.py | 2 +- sync_tests/{ => tests}/node_sync_test.py | 4 +- .../node_write_mainnet_tx_count_per_epoch.py | 0 .../node_write_sync_values_to_db.py | 0 .../tests/snapshot_creation.py | 4 +- .../utils/__init__.py | 0 sync_tests/{ => utils}/aws_db_utils.py | 50 +++- sync_tests/{ => utils}/blockfrost_utils.py | 0 sync_tests/{ => utils}/explorer_utils.py | 0 sync_tests/{ => utils}/gitpython_utils.py | 0 sync_tests/{ => utils}/utils.py | 0 .../utils/utils_db_sync.py | 4 +- 21 files changed, 64 insertions(+), 294 deletions(-) delete mode 100755 db_sync_tests/utils/aws_db_utils.py rename {db_sync_tests => sync_tests}/scripts/db-sync-start.sh (100%) rename {db_sync_tests => sync_tests}/scripts/postgres-start.sh (100%) rename {db_sync_tests => sync_tests}/tests/__init__.py (100%) rename {db_sync_tests/utils => sync_tests/tests}/db_tables_ddl.sql (100%) rename {db_sync_tests => sync_tests}/tests/full_sync_from_clean_state.py (98%) rename {db_sync_tests => sync_tests}/tests/iohk_snapshot_restoration.py (98%) rename {db_sync_tests => sync_tests}/tests/local_snapshot_restoration.py (99%) rename sync_tests/{ => tests}/node_sync_test.py (99%) rename sync_tests/{ => tests}/node_write_mainnet_tx_count_per_epoch.py (100%) rename sync_tests/{ => tests}/node_write_sync_values_to_db.py (100%) rename {db_sync_tests => sync_tests}/tests/snapshot_creation.py (96%) rename {db_sync_tests => sync_tests}/utils/__init__.py (100%) rename sync_tests/{ => utils}/aws_db_utils.py (85%) rename sync_tests/{ => utils}/blockfrost_utils.py (100%) rename sync_tests/{ => utils}/explorer_utils.py (100%) rename sync_tests/{ => utils}/gitpython_utils.py (100%) rename sync_tests/{ => utils}/utils.py (100%) rename db_sync_tests/utils/utils.py => sync_tests/utils/utils_db_sync.py (99%) diff --git a/.buildkite/db_sync_full_sync.yml b/.buildkite/db_sync_full_sync.yml index efa09723..0dc127ab 100644 --- a/.buildkite/db_sync_full_sync.yml +++ b/.buildkite/db_sync_full_sync.yml @@ -1,9 +1,9 @@ steps: - label: ':drum_with_drumsticks: Full sync test :drum_with_drumsticks:' commands: - - nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/full_sync_from_clean_state.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" - - nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/snapshot_creation.py -dv "${db_sync_version}" -e "${environment}" -rosc "${run_only_sync_test}" - - nix develop --accept-flake-config .#python --command python ./db_sync_tests/tests/local_snapshot_restoration.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" -rosc "${run_only_sync_test}" + - nix develop --accept-flake-config .#python --command python ./sync_tests/tests/full_sync_from_clean_state.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" + - nix develop --accept-flake-config .#python --command python ./sync_tests/tests/snapshot_creation.py -dv "${db_sync_version}" -e "${environment}" -rosc "${run_only_sync_test}" + - nix develop --accept-flake-config .#python --command python ./sync_tests/tests/local_snapshot_restoration.py -nv "${node_version}" -dv "${db_sync_version}" -dsa "${db_sync_start_arguments}" -e "${environment}" -rosc "${run_only_sync_test}" timeout_in_minutes: 43200 agents: system: x86_64-linux diff --git a/db_sync_tests/utils/aws_db_utils.py b/db_sync_tests/utils/aws_db_utils.py deleted file mode 100755 index 3318b593..00000000 --- a/db_sync_tests/utils/aws_db_utils.py +++ /dev/null @@ -1,278 +0,0 @@ -import os - -import pymysql.cursors -import pandas as pd - - -def create_connection(): - conn = None - try: - conn = pymysql.connect(host=os.environ["AWS_DB_HOSTNAME"], - user=os.environ["AWS_DB_USERNAME"], - password=os.environ["AWS_DB_PASS"], - db=os.environ["AWS_DB_NAME"], - ) - return conn - except Exception as e: - print(f"!!! Database connection failed due to: {e}") - - return conn - - -def create_table(table_sql_query): - conn = create_connection() - try: - cur = conn.cursor() - cur.execute(table_sql_query) - conn.commit() - cur.close() - except Exception as e: - print(f"!!! ERROR: Failed to create table: {e}") - return False - finally: - if conn: - conn.close() - - -def drop_table(table_name): - conn = create_connection() - sql_query = f"DROP TABLE `{table_name}`;" - try: - cur = conn.cursor() - cur.execute(sql_query) - conn.commit() - cur.close() - except Exception as e: - print(f"!!! ERROR: Failed to drop table {table_name}: {e}") - return False - finally: - if conn: - conn.close() - - -def get_column_names_from_table(table_name): - print(f"Getting the column names from table: {table_name}") - - conn = create_connection() - sql_query = f"select * from `{table_name}`" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - col_name_list = [res[0] for res in cur.description] - return col_name_list - except Exception as e: - print(f"!!! ERROR: Failed to get column names from table: {table_name}: {e}") - return False - finally: - if conn: - conn.close() - - -def add_column_to_table(table_name, column_name, column_type): - print(f"Adding column {column_name} with type {column_type} to {table_name} table") - - conn = create_connection() - sql_query = f"alter table `{table_name}` add column {column_name} {column_type}" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - except Exception as e: - print(f"!!! ERROR: Failed to add {column_name} column into table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - - -def add_single_row_into_db(table_name, col_names_list, col_values_list): - print(f"Adding 1 new entry into {table_name} table") - initial_rows_no = get_last_row_no(table_name) - col_names = ','.join(col_names_list) - col_spaces = ','.join(['%s'] * len(col_names_list)) - conn = create_connection() - sql_query = f"INSERT INTO `{table_name}` (%s) values(%s)" % (col_names, col_spaces) - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query, col_values_list) - conn.commit() - cur.close() - except Exception as e: - print(f" -- !!! ERROR: Failed to insert data into {table_name} table: {e}") - return False - finally: - if conn: - conn.close() - final_rows_no = get_last_row_no(table_name) - print(f"Successfully added {final_rows_no - initial_rows_no} rows into table {table_name}") - return True - - -def add_bulk_rows_into_db(table_name, col_names_list, col_values_list): - print(f"Adding {len(col_values_list)} entries into {table_name} table") - initial_rows_no = get_last_row_no(table_name) - col_names = ','.join(col_names_list) - col_spaces = ','.join(['%s'] * len(col_names_list)) - conn = create_connection() - sql_query = f"INSERT INTO `{table_name}` (%s) values (%s)" % (col_names, col_spaces) - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.executemany(sql_query, col_values_list) - conn.commit() - cur.close() - except Exception as e: - print(f" -- !!! ERROR: Failed to bulk insert data into {table_name} table: {e}") - return False - finally: - if conn: - conn.close() - final_rows_no = get_last_row_no(table_name) - print(f"Successfully added {final_rows_no - initial_rows_no} rows into table {table_name}") - return True - - -def get_last_row_no(table_name): - print(f"Getting the no of rows from table: {table_name}") - - conn = create_connection() - sql_query = f"SELECT count(*) FROM `{table_name}`;" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - last_row_no = cur.fetchone()[0] - return last_row_no - except Exception as e: - print(f"!!! ERROR: Failed to get the no of rows from table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - - -def get_identifier_last_run_from_table(table_name): - print(f"Getting the Identifier value of the last run from table {table_name}") - - if get_last_row_no(table_name) == 0: - return table_name + "_0" - else: - conn = create_connection() - sql_query = f"SELECT identifier FROM `{table_name}` " \ - f"ORDER BY LPAD(LOWER(identifier), 500,0) DESC LIMIT 1;" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - last_identifier = cur.fetchone()[0] - return last_identifier - except Exception as e: - print(f"!!! ERROR: Failed to get the no of rows from table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - - -def get_last_epoch_no_from_table(table_name): - print(f"Getting the last epoch no value from table {table_name}") - - if get_last_row_no(table_name) == 0: - return 0 - else: - conn = create_connection() - sql_query = f"SELECT MAX(epoch_no) FROM `{table_name}`;;" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - last_identifier = cur.fetchone()[0] - return last_identifier - except Exception as e: - print(f"!!! ERROR: Failed to get last epoch no from table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - - -def get_column_values(table_name, column_name): - print(f"Getting {column_name} column values from table {table_name}") - - conn = create_connection() - sql_query = f"SELECT {column_name} FROM `{table_name}`;" - try: - cur = conn.cursor() - cur.execute(sql_query) - return [el[0] for el in cur.fetchall()] - except Exception as e: - print(f"!!! ERROR: Failed to get {column_name} column values from table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - - -def delete_all_rows_from_table(table_name): - print(f"Deleting all entries from table: {table_name}") - conn = create_connection() - sql_query = f"TRUNCATE TABLE `{table_name}`" - print(f" -- sql_query: {sql_query}") - initial_rows_no = get_last_row_no(table_name) - try: - cur = conn.cursor() - cur.execute(sql_query) - conn.commit() - cur.close() - except Exception as e: - print(f"!!! ERROR: Failed to delete all records from table {table_name} --> {e}") - return False - finally: - if conn: - conn.close() - final_rows_no = get_last_row_no(table_name) - print(f"Successfully deleted {initial_rows_no - final_rows_no} rows from table {table_name}") - - -def delete_record(table_name, column_name, delete_value): - print(f"Deleting rows containing '{delete_value}' value inside the '{column_name}' column") - initial_rows_no = get_last_row_no(table_name) - print(f"Deleting {column_name} = {delete_value} from {table_name} table") - - conn = create_connection() - sql_query = f"DELETE from `{table_name}` where {column_name}=\"{delete_value}\"" - print(f" -- sql_query: {sql_query}") - try: - cur = conn.cursor() - cur.execute(sql_query) - conn.commit() - cur.close() - except Exception as e: - print(f"!!! ERROR: Failed to delete record {column_name} = {delete_value} from {table_name} table: --> {e}") - return False - finally: - if conn: - conn.close() - final_rows_no = get_last_row_no(table_name) - print(f"Successfully deleted {initial_rows_no - final_rows_no} rows from table {table_name}") - - -def add_bulk_csv_to_table(table_name, csv_path): - df = pd.read_csv(csv_path) - # replace nan/empty values with "None" - df = df.where(pd.notnull(df), None) - - col_to_insert = list(df.columns) - val_to_insert = df.values.tolist() - add_bulk_rows_into_db(table_name, col_to_insert, val_to_insert) - - -# Delete specified identifiers -# env = "testnet" -# delete_strings = ["testnet_37"] -# for del_str in delete_strings: -# delete_record(env, "identifier", del_str) -# delete_record(env + "_epoch_duration", "identifier", del_str) -# delete_record(env + "_logs", "identifier", del_str) diff --git a/flake.nix b/flake.nix index e76cd813..25a0219f 100644 --- a/flake.nix +++ b/flake.nix @@ -37,7 +37,7 @@ nativeBuildInputs = with pkgs; [ bash nix gnugrep gnumake gnutar coreutils git xz ]; }; python = pkgs.mkShell { - nativeBuildInputs = with pkgs; with python39Packages; [ python39Full virtualenv pip matplotlib pandas requests xmltodict psutil GitPython pymysql postgresql_14 wget curl psycopg2 assertpy ]; + nativeBuildInputs = with pkgs; with python39Packages; [ python39Full virtualenv pip matplotlib pandas requests xmltodict psutil GitPython pymysql postgresql_14 wget curl psycopg2 assertpy colorama]; }; postgres = pkgs.mkShell { nativeBuildInputs = with pkgs; [ glibcLocales postgresql_14 lsof procps wget ]; diff --git a/db_sync_tests/scripts/db-sync-start.sh b/sync_tests/scripts/db-sync-start.sh similarity index 100% rename from db_sync_tests/scripts/db-sync-start.sh rename to sync_tests/scripts/db-sync-start.sh diff --git a/db_sync_tests/scripts/postgres-start.sh b/sync_tests/scripts/postgres-start.sh similarity index 100% rename from db_sync_tests/scripts/postgres-start.sh rename to sync_tests/scripts/postgres-start.sh diff --git a/db_sync_tests/tests/__init__.py b/sync_tests/tests/__init__.py similarity index 100% rename from db_sync_tests/tests/__init__.py rename to sync_tests/tests/__init__.py diff --git a/db_sync_tests/utils/db_tables_ddl.sql b/sync_tests/tests/db_tables_ddl.sql similarity index 100% rename from db_sync_tests/utils/db_tables_ddl.sql rename to sync_tests/tests/db_tables_ddl.sql diff --git a/db_sync_tests/tests/full_sync_from_clean_state.py b/sync_tests/tests/full_sync_from_clean_state.py similarity index 98% rename from db_sync_tests/tests/full_sync_from_clean_state.py rename to sync_tests/tests/full_sync_from_clean_state.py index e2403960..589e1fa4 100644 --- a/db_sync_tests/tests/full_sync_from_clean_state.py +++ b/sync_tests/tests/full_sync_from_clean_state.py @@ -9,7 +9,7 @@ sys.path.append(os.getcwd()) -from db_sync_tests.utils.utils import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \ +from sync_tests.utils.utils_db_sync import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \ get_os_type, get_total_ram_in_GB, upload_artifact, clone_repo, zip_file, execute_command, \ print_file, stop_process, write_data_as_json_to_file, get_node_config_files, \ get_node_version, get_db_sync_version, start_node_in_cwd, wait_for_db_to_sync, \ @@ -25,7 +25,7 @@ NODE_ARCHIVE, DB_SYNC_ARCHIVE, SYNC_DATA_ARCHIVE, EXPECTED_DB_SCHEMA, EXPECTED_DB_INDEXES, \ ENVIRONMENT \ -from db_sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, \ +from sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, \ add_bulk_rows_into_db, add_single_row_into_db diff --git a/db_sync_tests/tests/iohk_snapshot_restoration.py b/sync_tests/tests/iohk_snapshot_restoration.py similarity index 98% rename from db_sync_tests/tests/iohk_snapshot_restoration.py rename to sync_tests/tests/iohk_snapshot_restoration.py index 25c8bfc6..e390351d 100644 --- a/db_sync_tests/tests/iohk_snapshot_restoration.py +++ b/sync_tests/tests/iohk_snapshot_restoration.py @@ -8,7 +8,7 @@ sys.path.append(os.getcwd()) -from db_sync_tests.utils.utils import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \ +from sync_tests.utils.utils_db_sync import seconds_to_time, get_no_of_cpu_cores, get_current_date_time, \ get_os_type, get_total_ram_in_GB, upload_artifact, clone_repo, wait, zip_file, \ print_file, stop_process, copy_node_executables, write_data_as_json_to_file, \ execute_command, get_node_config_files, are_errors_present_in_db_sync_logs, \ @@ -26,7 +26,7 @@ DB_SYNC_PERF_STATS, NODE_LOG, DB_SYNC_LOG, EPOCH_SYNC_TIMES, PERF_STATS_ARCHIVE, \ NODE_ARCHIVE, DB_SYNC_ARCHIVE, SYNC_DATA_ARCHIVE, ENVIRONMENT \ -from utils.aws_db_utils import get_identifier_last_run_from_table, \ +from sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, \ add_bulk_rows_into_db, add_single_row_into_db diff --git a/db_sync_tests/tests/local_snapshot_restoration.py b/sync_tests/tests/local_snapshot_restoration.py similarity index 99% rename from db_sync_tests/tests/local_snapshot_restoration.py rename to sync_tests/tests/local_snapshot_restoration.py index 7ec6e6e0..b51335fa 100644 --- a/db_sync_tests/tests/local_snapshot_restoration.py +++ b/sync_tests/tests/local_snapshot_restoration.py @@ -7,7 +7,7 @@ sys.path.append(os.getcwd()) -from db_sync_tests.utils.utils import seconds_to_time, get_no_of_cpu_cores, \ +from sync_tests.utils.utils_db_sync import seconds_to_time, get_no_of_cpu_cores, \ get_current_date_time, get_os_type, get_total_ram_in_GB, \ upload_artifact, print_file, stop_process, export_env_var, is_string_present_in_file, \ zip_file, write_data_as_json_to_file, get_db_sync_version, start_node_in_cwd, \ diff --git a/sync_tests/node_sync_test.py b/sync_tests/tests/node_sync_test.py similarity index 99% rename from sync_tests/node_sync_test.py rename to sync_tests/tests/node_sync_test.py index 2f4a17ba..ac78eee6 100644 --- a/sync_tests/node_sync_test.py +++ b/sync_tests/tests/node_sync_test.py @@ -92,7 +92,7 @@ def check_string_format(input_string): def delete_node_files(): - for p in Path(".").glob("cardano-*"): + for p in Path("..").glob("cardano-*"): print_info_warn(f"deleting file: {p}") p.unlink(missing_ok=True) @@ -133,7 +133,7 @@ def disable_p2p_node_config(): def rm_node_config_files() -> None: print_info_warn('Removing existing config files') os.chdir(Path(ROOT_TEST_PATH)) - for gen in Path(".").glob("*-genesis.json"): + for gen in Path("..").glob("*-genesis.json"): Path(gen).unlink(missing_ok=True) for f in ('config.json', 'topology.json'): Path(f).unlink(missing_ok=True) diff --git a/sync_tests/node_write_mainnet_tx_count_per_epoch.py b/sync_tests/tests/node_write_mainnet_tx_count_per_epoch.py similarity index 100% rename from sync_tests/node_write_mainnet_tx_count_per_epoch.py rename to sync_tests/tests/node_write_mainnet_tx_count_per_epoch.py diff --git a/sync_tests/node_write_sync_values_to_db.py b/sync_tests/tests/node_write_sync_values_to_db.py similarity index 100% rename from sync_tests/node_write_sync_values_to_db.py rename to sync_tests/tests/node_write_sync_values_to_db.py diff --git a/db_sync_tests/tests/snapshot_creation.py b/sync_tests/tests/snapshot_creation.py similarity index 96% rename from db_sync_tests/tests/snapshot_creation.py rename to sync_tests/tests/snapshot_creation.py index 0eef09c7..6c39fac3 100644 --- a/db_sync_tests/tests/snapshot_creation.py +++ b/sync_tests/tests/snapshot_creation.py @@ -8,7 +8,7 @@ sys.path.append(os.getcwd()) -from db_sync_tests.utils.utils import seconds_to_time, get_no_of_cpu_cores, \ +from sync_tests.utils.utils_db_sync import seconds_to_time, get_no_of_cpu_cores, \ get_current_date_time, get_os_type, get_total_ram_in_GB, \ upload_artifact, print_file, create_db_sync_snapshot_stage_2, \ write_data_as_json_to_file, set_buildkite_meta_data, \ @@ -17,7 +17,7 @@ get_file_size, create_db_sync_snapshot_stage_1, print_color_log, \ ROOT_TEST_PATH, ENVIRONMENT -from db_sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, add_single_row_into_db +from sync_tests.utils.aws_db_utils import get_identifier_last_run_from_table, add_single_row_into_db diff --git a/db_sync_tests/utils/__init__.py b/sync_tests/utils/__init__.py similarity index 100% rename from db_sync_tests/utils/__init__.py rename to sync_tests/utils/__init__.py diff --git a/sync_tests/aws_db_utils.py b/sync_tests/utils/aws_db_utils.py similarity index 85% rename from sync_tests/aws_db_utils.py rename to sync_tests/utils/aws_db_utils.py index a305c6e4..91f747ef 100644 --- a/sync_tests/aws_db_utils.py +++ b/sync_tests/utils/aws_db_utils.py @@ -3,7 +3,7 @@ import pymysql.cursors import pandas as pd -from utils import print_info, print_ok, print_error +from .utils import print_info, print_ok, print_error @@ -318,6 +318,54 @@ def add_bulk_csv_to_table(table_name, csv_path): val_to_insert = df.values.tolist() add_bulk_values_into_db(table_name, col_to_insert, val_to_insert) + +def add_single_row_into_db(table_name, col_names_list, col_values_list): + print(f"Adding 1 new entry into {table_name} table") + initial_rows_no = get_last_row_no(table_name) + col_names = ','.join(col_names_list) + col_spaces = ','.join(['%s'] * len(col_names_list)) + conn = create_connection() + sql_query = f"INSERT INTO `{table_name}` (%s) values(%s)" % (col_names, col_spaces) + print(f" -- sql_query: {sql_query}") + try: + cur = conn.cursor() + cur.execute(sql_query, col_values_list) + conn.commit() + cur.close() + except Exception as e: + print(f" -- !!! ERROR: Failed to insert data into {table_name} table: {e}") + return False + finally: + if conn: + conn.close() + final_rows_no = get_last_row_no(table_name) + print(f"Successfully added {final_rows_no - initial_rows_no} rows into table {table_name}") + return True + + +def add_bulk_rows_into_db(table_name, col_names_list, col_values_list): + print(f"Adding {len(col_values_list)} entries into {table_name} table") + initial_rows_no = get_last_row_no(table_name) + col_names = ','.join(col_names_list) + col_spaces = ','.join(['%s'] * len(col_names_list)) + conn = create_connection() + sql_query = f"INSERT INTO `{table_name}` (%s) values (%s)" % (col_names, col_spaces) + print(f" -- sql_query: {sql_query}") + try: + cur = conn.cursor() + cur.executemany(sql_query, col_values_list) + conn.commit() + cur.close() + except Exception as e: + print(f" -- !!! ERROR: Failed to bulk insert data into {table_name} table: {e}") + return False + finally: + if conn: + conn.close() + final_rows_no = get_last_row_no(table_name) + print(f"Successfully added {final_rows_no - initial_rows_no} rows into table {table_name}") + return True + # Delete specified identifiers # env = "testnet" # delete_strings = ["testnet_37"] diff --git a/sync_tests/blockfrost_utils.py b/sync_tests/utils/blockfrost_utils.py similarity index 100% rename from sync_tests/blockfrost_utils.py rename to sync_tests/utils/blockfrost_utils.py diff --git a/sync_tests/explorer_utils.py b/sync_tests/utils/explorer_utils.py similarity index 100% rename from sync_tests/explorer_utils.py rename to sync_tests/utils/explorer_utils.py diff --git a/sync_tests/gitpython_utils.py b/sync_tests/utils/gitpython_utils.py similarity index 100% rename from sync_tests/gitpython_utils.py rename to sync_tests/utils/gitpython_utils.py diff --git a/sync_tests/utils.py b/sync_tests/utils/utils.py similarity index 100% rename from sync_tests/utils.py rename to sync_tests/utils/utils.py diff --git a/db_sync_tests/utils/utils.py b/sync_tests/utils/utils_db_sync.py similarity index 99% rename from db_sync_tests/utils/utils.py rename to sync_tests/utils/utils_db_sync.py index f2454412..bbca1fa1 100755 --- a/db_sync_tests/utils/utils.py +++ b/sync_tests/utils/utils_db_sync.py @@ -1144,7 +1144,7 @@ def start_db_sync(env, start_args="", first_start="True"): export_env_var("LOG_FILEPATH", DB_SYNC_LOG) try: - cmd = "./db_sync_tests/scripts/db-sync-start.sh" + cmd = "./sync_tests/scripts/db-sync-start.sh" p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) os.chdir(current_directory) except subprocess.CalledProcessError as e: @@ -1215,7 +1215,7 @@ def setup_postgres(pg_dir=POSTGRES_DIR, pg_user=POSTGRES_USER, pg_port='5432'): export_env_var("PGPORT", pg_port) try: - cmd = ["./db_sync_tests/scripts/postgres-start.sh", f"{pg_dir}", "-k"] + cmd = ["./sync_tests/scripts/postgres-start.sh", f"{pg_dir}", "-k"] output = ( subprocess.check_output(cmd, stderr=subprocess.STDOUT) .decode("utf-8")