Untitled
unknown
plain_text
a year ago
4.1 kB
4
Indexable
Never
import pandas as pd import mysql.connector import json from base64 import b64decode as base_b64decode import logging from pandas import read_sql as pd_read_sql import sys releaseId = '275.2' opId = 'HOB' buId = 'DEFAULT' replicationTarget = 'SIT' source = 'SOURCE' replicationJobId = 'REP_990_234' json_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_reply.json" sql_log_file = f"/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.sql" log_file = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.log' # Set up logging logging.basicConfig( filename=log_file, level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) # Open SQL log file for writing sql_log_file = open(sql_log_file, "w") # Function to write SQL query to the log file def write_sql(query_info): sql_log_file.write(query_info) sql_log_file.write('\n') try: # Function to establish a database connection def connect_to_database(json_data, replicationTarget): # ... (same as before) try: # Read JSON data from file with open(json_file_path) as json_file: json_data = json.load(json_file) except FileNotFoundError: print("File not found: " + json_file_path) try: # Connect to PPM_PC database conn_ppm, cursor_ppm, schema_ppm = connect_to_database(json_data, 'PPM_PC') # Fetch data from the etl_ppm_replication_master table primary_query = f"SELECT * FROM {schema_ppm}.etl_ppm_replication_master WHERE eprm_catalog='PC_EXT' AND eprm_enabled_flg='Y'" df = pd_read_sql(primary_query, con=conn_ppm) # Connect to source database conn_source, cursor_source, schema_source = connect_to_database(json_data, source) # Connect to target database replicationTarget_EXT = replicationTarget + '_EXT' conn_target, cursor_target, schema_target = connect_to_database(json_data, replicationTarget_EXT) for _, row in df.iterrows(): eprm_table_name = row['eprm_table_name'] if eprm_table_name != 'PKG_PRD_FED_EXT_ATTRS': source_query = f"SELECT * FROM {schema_source}.{eprm_table_name} WHERE release_id='{releaseId}' AND op_id='{opId}' AND bu_id='{buId}'" try: source_df = pd_read_sql(source_query, con=conn_source) if not source_df.empty: for _, source_row in source_df.iterrows(): source_row_dict = source_row.to_dict() pd.DataFrame([source_row_dict]).to_sql(eprm_table_name, con=conn_target, if_exists='append', index=False) query_info = f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n" query_info += f"-- #Query: Inserted 1 record\n" write_sql(query_info) print(f"Inserting records into {eprm_table_name}") logging.info(f"Inserted 1 record into {eprm_table_name}") except mysql.connector.Error as err: print(f"Error occurred while executing the query: {source_query}: {err}") logging.info(f"Error occurred while executing the query: {source_query}: {err}") except Exception as e: print("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno))) print("An Error occurred while constructing dataframe:", str(e)) except mysql.connector.Error as err: print("Error - {} . Line No - {} ".format(str(err), str(sys.exc_info()[-1].tb_lineno))) print(f"An error occurred: {err}") logging.info(f"An error occurred: {err}") finally: if cursor_ppm: cursor_ppm.close() if conn_ppm: conn_ppm.close() if json_file: json_file.close() if sql_log_file: sql_log_file.close()