Untitled
unknown
plain_text
a year ago
9.2 kB
1
Indexable
Never
# ================================================================================================================================================================================ # PPM PRODUCT CATALOG (PC) REPLICATION - CE CUSTOM TABLES REPLICATE # DATE AUTHOR VER CHANGE DESCRIPTION # -------- --------- ----- ------------------ # 21.08.23 Veera 1.0 The below script replicates ppm table records dynamically from "etl_ppm_replication_master" "PC_EXT" # # # ================================================================================================================================================================================ import pandas as pd import mysql.connector import json from base64 import b64decode as base_b64decode import logging from pandas import read_sql as pd_read_sql import sys from sqlalchemy import create_engine import argparse from io import TextIOBase as io_TextIOBase source = 'SOURCE' # define Python user-defined exceptions class Error(Exception): """Base class for other exceptions""" pass # define Python user-defined exceptions class ETL_PPM_REPLICATION_MASTER_ERROR(Error): pass class DB_CONNECTION_ERROR(Error): pass def connect_to_database(json_data, replicationTarget, logging): try: encrypt = json_data.get(replicationTarget, {}).get('ENCRYPT') host = json_data.get(replicationTarget, {}).get('DB_HOST') port = json_data.get(replicationTarget, {}).get('DB_PORT') user = json_data.get(replicationTarget, {}).get('DB_USER') db_type = json_data.get(replicationTarget, {}).get('DB_TYPE') schema = json_data.get(replicationTarget, {}).get('DB_SCHEMA') sid = json_data.get(replicationTarget, {}).get('DB_SID') if encrypt == 'Y': password = base_b64decode(json_data.get(replicationTarget, {}).get('DB_PASSWORD')).decode('utf-8') else: password = json_data.get(replicationTarget, {}).get('DB_PASSWORD') if db_type == 'MYSQL': cnx = mysql.connector.connect(user=user, password=password, host=host, port=port) cursor = cnx.cursor() logging.info(f"Connected to MySQL database server {replicationTarget}: {host}:{port}") elif db_type == 'ORACLE': import cx_Oracle oracle_mode = oracledb.is_thin_mode() if oracle_mode: oracledb.init_oracle_client() print("Enabled python-oracledb Thick mode") cnx_text = ('oracle://%s:%s@%s:%s/?service_name=%s' % (user, password, host, port, schema)) cnx = create_engine(cnx_text, encoding="utf8").raw_connection() cursor = cnx.cursor except mysql.connector.Error as dberr: logging.error("DATABASE CONNECTION ERROR") logging.error("Error - {} . Line No - {} ".format(str(dberr), str(sys.exc_info()[-1].tb_lineno))) cnx = cursor = schema = None except Exception as dbexp: logging.error("DATABASE CONNECTION EXCEPTION") logging.error("Error - {} . Line No - {} ".format(str(dberr), str(sys.exc_info()[-1].tb_lineno))) cnx = cursor = schema = None return cnx, cursor, schema, user, password, host, port def main(args, json_file_path, log_file, logging): try: releaseId = args.releaseId opId = args.opId buId = args.buId replicationTarget = args.replicationTarget replicationJobId = args.replicationJobId return_flag = True # Read JSON data from file with open(json_file_path) as json_file: json_data = json.load(json_file) # Connect to PPM_PC database conn_ppm, cursor_ppm, schema_ppm, user_ppm, password_ppm, host_ppm, port_ppm = connect_to_database(json_data, 'PPM_PC', logging) # Connect to source database conn_source, cursor_source, schema_source, _, _, _, _ = connect_to_database(json_data, 'SOURCE_EXT', logging) # Fetch data from the etl_ppm_replication_master table primary_query = f"SELECT * FROM {schema_ppm}.etl_ppm_replication_master WHERE eprm_catalog='PC_EXT' AND eprm_enabled_flg='Y'" df = pd_read_sql(primary_query, con=conn_ppm) if len(df) == 0: raise ETL_PPM_REPLICATION_MASTER_ERROR # Connect to target database replicationTarget_EXT = replicationTarget + '_EXT' db_type = json_data.get(replicationTarget_EXT, {}).get('DB_TYPE') conn_tar, cursor_tar, schema_tar, _, _, _, _ = connect_to_database(json_data,replicationTarget_EXT, logging) for _, row in df.iterrows(): try: eprm_table_name = row['eprm_table_name'].lower() # Convert table name to lowercase if eprm_table_name == 'pkg_prd_fed_ext_attrs': source_query = f"SELECT * FROM {schema_source}.{eprm_table_name} WHERE release_id='{releaseId}'" else: source_query = f"SELECT * FROM {schema_source}.{eprm_table_name} WHERE release_id='{releaseId}' AND op_id='{opId}' AND bu_id='{buId}'" source_df = pd_read_sql(source_query, con=conn_source) logging.info(f"Count of {eprm_table_name} {len(source_df)}") if 'updated_by' in source_df: source_df['updated_by'] = replicationJobId if not source_df.empty: source_df.to_sql(eprm_table_name, con=conn_tar, if_exists='append', index=False) logging.info(f"Insertion succesfull") except mysql.connector.Error as err: print(f"Error occurred while executing the query: {source_query}: {err}") logging.info(f"Error occurred while executing the query: {source_query}: {err}") except DB_CONNECTION_ERROR: logging.error("EXCEPTION: DB CONNECTION ERROR PC_EXT") return_flag = False except ETL_PPM_REPLICATION_MASTER_ERROR: STATUS_MESSAGE = "NO RECORDS PRESENT IN etl_ppm_replication_master TABLE" logging.error("EXCEPTION:" + STATUS_MESSAGE) return_flag = False except Exception as e: logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno))) return_flag = False return return_flag if __name__ == '__main__': import logging from configparser import ConfigParser as conf_ConfigParser statFile = "" try: parser = argparse.ArgumentParser(description="PPM Product Catalog Replication Script") parser.add_argument('--releaseId', required=True, help="Release ID") parser.add_argument('--releaseType', required=True, help="Release Type") parser.add_argument('--replicationTarget', required=True, help="Replication Target") parser.add_argument('--opId', required=True, help="Operation ID") parser.add_argument('--buId', required=True, help="Business Unit ID") parser.add_argument('--replicationJobId', required=True, help="Replication Job ID") args = parser.parse_args() replicationJobId = args.replicationJobId json_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_pc_replication.json" conf_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_pc_replication.conf" log_file = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}_ppm_pc_ext.log' statFile = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}_ppm_pc_replication_insert_pc_ext.status' args = parser.parse_args() statFile = open(statFile, "w") # Set up logging CONFIG = conf_ConfigParser() CONFIG.read(conf_file_path) logging.basicConfig( filename=log_file, level=CONFIG.get('CONFIG_LOGGING', 'LOG_LEVEL', raw=True), format=CONFIG.get('CONFIG_LOG_FORMAT', 'LOG_FORMAT_DISP', raw=True), datefmt=CONFIG.get('CONFIG_LOG_FORMAT', 'LOG_FORMAT_DATE', raw=True) ) logging.info('LOGGER initiated') if main(args, json_file_path, log_file, logging): print("Insertion of data succesfull") statFile.write("SUCCESS") else: statFile.write("FAILED") except FileNotFoundError as ferr: print("Error - {} . Line No - {} ".format(str(ferr), str(sys.exc_info()[-1].tb_lineno))) statFile.write("FAILED") except Exception as err: print("Error - {} . Line No - {} ".format(str(err), str(sys.exc_info()[-1].tb_lineno))) statFile.write("FAILED") if isinstance(statFile, io_TextIOBase): statFile.close() iam getting below error Error - dynamic module does not define module export function (PyInit__sqlite3) . Line No - 122