# ================================================================================================================================================================================
# PPM PRODUCT CATALOG (PC) REPLICATION - CE CUSTOM TABLES REPLICATE
# DATE AUTHOR VER CHANGE DESCRIPTION
# -------- --------- ----- ------------------
# 21.08.23 Veera 1.0 The below script replicates ppm table records dynamically from "etl_ppm_replication_master" "PC_EXT"
#
#
# ================================================================================================================================================================================
import pandas as pd
import mysql.connector
import json
from base64 import b64decode as base_b64decode
import logging
from pandas import read_sql as pd_read_sql
import sys
from sqlalchemy import create_engine
import argparse
source='SOURCE'
# Function to write SQL query to the log file
def write_sql(sql_log_file, query_info):
sql_log_file.write(query_info)
sql_log_file.write('\n')
def connect_to_database(json_data, replicationTarget):
encrypt = json_data.get(replicationTarget, {}).get('ENCRYPT')
host = json_data.get(replicationTarget, {}).get('DB_HOST')
port = json_data.get(replicationTarget, {}).get('DB_PORT')
user = json_data.get(replicationTarget, {}).get('DB_USER')
db_type = json_data.get(replicationTarget, {}).get('DB_TYPE')
schema = json_data.get(replicationTarget, {}).get('DB_SCHEMA')
sid = json_data.get(replicationTarget, {}).get('DB_SID')
if encrypt == 'Y':
password = base_b64decode(json_data.get(replicationTarget, {}).get('DB_PASSWORD')).decode('utf-8')
else:
password = json_data.get(replicationTarget, {}).get('DB_PASSWORD')
if db_type == 'MYSQL':
cnx = mysql.connector.connect(user=user, password=password, host=host, port=port)
cursor = cnx.cursor()
logging.info(f"Connected to MySQL database server {replicationTarget}: {host}:{port}")
elif db_type == 'ORACLE':
import cx_Oracle
dsn = cx_Oracle.makedsn(host, port, sid=sid)
cnx = cx_Oracle.connect(user=user, password=password, dsn=dsn)
cursor = cnx.cursor()
return cnx, cursor, schema, user, password, host, port
def main(args,json_file_path,sql_log_file,log_file):
releaseId = args.releaseId
opId = args.opId
buId = args.buId
replicationTarget = args.replicationTarget
replicationJobId = args.replicationJobId
# Read JSON data from file
with open(json_file_path) as json_file:
json_data = json.load(json_file)
# Connect to PPM_PC database
conn_ppm, cursor_ppm, schema_ppm, user_ppm, password_ppm, host_ppm, port_ppm = connect_to_database(json_data, 'PPM_PC')
# Fetch data from the etl_ppm_replication_master table
primary_query = f"SELECT * FROM {schema_ppm}.etl_ppm_replication_master WHERE eprm_catalog='PC_EXT' AND eprm_enabled_flg='Y'"
df = pd_read_sql(primary_query, con=conn_ppm)
# Connect to source database
conn_source, cursor_source, schema_source, _, _, _, _ = connect_to_database(json_data, source)
# Connect to target database
replicationTarget_EXT = replicationTarget + '_EXT'
db_type = json_data.get(replicationTarget_EXT, {}).get('DB_TYPE')
if db_type=='MYSQL':
_, _, schema_target, user_target, password_target, host_target, port_target = connect_to_database(json_data, replicationTarget_EXT)
target_engine = create_engine(f"mysql+mysqlconnector://{user_target}:{password_target}@{host_target}:{port_target}/{schema_target}")
else:
_, _, schema_target, user_target, password_target, host_target, port_target = connect_to_database(json_data,
replicationTarget_EXT)
oracle_dsn = f"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST={host_target})(PORT={port_target})))(CONNECT_DATA=(SERVICE_NAME={schema_target})(SID={sid})))" # Add SID here
dsn_kwargs = {
'user': user_target,
'password': password_target,
'dsn': oracle_dsn,
'encoding': 'UTF-8',
}
try:
import cx_Oracle
target_connection = cx_Oracle.connect(**dsn_kwargs)
target_cursor = target_connection.cursor()
except ImportError:
logging.error("cx_Oracle library not found. Make sure it's installed to establish Oracle connections.")
raise
except cx_Oracle.DatabaseError as ex:
logging.error(f"Error while connecting to Oracle database: {ex}")
raise
for _, row in df.iterrows():
eprm_table_name = row['eprm_table_name'].lower() # Convert table name to lowercase
if eprm_table_name != 'pkg_prd_fed_ext_attrs':
source_query = f"SELECT * FROM {schema_source}.{eprm_table_name} WHERE release_id='{releaseId}' AND op_id='{opId}' AND bu_id='{buId}'"
try:
source_df = pd_read_sql(source_query, con=conn_source)
if 'updated_by' in source_df:
source_df['updated_by'] = replicationJobId
if not source_df.empty:
source_df.to_sql(eprm_table_name, con=target_engine, if_exists='append', index=False)
write_sql(
f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n")
write_sql(f"-- #Query: Inserted {len(source_df)} record(s) and updated 'created_by'\n")
print(f"Inserting records into {eprm_table_name}")
logging.info(f"Inserted {len(source_df)} record(s) into {eprm_table_name} and updated 'created_by'")
except mysql.connector.Error as err:
print(f"Error occurred while executing the query: {source_query}: {err}")
logging.info(f"Error occurred while executing the query: {source_query}: {err}")
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="PPM Product Catalog Replication Script")
parser.add_argument('--releaseId', required=True, help="Release ID")
parser.add_argument('--releaseType', required=True, help="Release Type")
parser.add_argument('--replicationTarget', required=True, help="Replication Target")
parser.add_argument('--opId', required=True, help="Operation ID")
parser.add_argument('--buId', required=True, help="Business Unit ID")
parser.add_argument('--replicationJobId', required=True, help="Replication Job ID")
args = parser.parse_args()
replicationJobId = args.replicationJobId
json_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_pc_replication.json"
sql_log_file = f"/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}_ppm_pc_ext.sql"
log_file = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}_ppm_pc_ext.log'
args = parser.parse_args()
main(args,json_file_path,sql_log_file,log_file)
for the above code iam getting below error
Traceback (most recent call last):
File "ppm_pc_ext_insert.py", line 136, in <module>
main(args,json_file_path,sql_log_file,log_file)
File "ppm_pc_ext_insert.py", line 113, in main
f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n")
TypeError: write_sql() missing 1 required positional argument: 'query_info'