Untitled

mail@pastecode.io avatar
unknown
plain_text
a year ago
8.3 kB
2
Indexable
import pandas as pd
import mysql.connector
import json
from base64 import b64decode as base_b64decode
from base64 import b64encode as base_b64encode
import logging
import datetime
import os
from sqlalchemy import create_engine as sqlalchemy_create_engine
from pandas import read_sql as pd_read_sql
import sys

releaseId = '275.2'
releaseType = 'deployed'
replicationTarget = 'SOURCE'
catalogId = ''
opId = 'HOB'
buId = 'DEFAULT'
replicationJobId = 'REP_990_234'
json_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_reply.json"
sql_log_file = f"/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.sql"
log_file = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.log'
# sql_log_file = os.get_path("PPM_PC_LOG") + "/" + replicationJobId + "_ppm_pc_replication_delete.sql"
# json_file_path = os.get_path("PPM_PC_CONFIG") + "/ppm_pc_replication.json"
# log_file = os.get_path("PPM_PC_LOG") + "/" + replicationJobId + "_ppm_pc_replication_delete.log"

# Set up logging
logging.basicConfig(
    filename=log_file,
    level=logging.INFO,
    format='%(asctime)s - %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S'
)
query_count = 0

# Open SQL log file for writing
sql_log_file = open(sql_log_file, "w")


# Function to write SQL query to the log file
def write_sql(query_info):
    sql_log_file.write(query_info)
    sql_log_file.write('\n')


try:
    # Function to establish a database connection
    def connect_to_database(json_data, replicationTarget):
        try:
            encrypt = json_data.get(replicationTarget, {}).get('ENCRYPT')
            host = json_data.get(replicationTarget, {}).get('DB_HOST')
            port = json_data.get(replicationTarget, {}).get('DB_PORT')
            user = json_data.get(replicationTarget, {}).get('DB_USER')
            db_type = json_data.get(replicationTarget, {}).get('DB_TYPE')
            schema = json_data.get(replicationTarget, {}).get('DB_SCHEMA')
            if encrypt == 'Y':
                password = b64decode(json_data.get(replicationTarget, {}).get('DB_PASSWORD')).decode('utf-8')
            else:
                password = json_data.get(replicationTarget, {}).get('DB_PASSWORD')

            if db_type == 'MYSQL':
                cnx = mysql.connector.connect(user=user, password=password, host=host, port=port)
                cursor = cnx.cursor()
                logging.info(f"Connected to MySQL database server {replicationTarget}: {host}:{port}")

            elif db_type == 'ORACLE':
                import oracledb
                oracle_mode = oracledb.is_thin_mode()
                print("Oracle mode: %s" % oracle_mode)

                if oracle_mode:
                    oracledb.init_oracle_client()
                    print("Enabled python-oracledb Thick mode")
                else:
                    print("Default python-oracledb Thick mode")

                cnx_text = ('oracle://%s:%s@%s:%s/?service_name=%s' % (user, password, host, port, schema))
                cnx = create_engine(cnx_text, encoding="utf8").raw_connection()
                cursor = cnx.cursor()

            return cnx, cursor, schema

        except (mysql.connector.Error, sqlalchemy.exc.SQLAlchemyError, ImportError, Exception) as e:
            logging.error(f"An error occurred while connecting to the database: {str(e)}")
            raise e


    try:
        # Read JSON data from file
        with open(json_file_path) as json_file:
            json_data = json.load(json_file)
    except FileNotFoundError:
        print("File not found: " + json_file_path)
    try:
        # Connect to PPM_PC database
        conn_ppm, cursor_ppm, schema_ppm = connect_to_database(json_data, 'PPM_PC')

        # Fetch data from the etl_ppm_replication_master table
        primary_query = f"SELECT * FROM {schema_ppm}.etl_ppm_replication_master"

        df = pd_read_sql(primary_query, con=conn_ppm)
        columns = df.columns.tolist()
        rows = df.values.tolist()

        # PC_EXT
        replicationTarget_EXT = replicationTarget + '_EXT'
        connection_ext, cursor_ext, schema_ext = connect_to_database(json_data, replicationTarget_EXT)
        cursor_ext = connection_ext.cursor()

        filtered_df = df[df['eprm_catalog'].isin(['PC_EXT']) & (df['eprm_enabled_flg'].isin(['Y']))]
        if len(filtered_df) > 0:
            for _, row in filtered_df.iterrows():
                eprm_table_name = row['eprm_table_name']
                if eprm_table_name != 'PKG_PRD_FED_EXT_ATTRS':
                    query = f"SELECT COUNT(*) FROM {schema_ext}.{eprm_table_name} where release_id='" + releaseId + "' AND op_id='" + opId + "' AND bu_id='" + buId + "'"
                    try:
                        result = pd_read_sql(query, con=connection_ext)
                        count = result.iloc[0, 0]
                        query_info = f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n"
                        query_info += f"-- #Query: Result:{count}\n"
                        query_count += 1
                        query_info += query + ";\n"
                        write_sql(query_info)
                        print(f"Count for {eprm_table_name}: {count} PC_EXT")
                        logging.info(f"Count for {eprm_table_name}: {count}")
                        if query_count > 0:
                            try:
                                query_del = f"SELECT COUNT(*) FROM {schema_ext}.{eprm_table_name} where release_id='" + releaseId + "' AND op_id='" + opId + "' AND bu_id='" + buId + "'"
                                cursor_ext.execute(query_del)
                                result = cursor_ext.fetchone()
                                query_info = f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n"
                                query_info += f"-- #Query: Result:{result[0]}\n"
                                query_count += 1
                                query_info += query + ";\n"
                                write_sql(query_info)
                                print(f"Count for {eprm_table_name}: {result[0]}" + "PC_EXT for deletion")

                            except mysql.connector.Error as err:
                                print("Error - {} . Line No - {} ".format(str(exp), str(sys.exc_info()[-1].tb_lineno)))
                                print(f"Error occurred while executing the query:{query}: {err}")
                                logging.info(f"Error occurred while executing the query:{query}: {err}")
                    except mysql.connector.Error as err:
                        print("Error - {} . Line No - {} ".format(str(err), str(sys.exc_info()[-1].tb_lineno)))
                        print(f"Error occurred while executing the query :{query}: {err}")
                        logging.info(f"Error occurred while executing the query:{query}: {err}")
        logging.info("COMPLETED")
    except Exception as e:
        print("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
        print("An Error occured while constructing dataframe:", str(e))

except mysql.connector.Error as err:
    print("Error - {} . Line No - {} ".format(str(exp), str(sys.exc_info()[-1].tb_lineno)))
    print(f"An error occurred: {err}")
    logging.info(f"An error occurred: {err}")

finally:
    if cursor_ppm:
        cursor_ppm.close()
    if conn_ppm:
        conn_ppm.close()
    if json_file:
        json_file.close()
    if sql_log_file:
        sql_log_file.close()


in the above scipt i want modify one thing
query = f"SELECT COUNT(*) FROM {schema_ext}.{eprm_table_name} where release_id='" + releaseId + "' AND op_id='" + opId + "' AND bu_id='" + buId + "'"

while executing above query construct dataframes for each and every column and insert all those records as it is in the query_del = f"SELECT COUNT(*) FROM {schema_ext}.{eprm_table_name} where release_id='" + releaseId + "' AND op_id='" + opId + "' AND bu_id='" + buId + "'"

remove select count and modify instead of insert and the columns and values also should be same as above