Untitled

 avatar
unknown
plain_text
2 years ago
7.1 kB
4
Indexable
import pandas as pd
import mysql.connector
import json
from base64 import b64decode as base_b64decode
from base64 import b64encode as base_b64encode
import logging
import datetime
import os
from sqlalchemy import create_engine as sqlalchemy_create_engine
from pandas import read_sql as pd_read_sql

#def func_pandas_factory(__col_names, __rows):
#    return pd_DataFrame(__rows, columns=__col_names)
def func_pandas_factory(__col_names, __rows):
    print("Row_Factory")
    # Convert tuple items of '__rows' into list (elements of tuples cannot be replaced)
    __rows = [list(i) for i in __rows]
    # Convert only 'OrderedMapSerializedKey' type list elements into dict
    for idx_row, i_row in enumerate(__rows):
        for idx_value, i_value in enumerate(i_row):
            if type(i_value) is cass_OrderedMapSerializedKey:
                __rows[idx_row][idx_value] = dict(__rows[idx_row][idx_value])
    return pd_DataFrame(__rows, columns=__col_names)
# endDef==>func_pandas_factory()


releaseId = '275.2'
releaseType = 'Deployed'
replicationTarget = 'SOURCE'
catalogId = ''
opId = 'HOB'
buId = 'DEFAULT'
replicationJobId = 'REP_990_234'
json_file_path = "/app/scripts/PPM_Release_Management/Product_Catalog_ETL/config/ppm_reply.json"
sql_log_file = f"/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.sql"
log_file = f'/app/scripts/PPM_Release_Management/Product_Catalog_ETL/logs/{replicationJobId}ppm_reply.log'
# sql_log_file = os.get_path("PPM_PC_LOG") + "/" + replicationJobId + "_ppm_pc_replication_delete.sql"
# json_file_path = os.get_path("PPM_PC_CONFIG") + "/ppm_pc_replication.json"
# log_file = os.get_path("PPM_PC_LOG") + "/" + replicationJobId + "_ppm_pc_replication_delete.log"
logging.basicConfig(
    filename=log_file,
    level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
query_count = 0
sql_log_file = open(sql_log_file, "w")


def write_sql(query_info):
    sql_log_file.write(query_info)
    sql_log_file.write('\n')


try:
    def connect_to_database(json_data, replicationTarget):
        encrypt = json_data.get(replicationTarget, {}).get('ENCRYPT')
        host = json_data.get(replicationTarget, {}).get('DB_HOST')
        port = json_data.get(replicationTarget, {}).get('DB_PORT')
        user = json_data.get(replicationTarget, {}).get('DB_USER')
        db_type = json_data.get(replicationTarget, {}).get('DB_TYPE')
        schema = json_data.get(replicationTarget, {}).get('DB_SCHEMA')
        if encrypt == 'Y':
            password = base_b64decode(json_data.get(replicationTarget, {}).get('DB_PASSWORD')).decode('utf-8')
        else:
            password = json_data.get(replicationTarget, {}).get('DB_PASSWORD')
        if db_type == 'MYSQL':
            cnx = mysql.connector.connect(user=user, password=password, host=host, port=port)
            cursor = cnx.cursor()
            logging.info(f"connected to database server {replicationTarget}: {host}:{port}")
        elif db_type == 'ORACLE':
            import oracledb
            oracle_mode = oracledb.is_thin_mode()
            print("Oracle mode: %s" % oracle_mode)
            if oracle_mode:
                oracledb.init_oracle_client()
                print("Enabled python-oracledb Thick mode")
            else:
                print("Default python-oracledb Thick mode")

            cnx_text = ('oracle://%s:%s@%s:%s/?service_name=%s' % (user, password, host, port, schema))
            cnx = sqlalchemy_create_engine(cnx_text, encoding="utf8")
            cnx = connection.raw_connection()
            cursor = cnx.cursor()
        return cnx, cursor, schema


    with open(json_file_path) as json_file:
        json_data = json.load(json_file)

    conn_ppm, cursor_ppm, schema_ppm = connect_to_database(json_data, 'PPM_PC')
    ##cursor_ppm = conn_ppm.cursor()
    primary_query = f"SELECT * FROM {schema_ppm}.etl_ppm_replication_master"
    df=pd_read_sql(primary_query,con=conn_ppm)
    columns=df.columns.tolist()
    rows=df.values.tolist()

    # replicationTarget

    connection_tar, cursor_tar, schema_tar = connect_to_database(json_data, replicationTarget)
    # cursor_tar = connection_tar.cursor()
    replaced_string = ""

    if releaseType.casefold() == 'DEPLOYED'.casefold():
        logging.info(f"processing - {releaseType}")
        order = ['MASTER-CHILD', 'AUDIT-CHILD', 'MASTER', 'AUDIT', 'RELEASE']
        logging.info("order of execution - 'MASTER-CHILD', 'AUDIT-CHILD', 'MASTER', 'AUDIT', 'RELEASE'")
        filtered_df = df.loc[
            df['eprm_catalog'].isin(['PC', 'RELEASE']) & (df['eprm_enabled_flg'].isin(['Y'])) & df[
                'eprm_table_type'].isin(
                order)].copy()
        filtered_df['eprm_table_type'] = pd.Categorical(filtered_df['eprm_table_type'], categories=order, ordered=True)
        filtered_df = filtered_df.sort_values('eprm_table_type')
        df = df.sort_values('eprm_seq_nbr', ascending=False)

        for _, row in filtered_df.iterrows():
            eprm_table_name = row['eprm_table_name']
            eprm_join_cols_entity = row['eprm_join_cols_entity']
            eprm_join_cols_reim = row['eprm_join_cols_reim']
            eprm_table_alias = row['eprm_table_alias']
            eprm_table_type = row['eprm_table_type']
            eprm_parent_table_name = row['eprm_parent_table_name']

            if eprm_table_type == 'AUDIT':
                eprm_table_col_pk = row['eprm_table_col_pk']
                query = f"SELECT COUNT(*) FROM {schema_tar}.{eprm_table_name} WHERE (" + eprm_table_col_pk + f") IN (SELECT entity_ref_nbr FROM  {schema_tar}.release_entity_inst_map WHERE release_id='" + releaseId + "' AND op_id='" + opId + "' AND bu_id='" + buId + "')"
                try:
                    logging.info(f"processing {eprm_table_type}")
                    cursor_tar.execute(query)
                    result = cursor_tar.fetchone()
                    query_info = f"-- ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ STATUS|{replicationTarget}| TABLE| {eprm_table_name}\n"
                    query_info += f"-- #Query: Result:{result[0]}\n"
                    query_count += 1
                    query_info += query + ";\n"
                    write_sql(query_info)
                    print(f"Count for {eprm_table_name}: {result[0]} (audit)")
                    logging.info(f"Count for {eprm_table_name}: {result[0]} (audit)")
                except mysql.connector.Error as err:
                    print("Error - {} . Line No - {} ".format(str(exp), str(sys.exc_info()[-1].tb_lineno)))
                    print(f"Error occurred while executing the query:{query}: {err}")
                    logging.info(f"Error occurred while executing the query:{query}: {err}")

ok now i want to change same in tha above use pd_read_sql in line 127
cursor_tar.execute(query)
                    result = cursor_tar.fetchone()
Editor is loading...