Untitled
dwdunknown
python
3 years ago
5.1 kB
6
Indexable
def run_validator_LMI(download_f_name, s3_file_name): # validate from a txt file (extracted values). # start time for time count total_start_time = time.time() file_access_date = datetime.now().strftime("%Y%m%d") # TXT files, Nordea 7 template tables as TSV/TDV if (".xls" or ".xlsx") in s3_file_name: if ".xlsx" in s3_file_name: extent = ".xlsx" else: extent = ".xls" date_status, date_ex = valdiate_date_version(download_f_name) date_status = True date_ex = "20220416" filename = s3_file_name.replace(extent, "") file_path = modify_bulk_path(file_access_date, date_ex, filename) # crates json if not exists create_json_result(fp=file_path) if date_status == False: # appends json results duplicate_date_results(fp=file_path) if date_status == True: # open txt file with pandas d = pd.read_excel(download_f_name) # start time for time count # removes blank space from headers in df d = d.rename(columns=lambda x: x.strip()) col_lst = list(d.columns.values) counter = 0 for column in col_lst: if column in ["DATA_SOURCE_ID"]: continue print(c(column, "blue")) for value in d[column]: # check if the extracted value is separated with tildes v_list = tilda_sep_config(value, column) table, field = nordea_config(column) for i in v_list: value = prep_value(column, i) if value == "skip_validation": continue else: f = FieldValidation( table, field, value, date_ex, file_path ) f.validateField() counter += 1 print("number of validated fields is:", c(counter, "yellow")) # print out time taken to process 1 xml file duration_print(f" {s3_file_name}", total_start_time) else: print(c("File not processed: file extension not .txt or .xml", "red")) # delete downloaded file os.remove(download_f_name) validation_status = True # print out total time needed to process all files duration_print(" validation duaration", total_start_time) remove_7_days_old_json() return validation_status, file_path from run_validator_modify_bulk import run_validator from run_validator_LMI import run_validator_LMI def get_file_from_s3(filename): # goes into s3, and loops through files # downloads and processes only the file name given def get_file_from_s3(filename, bucket): session = boto3.Session( aws_access_key_id="AKIARH3NC6NMEBXBZT7T", aws_secret_access_key="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", ) s3 = session.resource("s3") my_bucket = s3.Bucket("slm-modify-bulk-profile") my_bucket = s3.Bucket(bucket) s3 = session.resource("s3") for my_bucket_object in my_bucket.objects.all(): # s3 bucket access s3_file_name = my_bucket_object.key print(filename) print(s3_file_name) if filename in s3_file_name: download_f_name = os.path.join("download", s3_file_name) print(download_f_name) my_bucket.download_file(Key=s3_file_name, Filename=download_f_name) print(f"File {download_f_name} downloaded from s3 bucket.") # run data extraction and push data to DB def run_modify_bulk(download_f_name, filename): # The Api triggers the validation proces and if successful than the import process v_stat, json_fp = run_validator(download_f_name, filename) validation_status = read_validation_results(json_fp) if "Modify bulk" in filename: v_stat, json_fp = run_validator(download_f_name, filename) validation_status = read_validation_results(json_fp) elif "LMI" in filename: v_stat, json_fp = run_validator_LMI(download_f_name, filename) validation_status = read_validation_results(json_fp) # import process based on validation results if validation_status != False: run_import_data_nordea(download_f_name) import_status(json_fp) # Modify bulk fname = "Modify bulk-FPFA_Address-20220414 v0.1.xlsx" fname = "Modify bulk-FPFA_Bank-20220414 v0.1.xlsx" fname = "Modify bulk-FPFA_Country_Data-20220414 v0.1.xlsx" fname = "Modify bulk-FPFA_Sanctions_Country-20220414 v0.1.xlsx" fname = "Modify bulk-FPFA_Source_Data-20220414 v0.1.xlsx" fname = "Modify bulk-FPFA_Vessel_Details-20220414 v0.1.xlsx" get_file_from_s3(fname) # Nordea Internals fname = "LMI ISS Input Template Entity Individual 20220314 v0.1.xlsx" get_file_from_s3(fname, "slm-modify-bulk-profile")
Editor is loading...