Untitled
unknown
plain_text
2 years ago
12 kB
4
Indexable
""" ####################################################################################################################################################################### API Name : cacheStore Description : This api stores and fetches the cache details stored in the cassandra table cache_store table in hobs_ods_staging keyspace. Paramters are required to fetch the cache details.valid parameters are valueType,key Columns that needs to be returned in the get response can be configured in validColumns property in config/dataservice/cacheStore.config file Columns that needs to be passed in the post request can be configured in validKeys property in config/dataservice/cacneStore.config file valis Parameters that can be passed are customerId,interactionId,interactionDate,startDate,endDate. And the parameters are configurable in config/dataservice/getInteractions.config Revision History ---------------- Version Date Author Description 0.1 29-Nov-2021 senthilCR Initial version of the api to store and fetch cache details in the cache_store 0.2 09-Feb-2022 senthilCR modified the script to handle value input as dictionaty or list of dictionary ####################################################################################################################################################################### """ from models.cache_store import cache_store from models.cache_store_by_username import cache_store_by_username from models.cache_store_by_date import cache_store_by_date from models.indexes import indexes from models.columns import columns as tablecolumns from datetime import datetime, timedelta, timezone from dateutil.relativedelta import relativedelta from pandas.io.json import json_normalize import json from config.logger import configlogfile import base64 from flask import jsonify, make_response import pandas as pd import sys import gc import uuid import pytz from flask import request from flask import Response from json import loads as json_loads from gc import collect as gc_collect from dateutil.parser import parse """ This function is used to fetch cache details based on valueType and key """ def getData(args, configvalues): # UI Based Calling logging = configlogfile() __funcReturnAPICode = '0000' __funcReturnDesc = 'Successfully Completed the Process' businessid=request.args.get('businessid') createdtime=request.args.get('createdtime') username = request.args.get('username') columnMapping = json_loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True)) try: logging.info(("process started")) validArguments = json_loads(configvalues.get('apiconfiguration', 'validArguments', raw=True)) logging.info(("Args.keys() : %s" % set(list(args.keys())))) logging.info(("validArguments : %s" % set(validArguments))) if 'businessid' in args.keys(): results = cache_store.objects.filter(businessid=businessid) elif 'username' in args.keys(): results = cache_store_by_username.objects.filter(username=username) elif 'createdtime' in args.keys(): try: createdtime = datetime.strptime(createdtime, "%Y-%m-%d %H:%M:%S.%f%z") except ValueError: createdtime = parse(createdtime) results = cache_store_by_date.objects.filter(createdtime=createdtime) else: return ((["9003"], "Invalid Arguments passed to the API.")) page = request.args.get('page',1) limit = request.args.get('limit',20) if page is not None and limit is not None: try: page=int(page) limit=int(limit) except ValueError: return("400" "both page and limit must be integers") offset = (page - 1) * limit if page is not None and limit is not None else 0 paginated_res=results[offset:offset+limit] data = [{ columnMapping['businessid']: r.businessid, columnMapping['valuetype']: r.valuetype, columnMapping['key']: str(r.key), columnMapping['buid']: r.buid, columnMapping['createdtime']: r.createdtime.isoformat(), columnMapping['opid']: r.opid, columnMapping['updatedtime']: r.updatedtime.isoformat(), columnMapping['username']: r.username, columnMapping['value']: base64.b64encode(r.value).decode('utf-8'), columnMapping['valuetype']: r.valuetype } for r in paginated_res] logging.debug(results) logging.debug(data) response_data={ "data":data } return ("200",(response_data)) except Exception as e: gc_collect() logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno))) # return (("500", "Technical exception")) return ((["9003"], "Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))) """ This function is used to store the request details in cache_store table """ def postData(payLoad, configvalues): logging = configlogfile() logging.info("Preparing to save records") try: dateColumns = json.loads(configvalues.get('apiconfiguration', 'dateColumns', raw=True)) keysToBeIgnored = json.loads(configvalues.get('apiconfiguration', 'keysToBeIgnored', raw=True)) columnMapping = json.loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True)) validColumns = json.loads(configvalues.get('apiconfiguration', 'validKeys', raw=True)) timeZone = configvalues.get('apiconfiguration', 'timeZone', raw=True) logging.debug("arguments-" + str(payLoad)) logging.debug("validColumns-" + str(validColumns)) logging.debug("payLoad[before]-" + str(payLoad)) if payLoad == None or not isinstance(payLoad, dict) or len(payLoad) == 0: return (("422", ("9005", "Missing required attribute in payLoad. " + ','.join(validColumns) + "."))) if not set(list(payLoad.keys())).issubset(set(list(columnMapping.values()))): return (("422", ("9006", "Field rule violation. Payload having non mapped attribute. " + ','.join( list(set(list(payLoad.keys())) - set(columnMapping.values()))) + "."))) # return ((["9006"], ("Field rule violation. Payload having non mapped attribute. " + ','.join(list(set(list(payLoad.keys())) - set(columnMapping.values()))) + "."))) if not set(validColumns).issubset(set(list(payLoad.keys()))): return (("422", ("9005", "Field rule violation or missing required attribute. " + ",".join( list(set(validColumns) - set(list(payLoad.keys()))))))) # return ((["9005"], "Field rule violation or missing required attribute. " + ",".join( # list(set(validColumns) - set(list(payLoad.keys())))))) logging.info( "saving records for cart details " + payLoad['businessId'] + "," + payLoad['valueType'] + "," + payLoad[ 'key']) # if 'value' in payLoad.keys() and not (isinstance(payLoad['value'], list) or isinstance(payLoad['value'],dict)): # return (("422", ("9005", "Field rule violation. value field needs to be in list or dictionary format"))) data = {} data = payLoad.copy() if 'createdTime' not in data.keys(): data['createdTime'] = datetime.strftime(datetime.now(pytz.timezone(timeZone)), '%Y-%m-%dT%H:%M:%S.%f%z') if 'updatedTime' not in data.keys(): data['updatedTime'] = datetime.strftime(datetime.now(pytz.timezone(timeZone)), '%Y-%m-%dT%H:%M:%S.%f%z') data = {k: v for k, v in data.items() if v is not None} # removes the fields with null values data = {k: v for k, v in data.items() if k not in keysToBeIgnored} data = {k: v for k, v in data.items() if k in columnMapping.values()} # remove fields not mapped for k, v in columnMapping.items(): if (v in data): data[k] = data.pop(v) insertSql = "" for k, v in data.items(): logging.debug(k) logging.debug(v) logging.debug(type(v)); if k in dateColumns: # formats the date columns in the request logging.debug("processing date columns") try: datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%f%z') insertSql = insertSql + k + "=datetime.strptime('" + v + "','%Y-%m-%dT%H:%M:%S.%f%z')," except ValueError: return (("422", ("9005", "Field rule violation. " + k + " field needs to be in 2020-01-01T00:00:00.000000+0530 format"))) elif type(v) == str: if k == 'value': logging.debug('value-' + str(v)) insertSql = insertSql + str(k) + "='" + str( str(v)) + "'.encode('utf-8')," # convert the string to bytes to store data in blob column else: if not v.isalnum(): # logic included to check for alphanumeric string and enclose string with double quotes insertSql = insertSql + str(k) + "=\"\"\"" + v + "\"\"\"," # insertSql = insertSql + str(k) + "='" + str(v) + "'," else: insertSql = insertSql + str(k) + "='" + str(v) + "'," elif isinstance(v, list) or isinstance(v, dict): if k == 'value': insertSql = insertSql + str(k) + "=b'" + json.dumps(v) + "'," else: insertSql = insertSql + str(k) + "=" + str(v) + "," elif type(b) == blob: insertSql = insertSql + str(k) + "=textAsBlob('" + str(v) + ")," else: insertSql = insertSql + str(k) + "='" + str(v) + "'," insertSql = insertSql[0:len(insertSql) - 1] insertSql_cache = "global cacheStoreData; cacheStoreData = cache_store(" + insertSql + ")" insertSql_usercache = "global cacheUserData; cacheUserData = cache_store_by_username(" + insertSql + ")" insertSql_cacheData = "global cacheData; cacheData = cache_store_by_date(" + insertSql + ")" logging.debug('insertSql') logging.debug(insertSql_cache) logging.debug(insertSql_usercache) logging.debug(insertSql_cacheData) # return ((["0000"],insertSql)) exec(insertSql_cache) exec(insertSql_usercache) exec(insertSql_cacheData) cacheStoreData.save() cacheUserData.save() cacheData.save() gc.collect() logging.info("Successfully saved the cache details") return (("200", "Successfully saved the cache details")) return (("200", {k: v for k, v in payLoad.items()})) except Exception as e: gc.collect() logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno))) return (("500", "Technical exception"))
Editor is loading...