Untitled
unknown
plain_text
3 years ago
13 kB
5
Indexable
from models.cache_store import cache_store
from models.indexes import indexes
from models.columns import columns as tablecolumns
from datetime import datetime, timedelta, timezone
from dateutil.relativedelta import relativedelta
from pandas.io.json import json_normalize
import json
from config.logger import configlogfile
from flask import jsonify, make_response
import pandas as pd
import sys
import gc
import uuid
import pytz
def getData(payLoad, configvalues):
from config.logger import configlogfile
logging = configlogfile()
logging.info("Started to fetch the records for " + str(payLoad) )
cacheStore = []
try:
payLoad = payLoad.to_dict()
columnMapping = json.loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True))
reversecolumnMapping = {y: x for x, y in columnMapping.items()}
validColumns = json.loads(configvalues.get('apiconfiguration', 'validColumns', raw=True))
datecolumns = json.loads(configvalues.get('apiconfiguration', 'dateColumns', raw=True))
logging.debug('Arguments -' + str(payLoad))
# return ((["0000"],str(payLoad['valueType'])))
for k, v in reversecolumnMapping.items():
if k in list(payLoad.keys()):
payLoad[v] = payLoad.pop(k)
# modified the code on 18-Feb to fetch data based on keyspace and then table name
recs = indexes.objects().filter(keyspace_name =cache_store.__keyspace__)
recs = recs.filter(table_name='cache_store')
indexedcolumns = [row.options['target'] for row in recs]
recs = tablecolumns.objects().filter(keyspace_name = cache_store.__keyspace__)
recs = recs.filter(table_name='cache_store')
partitioncolumns = [row.column_name for row in recs if
row.kind in ["partition_key"]]
partitioncolumns = partitioncolumns + [row.column_name for row in recs if
row.kind in ["primary_key", "clustering"]]
parametercolumns = partitioncolumns + indexedcolumns
partitioncolstofilter = [parametercolumn for parametercolumn in parametercolumns if
parametercolumn in list(payLoad.keys())]
if (bool(payLoad) == False):
return (("200","parameters needs to be passed to fetch values from the cache store"))
#query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().all();'
else:
#return ((["0000"], str(range(len(partitioncolstofilter)))))
if set(list(payLoad.keys())).issubset(parametercolumns):
for i in range(len(partitioncolstofilter)):
if i == 0:
if partitioncolstofilter[i] in datecolumns:
query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
partitioncolstofilter[i] + '=datetime.strptime(\'' + \
str((payLoad[partitioncolstofilter[i]])) + '\',\'%Y-%m-%dT%H:%M:%S%z\'));'
# return ((["0000"],query))
else:
query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
partitioncolstofilter[i] + '=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'
else:
if partitioncolstofilter[i] in datecolumns:
query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
partitioncolstofilter[i] + \
'=datetime.strptime(\'' + str(
(payLoad[partitioncolstofilter[i]])) + '\',\'%Y-%m-%dT%H:%M:%S%z\'));'
# return ((["0000"],query))
else:
query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
partitioncolstofilter[i] + \
'=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'
#return ((["0000"],query))
exec(query)
else:
for i in range(len(parametercolumns)):
for k, v in reversecolumnMapping.items():
if v == parametercolumns[i]:
parametercolumns[i] = k
return (("200",("9003", "Invalid Arguments passed to the API. Valid Arguments are " + ','.join(parametercolumns))))
if len(cacheStoreRecords) == 0:
return (("200",{}))
#return (("404", ("9007", "cache details could not be found.")))
#return ((["9007"], "Details could not be found"))
cacheStore = [row.__json__() for row in cacheStoreRecords]
#return ((200,cacheStore))
cacheStore = pd.DataFrame.from_dict((cacheStore), orient='columns')
cacheStore.fillna('',inplace = True)
if len(cacheStore)>0:
for column in datecolumns:
if column in cacheStore.columns.tolist():
cacheStore[column] = pd.to_datetime(cacheStore[column],unit='ns')
cacheStore[column] = cacheStore[column].dt.tz_localize('UTC').dt.tz_convert(configvalues.get('apiconfiguration','timeZone')).dt.strftime('%Y-%m-%dT%H:%M:%S.%f%z')
#return (("404",("0000",cacheStore[column].to_list())))
cacheStore[column] = [ "" if columnValue == "NaT" else columnValue for columnValue in cacheStore[column].to_list()]
#return (("404",("0000",cacheStore[column].to_list())))
cacheStore = cacheStore[validColumns]
validColumns = {k: v for k, v in columnMapping.items() if k in validColumns}
cacheStore = cacheStore.rename(columns=(validColumns))
cacheStore = cacheStore.to_dict(orient='records')
import unicodedata
if len(cacheStore) > 0:
#cacheStore = [ {k : ( unicodedata.normalize('NFC',v) if k == 'value' else v ) for k,v in x.items()} for x in cacheStore]
cacheStore = [ {k : (v.encode().decode() if k == 'value' else v ) for k,v in x.items()} for x in cacheStore]
logging.debug('cacheStore-'+str(cacheStore))
response = {}
response = response if len(cacheStore) == 0 else cacheStore[0] if len(cacheStore) == 1 else cacheStore
logging.info("Completed fetching the records")
gc.collect()
return ((200,(response)))
except Exception as e:
gc.collect()
logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
return (("500", "Technical exception"))
"""
This function is used to store the request details in cache_store table
"""
def postData(payLoad, configvalues):
logging = configlogfile()
logging.info("Preparing to save records" )
try:
dateColumns = json.loads(configvalues.get('apiconfiguration', 'dateColumns', raw=True))
keysToBeIgnored = json.loads(configvalues.get('apiconfiguration', 'keysToBeIgnored', raw=True))
columnMapping = json.loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True))
validColumns = json.loads(configvalues.get('apiconfiguration', 'validKeys', raw=True))
timeZone = configvalues.get('apiconfiguration', 'timeZone', raw=True)
logging.debug("arguments-" + str(payLoad))
logging.debug("validColumns-" + str(validColumns))
logging.debug("payLoad[before]-" + str(payLoad))
if payLoad == None or not isinstance(payLoad, dict) or len(payLoad) == 0:
return (("422", ("9005", "Missing required attribute in payLoad. " + ','.join(validColumns) + ".")))
if not set(list(payLoad.keys())).issubset(set(list(columnMapping.values()))):
return (("422", ("9006", "Field rule violation. Payload having non mapped attribute. " + ','.join(list(set(list(payLoad.keys()))-set(columnMapping.values()))) + ".")))
#return ((["9006"], ("Field rule violation. Payload having non mapped attribute. " + ','.join(list(set(list(payLoad.keys())) - set(columnMapping.values()))) + ".")))
if not set(validColumns).issubset(set(list(payLoad.keys()))):
return (("422", ("9005", "Field rule violation or missing required attribute. "+",".join(list(set(validColumns)-set(list(payLoad.keys())))))))
#return ((["9005"], "Field rule violation or missing required attribute. " + ",".join(
#list(set(validColumns) - set(list(payLoad.keys()))))))
logging.info("saving records for cart details " + payLoad['businessId'] + "," + payLoad['valueType'] + "," + payLoad['key'])
#if 'value' in payLoad.keys() and not (isinstance(payLoad['value'], list) or isinstance(payLoad['value'],dict)):
# return (("422", ("9005", "Field rule violation. value field needs to be in list or dictionary format")))
data = {}
data = payLoad.copy()
if 'createdTime' not in data.keys():
data['createdTime'] = datetime.strftime(datetime.now(pytz.timezone(timeZone)),'%Y-%m-%dT%H:%M:%S.%f%z')
if 'updatedTime' not in data.keys():
data['updatedTime'] = datetime.strftime(datetime.now(pytz.timezone(timeZone)),'%Y-%m-%dT%H:%M:%S.%f%z')
data = {k: v for k, v in data.items() if v is not None} # removes the fields with null values
data = {k: v for k, v in data.items() if k not in keysToBeIgnored}
data = {k: v for k, v in data.items() if k in columnMapping.values()} # remove fields not mapped
for k, v in columnMapping.items():
if (v in data):
data[k] = data.pop(v)
insertSql = ""
for k, v in data.items():
logging.debug(k)
logging.debug(v)
logging.debug(type(v));
if k in dateColumns: # formats the date columns in the request
logging.debug("processing date columns")
try:
datetime.strptime(v,'%Y-%m-%dT%H:%M:%S.%f%z')
insertSql = insertSql + k + "=datetime.strptime('" + v + "','%Y-%m-%dT%H:%M:%S.%f%z'),"
except ValueError:
return (("422", ("9005", "Field rule violation. " + k + " field needs to be in 2020-01-01T00:00:00.000000+0530 format")))
elif type(v) == str:
if k == 'value':
logging.debug('value-' + str(v))
insertSql = insertSql + str(k) + "='" + str( str(v))+ "'.encode('utf-8')," # convert the string to bytes to store data in blob column
else:
if not v.isalnum(): # logic included to check for alphanumeric string and enclose string with double quotes
insertSql = insertSql + str(k) + "=\"\"\"" + v + "\"\"\","
#insertSql = insertSql + str(k) + "='" + str(v) + "',"
else:
insertSql = insertSql + str(k) + "='" + str(v) + "',"
elif isinstance(v, list) or isinstance(v, dict):
if k == 'value':
insertSql = insertSql + str(k) + "=b'" + json.dumps(v) + "',"
else:
insertSql = insertSql + str(k) + "=" + str(v) + ","
elif type(b) == blob:
insertSql = insertSql + str(k) + "=textAsBlob('" + str(v) + "),"
else:
insertSql = insertSql + str(k) + "='" + str(v) + "',"
insertSql = insertSql[0:len(insertSql) - 1]
insertSql = "global cacheStoreData; cacheStoreData = cache_store(" + insertSql + ")"
logging.debug('insertSql')
logging.debug(insertSql)
#return ((["0000"],insertSql))
exec(insertSql)
cacheStoreData.save()
gc.collect()
logging.info("Successfully saved the cache details")
return (("200","Successfully saved the cache details"))
return (("200", {k: v for k, v in payLoad.items()}))
except Exception as e:
gc.collect()
logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
return (("500", "Technical exception"))
Editor is loading...