Untitled
unknown
plain_text
2 years ago
9.8 kB
8
Indexable
def getData(payLoad, configvalues):
from config.logger import configlogfile
logging = configlogfile()
logging.info("Started to fetch the records for " + str(payLoad) )
cacheStore = []
try:
payLoad = payLoad.to_dict()
columnMapping = json.loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True))
reversecolumnMapping = {y: x for x, y in columnMapping.items()}
validColumns = json.loads(configvalues.get('apiconfiguration', 'validColumns', raw=True))
datecolumns = json.loads(configvalues.get('apiconfiguration', 'dateColumns', raw=True))
logging.debug('Arguments -' + str(payLoad))
# return ((["0000"],str(payLoad['valueType'])))
for k, v in reversecolumnMapping.items():
if k in list(payLoad.keys()):
payLoad[v] = payLoad.pop(k)
# modified the code on 18-Feb to fetch data based on keyspace and then table name
recs = indexes.objects().filter(keyspace_name =cache_store.__keyspace__)
recs = recs.filter(table_name='cache_store')
indexedcolumns = [row.options['target'] for row in recs]
recs = tablecolumns.objects().filter(keyspace_name = cache_store.__keyspace__)
recs = recs.filter(table_name='cache_store')
partitioncolumns = [row.column_name for row in recs if
row.kind in ["partition_key"]]
partitioncolumns = partitioncolumns + [row.column_name for row in recs if
row.kind in ["primary_key", "clustering"]]
parametercolumns = partitioncolumns + indexedcolumns
partitioncolstofilter = [parametercolumn for parametercolumn in parametercolumns if
parametercolumn in list(payLoad.keys())]
if (bool(payLoad) == False):
return (("200","parameters needs to be passed to fetch values from the cache store"))
#query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().all();'
else:
#return ((["0000"], str(range(len(partitioncolstofilter)))))
if set(list(payLoad.keys())).issubset(parametercolumns):
for i in range(len(partitioncolstofilter)):
if i == 0:
if partitioncolstofilter[i] in datecolumns:
query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
partitioncolstofilter[i] + '=datetime.strptime(\'' + \
str((payLoad[partitioncolstofilter[i]])) + '\',\'%Y-%m-%dT%H:%M:%S%z\'));'
# return ((["0000"],query))
else:
query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
partitioncolstofilter[i] + '=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'
else:
if partitioncolstofilter[i] in datecolumns:
query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
partitioncolstofilter[i] + \
'=datetime.strptime(\'' + str(
(payLoad[partitioncolstofilter[i]])) + '\',\'%Y-%m-%dT%H:%M:%S%z\'));'
# return ((["0000"],query))
else:
query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
partitioncolstofilter[i] + \
'=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'
#return ((["0000"],query))
exec(query)
else:
for i in range(len(parametercolumns)):
for k, v in reversecolumnMapping.items():
if v == parametercolumns[i]:
parametercolumns[i] = k
return (("200",("9003", "Invalid Arguments passed to the API. Valid Arguments are " + ','.join(parametercolumns))))
if len(cacheStoreRecords) == 0:
return (("200",{}))
#return (("404", ("9007", "cache details could not be found.")))
#return ((["9007"], "Details could not be found"))
cacheStore = [row.__json__() for row in cacheStoreRecords]
#return ((200,cacheStore))
cacheStore = pd.DataFrame.from_dict((cacheStore), orient='columns')
cacheStore.fillna('',inplace = True)
if len(cacheStore)>0:
for column in datecolumns:
if column in cacheStore.columns.tolist():
cacheStore[column] = pd.to_datetime(cacheStore[column],unit='ns')
cacheStore[column] = cacheStore[column].dt.tz_localize('UTC').dt.tz_convert(configvalues.get('apiconfiguration','timeZone')).dt.strftime('%Y-%m-%dT%H:%M:%S.%f%z')
#return (("404",("0000",cacheStore[column].to_list())))
cacheStore[column] = [ "" if columnValue == "NaT" else columnValue for columnValue in cacheStore[column].to_list()]
#return (("404",("0000",cacheStore[column].to_list())))
cacheStore = cacheStore[validColumns]
validColumns = {k: v for k, v in columnMapping.items() if k in validColumns}
cacheStore = cacheStore.rename(columns=(validColumns))
cacheStore = cacheStore.to_dict(orient='records')
import unicodedata
if len(cacheStore) > 0:
#cacheStore = [ {k : ( unicodedata.normalize('NFC',v) if k == 'value' else v ) for k,v in x.items()} for x in cacheStore]
cacheStore = [ {k : (v.encode().decode() if k == 'value' else v ) for k,v in x.items()} for x in cacheStore]
logging.debug('cacheStore-'+str(cacheStore))
response = {}
response = response if len(cacheStore) == 0 else cacheStore[0] if len(cacheStore) == 1 else cacheStore
logging.info("Completed fetching the records")
gc.collect()
return ((200,(response)))
except Exception as e:
gc.collect()
logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
return (("500", "Technical exception"))
def getData(args, configvalues): # UI Based Calling
logging = configlogfile()
__funcReturnAPICode = '0000'
__funcReturnDesc = 'Successfully Completed the Process'
businessid=request.args.get('businessid')
createdtime=request.args.get('createdtime')
username = request.args.get('username')
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
columnMapping = json_loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True))
try:
logging.info(("process started"))
validArguments = json_loads(configvalues.get('apiconfiguration', 'validArguments', raw=True))
logging.info(("Args.keys() : %s" % set(list(args.keys()))))
logging.info(("validArguments : %s" % set(validArguments)))
if 'businessid' in args.keys():
results = cache_store.objects.filter(businessid=businessid)
elif 'username' in args.keys():
username = username.strip('[]').split(',')
results = cache_store_by_username.objects.filter(username__in=username)
elif 'createdtime' in args.keys():
try:
createdtime = datetime.strptime(createdtime, '%Y-%m-%d')
except ValueError:
return ((["9003"], "Invalid createdtime format. Use yyyy-mm-dd. "))
start_date = createdtime.replace(hour=0, minute=0, second=0)
end_date = createdtime.replace(hour=23, minute=59, second=59)
results = cache_store_by_date.objects.filter(createdtime__gte=start_date,
createdtime__lte=end_date).allow_filtering()
else:
return ((["9003"], "Invalid Arguments passed to the API."))
page = request.args.get('page',1)
limit = request.args.get('limit',20)
if page is not None and limit is not None:
try:
page=int(page)
limit=int(limit)
except ValueError:
return("400" "both page and limit must be integers")
offset = (page - 1) * limit if page is not None and limit is not None else 0
paginated_res=results[offset:offset+limit]
data = [{
columnMapping['businessid']: r.businessid,
columnMapping['valuetype']: r.valuetype,
columnMapping['key']: str(r.key),
columnMapping['buid']: r.buid,
columnMapping['createdtime']: r.createdtime.isoformat(),
columnMapping['opid']: r.opid,
columnMapping['updatedtime']: r.updatedtime.isoformat(),
columnMapping['username']: r.username,
columnMapping['value']: base64.b64encode(r.value).decode('utf-8'),
columnMapping['valuetype']: r.valuetype
} for r in paginated_res]
logging.debug(results)
logging.debug(data)
response_data={
"data":data
}
return ("200",(response_data))
except Exception as e:
gc_collect()
logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
# return (("500", "Technical exception"))
return ((["9003"], "Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno))))
can you tell me what logic is missing in 2nd function compared to first functionEditor is loading...