Untitled

 avatar
unknown
plain_text
2 years ago
5.5 kB
4
Indexable
from models.cache_store import cache_store
from models.cache_store_by_username import cache_store_by_username
from models.cache_store_by_date import cache_store_by_date
from models.indexes import indexes
from models.columns import columns as tablecolumns
from datetime import datetime, timedelta, timezone
from dateutil.relativedelta import relativedelta
from pandas.io.json import json_normalize
import json
from config.logger import configlogfile

from flask import jsonify, make_response
import pandas as pd
import sys
import gc
import uuid
import pytz

"""
This function is used to fetch cache details based on valueType and key
"""


def getData(payLoad, configvalues):
    from config.logger import configlogfile
    logging = configlogfile()
    logging.info("Started to fetch the records for " + str(payLoad))

    cacheStore = []
    try:
        payLoad = payLoad.to_dict()
        columnMapping = json.loads(configvalues.get('apiconfiguration', 'columnMapping', raw=True))
        reversecolumnMapping = {y: x for x, y in columnMapping.items()}
        validColumns = json.loads(configvalues.get('apiconfiguration', 'validColumns', raw=True))
        datecolumns = json.loads(configvalues.get('apiconfiguration', 'dateColumns', raw=True))
        datecolumns.append('createdtime')  # Add 'createdtime' column
        logging.debug('Arguments -' + str(payLoad))

        for k, v in reversecolumnMapping.items():
            if k in list(payLoad.keys()):
                payLoad[v] = payLoad.pop(k)

        recs = indexes.objects().filter(keyspace_name=cache_store.__keyspace__)
        recs = recs.filter(table_name='cache_store')
        indexedcolumns = [row.options['target'] for row in recs]

        recs = tablecolumns.objects().filter(keyspace_name=cache_store.__keyspace__)
        recs = recs.filter(table_name='cache_store')

        partitioncolumns = [row.column_name for row in recs if row.kind in ["partition_key"]]
        partitioncolumns = partitioncolumns + [row.column_name for row in recs if row.kind in ["primary_key", "clustering"]]
        parametercolumns = partitioncolumns + indexedcolumns
        partitioncolstofilter = [parametercolumn for parametercolumn in parametercolumns if parametercolumn in list(payLoad.keys())]

        if not payLoad:
            return (("200", "Parameters need to be passed to fetch values from the cache store"))
        
        for i in range(len(partitioncolstofilter)):
            if i == 0:
                if partitioncolstofilter[i] in datecolumns:
                    query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
                            partitioncolstofilter[i] + '=datetime.strptime(\'' + \
                            str(payLoad[partitioncolstofilter[i]]) + '\',\'%Y-%m-%d\'));'
                else:
                    query = 'global cacheStoreRecords;cacheStoreRecords=cache_store.objects().filter(' + \
                            partitioncolstofilter[i] + '=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'
            else:
                if partitioncolstofilter[i] in datecolumns:
                    query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
                            partitioncolstofilter[i] + \
                            '=datetime.strptime(\'' + str(payLoad[partitioncolstofilter[i]]) + '\',\'%Y-%m-%d\'));'
                else:
                    query = 'global cacheStoreRecords;cacheStoreRecords=cacheStoreRecords.filter(' + \
                            partitioncolstofilter[i] + \
                            '=\'' + str(payLoad[partitioncolstofilter[i]]) + '\');'

            exec(query)

        if not cacheStoreRecords:
            return (("200", {}))

        cacheStore = [row.__json__() for row in cacheStoreRecords]
        cacheStore = pd.DataFrame.from_dict(cacheStore, orient='columns')
        cacheStore.fillna('', inplace=True)

        if cacheStore.empty:
            response = {}
        elif len(cacheStore) == 1:
            response = cacheStore.iloc[0].to_dict()
        else:
            response = cacheStore.to_dict(orient='records')

        for column in datecolumns:
            if column in cacheStore.columns.tolist():
                cacheStore[column] = pd.to_datetime(cacheStore[column], unit='ns')
                cacheStore[column] = cacheStore[column].dt.tz_localize('UTC').dt.tz_convert(
                    configvalues.get('apiconfiguration', 'timeZone')).dt.strftime('%Y-%m-%dT%H:%M:%S.%f%z')
                cacheStore[column] = ["" if pd.isnull(columnValue) else columnValue for columnValue in
                                      cacheStore[column].to_list()]

        cacheStore = cacheStore[validColumns]
        validColumns = {k: v for k, v in columnMapping.items() if k in validColumns}
        cacheStore = cacheStore.rename(columns=validColumns)
        cacheStore = cacheStore.to_dict(orient='records')

        if cacheStore:
            cacheStore = [{k: (v.encode().decode() if k == 'value' else v) for k, v in x.items()} for x in cacheStore]

        response = response if cacheStore else {}
        
        logging.debug('cacheStore-' + str(cacheStore))
        logging.info("Completed fetching the records")
        gc.collect()
        
        return ((200, response))

    except Exception as e:
        gc.collect()
        logging.error("Error - {} . Line No - {} ".format(str(e), str(sys.exc_info()[-1].tb_lineno)))
        return (("500", "Technical exception"))
Editor is loading...