def MEMcache_insert(result,query, id, expiry):
            logger.info("Cache insertion started...")

            class ExtendedJSONEncoder(json.JSONEncoder):
                def default(self, obj):
                    if isinstance(obj, decimal.Decimal):
                        return str(obj)
                    if isinstance(obj, datetime) or isinstance(obj, datetime.date):
                        return obj.isoformat()
                    return super(ExtendedJSONEncoder, self).default(obj)

            createddatetime = datetime.datetime.now()
            expirydatetime = createddatetime + datetime.timedelta(seconds=expiry)
            # to_cache_lst = []
            # for k,v in result[0].iteritems():
            to_cache = { 'id': id,
                         # 'fieldname': k,
                         # 'value': v,
                         'data' : json.dumps(result, cls=ExtendedJSONEncoder),
                         'query' : str(query),
                         'expirydatetime': expirydatetime,
                         'createddatetime': createddatetime}
           # to_cache_lst.append(to_cache)
            try:
                CC.insert_data([to_cache],'cache_aggregation')
                logger.info("Cache insertion successful!")
            except Exception, err:
                logger.error("Error inserting to cache!")
                logger.error(err)
 def deactivate_packages(self):
     ""
     if self.is_default:
         try:
             db.update_data('digin_tenant_package_details'," WHERE tenant_id = '{0}' AND package_id IN ({1},{2},{3},{4})".format(self.tenant,int(free_package),int(default_1),int(default_2),int(default_3)),
                            package_status = 'deactivated')
         except Exception, err:
             print "Error inserting to DB!"
             return cmg.format_response(False, err, "Error occurred while deactivate_packages.. \n" + str(err),
                                           exception=sys.exc_info())
    def set_usage(self):

        for k,v in self.usages.items():
            usage_rating = {'user_id':self.user_id,
                       'tenant':self.tenant,
                       'parameter':k,
                       'value':v,
                       'other_data':self.other_data}
            self.insert_obj.append(usage_rating)
        print self.insert_obj
        db.insert_data(self.insert_obj,'digin_usage_details')
        self._calculate_summary()
    def get_package_attributes(self):

        package= self.package_id

        query = "SELECT " \
                "package_id, " \
                "package_name, " \
                "package_attribute, " \
                "package_value, " \
                "package_price " \
                "FROM digin_packagedetails " \
                "WHERE package_id = {0} ".format(self.package_id)
        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {
                        'package_name': row[1],
                        'package_attribute': row[2],
                        'package_value': row[3],
                        'package_price': row[4]}
                data_list.append(data)
        except Exception, err:
            print err
            return "Error occurred while getting data"
def _temporary_delete_components(comp_id, table, user_id, domain):
        try:
            is_access_valid = CC.get_data("SELECT digin_comp_id FROM digin_component_header a INNER JOIN "
                                          "digin_component_access_details b ON a.digin_comp_id = b.component_id "
                                          "WHERE digin_comp_id = {0} AND domain = '{1}' AND user_id = '{2}'".format(comp_id,domain,user_id))
            if int(is_access_valid['rows'][0][0]) == int(comp_id):
                result = CC.update_data(table,'WHERE digin_comp_id = {0}'.format(comp_id),is_active=False)
                _rate_calculation_helper(False, comp_id, 'dashboard', user_id, domain) # Component count is decremented when components moved to Trash
                print 'Component deleted user_id: {0}, tenant: {1}, component_id: {2}'.format(user_id, domain, comp_id)
            else:
                result = 'No component found for deletion user_id: {0}, tenant: {1}, component_id: {2}'.format(user_id,domain,comp_id)
                print result
                return result
        except Exception, err:
            print err
            raise
def wcloud_stream(tweet):
    try:
        logger.info('Getting data from Cache if there is')
        c_data = json.loads(cache.get_data('wordcloud','d_name,data','d_name ="test1"').data)
        logger.info('calc word count for a new tweet')
        dat = wnc.wordcloud_json(tweet)
        n = json.loads(dat)

        logger.info('Agreegate previous and current tweets')
        for t in n:
            if t in c_data:
                c_data[t] = c_data[t] + n[t]
            else:
                c_data.update({t:n[t]})

        logger.info('update MemSql')
        c_data=dict(sorted(c_data.items(), key=lambda x: x[1],reverse=True)[:250])
        update_data(json.dumps(c_data))

    except Exception, err:
        logger.info(err, 'Insert to MemSQL, if dash boardname is ot existing')
        dat = wnc.wordcloud_json(tweet)
        c_data = json.loads(dat)
        c_data=dict(sorted(c_data.items(), key=lambda x: x[1],reverse=True)[:250])
        insert_data(json.dumps(c_data))
 def get_rating_summary(self):
     user_dict = {}
     if self.security_level == 'admin':
         summary = db.get_data("SELECT user_id, parameter, SUM(value) as value FROM digin_usage_summary "
                               "WHERE tenant = '{0}' GROUP BY user_id, parameter".format(self.tenant))['rows']
     else:
         summary = db.get_data("SELECT user_id, parameter, value FROM digin_usage_summary "
                           "WHERE user_id = '{0}' AND tenant = '{1}'".format(self.user_id, self.tenant))['rows']
     for parameter in summary:
         if parameter[0] in user_dict:
             user_dict[parameter[0]][parameter[1]]=parameter[2]
         else:
             user_dict[parameter[0]] = {parameter[1]:parameter[2]}
     rated_dict = {'usage': [{self.tenant:user_dict}],
                   'exceed_blocked': euc.ExceedUsageCalculator(tenant=self.tenant, attribute=None).calculation()}
     return cmg.format_response('True',rated_dict,"Usage data retrieved")
def get_user_settings(user_id, domain):

    query = "SELECT components, user_role, cache_lifetime, widget_limit, " \
            "query_limit, logo_path, dp_path, theme_config, modified_date_time, created_date_time, " \
            "domain FROM digin_user_settings WHERE user_id = '{0}' AND domain = '{1}'".format(user_id, domain)
    logo_path = conf.get_conf('FilePathConfig.ini','User Files')['Path']
    document_root = conf.get_conf('FilePathConfig.ini','Document Root')['Path']
    path = re.sub(document_root, '', logo_path)
    try:
        user_data = CC.get_data(query)
        if user_data['rows'] == ():
            logger.info('No user settings saved for given user ' + user_id)
            return cmg.format_response(True,user_id,"No user settings saved for given user and domain")
        data ={
             'components': user_data['rows'][0][0],
             'user_role': user_data['rows'][0][1],
             'cache_lifetime': int(user_data['rows'][0][2]),
             'widget_limit': int(user_data['rows'][0][3]),
             'query_limit': int(user_data['rows'][0][4]),
             'logo_path': path+user_data['rows'][0][5],
             'dp_path': path+user_data['rows'][0][6],
             'theme_config': user_data['rows'][0][7],
             'modified_date_time': user_data['rows'][0][8],
             'created_date_time': user_data['rows'][0][9],
             'domain': user_data['rows'][0][10]
             }

    except Exception, err:
        logger.error("Error retrieving user settings")
        logger.error(err)
        print "Error retrieving user settings"
        print err
        raise
    def get_package_detail(self):
        query = "SELECT " \
                "a.package_id, " \
                "a.package_name, " \
                "a.package_attribute, " \
                "SUM(a.package_value), " \
                "SUM(a.package_price), " \
                "b.expiry_datetime, " \
                "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
                "CURRENT_TIMESTAMP > b.expiry_datetime " \
                "FROM digin_packagedetails a " \
                "INNER JOIN digin_tenant_package_details b " \
                "ON a.package_id = b.package_id " \
                "WHERE b.tenant_id = '{0}' AND b.package_status = 'current_package' " \
                "GROUP BY a.package_id, a.package_name, a.package_attribute, b.expiry_datetime, remaining_days".format(self.tenant)

        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_id': row[0],
                        'package_name': row[1],
                        'package_attribute': row[2],
                        'package_value_sum': row[3],
                        'package_price_sum': row[4],
                        'expiry_datetime': row[5],
                        'remaining_days': row[6],
                        'is_expired': bool(row[7])}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())
    def initiate_usage_scheduler(self):

            usage_scheduler_run_time = datetime.datetime.now()
            if self.run_count != 0:
                print 'usage_scheduler started at: ' + str(usage_scheduler_run_time)
                logger.info('usage_scheduler started at: ' + str(usage_scheduler_run_time))
                datasets = bq.get_datasets()
                for dataset in datasets:
                    try:
                        storage_query = "SELECT SUM(size_bytes) as storage_bq FROM [{0}.__TABLES__]".format(
                            dataset['datasetReference']['datasetId'])
                        storage_bq = bq.execute_query(storage_query, user_id=0, tenant='DigInEngine')[0]['storage_bq']
                        user_id = db.get_data(
                            "SELECT user_id FROM digin_user_settings WHERE REPLACE(REPLACE(email, '.', '_'), '@', '_') = '{0}' limit 1".format(
                                dataset['datasetReference']['datasetId']))['rows']
                        if user_id == ():
                            print 'No user_Settings found for user: '******'datasetReference']['datasetId']
                            logger.info('No user_Settings found for user: '******'datasetReference']['datasetId'])
                        else:
                            usages = {'storage_bq': 0 if storage_bq is None else storage_bq}
                            obj = dre.RatingEngine(user_id[0][0], 'undefined', **usages)
                            obj.set_usage()
                            print 'Storage calculated for user ' + str(user_id[0][0])
                            logger.info('Storage calculated for user ' + str(user_id[0][0]))
                    except Exception, err:
                        print err
 def _calculate_summary(self):
     summary = db.get_data("SELECT parameter, value FROM digin_usage_summary "
                           "WHERE user_id = '{0}' AND tenant = '{1}'".format(self.user_id, self.tenant))
     if summary['rows'] == ():
         db.insert_data(self.insert_obj,'digin_usage_summary')
     else:
         update_obj = []
         residue_insert = []
         for i in self.insert_obj:
             for j in summary['rows']:
                 if i['parameter'] == j[0]:
                     if self.is_increment:
                         update_obj.append({i['parameter']:str(int(j[1])+int(i['value']))})
                     else:
                         update_obj.append({i['parameter']: str(int(j[1])-int(i['value']))})
                     break
             else:
                 residue_insert.append({'parameter':i['parameter'],'value':i['value'],'user_id':self.user_id,'tenant':self.tenant})
                 continue
         if update_obj:
             for record in update_obj:
                 db.update_data('digin_usage_summary',"WHERE parameter = '{0}' AND user_id = '{1}' AND tenant = '{2}' "
                                .format(list(record.keys())[0], self.user_id,self.tenant), value = record.itervalues().next(),
                                modifieddatetime=datetime.datetime.now())
         if residue_insert:
             db.insert_data(residue_insert,'digin_usage_summary')
def ret_fuzzyC(dbtype, rec_data, u_id, cache_timeout):
    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_data("SELECT expirydatetime >= '{0}' FROM cache_algorithms "
                                      "WHERE id = '{1}' and name_algo='fuzzyC'".format(time, u_id))['rows']

    except Exception, err:
        logger.error(err, "Error connecting to cache..")
        cache_existance = ()
def _permanent_delete_components(comp_id, table, user_id, domain): #TODO query not working
        try:
            result = CC.delete_data("DELETE a FROM {0} a INNER JOIN digin_component_access_details b "
                                    "ON a.digin_comp_id = b.component_id WHERE digin_comp_id = {1} AND domain = '{2}' AND "
                                    "user_id = '{3}' ".format(table, comp_id,domain,user_id))
            print 'Component permanently DELETED: %s ' %str(comp_id)
        except Exception, err:
            print err
            raise
def MEM_insert(id, data, query, cache_timeout):
        logger.info("Cache insertion started...")
        createddatetime = datetime.datetime.now()
        expirydatetime = createddatetime + datetime.timedelta(seconds=cache_timeout)

        to_cache = [{'id': str(id),
                     'data': data,
                     'query': query,
                     'expirydatetime': expirydatetime,
                     'createddatetime': createddatetime}]

        try:
            CC.insert_data(to_cache,'cache_execute_query')

        except Exception, err:
            print err
            logger.error("Error inserting to cache!")
            logger.error(err)
def ret_bubble(dbtype, table, x, y, s, c, u_id, cache_timeout, user_id=None, tenant=None):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics "
                                             "WHERE id = '{1}' and c_type='bubblechart'".format(time, u_id))['rows']
    except Exception, err:
        logger.error(err, "Error connecting to cache..")
        cache_existance = ()
 def get_usage(self):
     if self.attribute == "data":
         try:
             summary = db.get_data(" SELECT sum(value) FROM digin_usage_summary "
                                   " WHERE tenant = '{0}' AND parameter = 'download_bq' OR 'upload_size_bq' ".format(self.tenant))['rows']
             return summary[0]
         except Exception, err:
             print "Error inserting to DB!"
             return cmg.format_response(False, err, "Error occurred while getting packages details .. \n" + str(err),
                                           exception=sys.exc_info())
def MEM_insert(data,cache_timeout):
        logger.info("Cache insertion started...")
        createddatetime = datetime.datetime.now()
        expirydatetime = createddatetime + datetime.timedelta(seconds=cache_timeout)

        to_cache_lst = []
        for row in data:
            to_cache = {'id': str(row['ID']),
                         'level': row['level'],
                         'value': row['value'],
                         'expirydatetime': expirydatetime,
                         'createddatetime': createddatetime}
            to_cache_lst.append(to_cache)
        try:
            CC.insert_data(to_cache_lst,'cache_hierarchy_levels')

        except Exception, err:
            logger.error("Error inserting to cache!")
            logger.error(err)
            pass
def ret_hist(dbtype, rec_data, u_id, cache_timeout, n_bins, user_id, tenant):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics"
                                             " WHERE id = '{1}' and c_type='histogram'".format(time, u_id))['rows']
        print 'recieved data from Cache'

    except Exception, err:

        logger.error(err, "Error connecting to cache..")
        cache_existance = ()
    def set_packages(self):
        time_now = datetime.datetime.now()
        _, num_days = calendar.monthrange(time_now.year, time_now.month)
        free_package = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['Free']
        if self.package_id == int(free_package):
            last_day = time_now + datetime.timedelta(days=30)
        else:
            last_day = datetime.datetime(time_now.year, time_now.month, num_days, 23, 59, 59)

        tenant_package_mapping = [{'tenant_id': self.tenant,
                                   'package_id': self.package_id,
                                   'created_datetime': time_now,
                                   'modified_datetime': time_now,
                                   'expiry_datetime': last_day,
                                   'package_status':'current_package'}]
        try:
            db.insert_data(tenant_package_mapping, 'digin_tenant_package_details')
        except Exception, err:
            print "Error inserting to cacheDB!"
            return cmg.format_response(False, err, "Error occurred while inserting.. \n" + str(err),
                                          exception=sys.exc_info())
def store_user_settings(params,user_id, domain):

    data_object = [{'user_id': user_id,
             'email': params['email'],
             'components': params['components'],
             'user_role': params['user_role'],
             'cache_lifetime': 300 if params['cache_lifetime'] is None else int(params['cache_lifetime']),
             'widget_limit': default_user_settings['widget_limit'] if params['widget_limit'] is None else int(params['widget_limit']),
             'query_limit': default_user_settings['query_limit'] if params['query_limit'] is None else int(params['query_limit']),
             'logo_path': '/digin_user_data/'+user_id+'/'+domain+'/logos/'+params['logo_name'],
             'dp_path': '/digin_user_data/'+user_id+'/'+domain+'/DPs/'+params['dp_name'],
             'theme_config': params['theme_config'],
             'modified_date_time': datetime.datetime.now(),
             'created_date_time': datetime.datetime.now(),
             'domain': domain
             }]
    logger.info("Data received!")
    logger.info(data_object)
    existance = CC.get_data("SELECT user_id from digin_user_settings where user_id = '{0}' AND domain = '{1}'".format(user_id, domain))
    if existance['rows'] != ():
        try:
            CC.update_data('digin_user_settings',"WHERE user_id='{0}'".format(user_id),
                           components=params['components'],
                           user_role=params['user_role'],
                           cache_lifetime=int(params['cache_lifetime']),
                           widget_limit=int(params['widget_limit']),
                           query_limit=int(params['query_limit']),
                           logo_path='/digin_user_data/'+user_id+'/'+domain+'/logos/'+params['logo_name'],
                           dp_path='/digin_user_data/'+user_id+'/'+domain+'/DPs/'+params['dp_name'],
                           theme_config=params['theme_config'],
                           modified_date_time=datetime.datetime.now())
            return cmg.format_response(True,1,"User settings updated successfully")
        except Exception, err:
            logger.error("Error updating user settings")
            logger.error(err)
            print "Error updating user settings"
            print err
            raise
    def get_rating_detail(self, params):

        date_dict = {}
        if self.security_level == 'admin':
            detail = db.get_data("SELECT user_id, DATE(createddatetime), parameter, SUM(value) as value FROM digin_usage_details "
                                  "WHERE tenant = '{0}' AND DATE(createddatetime)>= {1} AND DATE(createddatetime)<= {2} "
                                  "GROUP BY user_id, DATE(createddatetime), parameter".format(self.tenant, params.start_date, params.end_date))['rows']
        else:
            detail = db.get_data("SELECT user_id, DATE(createddatetime), parameter, SUM(value) as value FROM digin_usage_details "
                                  "WHERE user_id = '{0}' AND tenant = '{1}' AND DATE(createddatetime)>= {2} AND DATE(createddatetime)<= {3} "
                                  "GROUP BY user_id, DATE(createddatetime), parameter".format(self.user_id, self.tenant, params.start_date, params.end_date))['rows']
        
        for row in detail:
            if row[0] in date_dict:
                if str(row[1]) in date_dict[row[0]]:
                    date_dict[row[0]][str(row[1])][row[2]] = row[3]
                else:
                    date_dict[row[0]][str(row[1])] = {row[2]:row[3]}
            else:
                date_dict[row[0]] = {str(row[1]):{row[2]:row[3]}}

        rated_detail = [{self.tenant:date_dict}]
        return cmg.format_response('True',rated_detail,"Usage data retrieved")
    def get_ledger(self):
        # query = "SELECT " \
        #         "a.package_id, " \
        #         "a.package_name, " \
        #         "a.package_attribute, " \
        #         "a.package_value, " \
        #         "a.package_price, " \
        #         "b.expiry_datetime, " \
        #         "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
        #         "b.package_status, " \
        #         "b.created_datetime " \
        #         "FROM digin_packagedetails a " \
        #         "INNER JOIN digin_tenant_package_details b " \
        #         "ON a.package_id = b.package_id " \
        #         "WHERE b.tenant_id = '{0}' " \
        #         "AND b.created_datetime >= TIMESTAMP('{1}') AND  b.created_datetime <= TIMESTAMP('{2}') " \
        #         "GROUP BY a.package_id, a.package_name, a.package_attribute, b.expiry_datetime, remaining_days " \
        #         "ORDER BY b.created_datetime ".format(self.tenant, self.start_date, self.end_date)

        query = "SELECT " \
                "package_id, " \
                "expiry_datetime, " \
                "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
                "package_status, " \
                "created_datetime " \
                "FROM digin_tenant_package_details  " \
                "WHERE tenant_id = '{0}' " \
                "AND created_datetime >= TIMESTAMP('{1}') AND  created_datetime <= TIMESTAMP('{2}') " \
                "ORDER BY created_datetime ".format(self.tenant, self.start_date, self.end_date)


        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_id': row[0],
                        'package_Details': PackageProcessor(package_name=None, package_attribute=None, package_value=None, package_price=None,is_default=False, tenant =self.tenant,package_id=int(row[0])).get_package_attributes(),
                        'expiry_datetime': row[1],
                        'remaining_days': row[2],
                        'package_status': row[3],
                        'created_datetime': row[4]}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())
def get_all_components(params, user_id, domain):
      try:
        data = CC.get_data("SELECT h.digin_comp_id, h.digin_comp_name, h.digin_comp_type "
                           "FROM digin_component_access_details a "
                           "INNER JOIN digin_component_header h "
                           "ON a.component_id = h.digin_comp_id "
                           "WHERE h.is_active = TRUE AND a.domain = '{0}' AND a.user_id = '{1}' "
                           "AND a.type IN ('dashboard','report')".format(domain, user_id))
        print data["rows"]
        comps = []
        for comp in data["rows"]:
            comp_dict ={}
            comp_dict["compID"]= comp[0]
            comp_dict["compName"]=comp[1]
            comp_dict["compType"]=comp[2]
            comps.append(comp_dict)
        return cmg.format_response(True,comps,"Successful!")

      except Exception, err:
        logger.error("Error getting data from cache. %s" % err)
        return cmg.format_response(False,0,"Error Occurred!", exception = sys.exc_info())
    def get_package_summary(self):
        query = "SELECT " \
                "a.package_attribute, " \
                "SUM(a.package_value), " \
                "SUM(a.package_price) " \
                "FROM digin_packagedetails a " \
                "INNER JOIN digin_tenant_package_details b " \
                "ON a.package_id = b.package_id " \
                "WHERE b.tenant_id = '{0}' AND b.package_status = 'current_package' " \
                "GROUP BY a.package_attribute".format(self.tenant)

        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_attribute': row[0],
                        'package_value_sum': row[1],
                        'package_price_sum': row[2]}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())
                           cache_lifetime=int(params['cache_lifetime']),
                           widget_limit=int(params['widget_limit']),
                           query_limit=int(params['query_limit']),
                           logo_path='/digin_user_data/'+user_id+'/'+domain+'/logos/'+params['logo_name'],
                           dp_path='/digin_user_data/'+user_id+'/'+domain+'/DPs/'+params['dp_name'],
                           theme_config=params['theme_config'],
                           modified_date_time=datetime.datetime.now())
            return cmg.format_response(True,1,"User settings updated successfully")
        except Exception, err:
            logger.error("Error updating user settings")
            logger.error(err)
            print "Error updating user settings"
            print err
            raise
    try:
        CC.insert_data(data_object,'digin_user_settings')
    except Exception, err:
        logger.error("Error saving user settings")
        logger.error(err)
        print "Error saving user settings"
        print err
        raise
    return cmg.format_response(True,1,"User settings saved successfully")

def get_user_settings(user_id, domain):

    query = "SELECT components, user_role, cache_lifetime, widget_limit, " \
            "query_limit, logo_path, dp_path, theme_config, modified_date_time, created_date_time, " \
            "domain FROM digin_user_settings WHERE user_id = '{0}' AND domain = '{1}'".format(user_id, domain)
    logo_path = conf.get_conf('FilePathConfig.ini','User Files')['Path']
    document_root = conf.get_conf('FilePathConfig.ini','Document Root')['Path']
    def get_usage(self):
        if self.attribute == "data":
            try:
                summary = db.get_data(" SELECT sum(value) FROM digin_usage_summary "
                                      " WHERE tenant = '{0}' AND parameter = 'download_bq' OR 'upload_size_bq' ".format(self.tenant))['rows']
                return summary[0]
            except Exception, err:
                print "Error inserting to DB!"
                return cmg.format_response(False, err, "Error occurred while getting packages details .. \n" + str(err),
                                              exception=sys.exc_info())

        elif self.attribute == "storage":
            try:
                summary = db.get_data(" SELECT sum(value) FROM digin_usage_summary "
                                      " WHERE tenant = '{0}' AND parameter = 'storage_bq' ".format(self.tenant))['rows']
                return summary[0]
            except Exception, err:
                print "Error inserting to DB!"
                return cmg.format_response(False, err, "Error occurred while getting packages details .. \n" + str(err),
                                              exception=sys.exc_info())

        elif self.attribute == "users":
            try:
                summary = db.get_data(" SELECT sum(value) FROM digin_usage_summary "
                                      " WHERE tenant = '{0}' AND parameter = 'users' ".format(self.tenant))['rows']
                return summary[0]
            except Exception, err:
                print "Error inserting to DB!"
                return cmg.format_response(False, err, "Error occurred while getting packages details .. \n" + str(err),
                                              exception=sys.exc_info())
            output = Hist.histogram(df, n_bins)
            t = threading.Thread(target=cache_data, args=(output, u_id, cache_timeout, 'histogram'))
            t.start()
            result = cmg.format_response(True, output, 'Histogram processed successfully!')

        except Exception, err:

            result = cmg.format_response(False, err, 'Histogram Failed!', sys.exc_info())

        return result

    else:
        logger.info("Getting Histogram data from Cache..")
        result = ''
        try:
            data = json.loads(CC.get_cached_data("SELECT data FROM cache_descriptive_analytics WHERE id = '{0}' "
                                                 "and c_type='histogram'".format(u_id))['rows'][0][0])
            result = cmg.format_response(True, data, 'Data successfully processed!')
            logger.info("Data received from cache")
        except Exception:
            logger.error("Error occurred while fetching data from Cache")
            result = cmg.format_response(False, None, 'Error occurred while getting data from cache!', sys.exc_info())
            #raise
        return result


def ret_box(dbtype, rec_data, u_id, cache_timeout, user_id, tenant):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics "
                                             "WHERE id = '{1}' and c_type='boxplot'".format(time, u_id))['rows']
            measure = None
        try:
            agg = params.agg
        except AttributeError:
            agg = 'count'
        logger.info('Requested received: Keys: {0}, values: {1}'.format(params.keys(),params.values()))

        fields = []  # ['aaaa', 'bbbb', 'cccc']
        counted_fields = []
        partition_by = []
        count_statement = []
        window_functions_set = []

        time = datetime.datetime.now()
        try:
            cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_hierarchy_summary WHERE id = '{1}'".format(time, pkey))['rows']
        except:
            logger.error("Error connecting to cache..")
            cache_existance = ()
            pass

        if len(cache_existance) == 0 or cache_existance[0][0] == 0 :
            if agg.lower() == 'count' or measure is None:
                for i in range(0, len(tup)):
                    fields.append(tup[i][0])
                    counted_fields.append('%s_count1' % (tup[i][0]))  # ['aaaa_count', 'bbbb_count', 'cccc_count']
                    p = []
                    count_statement.append('COUNT (%s) as %s_count1' % (tup[i][0], tup[i][0]))
                    for j in range(0, i+1):
                        p.append(fields[j])
                    p_str = ', '.join(p)
            output = ka.kmeans_algo(df)
            cache_data(output, u_id, cache_timeout, name_algo='kmeans')
            result = cmg.format_response(True, output, 'Kmeans processed successfully!')

        except Exception, err:
            logger.error(err)
            result = cmg.format_response(False, err, 'Kmeans Failed!', sys.exc_info())

        finally:
            return result

    else:
        logger.info("Getting Kmeans data from Cache..")
        result = ''
        try:
            data = json.loads(CC.get_cached_data("SELECT data FROM cache_algorithms "
                                          "WHERE id = '{0}' and name_algo='kmeans'".format(u_id))['rows'][0][0])
            result = cmg.format_response(True,data,'Data successfully processed!')
            logger.info("Data received from cache")
        except:
            logger.error("Error occurred while fetching data from Cache")
            result = cmg.format_response(False,None,'Error occurred while getting data from cache!',sys.exc_info())
            raise
        finally:
            return result

def ret_fuzzyC(dbtype, rec_data, u_id, cache_timeout):
    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_data("SELECT expirydatetime >= '{0}' FROM cache_algorithms "
                                      "WHERE id = '{1}' and name_algo='fuzzyC'".format(time, u_id))['rows']
            limit_ = int(1000)
          try:
            offset_ = params.offset
          except:
            offset_ = None
          query = params.query
          db = params.db
          try:
            cache_timeout = int(params.t)
          except AttributeError, err:
            logger.info("No cache timeout mentioned.")
            cache_timeout = int(default_cache_timeout)

          time = datetime.datetime.now()
          try:
                cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_execute_query WHERE id = '{1}'".format(time, cache_key))['rows']
          except Exception, err:
                logger.error("Error connecting to cache..")
                logger.error(err)
                cache_existance = ()
                pass
          if len(cache_existance) != 0:
                try:
                    data = CC.get_cached_data("SELECT data, query FROM cache_execute_query WHERE id = '{0}'".format(cache_key))['rows']
                except Exception,err:
                    return  comm.format_response(False,None,"Error occurred while retrieving data from cache!",exception=sys.exc_info())
                return  comm.format_response(True,json.loads(data[0][0]),data[0][1],exception=None)

          if db.lower() == 'bigquery':
               try:
                    results = bqhandler.execute_query(query, offset=offset_, limit=limit_,user_id=user_id,tenant=tenant)