コード例 #1
0
def ret_bubble(dbtype, table, x, y, s, c, u_id, cache_timeout, user_id=None, tenant=None):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics "
                                             "WHERE id = '{1}' and c_type='bubblechart'".format(time, u_id))['rows']
    except Exception, err:
        logger.error(err, "Error connecting to cache..")
        cache_existance = ()
コード例 #2
0
def ret_kmeans(dbtype, rec_data, u_id, cache_timeout):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_algorithms "
                                      "WHERE id = '{1}' and name_algo='kmeans'".format(time, u_id))['rows']

    except Exception, err:
        logger.error(err,"Error connecting to cache..")
        cache_existance = ()
コード例 #3
0
def ret_hist(dbtype, rec_data, u_id, cache_timeout, n_bins, user_id, tenant):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics"
                                             " WHERE id = '{1}' and c_type='histogram'".format(time, u_id))['rows']
        print 'recieved data from Cache'

    except Exception, err:

        logger.error(err, "Error connecting to cache..")
        cache_existance = ()
コード例 #4
0
            measure = None
        try:
            agg = params.agg
        except AttributeError:
            agg = 'count'
        logger.info('Requested received: Keys: {0}, values: {1}'.format(params.keys(),params.values()))

        fields = []  # ['aaaa', 'bbbb', 'cccc']
        counted_fields = []
        partition_by = []
        count_statement = []
        window_functions_set = []

        time = datetime.datetime.now()
        try:
            cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_hierarchy_summary WHERE id = '{1}'".format(time, pkey))['rows']
        except:
            logger.error("Error connecting to cache..")
            cache_existance = ()
            pass

        if len(cache_existance) == 0 or cache_existance[0][0] == 0 :
            if agg.lower() == 'count' or measure is None:
                for i in range(0, len(tup)):
                    fields.append(tup[i][0])
                    counted_fields.append('%s_count1' % (tup[i][0]))  # ['aaaa_count', 'bbbb_count', 'cccc_count']
                    p = []
                    count_statement.append('COUNT (%s) as %s_count1' % (tup[i][0], tup[i][0]))
                    for j in range(0, i+1):
                        p.append(fields[j])
                    p_str = ', '.join(p)
コード例 #5
0
            limit_ = int(1000)
          try:
            offset_ = params.offset
          except:
            offset_ = None
          query = params.query
          db = params.db
          try:
            cache_timeout = int(params.t)
          except AttributeError, err:
            logger.info("No cache timeout mentioned.")
            cache_timeout = int(default_cache_timeout)

          time = datetime.datetime.now()
          try:
                cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_execute_query WHERE id = '{1}'".format(time, cache_key))['rows']
          except Exception, err:
                logger.error("Error connecting to cache..")
                logger.error(err)
                cache_existance = ()
                pass
          if len(cache_existance) != 0:
                try:
                    data = CC.get_cached_data("SELECT data, query FROM cache_execute_query WHERE id = '{0}'".format(cache_key))['rows']
                except Exception,err:
                    return  comm.format_response(False,None,"Error occurred while retrieving data from cache!",exception=sys.exc_info())
                return  comm.format_response(True,json.loads(data[0][0]),data[0][1],exception=None)

          if db.lower() == 'bigquery':
               try:
                    results = bqhandler.execute_query(query, offset=offset_, limit=limit_,user_id=user_id,tenant=tenant)
コード例 #6
0
            output = Hist.histogram(df, n_bins)
            t = threading.Thread(target=cache_data, args=(output, u_id, cache_timeout, 'histogram'))
            t.start()
            result = cmg.format_response(True, output, 'Histogram processed successfully!')

        except Exception, err:

            result = cmg.format_response(False, err, 'Histogram Failed!', sys.exc_info())

        return result

    else:
        logger.info("Getting Histogram data from Cache..")
        result = ''
        try:
            data = json.loads(CC.get_cached_data("SELECT data FROM cache_descriptive_analytics WHERE id = '{0}' "
                                                 "and c_type='histogram'".format(u_id))['rows'][0][0])
            result = cmg.format_response(True, data, 'Data successfully processed!')
            logger.info("Data received from cache")
        except Exception:
            logger.error("Error occurred while fetching data from Cache")
            result = cmg.format_response(False, None, 'Error occurred while getting data from cache!', sys.exc_info())
            #raise
        return result


def ret_box(dbtype, rec_data, u_id, cache_timeout, user_id, tenant):

    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_descriptive_analytics "
                                             "WHERE id = '{1}' and c_type='boxplot'".format(time, u_id))['rows']
コード例 #7
0
            output = ka.kmeans_algo(df)
            cache_data(output, u_id, cache_timeout, name_algo='kmeans')
            result = cmg.format_response(True, output, 'Kmeans processed successfully!')

        except Exception, err:
            logger.error(err)
            result = cmg.format_response(False, err, 'Kmeans Failed!', sys.exc_info())

        finally:
            return result

    else:
        logger.info("Getting Kmeans data from Cache..")
        result = ''
        try:
            data = json.loads(CC.get_cached_data("SELECT data FROM cache_algorithms "
                                          "WHERE id = '{0}' and name_algo='kmeans'".format(u_id))['rows'][0][0])
            result = cmg.format_response(True,data,'Data successfully processed!')
            logger.info("Data received from cache")
        except:
            logger.error("Error occurred while fetching data from Cache")
            result = cmg.format_response(False,None,'Error occurred while getting data from cache!',sys.exc_info())
            raise
        finally:
            return result

def ret_fuzzyC(dbtype, rec_data, u_id, cache_timeout):
    time = datetime.datetime.now()
    try:
        cache_existance = CC.get_data("SELECT expirydatetime >= '{0}' FROM cache_algorithms "
                                      "WHERE id = '{1}' and name_algo='fuzzyC'".format(time, u_id))['rows']
コード例 #8
0
            limit = int(params.limit)
        except AttributeError:
            print "No limit specified"
            limit = None
        db = params.db
        pkey = key
        try:
            cache_timeout = int(params.t)
        except AttributeError, err:
            logger.info("No cache timeout mentioned.")
            cache_timeout = int(default_cache_timeout)

        # SELECT a2, b2, c2, a1, b1, c1, sum(a3), sum(b3), sum(c3) FROM tablenames GROUP BY a1, b1, c1 ORDER BY a2, b2, c2
        time = datetime.datetime.now()
        try:
            cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_aggregation WHERE id = '{1}'".format(time, pkey))['rows']
        except Exception:
            logger.error("Error connecting to cache..")
            cache_existance = ()
            pass

        if len(cache_existance) == 0 or cache_existance[0][0] == 0 :

            if db.lower() == 'mssql':
                logger.info("MSSQL - Processing started!")
                query_body = tablenames[1]
                if join_types and join_keys != {}:
                    for i in range(0, len(join_types)):
                        sub_join_body = join_types[i+1] + ' ' + tablenames[i+2] + ' ' + join_keys[i+1]
                        query_body += ' '
                        query_body += sub_join_body