def initiate_usage_scheduler(self):

            usage_scheduler_run_time = datetime.datetime.now()
            if self.run_count != 0:
                print 'usage_scheduler started at: ' + str(usage_scheduler_run_time)
                logger.info('usage_scheduler started at: ' + str(usage_scheduler_run_time))
                datasets = bq.get_datasets()
                for dataset in datasets:
                    try:
                        storage_query = "SELECT SUM(size_bytes) as storage_bq FROM [{0}.__TABLES__]".format(
                            dataset['datasetReference']['datasetId'])
                        storage_bq = bq.execute_query(storage_query, user_id=0, tenant='DigInEngine')[0]['storage_bq']
                        user_id = db.get_data(
                            "SELECT user_id FROM digin_user_settings WHERE REPLACE(REPLACE(email, '.', '_'), '@', '_') = '{0}' limit 1".format(
                                dataset['datasetReference']['datasetId']))['rows']
                        if user_id == ():
                            print 'No user_Settings found for user: '******'datasetReference']['datasetId']
                            logger.info('No user_Settings found for user: '******'datasetReference']['datasetId'])
                        else:
                            usages = {'storage_bq': 0 if storage_bq is None else storage_bq}
                            obj = dre.RatingEngine(user_id[0][0], 'undefined', **usages)
                            obj.set_usage()
                            print 'Storage calculated for user ' + str(user_id[0][0])
                            logger.info('Storage calculated for user ' + str(user_id[0][0]))
                    except Exception, err:
                        print err
def ret_data(dbtype, rec_data,user_id=None, tenant=None):

    df = pd.DataFrame()
    for i in range(0,len(rec_data)):
        tables = rec_data[i].keys()
        fields = rec_data[i].values()
        fields = fields[0]

        fields_str = ', '.join(fields)
        tables_str = ', '.join(tables)

        if dbtype.lower() == 'mssql':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = mssql.execute_query(query)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from MSSQL!', sys.exc_info())
                return result

        elif dbtype.lower() == 'bigquery':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = BQ.execute_query(query, user_id=user_id, tenant=tenant)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from BigQuery Handler!', sys.exc_info())
                return result
                    window_functions_set.append(window_functions)

                total_str = '{0}({1}) OVER () as total'.format(agg, measure)
                fields_str = ', '.join(fields)
                window_functions_set_str = ', '.join(window_functions_set)
                fields_from_inner_query_str = ', '.join(fields_from_inner_query)

                query = 'SELECT {0}, total, {1} FROM (SELECT {0} , {2}, {3} FROM {4} {5})z GROUP BY {0}, total, {1} ORDER BY {0}' \
                    .format(fields_str, fields_from_inner_query_str, total_str, window_functions_set_str, table_name,
                            where_clause)

                print query
            result = ''
            if db.lower() == 'bigquery':
                try:
                    result = BQ.execute_query(query,user_id=user_id, tenant=tenant)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from BigQuery Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'mssql':
                try:
                    result = mssql.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from sql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())
                cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_execute_query WHERE id = '{1}'".format(time, cache_key))['rows']
          except Exception, err:
                logger.error("Error connecting to cache..")
                logger.error(err)
                cache_existance = ()
                pass
          if len(cache_existance) != 0:
                try:
                    data = CC.get_cached_data("SELECT data, query FROM cache_execute_query WHERE id = '{0}'".format(cache_key))['rows']
                except Exception,err:
                    return  comm.format_response(False,None,"Error occurred while retrieving data from cache!",exception=sys.exc_info())
                return  comm.format_response(True,json.loads(data[0][0]),data[0][1],exception=None)

          if db.lower() == 'bigquery':
               try:
                    results = bqhandler.execute_query(query, offset=offset_, limit=limit_,user_id=user_id,tenant=tenant)
               except Exception, err:
                    err_content = json.loads(err.content)
                    return comm.format_response(False, err_content['error']['errors'][0]['reason']+ ' ' + err_content['error']['errors'][0]['message'], query)
               try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(results),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(results),query,cache_timeout))
                    t.start()
               except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
               return  comm.format_response(True,results,query,exception=None)

          elif db.lower() == 'mssql':
                    result = cmg.format_response(False,None,'Error occurred while getting data from sql Handler!',sys.exc_info())
                #result_dict = json.loads(result)
                finally:
                    return result

            elif db.lower() == 'bigquery':

                try:
                    agg_ = aggregations["Date, '%m'"]
                except Exception:
                    agg_ = ''
                if agg_ == 'STRFTIME_UTC_USEC':
                    query = "SELECT STRFTIME_UTC_USEC(Date, '%Y') as year, STRFTIME_UTC_USEC(Date, '%m') as month," \
                            " SUM(Sales) as sales, SUM(OrderQuantity) as tot_units FROM [Demo.forcast_superstoresales]" \
                            " GROUP BY year, month ORDER BY year, month"
                    result_ = BQ.execute_query(query,limit=limit,user_id=user_id,tenant=tenant)
                    result = cmg.format_response(True,result_,query)
                    return result
                else:
                    logger.info("BigQuery - Processing started!")
                    query_body = tablenames[1]
                    if join_types and join_keys != {}:
                        for i in range(0, len(join_types)):
                            sub_join_body = join_types[i+1] + ' ' + tablenames[i+2] + ' ' + join_keys[i+1]
                            query_body += ' '
                            query_body += sub_join_body

                    if conditions:
                        conditions = 'WHERE %s' %(conditions)

                    if group_bys_dict != {}:
            field_name_date = params.field_name_d
            field_name_forecast = params.field_name_f
            interval = str(params.interval)
            db_type = params.dbtype

        except Exception, err:
            return cmg.format_response(False, err, 'Input parameters caused the service to raise an error',
                                       sys.exc_info())

        if interval == 'Daily':
            if db_type.lower() == 'bigquery':

                query = "SELECT TIMESTAMP_TO_SEC({0}) as date, SUM({1}) as value from {2} group by date order by date".\
                    format(field_name_date,field_name_forecast,table_name)
                try:
                    result = BQ.execute_query(query)
                except:
                    result = cmg.format_response(False,None,'Error occurred while getting data from BQ Handler!',sys.exc_info())
                    return result
            elif db_type.lower() == 'postgresql':
                query = "SELECT date_part('epoch',{0}::date) as date, SUM({1})::FLOAT as value from {2} group by date order by date".\
                    format(field_name_date,field_name_forecast,table_name)
                try:
                    result = PG.execute_query(query)
                except:
                    result = cmg.format_response(False,None,'Error occurred while getting data from PG Handler!',sys.exc_info())
                    return result
            elif db_type.lower() == 'mssql':
                query = "SELECT DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) date, SUM({1}) as value from {2} " \
                        "group by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date))  " \
                        "order by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) ".format(field_name_date,field_name_forecast,table_name)