def initiate_usage_scheduler(self):

            usage_scheduler_run_time = datetime.datetime.now()
            if self.run_count != 0:
                print 'usage_scheduler started at: ' + str(usage_scheduler_run_time)
                logger.info('usage_scheduler started at: ' + str(usage_scheduler_run_time))
                datasets = bq.get_datasets()
                for dataset in datasets:
                    try:
                        storage_query = "SELECT SUM(size_bytes) as storage_bq FROM [{0}.__TABLES__]".format(
                            dataset['datasetReference']['datasetId'])
                        storage_bq = bq.execute_query(storage_query, user_id=0, tenant='DigInEngine')[0]['storage_bq']
                        user_id = db.get_data(
                            "SELECT user_id FROM digin_user_settings WHERE REPLACE(REPLACE(email, '.', '_'), '@', '_') = '{0}' limit 1".format(
                                dataset['datasetReference']['datasetId']))['rows']
                        if user_id == ():
                            print 'No user_Settings found for user: '******'datasetReference']['datasetId']
                            logger.info('No user_Settings found for user: '******'datasetReference']['datasetId'])
                        else:
                            usages = {'storage_bq': 0 if storage_bq is None else storage_bq}
                            obj = dre.RatingEngine(user_id[0][0], 'undefined', **usages)
                            obj.set_usage()
                            print 'Storage calculated for user ' + str(user_id[0][0])
                            logger.info('Storage calculated for user ' + str(user_id[0][0]))
                    except Exception, err:
                        print err
def ret_data(dbtype, rec_data,user_id=None, tenant=None):

    df = pd.DataFrame()
    for i in range(0,len(rec_data)):
        tables = rec_data[i].keys()
        fields = rec_data[i].values()
        fields = fields[0]

        fields_str = ', '.join(fields)
        tables_str = ', '.join(tables)

        if dbtype.lower() == 'mssql':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = mssql.execute_query(query)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from MSSQL!', sys.exc_info())
                return result

        elif dbtype.lower() == 'bigquery':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = BQ.execute_query(query, user_id=user_id, tenant=tenant)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from BigQuery Handler!', sys.exc_info())
                return result
def create_Dataset(params):
          datasetID = params.dataSetName
          db = params.db
          if db.lower() == 'bigquery':
              try:
                   result = bqhandler.create_dataset(datasetID)
                   return  comm.format_response(True,result,"",exception=None)
              except Exception, err:
                   print err
                   return False
def get_tables(params):

          datasetID = params.dataSetName
          db = params.db
          if db.lower() == 'bigquery':
              try:
                  result = bqhandler.get_tables(datasetID)
              except Exception, err:
                  return  comm.format_response(False,err,"Error Occurred when retrieving tables!",exception=sys.exc_info())
              return  comm.format_response(True,result,"Tables retrieved!",exception=None)
def set_initial_user_env(params,email,user_id,domain):

    default_sys_settings = conf.get_conf('DefaultConfigurations.ini','System Settings')
    dataset_name = email.replace(".", "_").replace("@","_")

    if ast.literal_eval(default_sys_settings['signup_dataset_creation']):
        db = params['db']
        if db.lower() == 'bigquery':
            logger.info("Creation of dataset started!")
            print "Creation of dataset started!"
            try:
                result_ds= bq.create_dataset(dataset_name)
                print result_ds
                logger.info("Creation of dataset status " + str(result_ds))
                print "Creation of dataset " + str(result_ds)
            except Exception, err:
              print err
              print "Creation of dataset failed!"
              return cmg.format_response(False,err,"Error Occurred while creating dataset in bigquery!",exception=sys.exc_info())
        else:
            raise
def get_fields(params):

          tablename = params.tableName
          db = params.db

          if db.lower() == 'bigquery':
                datasetname = params.dataSetName
                fields = bqhandler.get_fields(datasetname, tablename)
                return  comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'mssql':
                fields = mssqlhandler.get_fields(tablename)
                return comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'postgresql':
                schema_name = params.schema
                colnames = pgsqlhandler.get_fields(tablename,schema_name)
                return comm.format_response(True,colnames,"",exception=None)
          elif db.lower() == 'mysql':
                colnames = mysqlhandler.get_fields(params.tableName)
                return comm.format_response(True,colnames,"",exception=None)
          else:
                return comm.format_response(False,db,"DB not implemented!",exception=None)
def _table_creation_bq(_schema, db, data_set_name, table_name):
    if db.lower() == 'bigquery':
        bq_schema = []
        # schema_dict = {}
        for i in _schema:
            schema_dict = {}
            t = i['type']
            if t == 'object':
                schema_dict['name'] = i['name']
                schema_dict['type'] = 'string'
                schema_dict['mode'] = 'nullable'
            elif t == 'int64':
                schema_dict['name'] = i['name']
                schema_dict['type'] = 'integer'
                schema_dict['mode'] = 'nullable'
            elif t == 'float64':
                schema_dict['name'] = i['name']
                schema_dict['type'] = 'float'
                schema_dict['mode'] = 'nullable'
            elif t == 'datetime64[ns]':
                schema_dict['name'] = i['name']
                schema_dict['type'] = 'TIMESTAMP'
                schema_dict['mode'] = 'nullable'
            bq_schema.append(schema_dict)

        print 'Table creation started!'
        print table_name
        try:
            print data_set_name
            result = bq.create_Table(data_set_name,table_name,bq_schema)
            if result:
                print "Table creation succcessful!"
            else: print "Error occurred while creating table! If table already exists data might insert to the existing table!"
        except Exception, err:
            print "Error occurred while creating table!"
            print err
            raise
    for i in range(columns.size):
        if columns[i] == 'object':
            C.append(i)

    try:
        file_csv = pd.read_csv(file_path+'/'+filename, date_parser=C, error_bad_lines=False, dayfirst=True)
    except Exception,err:
        print err
        result = comm.format_response(False,err,"failed read csv file",exception=sys.exc_info())
        return result
    print "Field type recognition successful"

    if db.lower() == 'bigquery':

        try:
            table_existance = bq.check_table(dataset_name,table_name)
            if table_existance :
                if parms.folder_type.lower() == 'singlefile':
                    bq.delete_table(dataset_name,table_name)
                    print "Existing Table deleted"
                    try:
                        print dataset_name
                        result = bq.create_Table(dataset_name, table_name, schema)
                        if result:
                            print "Table creation succcessful!"
                        else:
                            print "Error occurred while creating table! If table already exists data might insert to the existing table!"

                    except Exception, err:
                        print "Error occurred while creating table!"
                        print err
                    window_functions_set.append(window_functions)

                total_str = '{0}({1}) OVER () as total'.format(agg, measure)
                fields_str = ', '.join(fields)
                window_functions_set_str = ', '.join(window_functions_set)
                fields_from_inner_query_str = ', '.join(fields_from_inner_query)

                query = 'SELECT {0}, total, {1} FROM (SELECT {0} , {2}, {3} FROM {4} {5})z GROUP BY {0}, total, {1} ORDER BY {0}' \
                    .format(fields_str, fields_from_inner_query_str, total_str, window_functions_set_str, table_name,
                            where_clause)

                print query
            result = ''
            if db.lower() == 'bigquery':
                try:
                    result = BQ.execute_query(query,user_id=user_id, tenant=tenant)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from BigQuery Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'mssql':
                try:
                    result = mssql.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from sql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())
                cache_existance = CC.get_cached_data("SELECT expirydatetime >= '{0}' FROM cache_execute_query WHERE id = '{1}'".format(time, cache_key))['rows']
          except Exception, err:
                logger.error("Error connecting to cache..")
                logger.error(err)
                cache_existance = ()
                pass
          if len(cache_existance) != 0:
                try:
                    data = CC.get_cached_data("SELECT data, query FROM cache_execute_query WHERE id = '{0}'".format(cache_key))['rows']
                except Exception,err:
                    return  comm.format_response(False,None,"Error occurred while retrieving data from cache!",exception=sys.exc_info())
                return  comm.format_response(True,json.loads(data[0][0]),data[0][1],exception=None)

          if db.lower() == 'bigquery':
               try:
                    results = bqhandler.execute_query(query, offset=offset_, limit=limit_,user_id=user_id,tenant=tenant)
               except Exception, err:
                    err_content = json.loads(err.content)
                    return comm.format_response(False, err_content['error']['errors'][0]['reason']+ ' ' + err_content['error']['errors'][0]['message'], query)
               try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(results),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(results),query,cache_timeout))
                    t.start()
               except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
               return  comm.format_response(True,results,query,exception=None)

          elif db.lower() == 'mssql':
                    result = cmg.format_response(False,None,'Error occurred while getting data from sql Handler!',sys.exc_info())
                #result_dict = json.loads(result)
                finally:
                    return result

            elif db.lower() == 'bigquery':

                try:
                    agg_ = aggregations["Date, '%m'"]
                except Exception:
                    agg_ = ''
                if agg_ == 'STRFTIME_UTC_USEC':
                    query = "SELECT STRFTIME_UTC_USEC(Date, '%Y') as year, STRFTIME_UTC_USEC(Date, '%m') as month," \
                            " SUM(Sales) as sales, SUM(OrderQuantity) as tot_units FROM [Demo.forcast_superstoresales]" \
                            " GROUP BY year, month ORDER BY year, month"
                    result_ = BQ.execute_query(query,limit=limit,user_id=user_id,tenant=tenant)
                    result = cmg.format_response(True,result_,query)
                    return result
                else:
                    logger.info("BigQuery - Processing started!")
                    query_body = tablenames[1]
                    if join_types and join_keys != {}:
                        for i in range(0, len(join_types)):
                            sub_join_body = join_types[i+1] + ' ' + tablenames[i+2] + ' ' + join_keys[i+1]
                            query_body += ' '
                            query_body += sub_join_body

                    if conditions:
                        conditions = 'WHERE %s' %(conditions)

                    if group_bys_dict != {}:
            field_name_date = params.field_name_d
            field_name_forecast = params.field_name_f
            interval = str(params.interval)
            db_type = params.dbtype

        except Exception, err:
            return cmg.format_response(False, err, 'Input parameters caused the service to raise an error',
                                       sys.exc_info())

        if interval == 'Daily':
            if db_type.lower() == 'bigquery':

                query = "SELECT TIMESTAMP_TO_SEC({0}) as date, SUM({1}) as value from {2} group by date order by date".\
                    format(field_name_date,field_name_forecast,table_name)
                try:
                    result = BQ.execute_query(query)
                except:
                    result = cmg.format_response(False,None,'Error occurred while getting data from BQ Handler!',sys.exc_info())
                    return result
            elif db_type.lower() == 'postgresql':
                query = "SELECT date_part('epoch',{0}::date) as date, SUM({1})::FLOAT as value from {2} group by date order by date".\
                    format(field_name_date,field_name_forecast,table_name)
                try:
                    result = PG.execute_query(query)
                except:
                    result = cmg.format_response(False,None,'Error occurred while getting data from PG Handler!',sys.exc_info())
                    return result
            elif db_type.lower() == 'mssql':
                query = "SELECT DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) date, SUM({1}) as value from {2} " \
                        "group by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date))  " \
                        "order by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) ".format(field_name_date,field_name_forecast,table_name)