def get_all(cls, note_id=None, chart_ids=None): Logger.info("get_all note_id={note_id}, chart_ids={chart_ids}".format( note_id=note_id, chart_ids=chart_ids)) connection = cls.__get_db() try: wheres = [] sql = "SELECT * FROM charts" if note_id: wheres.append('note={note_id}'.format(note_id=note_id)) if chart_ids: wheres.append('ids in ({chart_ids})'.format( chart_ids=','.join(chart_ids))) if wheres: sql += ' WHERE {wheres}'.format(wheres=' and '.join(wheres)) Logger.info("get_all sql={sql}".format(sql=sql)) with connection.cursor() as cursor: cursor.execute(sql) rows = cursor.fetchall() for row in rows: for column in ['graph', 'query_params']: if row[column]: row[column] = json.loads(row[column]) return rows finally: connection.close()
def get_query(cls, chart_id): Logger.info("get: chart_id={chart_id}".format(chart_id=chart_id)) row = cls.get(chart_id, ['query']) if row: return row['query'] else: return None
def __get_job(self, job_id): Logger.debug("__get_job: job_name={job_id}".format(job_id=job_id)) try: job = self.__get_client().get_job(job_id) except NotFound: job = None return job
def before(): Logger.set_level(Logger.DEBUG) Logger.debug("before url={url}".format(url=request.endpoint)) if not request.endpoint or 'gmail_auth' not in request.endpoint: redirection = oauth2check() if redirection: return redirection
def get(cls, note_id): Logger.info("get: note_id={note_id}".format(note_id=note_id)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "SELECT * FROM notes WHERE id=%s" cursor.execute(sql, (note_id,)) return cursor.fetchone() finally: connection.close()
def get_all(cls, note_ids=None): Logger.info("get_all, node_ids={note_ids}".format(note_ids=note_ids)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "SELECT * FROM notes" cursor.execute(sql) return cursor.fetchall() finally: connection.close()
def charts_queries(): chart_id = request.json['chart_id'] resource = request.json['resource'] query_type = request.json['query_type'] query_params = request.json['query_params'] query = request.json['query'] Logger.debug('resource={0}, query_type={1}'.format(resource, query_type)) results, error = bigshow.Chart.query_sync(chart_id=chart_id, resource=resource, query_type=query_type, query=query, query_params=query_params) return json.dumps({'results': results, 'error': error})
def get_all(cls): Logger.info("get_all") connection = cls.__get_db() try: with connection.cursor() as cursor: sql = 'SELECT * FROM schedules' cursor.execute(sql) return cursor.fetchall() finally: connection.close()
def get(cls, schedule_id): Logger.info( 'get: schedule_id:{schedule_id}'.format(schedule_id=schedule_id)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = 'SELECT * FROM schedules where id=%s' cursor.execute(sql, (schedule_id, )) return cursor.fetchone() finally: connection.close()
def delete(cls, chart_id): Logger.info("delete: chart_id={chart_id}".format(chart_id=chart_id)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "DELETE FROM charts WHERE id=%s" cursor.execute(sql, (chart_id, )) connection.commit() return True finally: connection.close()
def add(cls, title): Logger.error("add: title={title}".format(title=title)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "INSERT INTO notes(title, created) VALUES(%s, now())" cursor.execute(sql, (title,)) insert_id = connection.insert_id() connection.commit() return insert_id finally: connection.close()
def __get_db(self): if not self.__db: from config.dev import config Logger.debug("__getDB: config=" + str(config)) db_config = config['db']['admin'] return pymysql.connect(host=db_config["host"], user=db_config["user"], password=db_config["password"], db=db_config["db"], charset=db_config["charset"], cursorclass=pymysql.cursors.DictCursor) return self.__db
def get(self, email): Logger.info("get: email={email}".format(email=email)) connection = self.__get_db() try: with connection.cursor() as cursor: sql = "SELECT idx, email, name, lv, dept, permission, time_register FROM usr WHERE email=%s" cursor.execute(sql, (email, )) row = cursor.fetchone() connection.commit() finally: connection.close() return row
def __get_db(cls): if not Chart.__db: conversions[FIELD_TYPE.TIMESTAMP] = through from config.dev import config db_config = config['db']['default'] Logger.debug("new connection - CHART") return pymysql.connect(host=db_config["host"], user=db_config["user"], password=db_config["password"], db=db_config["db"], charset=db_config["charset"], cursorclass=pymysql.cursors.DictCursor) return Chart.__db
def oauth2callback(): code = request.args['code'] Logger.debug("oauth2callback code={code}".format(code=code)) credentials = __getFlow().step2_exchange(code) email = credentials.id_token[u'email'] user = Admin().get(email) if user: Logger.debug("user_id={user_id}, email={email}".format( user_id=user['idx'], email=email)) session['user'] = {'id': user['idx'], 'time': time.time()} return redirect('/') else: return redirect(url_for('gmail_auth.fail'))
def query(chart_id, resource, query_type, query, query_params): Logger.debug( 'chart_id={0}, resource={1}, query_type={2}, query={3}, query_params={4}' .format(chart_id, resource, query_type, query, query_params)) adapter = Resource.get(resource_id=resource) if not adapter: return None else: job_id = Chart.get_job_id(chart_id) adapter.query(job_id=job_id, query_type=query_type, query=query, query_params=query_params) return job_id
def get(cls, chart_id, columns=None): Logger.info("get: chart_id={chart_id}, columns={columns}".format( chart_id=chart_id, columns=columns)) if not columns: columns = ['*'] if not isinstance(columns, list): columns = [columns] connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "SELECT {0} FROM charts WHERE id=%s".format( ','.join(columns)) cursor.execute(sql, (chart_id, )) return cursor.fetchone() finally: connection.close()
def get_query(config, query_type, query, query_params=None): Logger().debug( "get_query: query_type={query_type}, query_params={query_params}" .format(query_type=query_type, query_params=query_params)) if QueryType.RETENTION == int(query_type): option = json.loads(query) date_range = option['range'] interval = date_range - 1 region = option['region'] if 'region' in option else None platform = BigQueryAdapter.QueryBuilder.__get_platform( option['platform']) if 'platform' in option else None end_date = datetime.datetime.now() - datetime.timedelta(days=2) start_date = end_date - datetime.timedelta(days=interval) query = BigQueryAdapter.QueryBuilder.retention( config, start_date, end_date, date_range, region, platform) ##Logger().debug(query) query = BigQueryAdapter.QueryBuilder.replace_patterns(query) Logger().debug(query) return query
def add(cls, chart): Logger.error("add: chart={chart}".format(chart=chart)) connection = cls.__get_db() try: with connection.cursor() as cursor: sql = "INSERT INTO charts(note, title, resource, graph, query_type, query, query_params) VALUES(%s, " \ "%s, %s, %s, %s, %s, %s) " cursor.execute(sql, ( chart['note'], chart['title'], chart['resource'], json.dumps(chart['graph']), chart['query_type'], chart['query'], json.dumps(chart['query_params']), )) insert_id = connection.insert_id() connection.commit() return insert_id finally: connection.close()
def add(cls, schedule): Logger.error("add: schedule={schedule}".format(schedule=schedule)) connection = cls.__get_db() try: with connection.cursor() as cursor: keys = list( set(cls.__schema).intersection(set(schedule.keys())) - {'id'}) sql = "INSERT INTO schedules({keys}, created) " \ "VALUES({columns}, now())".format(keys=','.join(keys), columns=','.join(map(lambda x: "%s", keys))) values = [] for key in keys: values.append(schedule[key]) Logger.debug("sql={sql}, values={values}".format( sql=sql, values=values)) cursor.execute(sql, values) insert_id = connection.insert_id() connection.commit() return insert_id finally: connection.close()
def update(cls, chart_id, chart): schema = set(cls.__schema) - {'id'} targets = list(schema & chart.keys()) columns = ','.join(map(lambda x: "{x}=%s".format(x=x), targets)) values = [] for key in targets: if isinstance(chart[key], (list, dict)): values.append("{x}".format(x=json.dumps(chart[key]))) else: values.append(chart[key]) sql = "UPDATE charts SET {columns} WHERE id={chart_id}".format( columns=columns, chart_id=chart_id) Logger.debug("columns={columns},sql={sql},values={values}".format( columns=columns, sql=sql, values=values)) connection = cls.__get_db() try: with connection.cursor() as cursor: cursor.execute(sql, tuple(values)) connection.commit() return True finally: connection.close()
def update(cls, schedule): Logger.error("update: schedule={schedule}".format(schedule=schedule)) connection = cls.__get_db() try: with connection.cursor() as cursor: keys = list( set(cls.__schema).intersection(set(schedule.keys()))) sql = "UPDATE schedules SET {key_val} WHERE id={id}".format( key_val=','.join( map(lambda x: "{key}=%s".format(key=x), keys)), id=schedule['id']) values = [] for key in keys: values.append(schedule[key]) Logger.debug("sql={sql}, values={values}".format( sql=sql, values=values)) cursor.execute(sql, values) insert_id = connection.insert_id() connection.commit() return insert_id finally: connection.close()
def get_result(chart_id, from_cache=True): Logger().debug( "get_result: chart_id={chart_id}, from_cache={from_cache}".format( chart_id=chart_id, from_cache=from_cache)) last_job_key = Chart.get_job_key(chart_id=chart_id) if from_cache is True: last_job = Cache().get(last_job_key) else: last_job = None if not last_job: chart = models.Chart.get( chart_id, ['resource,query_type,query,query_params']) new_job = { 'id': Chart.get_job_id(chart_id), 'resource': chart['resource'] } adapter = Resource.get(resource_id=chart['resource']) adapter.query(job_id=new_job['id'], query_type=chart['query_type'], query=chart['query'], query_params=chart['query_params']) Cache().set(last_job_key, new_job, Chart.TTL_LAST_JOB) return 'RUNNING', None, None else: last_job = ast.literal_eval(last_job) last_job_id = last_job['id'] last_job_result = Cache().get(last_job_id) if last_job_result: last_job_result = ast.literal_eval(last_job_result) return 'DONE', last_job_result['result'], last_job_result[ 'error'] else: adapter = Resource.get(resource_id=last_job['resource']) if not adapter.exists(job_id=last_job_id): chart = models.Chart.get( chart_id, ['resource,query_type,query,query_params']) adapter.query_async(job_id=last_job['id'], query_type=chart['query_type'], query=chart['query'], query_params=chart['query_params']) Cache().set(last_job_key, last_job, Chart.TTL_LAST_JOB) return 'RUNNING', None, None else: status, results, error = adapter.get_result(last_job_id) if 'DONE' == status: Cache().set(last_job_id, { 'result': results, 'error': error }, Chart.TTL_LAST_RESULT) return status, results, error
def query(self, job_id, query_type, query, query_params=None): Logger().debug( "query_async: job_id={job_id}, query_type={query_type}, query_params={query_params}" .format(job_id=job_id, query_type=query_type, query_params=query_params)) job = self.__get_job(job_id=job_id) if not job: job = self.__get_client().run_async_query( job_name=job_id, query=self.QueryBuilder.get_query(config=self.__config, query_type=query_type, query=query, query_params=query_params)) job.use_legacy_sql = False try: query_params = json.loads( BigQueryAdapter.QueryBuilder.replace_patterns( query=query_params)) Logger().debug("query_params={query_params}".format( query_params=query_params)) if 'destinationTable' in query_params: client = self.__get_client() dataset = client.dataset( dataset_name=query_params['destinationTable'] ['dataset']) job.destination = dataset.table( name=query_params['destinationTable']['name']) if 'write_disposition' in query_params['destinationTable']: job.write_disposition = query_params[ 'destinationTable']['write_disposition'] except: Logger.error("invalid query params:{query_params}".format( query_params=query_params)) job.begin() return job
def get_result(self, job_id): Logger().debug("get_result: job_id={job_id}".format(job_id=job_id)) job = self.__get_job(job_id=job_id) if None is job: return 'DONE', None, {'message': 'Job Not Exists'} if 'DONE' == job.state.upper(): if not job.error_result: result = job.result() if result: return 'DONE', list( map(lambda x: list(x), result.fetch_data())), None else: return 'DONE', None, {'message': 'Result not exists'} else: return 'DONE', None, job.error_result else: return job.state, None, None
def oauth2check(): if 'user' not in session: Logger.error("Credentials not exists") return redirect(__getFlow().step1_get_authorize_url()) else: return None
def retention(config, start_date, end_date, interval, region, platform): project_id = config['project_id'] dataset_nru = "{0}.{1}".format(project_id, config['dataset']['nru']) dataset_dau = "{0}.{1}".format(project_id, config['dataset']['dau']) today = datetime.datetime.now() - datetime.timedelta(days=2) if end_date > today: end_date = today if start_date > end_date: start_date = end_date where_nru = BigQueryAdapter.QueryBuilder.__get_where_region_platform( region=region, platform=platform, table_alias=None) where_dru = BigQueryAdapter.QueryBuilder.__get_where_region_platform( region=region, platform=platform, table_alias='t1') unions = [] Logger().debug("retention: start_date={0}, end_date={1}".format( start_date, end_date)) # make union while start_date <= end_date: target_date_list = [ start_date + datetime.timedelta(days=x) for x in range(0, interval) ] cnt = 0 date_str = None query_list = ["'{0}' dt".format(start_date.strftime('%y%m%d'))] for target_date in target_date_list: if target_date <= today: if cnt == 0: date_str = target_date.strftime('%y%m%d') query_list.append( "(SELECT count(distinct(uid)) FROM `{dataset_nru}.{date_str}` {where}) r{cnt} " .format(dataset_nru=dataset_nru, date_str=date_str, cnt=cnt, where=where_nru)) else: query_list.append( "(SELECT count(distinct(t1.uid)) FROM `{dataset_nru}.{date_str}` t1 " "inner join `{dataset_dau}.{dau_date_str}` t2 " "ON t1.uid = t2.uid AND t1.region = t2.region {where}) r{cnt}" .format(dataset_nru=dataset_nru, date_str=date_str, dataset_dau=dataset_dau, dau_date_str=target_date.strftime( '%y%m%d'), cnt=cnt, where=where_dru)) else: query_list.append("0 r{0}".format(cnt)) cnt += 1 if query_list: unions.append("(SELECT {0})".format(','.join(query_list))) start_date = start_date + datetime.timedelta(days=1) if unions: return "SELECT * FROM {0} ORDER BY dt".format( " union all ".join(unions)) else: return None
def exists(self, job_id): Logger().debug("exists: job_id={job_id}".format(job_id=job_id)) return self.__get_job(job_id=job_id)
def __get_cache(self): if not self.__cache: Logger.debug("__get_cache") from config.dev import config self.__cache = PooledClient(server=config['cache']['server']) return self.__cache