def _gen_values_for(_filter: DataFilter): if not len(_filter): return sql.SQL('') # safe subquery result else: return sql.Composed([ sql.SQL(', {}').format(sql.Literal(item.value)) for item in _filter ])
def _create_file(self, badge: FileBadge) -> int: # return new id for file badge params = { 'lines_count': 0, 'duration': 0, 'status': 'new', 'fail_descr': None, 'last_update': datetime.now(), 'name': badge.name, 'type': badge.data_type, } insert_query = sql.SQL( 'INSERT INTO {}({} {}) VALUES ({} {}) RETURNING {}').format( sql.Identifier(self._file_table), sql.SQL(', ').join(map(sql.Identifier, params.keys())), _gen_fields_for(self._filter), sql.SQL(', ').join(map(sql.Literal, params.values())), _gen_values_for(self._filter), sql.Identifier('id'), ) cursor = self.cursor(named=True) cursor.execute(insert_query) self.commit() file_id = cursor.fetchone().id self._badges[file_id] = badge # time for local cache return file_id
def _gen_fields_for(_filter: DataFilter): if not len(_filter): return sql.SQL('') # safe subquery result else: return sql.Composed([ sql.SQL(', {}').format(sql.Identifier(item.field)) for item in _filter ])
def _gen_where_for(_filter: DataFilter): if not len(_filter): return sql.SQL('') # safe subquery result else: return sql.Composed([ sql.SQL(' and {} = {} ').format(sql.Identifier(item.field), sql.Literal(item.value)) for item in _filter ])
def ops_apdex_for(self, period: _Period): # return dict sq_case = pgs.SQL('COUNT(CASE WHEN status={} THEN 1 END)') sq_ns_case = sq_case.format(pgs.Literal('NS')) sq_nt_case = sq_case.format(pgs.Literal('NT')) n_query = pgs.SQL( 'SELECT ops_uid as id, ({} + ({}::real/2))::real / COUNT(*)::real as apdex FROM {} ' 'WHERE start>={} AND start<{} AND base1s={} group by ops_uid' ).format(sq_ns_case, sq_nt_case, pgs.Identifier(self.APDEX_TABLE), pgs.Literal(period.begin), pgs.Literal(period.end), pgs.Literal(self.base1s)) cursor = self.cursor(named=True) cursor.execute(n_query) return cursor.fetchall()
def ops_apdex_for2(self, period: _Period): apdex_calc_query = pgs.SQL(''' WITH empty_uid AS ( SELECT ops_uid AS id FROM {0} WHERE start >= {1}::TIMESTAMP AND start < {2}::TIMESTAMP AND base1s = {3} AND apdex IS Null GROUP BY ops_uid ) SELECT ops_uid AS id, (COUNT(CASE WHEN status='NS' THEN 1 END) + (COUNT(CASE WHEN status='NT' THEN 1 END)::REAL/2))::REAL / COUNT(*)::REAL AS apdex FROM {0} WHERE start >= {1}::TIMESTAMP AND start < {2}::TIMESTAMP AND base1s = {3} AND ops_uid IN (SELECT id FROM empty_uid) GROUP BY ops_uid ''').format( pgs.Identifier(self.APDEX_TABLE), pgs.Literal(period.begin), pgs.Literal(period.end), pgs.Literal(self.base1s), ) cursor = self.cursor(named=True) cursor.execute(apdex_calc_query) return cursor.fetchall()
def calculate(self, max_hours): batch_cursor = self.cursor() batch_update_query = pgs.SQL( 'UPDATE {} SET {}=%s WHERE start>=%s AND start<%s AND base1s={} AND ops_uid=%s' ).format( pgs.Identifier(self.APDEX_TABLE), pgs.Identifier(self.APDEX_FIELD), pgs.Literal(self.base1s), ) batch_params_list = [] for _ in range(max_hours): period = self.get_next_period() if not period: break ops_map = self.ops_apdex_for2(period) for ops in ops_map or (): batch_params_list.append( (ops.apdex, period.begin, period.end, ops.id)) self.log.append(period) self._extras.execute_batch(batch_cursor, batch_update_query, batch_params_list) self.commit()
def validate_email_list(self, email_list_mixed_with_ids) -> list: """ Replace user_id to personal email """ if not email_list_mixed_with_ids: return [] ids_list = list( filter(lambda val: isinstance(val, int), email_list_mixed_with_ids)) if not len(ids_list): return email_list_mixed_with_ids # nothing to replace with self.cursor(named=False) as cursor: cursor.execute( sql.SQL('select "Id", "Email" from "Users" where "Id" in {}'). format(sql.Literal(tuple(ids_list)))) ids_email_dict = {key: value for key, value in cursor.fetchall() } # {Id1: Email1, ...} format pure_lowercase_email_list = map( lambda val: ids_email_dict[val].lower() if isinstance(val, int) else val.lower(), email_list_mixed_with_ids) unique_pure_email_list = list() for email in pure_lowercase_email_list: if email not in unique_pure_email_list: unique_pure_email_list.append(email) return unique_pure_email_list
def run(self): is_connector = ISConnector(KeyChain.IS_KEY) pg_connector = PGConnector(self.PG_KEY) now = datetime.now() # mark new open task with self.cursor() as cursor: cursor.execute( sql.SQL( 'UPDATE "Tasks" SET "m_lastClosedTouch"={} ' ' WHERE "Closed" IS NULL AND "m_lastClosedTouch" IS NULL'). format(sql.Literal(now))) self.commit() # get 1/12 of all opened task count cursor.execute( 'SELECT COUNT(*) AS cc FROM "Tasks" WHERE "Closed" IS NULL') open_tasks_count = cursor.fetchone().cc session_limit = round(open_tasks_count / 12) # get tasks for current session open_task_query = 'SELECT "Id" AS task_id FROM "Tasks" WHERE "Closed" IS Null' \ ' ORDER BY "m_lastClosedTouch" LIMIT {}'.format(session_limit) cursor.execute(open_task_query) task_list = [record.task_id for record in cursor] # mark current session tasks mark_task_query = sql.SQL( 'UPDATE "Tasks" SET "m_lastClosedTouch"={} where "Id" in ({})' ).format(sql.Literal(now), sql.SQL(', ').join(map(sql.Literal, task_list))) cursor.execute(mark_task_query) self.commit() for task_id in task_list: task = Task({'Id': task_id}) if is_connector.is_404(task['Id']): # closed 404 tasks self.remove_task_expenses(task_id, cursor) else: is_connector.select(task) if task['Closed']: print(',{}'.format(task['Id'])) pg_connector.update(task) self.commit()
def remove_task_expenses(self, task_id, cursor): cursor.execute( sql.SQL('delete from {} where {}={} returning {}').format( sql.Identifier('Expenses'), sql.Identifier('TaskId'), sql.Literal(task_id), sql.Identifier('Minutes'))) minutes_sum = minutes_count = 0 for minutes in cursor.fetchall(): minutes_sum += minutes[0] minutes_count += 1 print("#{}{}".format( task_id, '(c:{}-s:{})'.format(minutes_count, minutes_sum) if minutes_count else '')) manual_task_close_query = sql.SQL( 'UPDATE "Tasks" SET "Closed"={}, "m_lastClosedTouch"={} WHERE "Id"={}' ).format( sql.Literal(datetime.now()), sql.Literal(datetime(1111, 11, 11)), # manual closed marker sql.Literal(task_id)) cursor.execute(manual_task_close_query)
def _clear_file_data(self, file_id): delete_query = sql.SQL('DELETE FROM {} WHERE {}={}').format( sql.Identifier( self._storage_rule[self._id_to_badge(file_id).data_type]), sql.Identifier('file_id'), sql.Literal(file_id)) params = { 'lines_count': 0, 'duration': 0, 'status': 'update', 'fail_descr': None, 'last_update': datetime.now() } update_query = sql.SQL('UPDATE {} SET ({}) = ({}) WHERE {}={}').format( sql.Identifier(self._file_table), sql.SQL(', ').join(sql.Identifier(key) for key in params.keys()), sql.SQL(', ').join( sql.Literal(value) for value in params.values()), sql.Identifier('id'), sql.Literal(file_id)) cursor = self.cursor() cursor.execute(delete_query) cursor.execute(update_query) self.commit()
def get_next_period(self) -> _Period: select_query = pgs.SQL( 'SELECT start FROM {} WHERE base1s={} AND {} IS Null ORDER BY start ASC' ).format( pgs.Identifier(self.APDEX_TABLE), pgs.Literal(self.base1s), pgs.Identifier(self.APDEX_FIELD), ) cursor = self.cursor(named=True) cursor.execute(select_query) rows = cursor.fetchone() return _Period(rows.start) if rows else None
def _check_file_exist(self, badge: FileBadge): result = None # in case file not exist check_query = sql.SQL( 'SELECT id FROM {} WHERE {}={} and {}={} {}').format( sql.Identifier(self._file_table), sql.Identifier('name'), sql.Literal(badge.name), sql.Identifier('type'), sql.Literal(badge.data_type), _gen_where_for(self._filter)) cursor = self.cursor(named=True) cursor.execute(check_query) rows = cursor.fetchall() if len(rows) != 0: result = rows[0].id self._badges[result] = badge return result
def update_file_status(self, file_id: int, is_ok: bool, fail_reason: str = None): cursor = self.cursor(named=True) badge = self._id_to_badge(file_id) if len(self._batch_params): batch_query = sql.SQL( 'insert into {} ({}, file_id {}) values ({}, {} {})').format( sql.Identifier(self._storage_rule[badge.data_type]), sql.SQL(', ').join(map(sql.Identifier, self._batch_hdr)), _gen_fields_for(self._filter), sql.SQL(', ').join( sql.SQL('%s') for _ in enumerate(self._batch_hdr)), sql.Literal(file_id), _gen_values_for(self._filter), ) self._extras.execute_batch(cursor, batch_query, self._batch_params) lines_count = len(self._batch_params) self._batch_params.clear() params = { 'lines_count': lines_count, 'duration': (datetime.now() - self._attach_begin).seconds, 'status': 'done' if is_ok else 'fail', 'fail_descr': fail_reason, } update_query = sql.SQL('UPDATE {} SET ({})=({}) WHERE {}={}').format( sql.Identifier(self._file_table), sql.SQL(', ').join(sql.Identifier(key) for key in params.keys()), sql.SQL(', ').join( sql.Literal(value) for value in params.values()), sql.Identifier('id'), sql.Literal(file_id), ) cursor.execute(update_query) self.commit()