def get_dummy_entitytype(cursor, name): et = get_entitytype(cursor, name) if et: return et else: return create_entitytype(cursor, name, "")
def test_get_entitytype(conn): with closing(conn.cursor()) as cursor: new_entitytype = helpers_v4.create_entitytype(cursor, "test_get_entitytype", "short description of type") entitytype = helpers_v4.get_entitytype(cursor, "test_get_entitytype") assert entitytype.id == new_entitytype.id assert entitytype.name == "test_get_entitytype"
def get_partition(cursor, datasource_name, entitytype_name, granularity, timestamp): datasource = get_datasource(cursor, datasource_name) entitytype = get_entitytype(cursor, entitytype_name) granularity = create_granularity(granularity) trendstore = TrendStore(datasource, entitytype, granularity) return trendstore.partition(timestamp)
def retrieve_orderedby_time(self, datasources, gp, entitytype, trend_names, entities, start, end, limit=None): with closing(self.conn.cursor()) as cursor: if isinstance(entitytype, str): entitytype = get_entitytype(cursor, entitytype) table_names = get_table_names_v4(cursor, datasources, gp, entitytype, start, end) return retrieve_orderedby_time(self.conn, schema.name, table_names, trend_names, entities, start, end, limit)
def retrieve(self, datasources, gp, entitytype, trend_names, entities, start, end, subquery_filter=None, relation_table_name=None, limit=None): with closing(self.conn.cursor()) as cursor: if isinstance(entitytype, str): entitytype = get_entitytype(cursor, entitytype) table_names = get_table_names_v4(cursor, datasources, gp, entitytype, start, end) return retrieve(self.conn, schema.name, table_names, trend_names, entities, start, end, subquery_filter, relation_table_name, limit, entitytype=entitytype)
def retrieve_related(self, datasources, gp, source_entitytype, target_entitytype, trend_names, start, end, subquery_filter=None, limit=None): with closing(self.conn.cursor()) as cursor: if isinstance(target_entitytype, str): target_entitytype = get_entitytype(cursor, target_entitytype) table_names = get_table_names_v4(cursor, datasources, gp, target_entitytype, start, end) if source_entitytype.name == target_entitytype.name: relation_table_name = "self" else: relation_table_name = "{}->{}".format( source_entitytype.name, target_entitytype.name) return retrieve_related(self.conn, schema.name, relation_table_name, table_names, trend_names, start, end, subquery_filter, limit)
def last_modified(self, interval, datasource, granularity, entitytype_name, subquery_filter=None): """ Return last modified timestamp for specified datasource, granularity, entity type and interval :param interval: tuple (start, end) with non-naive timestamps, specifying interval to check :param datasource: datasource object :param granularity: granularity in seconds :param entitytype_name: name of entity type :param subquery_filter: subquery for additional filtering by JOINing on field 'id' """ (start, end) = interval with closing(self.conn.cursor()) as cursor: entitytype = get_entitytype(cursor, entitytype_name) table_names = get_table_names_v4(cursor, [datasource], granularity, entitytype, start, end) if subquery_filter: query = ("SELECT MAX(t.modified) FROM \"{0}\".\"{1}\" AS t " "JOIN ({0}) AS filter ON filter.id = t.entity_id " "WHERE t.timestamp > %s AND t.timestamp <= %s ") else: query = ("SELECT MAX(t.modified) FROM \"{0}\".\"{1}\" AS t " "WHERE t.timestamp > %s AND t.timestamp <= %s ") modifieds = [] with closing(self.conn.cursor()) as cursor: for table_name in table_names: try: cursor.execute(query.format(schema.name, table_name), interval) modified, = cursor.fetchone() modifieds.append(modified) except (psycopg2.ProgrammingError, psycopg2.InternalError): continue if modifieds: return max(modifieds) else: return None
def timestamp_exists(self, datasource, gp, entitytype_name, timestamp): """ Returns True when timestamp occurs for specified data source. False otherwise. """ with closing(self.conn.cursor()) as cursor: entitytype = get_entitytype(cursor, entitytype_name) table_name = get_table_names_v4(cursor, [datasource], gp, entitytype, timestamp, timestamp)[0] query = ( "SELECT 1 FROM \"{0}\".\"{1}\" WHERE timestamp = %s " "LIMIT 1".format(schema.name, table_name)) with closing(self.conn.cursor()) as cursor: try: cursor.execute(query, (timestamp,)) return bool(cursor.rowcount) except (psycopg2.ProgrammingError, psycopg2.InternalError): return False
def count(self, datasource, gp, entitytype_name, interval, filter=None): """ Returns row count for specified datasource, gp, entity type and interval """ (start, end) = interval with closing(self.conn.cursor()) as cursor: entitytype = get_entitytype(cursor, entitytype_name) table_names = get_table_names_v4(cursor, [datasource], gp, entitytype, start, end) query = ( "SELECT COUNT(*) FROM \"{0}\".\"{1}\" " "WHERE timestamp > %s AND timestamp <= %s ") if filter is not None: if len(filter) == 0: return 0 else: query += "AND entity_id IN ({0}) ".format( ",".join(str(id) for id in filter)) args = (start, end) count = 0 with closing(self.conn.cursor()) as cursor: for table_name in table_names: try: cursor.execute(query.format(schema.name, table_name), args) c, = cursor.fetchone() count += c except (psycopg2.ProgrammingError, psycopg2.InternalError): continue return count