def add_db_to_object_cache(object_cache, host, port, db, user, password, tables=True, views=False): sql = """ select quote_ident(t.table_schema)||'.'||quote_ident(t.table_name) as full_table_name, array_agg(c.column_name::text order by ordinal_position) as columns from information_schema.tables t left join information_schema.columns c on t.table_schema = c.table_schema and t.table_name = c.table_name where t.table_type = ANY(%s) and not t.table_schema in ('information_schema', 'pg_catalog') group by 1 """ table_type = [] if tables: table_type.append('BASE TABLE') if views: table_type.append('VIEW') if len(table_type) == 0: raise Exception('Views and/or Tables exposing must be enabled!') data = execute_on_host(host, port, db, user, password, sql, (table_type,)) for td in data: # print td object_cache.add_table_to_cache(host, port, db, td['full_table_name'], DBObjectsCache.formulate_table(td['columns'])) # TODO db_credentials['{}:{}:{}'.format(host, port, db)] = (user, password)
def add_db_to_object_cache(object_cache, host, port, db, user, password, tables=True, views=False): sql = """ select quote_ident(n.nspname)||'.'||quote_ident(c.relname) as full_table_name, coalesce((select array_agg(attname::text order by attnum) from pg_attribute where attrelid = c.oid and attnum >= 0 and not attisdropped), '{}'::text[]) as columns, (select count(*) from pg_inherits where inhparent = c.oid) as children_count, (select count(*) from pg_inherits where inhrelid = c.oid) > 0 as is_inherited from pg_class c join pg_namespace n on c.relnamespace = n.oid where c.relkind = ANY(%s) --c.relkind in ('r', 'v') and not n.nspname in ('information_schema', 'pg_catalog') """ table_type = [] if tables: table_type.append('r') if views: table_type.extend(('v', 'm')) if len(table_type) == 0: raise Exception('Views and/or Tables exposing must be enabled!') data = execute_on_host(host, port, db, user, password, sql, (table_type,)) for td in data: # print td object_cache.add_table_to_cache(host, port, db, td['full_table_name'], DBObjectsCache.formulate_table(td)) # TODO db_credentials['{}:{}:{}'.format(host, port, db)] = (user, password)
elif self.order_by_direction: url += ('/{}/{}'.format('orderby', self.order_by_direction)).lower() url += '/{}/{}'.format('limit', self.limit) return url def __str__(self): return 'UrlParams: db = {}, table = {}, columns = {}, filters = {}, order_by_columns = {},' \ ' output_format = {}, graphtype = {}, gkey = {}, gbucket = {}, limit = {}'.format(self.db_uniq, self.table, self.column_names, self.filters, self.order_by_columns, self.output_format, self.graphtype, self.graphkey, self.graphbucket, self.limit) if __name__ == '__main__': db_objects_cache = DBObjectsCache() db_objects_cache.add_table_to_cache('local', 5432, 'postgres', 'public.table1', DBObjectsCache.formulate_table(['col1', 'col2', 't_created'])) print db_objects_cache features = { 'default_order_by': 'DESC', 'default_limit': '20', 'created_patterns': 'created,timestamp,time', 'modified_patterns': 'modified,updated,timestamp', } # up = UrlParams(db_objects_cache, features, 'pos', 'ta*1', 'l', '100', 'o', 'd') # up = UrlParams(db_objects_cache, features, 'pos', 'ta*1', 'o', 'm', 'f', 'h', 'col1', '<=', '1') # up = UrlParams(db_objects_cache, features, 'pos', 'ta*1', 'col1', '<=', '100', 'agg', 'count', 'c1', 'agg', 'max', 'c1') # up = UrlParams(db_objects_cache, features, 'pos', 'ta*1', 'f', 'g', 'l', 'gkey', 'created', 'gbucket', 'hour')