def _build_create_temp_table_sql(table_name, column_names, column_types): create_sql = [ "create temporary table %s (\n " % plpy.quote_ident(table_name) ] first = True for column_name, column_type in zip(column_names, column_types): if first: first = False else: create_sql.append(",\n ") create_sql.append(plpy.quote_ident(column_name)) create_sql.append(" ") if (column_type.lower() == "smallfloat"): column_type = "REAL" if (column_type.lower() == "string"): column_type = "character" elif (column_type.lower() == "byte"): column_type = "bytea" elif (column_type.lower() == "integer8"): column_type = "bigint" elif (column_type.lower() == "serial8"): column_type = "bigserial" elif (column_type.lower() == "datetime"): column_type = "timestamp" create_sql.append(column_type) if (column_type == "OPAQUE FIXED"): return None if (column_type == "OPAQUE VARIABLE"): return None if (column_type == "UNKNOWN DATA TYPE"): return None create_sql.append("\n)") return ''.join(create_sql)
def start_presto_query(presto_server, presto_user, presto_catalog, presto_schema, function_name, query): try: # preserve search_path if explicitly set search_path = _get_session_search_path_array() if search_path != ['$user', 'public'] and len(search_path) > 0: # search_path is changed explicitly. use the first schema presto_schema = search_path[0] # start query client = presto_client.Client(server=presto_server, user=presto_user, catalog=presto_catalog, schema=presto_schema, time_zone=_get_session_time_zone()) query = client.query(query) session.query_auto_close = QueryAutoClose(query) try: # result schema column_names = [] column_types = [] for column in query.columns(): column_names.append(column.name) column_types.append(_pg_result_type(column.type)) column_names = _rename_duplicated_column_names(column_names, "a query result") session.query_auto_close.column_names = column_names session.query_auto_close.column_types = column_types # CREATE TABLE for return type of the function type_name = function_name + "_type" create_type_sql = _build_create_temp_table_sql(type_name, column_names, column_types) # CREATE FUNCTION create_function_sql = \ """ create or replace function pg_temp.%s() returns setof pg_temp.%s as $$ import prestogres return prestogres.fetch_presto_query_results() $$ language plpythonu """ % \ (plpy.quote_ident(function_name), plpy.quote_ident(type_name)) # run statements plpy.execute("drop table if exists pg_temp.%s cascade" % \ (plpy.quote_ident(type_name))) plpy.execute(create_type_sql) plpy.execute(create_function_sql) query = None finally: if query is not None: # close query session.query_auto_close = None except (plpy.SPIError, presto_client.PrestoException) as e: # PL/Python converts an exception object in Python to an error message in PostgreSQL # using exception class name if exc.__module__ is either of "builtins", "exceptions", # or "__main__". Otherwise using "module.name" format. Set __module__ = "__module__" # to generate pretty messages. e.__class__.__module__ = "__main__" raise
def _build_insert_into_sql(table_name, column_names, column_types): # INSERT INTO table_name (column_name, column_name, ...) insert_sql = ["insert into %s (\n " % plpy.quote_ident(table_name)] first = True for column_name in column_names: if first: first = False else: insert_sql.append(",\n ") insert_sql.append(plpy.quote_ident(column_name)) insert_sql.append("\n) values\n") # VALUES (${}::column_type, ${}::column_type, ...) values_sql_format = ["("] first = True for column_type in column_types: if first: first = False else: values_sql_format.append(", ") values_sql_format.append("${}::") values_sql_format.append(column_type) values_sql_format.append(")") return (''.join(insert_sql), ''.join(values_sql_format))
def _rename_duplicated_column_names(column_names): renamed = [] used_names = set() for original_name in column_names: name = original_name while name in used_names: name += "_" if name != original_name: plpy.warning("Result column %s is renamed to %s because the name appears twice in a query result" % \ (plpy.quote_ident(original_name), plpy.quote_ident(name))) used_names.add(name) renamed.append(name) return renamed
def _build_create_temp_table_sql(table_name, column_names, column_types): create_sql = ["create temporary table %s (\n " % plpy.quote_ident(table_name)] first = True for column_name, column_type in zip(column_names, column_types): if first: first = False else: create_sql.append(",\n ") create_sql.append(plpy.quote_ident(column_name)) create_sql.append(" ") create_sql.append(column_type) create_sql.append("\n)") return ''.join(create_sql)
def _build_insert_into_sql(table_name, column_names): # INSERT INTO table_name (column_name, column_name, ...) insert_sql = ["insert into %s (\n " % plpy.quote_ident(table_name)] first = True for column_name in column_names: if first: first = False else: insert_sql.append(",\n ") insert_sql.append(plpy.quote_ident(column_name)) insert_sql.append("\n) values\n") return ''.join(insert_sql)
def _build_alter_table_holder_sql(schema_name, table_name, column_names, column_types, not_nulls): alter_sql = ["alter table %s.%s \n " % (plpy.quote_ident(schema_name), plpy.quote_ident(table_name))] first = True for column_name, column_type, not_null in zip(column_names, column_types, not_nulls): if first: first = False else: alter_sql.append(",\n ") alter_sql.append("add %s %s" % (plpy.quote_ident(column_name), column_type)) if not_null: alter_sql.append(" not null") return ''.join(alter_sql)
def _build_create_table(schema_name, table_name, column_names, column_types, not_nulls): alter_sql = ["create table %s.%s (\n " % (plpy.quote_ident(schema_name), plpy.quote_ident(table_name))] first = True for column_name, column_type, not_null in zip(column_names, column_types, not_nulls): if first: first = False else: alter_sql.append(",\n ") alter_sql.append("%s %s" % (plpy.quote_ident(column_name), column_type)) if not_null: alter_sql.append(" not null") alter_sql.append("\n)") return ''.join(alter_sql)
def _rename_duplicated_column_names(column_names, where): renamed = [] used_names = copy(SYSTEM_COLUMN_NAMES) for original_name in column_names: name = original_name while name in used_names: name += "_" if name != original_name: if name in SYSTEM_COLUMN_NAMES: plpy.warning("Column %s is renamed to %s because the name in %s conflicts with PostgreSQL system column names" % \ (plpy.quote_ident(original_name), plpy.quote_ident(name), where)) else: plpy.warning("Column %s is renamed to %s because the name appears twice in %s" % \ (plpy.quote_ident(original_name), plpy.quote_ident(name), where)) used_names.add(name) renamed.append(name) return renamed
def run_presto_as_temp_table(server, user, catalog, schema, result_table, query): try: search_path = _get_session_search_path_array() if search_path != ['$user', 'public'] and len(search_path) > 0: # search_path is changed explicitly. Use the first schema schema = search_path[0] client = presto_client.Client(server=server, user=user, catalog=catalog, schema=schema, time_zone=_get_session_time_zone()) create_sql = "create temporary table %s (\n " % plpy.quote_ident(result_table) insert_sql = "insert into %s (\n " % plpy.quote_ident(result_table) values_types = [] q = client.query(query) try: # result schema column_names = [] column_types = [] for column in q.columns(): column_names.append(column.name) column_types.append(_pg_result_type(column.type)) # build SQL column_names = _rename_duplicated_column_names(column_names) create_sql = _build_create_temp_table_sql(result_table, column_names, column_types) insert_sql = _build_insert_into_sql(result_table, column_names) # run CREATE TABLE plpy.execute("drop table if exists " + plpy.quote_ident(result_table)) plpy.execute(create_sql) # run INSERT _batch_insert(insert_sql, 10, column_types, q.results()) finally: q.close() except (plpy.SPIError, presto_client.PrestoException) as e: # PL/Python converts an exception object in Python to an error message in PostgreSQL # using exception class name if exc.__module__ is either of "builtins", "exceptions", # or "__main__". Otherwise using "module.name" format. Set __module__ = "__module__" # to generate pretty messages. e.__class__.__module__ = "__main__" raise
def _build_create_table(schema_name, table_name, column_names, column_types, not_nulls): alter_sql = [ "create table %s.%s (\n " % (plpy.quote_ident(schema_name), plpy.quote_ident(table_name)) ] first = True for column_name, column_type, not_null in zip(column_names, column_types, not_nulls): if first: first = False else: alter_sql.append(",\n ") if (column_type.lower() == "smallfloat"): column_type = "REAL" if (column_type.lower() == "string"): column_type = "character" elif (column_type.lower() == "byte"): column_type = "bytea" elif (column_type.lower() == "integer8"): column_type = "bigint" elif (column_type.lower() == "serial8"): column_type = "bigserial" elif (column_type.lower() == "datetime"): column_type = "timestamp" alter_sql.append("%s %s" % (plpy.quote_ident(column_name), column_type)) if (column_type == "OPAQUE FIXED"): return None if (column_type == "OPAQUE VARIABLE"): return None if (column_type == "UNKNOWN DATA TYPE"): return None if not_null: alter_sql.append(" not null") alter_sql.append("\n)") return ''.join(alter_sql)
def setup_system_catalog(presto_server, presto_user, presto_catalog, presto_schema, access_role): search_path = _get_session_search_path_array() if search_path == ['$user', 'public']: # search_path is default value. plpy.execute("set search_path to %s" % plpy.quote_ident(presto_schema)) client = presto_client.Client(server=presto_server, user=presto_user, catalog=presto_catalog, schema='default') # get table list sql = "select table_schema, table_name, column_name, is_nullable, data_type" \ " from information_schema.columns" columns, rows = client.run(sql) if rows is None: rows = [] schemas = {} for row in rows: schema_name = row[0] table_name = row[1] column_name = row[2] is_nullable = row[3] column_type = row[4] if schema_name == "sys" or schema_name == "information_schema": # skip system schemas continue if len(schema_name) > PG_NAMEDATALEN - 1: plpy.warning("Schema %s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), PG_NAMEDATALEN - 1)) continue tables = schemas.setdefault(schema_name, {}) if len(table_name) > PG_NAMEDATALEN - 1: plpy.warning("Table %s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), PG_NAMEDATALEN - 1)) continue columns = tables.setdefault(table_name, []) if len(column_name) > PG_NAMEDATALEN - 1: plpy.warning("Column %s.%s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), \ plpy.quote_ident(column_name), PG_NAMEDATALEN - 1)) continue columns.append(Column(column_name, column_type, is_nullable)) # drop all schemas excepting prestogres_catalog, information_schema and pg_% sql = "select n.nspname as schema_name from pg_catalog.pg_namespace n" \ " where n.nspname not in ('prestogres_catalog', 'information_schema')" \ " and n.nspname not like 'pg_%'" for row in plpy.cursor(sql): plpy.execute("drop schema %s cascade" % plpy.quote_ident(row["schema_name"])) # create schema and tables for schema_name, tables in sorted(schemas.items(), key=lambda (k,v): k): try: plpy.execute("create schema %s" % (plpy.quote_ident(schema_name))) except: # ignore error? pass for table_name, columns in sorted(tables.items(), key=lambda (k,v): k): column_names = [] column_types = [] not_nulls = [] if len(columns) >= 1600: plpy.warning("Table %s.%s contains more than 1600 columns. Some columns will be inaccessible" % (plpy.quote_ident(schema_name), plpy.quote_ident(table_name))) for column in columns[0:1600]: column_names.append(column.name) column_types.append(_pg_table_type(column.type)) not_nulls.append(not column.nullable) # change columns column_names = _rename_duplicated_column_names(column_names, "%s.%s table" % (plpy.quote_ident(schema_name), plpy.quote_ident(table_name))) create_sql = _build_create_table(schema_name, table_name, column_names, column_types, not_nulls) plpy.execute(create_sql) # grant access on the schema to the restricted user so that # pg_table_is_visible(reloid) used by \d of psql command returns true plpy.execute("grant usage on schema %s to %s" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(access_role))) # this SELECT privilege is unnecessary because queries against those tables # won't run on PostgreSQL. causing an exception is good if Prestogres has # a bug sending a presto query to PostgreSQL without rewriting. # TODO however, it's granted for now because some BI tools might check # has_table_privilege. the best solution is to grant privilege but # actually selecting from those tables causes an exception. plpy.execute("grant select on all tables in schema %s to %s" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(access_role))) # fake current_database() to return Presto's catalog name to be compatible with some # applications that use db.schema.table syntax to identify a table if plpy.execute("select pg_catalog.current_database()")[0].values()[0] != presto_catalog: plpy.execute("delete from pg_catalog.pg_proc where proname='current_database'") plpy.execute("create function pg_catalog.current_database() returns name as $$begin return %s::name; end$$ language plpgsql stable strict" % \ plpy.quote_literal(presto_catalog))
def append_dim_select(dim, select): ''' Append the PC_Get fonction call string for "dim" to "select". ''' select.append('PC_Get(point, \'{}\') {}'.format(dim, plpy.quote_ident(dim)))
def setup_system_catalog(presto_server, presto_user, presto_catalog, access_role): client = presto_client.Client(server=presto_server, user=presto_user, catalog=presto_catalog, schema='default') # get table list sql = "select table_schema, table_name, column_name, is_nullable, data_type" \ " from information_schema.columns" columns, rows = client.run(sql) if rows is None: rows = [] schemas = {} for row in rows: schema_name = row[0] table_name = row[1] column_name = row[2] is_nullable = row[3] column_type = row[4] if schema_name == "sys" or schema_name == "information_schema": # skip system schemas continue if len(schema_name) > PG_NAMEDATALEN - 1: plpy.warning("Schema %s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), PG_NAMEDATALEN - 1)) continue tables = schemas.setdefault(schema_name, {}) if len(table_name) > PG_NAMEDATALEN - 1: plpy.warning("Table %s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), PG_NAMEDATALEN - 1)) continue columns = tables.setdefault(table_name, []) if len(column_name) > PG_NAMEDATALEN - 1: plpy.warning("Column %s.%s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), \ plpy.quote_ident(column_name), PG_NAMEDATALEN - 1)) continue columns.append(Column(column_name, column_type, is_nullable)) # drop all schemas excepting prestogres_catalog, information_schema and pg_% sql = "select n.nspname as schema_name from pg_catalog.pg_namespace n" \ " where n.nspname not in ('prestogres_catalog', 'information_schema')" \ " and n.nspname not like 'pg_%'" for row in plpy.cursor(sql): plpy.execute("drop schema %s cascade" % plpy.quote_ident(row["schema_name"])) # create schema and tables for schema_name, tables in sorted(schemas.items(), key=lambda (k,v): k): try: plpy.execute("create schema %s" % (plpy.quote_ident(schema_name))) except: # ignore error? pass # grant access on the all tables to the restricted user plpy.execute("grant select on all tables in schema %s to %s" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(access_role))) for table_name, columns in sorted(tables.items(), key=lambda (k,v): k): column_names = [] column_types = [] not_nulls = [] for column in columns: column_names.append(column.name) column_types.append(_pg_table_type(column.type)) not_nulls.append(not column.nullable) # change columns create_sql = _build_create_table(schema_name, table_name, column_names, column_types, not_nulls) plpy.execute(create_sql) # update pg_database plpy.execute("update pg_database set datname=%s where datname=current_database()" % \ plpy.quote_literal(presto_catalog))
def run_system_catalog_as_temp_table(server, user, catalog, schema, login_user, login_database, result_table, query): try: client = presto_client.Client(server=server, user=user, catalog=catalog, schema=schema, time_zone=_get_session_time_zone()) # create SQL statements which put data to system catalogs if SchemaCacheEntry.is_cached(server, user, catalog, schema, time.time()): schema_names = SchemaCacheEntry.schema_names statements = SchemaCacheEntry.statements query_cache = SchemaCacheEntry.query_cache else: # get table list sql = "select table_schema, table_name, column_name, is_nullable, data_type" \ " from information_schema.columns" columns, rows = client.run(sql) schemas = {} if rows is None: rows = [] for row in rows: schema_name = row[0] table_name = row[1] column_name = row[2] is_nullable = row[3] column_type = row[4] if len(schema_name) > PG_NAMEDATALEN - 1: plpy.warning("Schema %s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), PG_NAMEDATALEN - 1)) continue tables = schemas.setdefault(schema_name, {}) if len(table_name) > PG_NAMEDATALEN - 1: plpy.warning("Table %s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), PG_NAMEDATALEN - 1)) continue columns = tables.setdefault(table_name, []) if len(column_name) > PG_NAMEDATALEN - 1: plpy.warning("Column %s.%s.%s is skipped because its name is longer than %d characters" % \ (plpy.quote_ident(schema_name), plpy.quote_ident(table_name), \ plpy.quote_ident(column_name), PG_NAMEDATALEN - 1)) continue columns.append(Column(column_name, column_type, is_nullable)) # generate SQL statements statements = [] schema_names = [] # create a restricted user using the same name with the login user name to pgpool2 statements.append( "do $$ begin if not exists (select * from pg_catalog.pg_roles where rolname=%s) then create role %s with login; end if; end $$" % \ (plpy.quote_literal(login_user), plpy.quote_ident(login_user))) # grant access on the all table holders to the restricted user statements.append("grant select on all tables in schema prestogres_catalog to %s" % \ plpy.quote_ident(login_user)) table_holder_id = 0 for schema_name, tables in sorted(schemas.items(), key=lambda (k,v): k): if schema_name == "sys" or schema_name == "information_schema": # skip system schemas continue schema_names.append(schema_name) for table_name, columns in sorted(tables.items(), key=lambda (k,v): k): # table schema column_names = [] column_types = [] not_nulls = [] for column in columns: column_names.append(column.name) column_types.append(_pg_table_type(column.type)) not_nulls.append(not column.nullable) # rename table holder into the schema statements.append("alter table prestogres_catalog.table_holder_%d set schema %s" % \ (table_holder_id, plpy.quote_ident(schema_name))) statements.append("alter table %s.table_holder_%d rename to %s" % \ (plpy.quote_ident(schema_name), table_holder_id, plpy.quote_ident(table_name))) # change columns alter_sql = _build_alter_table_holder_sql(schema_name, table_name, column_names, column_types, not_nulls) statements.append(alter_sql) table_holder_id += 1 # cache expires after 60 seconds SchemaCacheEntry.set_cache(server, user, catalog, schema, schema_names, statements, time.time() + 60) query_cache = {} query_result = query_cache.get(query) if query_result: column_names = query_result.column_names column_types = query_result.column_types result = query_result.result else: # enter subtransaction to rollback tables right after running the query subxact = plpy.subtransaction() subxact.enter() try: # drop all schemas excepting prestogres_catalog, pg_catalog, information_schema, public # and schema holders sql = "select n.nspname as schema_name from pg_catalog.pg_namespace n" \ " where n.nspname not in ('prestogres_catalog', 'pg_catalog', 'information_schema', 'public')" \ " and n.nspname not like 'prestogres_catalog_schema_holder_%'" \ " and n.nspname !~ '^pg_toast'" for row in plpy.cursor(sql): plpy.execute("drop schema %s cascade" % plpy.quote_ident(row["schema_name"])) # alter schema holders schema_holder_id = 0 for schema_name in schema_names: try: plpy.execute("alter schema prestogres_catalog_schema_holder_%s rename to %s" % \ (schema_holder_id, plpy.quote_ident(schema_name))) schema_holder_id += 1 except: # ignore error? pass # alter table holders in prestogres_catalog schema for statement in statements: plpy.execute(statement) # drop prestogres_catalog schema plpy.execute("drop schema prestogres_catalog cascade") # drop schema holders sql = "select n.nspname as schema_name from pg_catalog.pg_namespace n" \ " where n.nspname like 'prestogres_catalog_schema_holder_%'" for row in plpy.cursor(sql): plpy.execute("drop schema %s" % plpy.quote_ident(row["schema_name"])) # update pg_database plpy.execute("update pg_database set datname=%s where datname=current_database()" % \ plpy.quote_literal(schema_name)) # switch to the restricted role plpy.execute("set role to %s" % plpy.quote_ident(login_user)) # run the actual query and save result metadata = plpy.execute(query) column_names = metadata.colnames() column_type_oids = metadata.coltypes() result = map(lambda row: map(row.get, column_names), metadata) # save result schema oid_to_type_name = _load_oid_to_type_name_mapping(column_type_oids) column_types = map(oid_to_type_name.get, column_type_oids) # store query cache query_cache[query] = QueryResult(column_names, column_types, result) finally: # rollback subtransaction subxact.exit("rollback subtransaction", None, None) column_names = _rename_duplicated_column_names(column_names) create_sql = _build_create_temp_table_sql(result_table, column_names, column_types) insert_sql = _build_insert_into_sql(result_table, column_names) # run CREATE TABLE and INSERT plpy.execute("drop table if exists " + plpy.quote_ident(result_table)) plpy.execute(create_sql) _batch_insert(insert_sql, 10, column_types, result) except (plpy.SPIError, presto_client.PrestoException) as e: # Set __module__ = "__module__" to generate pretty messages. e.__class__.__module__ = "__main__" raise