async def _compile_sys_queries(schema, cluster): queries = {} cfg_query = config.generate_config_query(schema) schema, sql = compiler.compile_bootstrap_script( schema, schema, cfg_query, expected_cardinality_one=True, single_statement=True) queries['config'] = sql role_query = ''' SELECT sys::Role { name, is_superuser, password, } FILTER .name = <str>$name; ''' schema, sql = compiler.compile_bootstrap_script( schema, schema, role_query, expected_cardinality_one=True, single_statement=True) queries['role'] = sql await _store_static_json_cache( cluster, 'sysqueries', json.dumps(queries), )
async def _compile_sys_queries(schema, compiler, cluster): queries = {} cfg_query = config.generate_config_query(schema) schema, sql = compile_bootstrap_script( compiler, schema, cfg_query, expected_cardinality_one=True, single_statement=True, ) queries['config'] = sql role_query = ''' SELECT sys::Role { name, is_superuser, password, } FILTER .name = <str>$name; ''' schema, sql = compile_bootstrap_script( compiler, schema, role_query, expected_cardinality_one=True, single_statement=True, ) queries['role'] = sql tids_query = ''' SELECT schema::ScalarType { id, backend_id, } FILTER .id IN <uuid>json_array_unpack(<json>$ids); ''' schema, sql = compile_bootstrap_script( compiler, schema, tids_query, expected_cardinality_one=False, single_statement=True, ) queries['backend_tids'] = sql await _store_static_json_cache( cluster, 'sysqueries', json.dumps(queries), )
async def _compile_sys_queries(schema, cluster): queries = {} cfg_query = config.generate_config_query(schema) schema, sql = compiler.compile_bootstrap_script( schema, schema, cfg_query, expected_cardinality_one=True, single_statement=True) queries['config'] = sql.encode('utf-8') role_query = ''' SELECT sys::Role { name, allow_login, is_superuser, password, } FILTER .name = <str>$name; ''' schema, sql = compiler.compile_bootstrap_script( schema, schema, role_query, expected_cardinality_one=True, single_statement=True) queries['role'] = sql.encode('utf-8') data_dir = cluster.get_data_dir() queries_fn = os.path.join(data_dir, 'queries.pickle') with open(queries_fn, 'wb') as f: pickle.dump(queries, f)