def _process_delta(ctx, delta, schema): """Adapt and process the delta command.""" if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(delta, schema=schema) context = sd.CommandContext() context.stdmode = True if not delta.canonical: # Canonicalize sd.apply(delta, schema=schema) delta = delta_cmds.CommandMeta.adapt(delta) context = sd.CommandContext( stdmode=True, backend_runtime_params=ctx.cluster.get_runtime_params(), ) schema = sd.apply(delta, schema=schema, context=context) if debug.flags.delta_pgsql_plan: debug.header('PgSQL Delta Plan') debug.dump(delta, schema=schema) return schema, delta
def _load_reflection_schema(): global _refl_schema global _schema_class_layout if _refl_schema is None: std_dirs_hash = buildmeta.hash_dirs(s_std.CACHE_SRC_DIRS) cache = None if devmode.is_in_dev_mode(): cache = buildmeta.read_data_cache(std_dirs_hash, 'transient-reflschema.pickle') if cache is not None: reflschema, classlayout = cache else: std_schema = _load_std_schema() reflection = s_refl.generate_structure(std_schema) classlayout = reflection.class_layout context = sd.CommandContext() context.stdmode = True reflschema = reflection.intro_schema_delta.apply( std_schema, context) if devmode.is_in_dev_mode(): buildmeta.write_data_cache( (reflschema, classlayout), std_dirs_hash, 'transient-reflschema.pickle', ) _refl_schema = reflschema _schema_class_layout = classlayout return _refl_schema, _schema_class_layout
async def _make_stdlib(testmode: bool): schema = s_schema.Schema() current_block = None std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, schema=schema, modaliases={None: 'std'}, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Do a dry-run on test_schema to canonicalize # the schema delta-commands. test_schema = schema context = sd.CommandContext() context.stdmode = True delta_command.apply(test_schema, context=context) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): if (current_block is not None and not isinstance(current_block, dbops.SQLBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.SQLBlock() else: if (current_block is not None and not isinstance(current_block, dbops.PLTopBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.PLTopBlock() plan.generate(current_block) sql_text = current_block.to_string() return schema, sql_text
def delta_from_ddl(stmts, *, schema, modaliases, stdmode: bool = False, testmode: bool = False): alter_db = s_delta.DeltaRoot() context = s_delta.CommandContext() context.modaliases = modaliases context.schema = schema context.stdmode = stdmode context.testmode = testmode if isinstance(stmts, edgeql.ast.Base): stmts = [stmts] for stmt in stmts: with context(s_delta.DeltaRootContext(alter_db)): alter_db.add( cmd_from_ddl(stmt, context=context, schema=schema, modaliases=modaliases, testmode=testmode)) return alter_db
async def _amend_stdlib( ddl_text: str, stdlib: StdlibBits, ) -> Tuple[StdlibBits, str]: schema = stdlib.stdschema reflschema = stdlib.reflschema topblock = dbops.PLTopBlock() plans = [] context = sd.CommandContext() context.stdmode = True for ddl_cmd in edgeql.parse_block(ddl_text): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) reflschema = delta_command.apply(reflschema, context) plan.generate(topblock) plans.append(plan) compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=stdlib.classlayout, ) compilerctx = edbcompiler.new_compiler_context( schema, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(schema) for plan in plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=plan, block=topblock, ) sqltext = topblock.to_string() return stdlib._replace(stdschema=schema, reflschema=reflschema), sqltext
def load_std_module( schema: s_schema.Schema, modname: str) -> s_schema.Schema: modaliases = {} context = s_delta.CommandContext() context.stdmode = True modtext = get_std_module_text(modname) for statement in edgeql.parse_block(modtext): schema = s_ddl.apply_ddl( statement, schema=schema, modaliases=modaliases, stdmode=True) return schema
async def _execute_edgeql_ddl( schema: s_schema.Schema, ddltext: str, stdmode: bool = True, ) -> s_schema.Schema: context = sd.CommandContext(stdmode=stdmode) for ddl_cmd in edgeql.parse_block(ddltext): delta_command = s_ddl.delta_from_ddl( ddl_cmd, modaliases={}, schema=schema, stdmode=stdmode) schema = delta_command.apply(schema, context) return schema
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None for stmt in statements: if isinstance(stmt, qlast.CreateDelta): # CREATE MIGRATION if target_schema is None: target_schema = _load_std_schema() ddl_plan = s_ddl.cmd_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) ddl_plan = s_ddl.compile_migration(ddl_plan, target_schema, current_schema) elif isinstance(stmt, qlast.Delta): # APPLY MIGRATION delta_cmd = s_ddl.cmd_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) delta = current_schema.get(delta_cmd.classname) ddl_plan = sd.DeltaRoot(canonical=True) ddl_plan.update(delta.get_commands(current_schema)) elif isinstance(stmt, qlast.DDL): # CREATE/DELETE/ALTER (FUNCTION, TYPE, etc) ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') context = sd.CommandContext() context.testmode = True current_schema, _ = ddl_plan.apply(current_schema, context) return current_schema
async def _execute_edgeql_ddl( schema: s_schema.Schema_T, ddltext: str, stdmode: bool = True, ) -> s_schema.Schema_T: context = sd.CommandContext(stdmode=stdmode) for ddl_cmd in edgeql.parse_block(ddltext): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl( ddl_cmd, modaliases={}, schema=schema, stdmode=stdmode) schema = delta_command.apply(schema, context) # type: ignore return schema
def _process_delta(delta, schema): """Adapt and process the delta command.""" if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(delta, schema=schema) context = sd.CommandContext() context.stdmode = True if not delta.canonical: # Canonicalize sd.apply(delta, schema=schema) delta = delta_cmds.CommandMeta.adapt(delta) context = sd.CommandContext() context.stdmode = True schema = sd.apply(delta, schema=schema, context=context) if debug.flags.delta_pgsql_plan: debug.header('PgSQL Delta Plan') debug.dump(delta, schema=schema) return schema, delta
def cmd_from_ddl(stmt, *, context=None, schema, modaliases, testmode: bool = False): ddl = s_expr.imprint_expr_context(stmt, modaliases) if context is None: context = s_delta.CommandContext() context.modaliases = modaliases context.schema = schema context.testmode = testmode cmd = s_delta.Command.from_ast(schema, ddl, context=context) return cmd
def _assert_migration_consistency(self, schema_text): migration_text = f''' CREATE MIGRATION m TO {{ {schema_text} }}; ''' migration_ql = edgeql.parse_block(migration_text) migration_cmd = s_ddl.cmd_from_ddl( migration_ql[0], schema=self.schema, modaliases={None: 'default'}, ) migration_cmd = s_ddl.compile_migration( migration_cmd, self.std_schema, self.schema, ) context = s_delta.CommandContext() schema, migration = migration_cmd.apply(self.schema, context) ddl_plan = s_delta.DeltaRoot(canonical=True) ddl_plan.update(migration.get_commands(schema)) baseline_schema, _ = ddl_plan.apply(schema, context) ddl_text = s_ddl.ddl_text_from_delta(schema, migration) try: test_schema = self.run_ddl(schema, ddl_text) except errors.EdgeDBError as e: self.fail(markup.dumps(e)) diff = s_ddl.delta_schemas(baseline_schema, test_schema) if list(diff.get_subcommands()): self.fail( f'unexpected difference in schema produced by\n' f'COMMIT MIGRATION and DDL obtained from GET MIGRATION:\n' f'{markup.dumps(diff)}\n' f'DDL text was:\n{ddl_text}')
def load_std_module(schema: s_schema.Schema, modname: str) -> s_schema.Schema: modaliases = {} if modname == 'std': modaliases[None] = 'std' context = s_delta.CommandContext() context.stdmode = True modtext = get_std_module_text(modname) for statement in edgeql.parse_block(modtext): cmd = s_ddl.delta_from_ddl(statement, schema=schema, modaliases=modaliases, stdmode=True) schema, _ = cmd.apply(schema, context) return schema
def cmd_from_ddl(stmt, *, context=None, schema, modaliases, testmode: bool = False): # expand module aliases (implicit and explicit) ddl = edgeql.deoptimize(stmt, modaliases=modaliases, strip_builtins=False, schema=schema) if context is None: context = s_delta.CommandContext() context.modaliases = modaliases context.schema = schema context.testmode = testmode cmd = s_delta.Command.from_ast(schema, ddl, context=context) return cmd
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] migration_plan = None for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if not migration_script: migration_plan = migration_diff migration_script.extend( s_ddl.ddlast_from_delta( migration_target, migration_diff, ), ) elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( commands=migration_script, auto_diff=migration_plan, parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) migration_schema = None migration_target = None migration_script = [] migration_plan = None elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits: schema = s_schema.Schema() schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__') current_block = dbops.PLTopBlock() std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) types: Set[uuid.UUID] = set() std_plans: List[sd.Command] = [] for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) std_plans.append(delta_command) types.update(plan.new_types) plan.generate(current_block) stdglobals = '\n'.join([ f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}' }};''', f'''CREATE DATABASE {edbdef.EDGEDB_TEMPLATE_DB} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_TEMPLATE_DB]}' }};''', f'CREATE DATABASE {edbdef.EDGEDB_SUPERUSER_DB};', ]) context = sd.CommandContext(stdmode=True) for ddl_cmd in edgeql.parse_block(stdglobals): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) schema = delta_command.apply(schema, context) refldelta, classlayout, introparts = s_refl.generate_structure(schema) reflschema, reflplan = _process_delta(refldelta, schema) std_plans.append(refldelta) assert current_block is not None reflplan.generate(current_block) subblock = current_block.add_block() compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=classlayout, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(reflschema) for std_plan in std_plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=std_plan, block=subblock, is_internal_reflection=std_plan is refldelta, stdmode=True, ) sqltext = current_block.to_string() compilerctx = edbcompiler.new_compiler_context( reflschema, schema_reflection_mode=True, output_format=edbcompiler.IoFormat.JSON_ELEMENTS, ) # The introspection query bits are returned in chunks # because it's a large UNION and we currently generate SQL # that is much harder for Posgres to plan as opposed to a # straight flat UNION. sql_introparts = [] for intropart in introparts: introtokens = tokenizer.tokenize(intropart.encode()) units = compiler._compile(ctx=compilerctx, tokens=introtokens) assert len(units) == 1 and len(units[0].sql) == 1 sql_intropart = units[0].sql[0].decode() sql_introparts.append(sql_intropart) introsql = ' UNION ALL '.join(sql_introparts) return StdlibBits( stdschema=schema, reflschema=reflschema, sqltext=sqltext, types=types, classlayout=classlayout, introquery=introsql, )
def load_module(self, module_name, decl_ast): decls = decl_ast.declarations self._schema, self._module = s_mod.Module.create_in_schema( self._schema, name=module_name) self._mod_aliases[None] = module_name self._process_imports(decl_ast) order = s_ordering.get_global_dep_order() objects = collections.OrderedDict( (s_objtypes.ObjectType if t is s_objtypes.BaseObjectType else t, collections.OrderedDict()) for t in order) views = [] # First, iterate over all top-level declarations # to get a sense of what's in the schema so that # forward references work. for decl in decls: try: objcls = _DECL_MAP[type(decl)] except KeyError: if isinstance(decl, qlast.Import): continue elif isinstance(decl, qlast.ViewDeclaration): views.append(decl) continue msg = 'unexpected declaration type: {!r}'.format(decl) raise TypeError(msg) from None name = s_name.Name(module=module_name, name=decl.name) # TODO: refactor this objcls_kw = {} if hasattr(decl, 'abstract'): objcls_kw['is_abstract'] = decl.abstract if hasattr(decl, 'delegated'): objcls_kw['is_abstract'] = decl.delegated if hasattr(decl, 'final'): objcls_kw['is_final'] = decl.final if hasattr(decl, 'inheritable'): objcls_kw['inheritable'] = decl.inheritable if objcls is s_constr.Constraint: objcls_kw['return_type'] = self._schema.get('std::bool') objcls_kw['return_typemod'] = qltypes.TypeModifier.SINGLETON if issubclass(objcls, s_pointers.Pointer): if len(decl.name) > s_pointers.MAX_NAME_LENGTH: raise errors.SchemaDefinitionError( f'link or property name length exceeds the maximum of ' f'{s_pointers.MAX_NAME_LENGTH} characters', context=decl.context) self._schema, obj = objcls.create_in_schema( self._schema, name=name, sourcectx=decl.context, **objcls_kw, ) if decl.annotations: self._parse_attr_setters(obj, decl.annotations) objects[type(obj)][obj] = decl # Second, process inheritance references. enums = {} chain = itertools.chain.from_iterable for obj, decl in chain(t.items() for t in objects.values()): bases, enum_values = self._get_bases(obj, decl) self._schema = obj.set_field_value(self._schema, 'bases', bases) if enum_values: enums[obj] = enum_values # Now, with all objects in the declaration in the schema, we can # process them in the semantic dependency order. self._init_annotations(objects[s_anno.Annotation]) # Constraints have no external dependencies, but need to # be fully initialized when we get to constraint users below. self._init_constraints(objects[s_constr.Constraint]) constraints = self._schema.get_objects( modules=[module_name], type=s_constr.Constraint) constraints = s_ordering.sort_objects(self._schema, constraints) for constraint in constraints: self._schema = constraint.finalize(self._schema) # ScalarTypes depend only on constraints and annotations, # can process them now. self._init_scalars(objects[s_scalars.ScalarType], enums) # Generic links depend on scalars (via props), constraints # and annotations. self._init_links(objects[s_links.Link]) # Finally, we can do the first pass on types self._init_objtypes(objects[s_objtypes.ObjectType]) constraints.update(c for c in self._schema.get_objects( modules=[module_name], type=s_constr.Constraint) if c.get_subject(self._schema) is not None) # Final pass, set empty fields to default values and do # other object finalization. for link, linkdecl in objects[s_links.Link].items(): self._normalize_link_constraints(link, linkdecl) for objtype, objtypedecl in objects[s_objtypes.ObjectType].items(): self._normalize_objtype_constraints(objtype, objtypedecl) dctx = s_delta.CommandContext(declarative=True) everything = s_ordering.sort_objects( self._schema, self._schema.get_objects(modules=[module_name])) for obj in everything: cmdcls = s_delta.ObjectCommandMeta.get_command_class_or_die( s_delta.CreateObject, type(obj)) ctxcls = cmdcls.get_context_class() cmd = cmdcls(classname=obj.get_name(self._schema)) ctx = ctxcls(self._schema, cmd, obj) with dctx(ctx): self._schema = obj.finalize(self._schema, dctx=dctx) # Normalization for defaults and other expressions must be # *after* finalize() so that all pointers have been inherited. for link, linkdecl in objects[s_links.Link].items(): self._normalize_link_expressions(link, linkdecl) for objtype, objtypedecl in objects[s_objtypes.ObjectType].items(): self._normalize_objtype_expressions(objtype, objtypedecl) for viewdecl in views: self._compile_view(viewdecl) return self._schema
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if debug.flags.delta_plan: debug.header('Populate Migration Diff') debug.dump(migration_diff, schema=schema) new_ddl = s_ddl.ddlast_from_delta( migration_schema, migration_target, migration_diff, ) migration_script.extend(new_ddl) if debug.flags.delta_plan: debug.header('Populate Migration DDL AST') text = [] for cmd in new_ddl: debug.dump(cmd) text.append(edgeql.generate_source(cmd, pretty=True)) debug.header('Populate Migration DDL Text') debug.dump_code(';\n'.join(text) + ';') elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( body=qlast.MigrationBody(commands=tuple(migration_script)), parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) migration_schema = None migration_target = None migration_script = [] elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema
def _new_delta_context(self, ctx: CompileContext): context = s_delta.CommandContext() context.testmode = self._in_testmode(ctx) context.stdmode = self._bootstrap_mode return context