def visit_ColumnRef(self, node): names = node.name if isinstance(names[-1], pgast.Star): self.write(common.qname(*names[:-1])) if len(names) > 1: self.write('.') self.write('*') else: if names == ['VALUE']: self.write(names[0]) elif names[0] in {'OLD', 'NEW'}: self.write(names[0]) if len(names) > 1: self.write('.') self.write(common.qname(*names[1:])) else: self.write(common.qname(*names))
def get_trigger_proc_text(self): chunks = [] constr_name = self.constraint_name() raw_constr_name = self.constraint_name(quote=False) errmsg = 'duplicate key value violates unique ' \ 'constraint {constr}'.format(constr=constr_name) for expr, origin_expr in zip(self._exprdata, self._origin_exprdata): exprdata = expr['exprdata'] origin_exprdata = origin_expr['exprdata'] schemaname, tablename = self.get_origin_table_name() text = ''' PERFORM TRUE FROM {table} WHERE {plain_expr} = {new_expr}; IF FOUND THEN RAISE unique_violation USING TABLE = '{tablename}', SCHEMA = '{schemaname}', CONSTRAINT = '{constr}', MESSAGE = '{errmsg}', DETAIL = {detail}; END IF; '''.format( plain_expr=origin_exprdata['plain'], detail=common.quote_literal( f"Key ({origin_exprdata['plain']}) already exists." ), new_expr=exprdata['new'], table=common.qname( schemaname, tablename + "_" + common.get_aspect_suffix("inhview")), schemaname=schemaname, tablename=tablename, constr=raw_constr_name, errmsg=errmsg, ) chunks.append(text) text = 'BEGIN\n' + '\n\n'.join(chunks) + '\nRETURN NEW;\nEND;' return text
def get_trigger_proc_text(self): chunks = [] constr_name = self.constraint_name() raw_constr_name = self.constraint_name(quote=False) errmsg = 'duplicate key value violates unique ' \ 'constraint {constr}'.format(constr=constr_name) for expr, origin_expr in zip(self._exprdata, self._origin_exprdata): exprdata = expr['exprdata'] origin_exprdata = origin_expr['exprdata'] text = ''' PERFORM TRUE FROM {table} WHERE {plain_expr} = {new_expr}; IF FOUND THEN RAISE unique_violation USING TABLE = '{table[1]}', SCHEMA = '{table[0]}', CONSTRAINT = '{constr}', MESSAGE = '{errmsg}', DETAIL = 'Key ({plain_expr}) already exists.'; END IF; '''.format( plain_expr=origin_exprdata['plain'], new_expr=exprdata['new'], table=common.qname(*self.get_origin_table_name()), constr=raw_constr_name, errmsg=errmsg, ) chunks.append(text) text = 'BEGIN\n' + '\n\n'.join(chunks) + '\nRETURN NEW;\nEND;' return text
def visit_FuncCall(self, node): self.write(common.qname(*node.name)) self.write('(') if node.agg_distinct: self.write('DISTINCT ') self.visit_list(node.args, newlines=False) if node.agg_order: self.write(' ORDER BY ') self.visit_list(node.agg_order, newlines=False) self.write(')') if node.agg_filter: self.write(' FILTER (WHERE ') self.visit(node.agg_filter) self.write(')') if node.over: self.write(' OVER (') if node.over.partition_clause: self.write('PARTITION BY ') self.visit_list(node.over.partition_clause, newlines=False) if node.over.order_clause: self.write(' ORDER BY ') self.visit_list(node.over.order_clause, newlines=False) # XXX: add support for frame definition self.write(')') if node.with_ordinality: self.write(' WITH ORDINALITY') if node.coldeflist: self.write(' AS (') self.visit_list(node.coldeflist, newlines=False) self.write(')')
def visit_Relation(self, node): if node.schemaname is None: self.write(common.qname(node.name)) else: self.write(common.qname(node.schemaname, node.name))
async def read_scalars(self, schema, only_modules, exclude_modules): seqs = await introspection.sequences.fetch( self.connection, schema_pattern='edgedb%', sequence_pattern='%_sequence') seqs = {(s['schema'], s['name']): s for s in seqs} seen_seqs = set() scalar_list = await datasources.schema.scalars.fetch( self.connection, modules=only_modules, exclude_modules=exclude_modules) basemap = {} for row in scalar_list: name = sn.Name(row['name']) scalar_data = { 'id': row['id'], 'inherited_fields': self._unpack_inherited_fields(row['inherited_fields']), 'name': name, 'is_abstract': row['is_abstract'], 'is_final': row['is_final'], 'view_type': (s_types.ViewType(row['view_type']) if row['view_type'] else None), 'view_is_persistent': row['view_is_persistent'], 'default': (self.unpack_expr(row['default'], schema) if row['default'] else None), 'expr': (self.unpack_expr(row['expr'], schema) if row['expr'] else None), 'enum_values': row['enum_values'], } schema, scalar = s_scalars.ScalarType.create_in_schema( schema, **scalar_data) basemap[scalar] = (row['bases'], row['ancestors']) for scls, (basenames, ancestors) in basemap.items(): schema = self._set_reflist(schema, scls, 'bases', basenames) schema = self._set_reflist(schema, scls, 'ancestors', ancestors) sequence = schema.get('std::sequence', None) for scalar in schema.get_objects(type=s_scalars.ScalarType): if (sequence is not None and scalar.issubclass(schema, sequence) and not scalar.get_is_abstract(schema)): seq_name = common.get_backend_name(schema, scalar, catenate=False, aspect='sequence') if seq_name not in seqs: msg = 'internal metadata incosistency' details = (f'Missing sequence for sequence ' f'scalar {scalar.get_name(schema)}') raise errors.SchemaError(msg, details=details) seen_seqs.add(seq_name) extra_seqs = set(seqs) - seen_seqs if extra_seqs and not only_modules and not exclude_modules: msg = 'internal metadata incosistency' details = 'Extraneous sequences exist: {}'.format(', '.join( common.qname(*t) for t in extra_seqs)) raise errors.SchemaError(msg, details=details) return schema