def data_update(request, context=None): engine = _get_engine() connection = engine.connect() query = { 'from': [{ 'type': 'table', 'schema': request['schema'], 'table': request['table'] }], 'where': request['where'] } user = context['user'].name rows = data_search(query, context) setter = request['values'] message = request.get('message', None) meta_fields = list(parser.set_meta_info('update', user, message).items()) fields = [field[0] for field in rows['description']] + [f[0] for f in meta_fields] table_name = request['table'] meta = MetaData(bind=engine) table = Table(table_name, meta, autoload=True, schema=request['schema']) pks = [c for c in table.columns if c.primary_key] insert_strings = [] if rows['data']: for row in rows['data']: insert = [] for (key,value) in list(zip(fields, row)) + meta_fields: if key in setter: if not (key in pks and value != setter[key]): value = setter[key] else: raise InvalidRequest( "Primary keys must remain unchanged.") insert.append(process_value(value)) insert_strings.append('('+(', '.join(insert))+')') # Add metadata for insertions schema = request['schema'] schema = get_meta_schema_name(schema) if not schema.startswith('_') else schema s = "INSERT INTO {schema}.{table} ({fields}) VALUES {values}".format( schema=read_pgid(schema), table=read_pgid(get_edit_table_name(table_name)), fields=', '.join(fields), values=', '.join(insert_strings) ) print(s) connection.execute(s) return {'affected':len(rows['data'])}
def table_create(request, context=None): # TODO: Authentication # TODO: column constrains: Unique, # load schema name and check for sanity engine = _get_engine() schema = read_pgid(request["schema"]) create_schema = not has_schema(request) # Check whether schema exists # load table name and check for sanity table = read_pgid(request.pop("table")) # Process fields fieldstrings = [] fields = request.pop("fields", []) foreign_keys = [] primary_keys = [] for field in fields: fname = read_pgid(field["name"]) type_name = field["type"] # TODO: check whether type_name is an actual postgres type # if not engine.dialect.has_type(connection,type_name): # raise p.toolkit.ValidationError("Invalid field type: '%s'"% type_name ) fs = field["name"] + " " + type_name if "pk" in field: if read_bool(field["pk"]): primary_keys.append([field["name"]]) fs += " PRIMARY KEY" fieldstrings.append(fs) table_constraints = {"unique": [], "pk": primary_keys, "fk": foreign_keys} constraints = request.pop('constraints', {}) if 'fk' in constraints: assert isinstance(constraints['fk'], list), \ "Foreign Keys should be a list" for fk in constraints['fk']: print(fk) assert all(map(is_pg_qual, [fk["schema"], fk["table"]] + fk["fields"] + fk["names"])), "Invalid identifier" if 'on_delete' in fk: assert fk["on delete"].lower() in ["cascade", "no action", "restrict", "set null", "set default"], "Invalid on delete action" else: fk["on_delete"] = "no action" foreign_keys.append((fk["names"], fk["schema"], fk["table"], fk["fields"], fk["on_delete"])) #fieldstrings.append("_comment int") #foreign_keys.append(("_comment", schema, "_"+table+"_cor", "id", "no action")) fk_constraints = [] for ( fk_field1, fk_schema, fk_table, fk_field2, fk_on_delete) in foreign_keys: fk_constraints.append( "FOREIGN KEY ({field1}) references {schema}.{table} ({field2}) match simple on update no action on delete {ondel}".format( field1=",".join(fk_field1), schema=fk_schema, table=fk_table, field2=",".join(fk_field2), ondel=fk_on_delete) ) constraints = ", ".join(fk_constraints) fields = "(" + ( ", ".join(fieldstrings + fk_constraints) if fieldstrings else "") + ")" sql_string = "create table {schema}.{table} {fields}".format( schema=schema, table=table, fields=fields, constraints=constraints) print(fk_constraints) session = sessionmaker(bind=engine)() try: if create_schema: session.execute("create schema %s" % schema) session.execute(sql_string.replace('%', '%%')) #create_meta(schema, table) except Exception as e: traceback.print_exc() session.rollback() raise e else: session.commit() return {'success': True}