def save(self): """ Save the entity to the database. Do this only once, after all properties have been set. """ # fetch existing: q = Entity.all() q = q.filter(Entity.project == self.loader.project) for name, only_active in self.update_criteria: value = self.properties.get(name).get('value') attr = self.loader.project.get_attribute('entity', name) q = Entity._filter_property(q, [attr], value, only_active=only_active) entity = q.first() try: data = { 'project': self.loader.project, 'author': self.loader.account, 'schemata': self.schemata, 'properties': self.properties } self._entity = entities.save(data, entity=entity) except Invalid, inv: if not self.loader.ignore_errors: raise log.warning("Validation error: %r", inv.asdict())
def view(id, slug=None): entity = Entity.by_id(id) if entity is None: raise NotFound() if entity.same_as is not None: canonical = Entity.by_id(entity.same_as) return redirect(entity_link(canonical)) inbound_sections = [] slug = url_slug(entity['name'].value) for schema in entity.inbound_schemata: pager_name = schema.name + '_in' pager = Pager(entity.inbound_by_schema(schema), pager_name, id=id, slug=slug, limit=15) inbound_sections.append((schema, pager)) outbound_sections = [] for schema in entity.outbound_schemata: pager_name = schema.name + '_out' pager = Pager(entity.outbound_by_schema(schema), pager_name, id=id, slug=slug, limit=15) outbound_sections.append((schema, pager)) canonical_url = entity_link(entity, **dict(request.args.items())) entity_hairball = app.config.get('ENTITY_HAIRBALL', True) return render_template('entity.html', entity=entity, canonical_url=canonical_url, entity_hairball=entity_hairball, inbound_sections=inbound_sections, outbound_sections=outbound_sections, render_relation=render_relation)
def save(self): """ Save the entity to the database. Do this only once, after all properties have been set. """ # fetch existing: q = Entity.all() q = q.filter(Entity.project==self.loader.project) for name, only_active in self.update_criteria: value = self.properties.get(name).get('value') attr = self.loader.project.get_attribute('entity', name) q = Entity._filter_property(q, [attr], value, only_active=only_active) entity = q.first() try: data = { 'project': self.loader.project, 'author': self.loader.account, 'schemata': self.schemata, 'properties': self.properties } self._entity = entities.save(data, entity=entity) except Invalid, inv: log.warning("Validation error: %r", inv)
def save(data, entity=None): """ Save or update an entity. """ data = validate(data, entity) operation = 'create' if entity is None else 'update' if entity is None: entity = Entity() entity.project = data.get('project') entity.author = data.get('author') db.session.add(entity) entity.schemata = list(set(data.get('schemata'))) prop_names = set() for name, prop in data.get('properties').items(): prop_names.add(name) prop['name'] = name prop['author'] = data.get('author') properties_logic.save(entity, prop) for prop in entity.properties: if prop.name not in prop_names: prop.active = False db.session.flush() _entity_changed.delay(entity.id, operation) return entity
def apply_alias(project, author, canonical_name, alias_name): """ Given two names, find out if there are existing entities for one or both of them. If so, merge them into a single entity - or, if only the entity associated with the alias exists - re-name the entity. """ canonical_name = canonical_name.strip() # Don't import meaningless aliases. if canonical_name == alias_name or not len(canonical_name): return log.info("No alias: %s", canonical_name) canonical = Entity.by_name(project, canonical_name) alias = Entity.by_name(project, alias_name) schema = Schema.by_name(project, 'base') # Don't artificially increase entity counts. if canonical is None and alias is None: return log.info("Neither alias nor canonical exist: %s", canonical_name) # Rename an alias to its new, canonical name. if canonical is None: properties_logic.set(alias, author, 'name', schema, canonical_name, active=True, source_url=None) _entity_changed.delay(alias.id) return log.info("Renamed: %s", alias_name) # Already done, thanks. if canonical == alias: return log.info("Already aliased: %s", canonical_name) # Merge two existing entities, declare one as "same_as" if canonical is not None and alias is not None: _merge_entities(alias, canonical) _entity_changed.delay(canonical.id) return log.info("Mapped: %s -> %s", alias.id, canonical.id)
def save(data, files=None, entity=None): """ Save or update an entity. """ data = validate(data, entity) operation = 'create' if entity is None else 'update' if entity is None: entity = Entity() entity.project = data.get('project') entity.author = data.get('author') db.session.add(entity) entity.schema = data.get('schema') prop_names = set() for name, prop in data.get('properties').items(): prop_names.add(name) prop['project'] = entity.project prop['name'] = name prop['author'] = data.get('author') properties_logic.save(entity, prop, files=files) for prop in entity.properties: if prop.name not in prop_names: prop.active = False db.session.flush() _entity_changed.delay(entity.id, operation) return entity
def setUp(self): self.app = make_test_app() Entity.all().delete() # Consistently include an extra private project with Entity # that should not show in any test results project, permission = _project_and_permission(private=True) entity = Entity(project=project, status=authz.PUBLISHED_THRESHOLD) db.session.add(entity)
def decode(self, node, cstruct): if isinstance(cstruct, Entity): if cstruct.project == self.project: return cstruct if isinstance(cstruct, basestring): entity = Entity.by_id(cstruct) if entity.project == self.project: return entity if isinstance(cstruct, dict): if cstruct.get('id'): entity = Entity.by_id(cstruct.get('id')) if entity.project == self.project: return entity return None
def test_entity_manage__unmanageable(self): project, permission = _project_and_permission(admin=False) entity_source = \ Entity(project=project, status=authz.PUBLISHED_THRESHOLD) entity_target = \ Entity(project=project, status=authz.PUBLISHED_THRESHOLD) db.session.add(entity_source) db.session.add(entity_target) rel = Relation(source=entity_source, target=entity_target) db.session.add(rel) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() self.assertFalse(authz.relation_manage(rel))
def update(id): entity = object_or_404(Entity.by_id(id)) authz.require(authz.entity_edit(entity)) data = request_data({'author': request.account}) entity = entities.save(data, files=request.files, entity=entity) db.session.commit() return jsonify(entity)
def rebuild(): """ Execute the change processing handlers for all entities and relations currently loaded. This can be used as a housekeeping function. """ for project in Project.all(): _project_changed(project.slug, 'delete') _project_changed(project.slug, 'create') for schema in project.schemata: _schema_changed(schema.project.slug, schema.name, 'delete') _schema_changed(schema.project.slug, schema.name, 'create') eq = Entity.all().filter_by(same_as=None) eq = eq.filter_by(project=project) for i, entity in enumerate(eq): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s entities", i) _entity_changed(entity.id, 'delete') _entity_changed(entity.id, 'create') rq = Relation.all().filter_by(project=project) for i, relation in enumerate(rq): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s relation", i) _relation_changed(relation.id, 'delete') _relation_changed(relation.id, 'create')
def save(self): """ Save the relation to the database. Do this only once, after all properties have been set. """ # fetch existing: q = Relation.all() q = q.filter(Relation.project == self.loader.project) q = q.filter(Relation.source == self.source.entity) q = q.filter(Relation.target == self.target.entity) for name, only_active in self.update_criteria: value = self.properties.get(name).get('value') attr = self.loader.project.get_attribute('relation', name) q = Entity._filter_property(q, [attr], value, only_active=only_active) relation = q.first() try: data = { 'project': self.loader.project, 'author': self.loader.account, 'schema': self.schemata.pop(), 'properties': self.properties, 'source': self.source.entity, 'target': self.target.entity } self._relation = relations.save(data, relation=relation) except Invalid, inv: if not self.loader.ignore_errors: raise log.warning("Validation error: %r", inv.asdict())
def decode(self, node, cstruct): if isinstance(cstruct, Entity): if cstruct.project == self.project: return cstruct if isinstance(cstruct, basestring): entity = Entity.by_id(cstruct) if entity.project == self.project: return entity if isinstance(cstruct, dict): if cstruct.get('id'): entity = Entity.by_id(cstruct.get('id')) if self.project is None: return entity if entity is not None and entity.project == self.project: return entity return None
def update(id): entity = object_or_404(Entity.by_id(id)) authz.require(authz.project_edit(entity.project)) data = request_data({'author': request.account}) entity = entities.save(data, entity=entity) db.session.commit() return jsonify(entities.to_rest(entity))
def save(self): """ Save the relation to the database. Do this only once, after all properties have been set. """ # fetch existing: q = Relation.all() q = q.filter(Relation.project==self.loader.project) q = q.filter(Relation.source==self.source.entity) q = q.filter(Relation.target==self.target.entity) for name, only_active in self.update_criteria: value = self.properties.get(name).get('value') q = Entity._filter_property(q, name, value, only_active=only_active) relation = q.first() try: data = { 'project': self.loader.project, 'author': self.loader.account, 'schema': self.schemata.pop(), 'properties': self.properties, 'source': self.source.entity, 'target': self.target.entity } self._relation = relations.save(data, relation=relation) except Invalid, inv: log.warning("Validation error: %r", inv)
def graph(id): entity = object_or_404(Entity.by_id(id)) extractor = GraphExtractor(root_id=entity.id) validate_cache(keys=extractor.to_hash()) if extractor.format == 'gexf': return Response(extractor.to_gexf(), mimetype='text/xml') return jsonify(extractor.to_dict())
def index_single(entity_id): """ Index a single entity. """ entity = Entity.by_id(entity_id) if entity.same_as is not None: return log.debug("Indexing: %s", entity['name'].value) body = entities.to_index(entity) es.index(index=es_index, doc_type='entity', id=body.pop('id'), body=body)
def save(data, entity=None): """ Save or update an entity. """ data = validate(data) if entity is None: entity = Entity() entity.project = data.get('project') entity.author = data.get('author') db.session.add(entity) entity.schemata = list(set(data.get('schemata'))) properties_logic.set_many(entity, data.get('author'), data.get('properties')) db.session.flush() _entity_changed.delay(entity.id) return entity
def entity_changed(self, entity_id, operation): if operation == 'delete': es.delete(index=es_index, doc_type='entity', id=entity_id) else: entity = Entity.by_id(entity_id) if entity is None: return self.index_entity(entity) es.indices.refresh(index=es_index)
def test_public_project_entity_read__publish_status(self): project, permission = _project_and_permission() entity = Entity(project=project, status=authz.PUBLISHED_THRESHOLD) db.session.add(entity) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() self.assertTrue(authz.entity_read(entity))
def test_public_project_entity_read__default_status(self): project, permission = _project_and_permission() entity = Entity(project=project) db.session.add(entity) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() self.assertFalse(authz.entity_read(entity))
def test_private_project_entity_read__draft_status_admin(self): project, permission = _project_and_permission(private=True, admin=True) entity = Entity(project=project, status=authz.PUBLISHED_THRESHOLD - 1) db.session.add(entity) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() self.assertTrue(authz.entity_read(entity))
def test_all_entities__not_private_published(self): project, permission = _project_and_permission(private=False) entity = Entity(project=project, status=authz.PUBLISHED_THRESHOLD) db.session.add(entity) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() q = db.session.query(Entity) self.assertEqual(filters.for_entities(q, Entity).count(), 1)
def graph(id): entity = object_or_404(Entity.by_id(id)) authz.require(authz.entity_read(entity)) entity_properties = request.args.getlist('entity_property') extractor = GraphExtractor(root_id=entity.id, entity_properties=entity_properties) validate_cache(keys=extractor.to_hash()) if extractor.format == 'gexf': return Response(extractor.to_gexf(), mimetype='text/xml') return jsonify(extractor)
def index(): query = filter_query(Entity, Entity.all(), request.args) for schema in request.args.getlist('schema'): alias = aliased(Schema) query = query.join(alias, Entity.schemata) query = query.filter(alias.name.in_(schema.split(','))) pager = Pager(query) conv = lambda es: [entities.to_rest_index(e) for e in es] return jsonify(pager.to_dict(conv))
def test_all_entities__private_reader_draft(self): project, permission = _project_and_permission(reader=True, private=True) entity = Entity(project=project, status=authz.PUBLISHED_THRESHOLD - 1) db.session.add(entity) db.session.commit() with self.app.test_request_context(): flask.session['id'] = 1 self.app.preprocess_request() q = db.session.query(Entity) self.assertEqual(filters.for_entities(q, Entity).count(), 0)
def search(): # TODO: move to be project-specific, the implement access control! searcher = ESSearcher(request.args) if 'project' in request.args: searcher.add_filter('project.slug', request.args.get('project')) pager = Pager(searcher) # TODO: get all entities at once: conv = lambda res: [entities.to_rest_index(Entity.by_id(r.get('id'))) for r in res] data = pager.to_dict(results_converter=conv) data['facets'] = searcher.facets() return jsonify(data)
def export_aliases(project, path): """ Dump a list of all entity names to a CSV file. The table will contain the active name of each entity, and one of the other existing names as an alias. """ with open(path, 'w') as fh: writer = DictWriter(fh, ['entity_id', 'alias', 'canonical', 'schemata']) writer.writeheader() q = Entity.all().filter_by(same_as=None) q = q.filter(Entity.project==project) for i, entity in enumerate(q): export_entity(entity, writer) if i % 100 == 0: log.info("Dumped %s entity names...", i)
def export_aliases(project, path): """ Dump a list of all entity names to a CSV file. The table will contain the active name of each entity, and one of the other existing names as an alias. """ with open(path, 'w') as fh: writer = DictWriter(fh, ['entity_id', 'alias', 'canonical', 'schemata']) writer.writeheader() q = Entity.all().filter_by(same_as=None) q = q.filter(Entity.project == project) for i, entity in enumerate(q): export_entity(entity, writer) if i % 100 == 0: log.info("Dumped %s entity names...", i)
def index_project(self, project=None): """ Index an entire project, or the entire database if no project is given. """ q = Entity.all().filter_by(same_as=None) if project is not None: q = q.filter(Entity.project == project) for i, entity in enumerate(q): self.index_entity(entity) if i > 0 and i % 1000 == 0: log.info("Indexed: %s entities", i) es.indices.refresh(index=es_index) es.indices.refresh(index=es_index)
def rebuild(): """ Execute the change processing handlers for all entities and relations currently loaded. This can be used as a housekeeping function. """ for i, entity in enumerate(Entity.all().filter_by(same_as=None)): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s entities", i) _entity_changed(entity.id) for i, relation in enumerate(Relation.all()): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s relation", i) _relation_changed(relation.id)
def save(self): """ Save the entity to the database. Do this only once, after all properties have been set. """ # fetch existing: q = Entity.all() q = q.filter(Entity.project == self.loader.project) for name, only_active in self.update_criteria: v = self.properties.get(name).get('value') q = Entity._filter_property(q, name, v, only_active=only_active) entity = q.first() try: data = { 'project': self.loader.project, 'author': self.loader.account, 'schema': self.schema, 'properties': self.properties } self._entity = entities.save(data, entity=entity) except Invalid, inv: if not self.loader.ignore_errors: raise log.warning("Validation error: %r", inv.asdict())
def index_entities(): """ Re-build an index for all enitites from scratch. """ for i, entity in enumerate(Entity.all().filter_by(same_as=None)): body = entities.to_index(entity) if not 'name' in body: log.warn('No name: %s, skipping!', entity.id) continue es.index(index=es_index, doc_type='entity', id=body.pop('id'), body=body) if i > 0 and i % 1000 == 0: log.info("Indexed: %s entities", i) es.indices.refresh(index=es_index) es.indices.refresh(index=es_index)
def index_entities(): """ Re-build an index for all enitites from scratch. """ for i, entity in enumerate(Entity.all().filter_by(same_as=None)): body = entities.to_index(entity) if 'name' not in body.get('properties', {}): log.warn('No name: %s, skipping!', entity.id) #pprint(body) continue es.index(index=es_index, doc_type='entity', id=body.pop('id'), body=body) if i > 0 and i % 1000 == 0: log.info("Indexed: %s entities", i) es.indices.refresh(index=es_index) es.indices.refresh(index=es_index)
def merge(orig, dest): """ Copy all properties and relations from one entity onto another, then mark the source entity as an ID alias for the destionation entity. """ if orig.id == dest.id: return orig if dest.same_as == orig.id: return orig if orig.same_as == dest.id: return dest if dest.same_as is not None: # potential infinite recursion here. resolved_dest = Entity.by_id(dest.same_as) if resolved_dest is not None: return merge(orig, resolved_dest) schemata, seen_schemata = list(), set() for schema in dest.schemata + orig.schemata: if schema.id in seen_schemata: continue seen_schemata.add(schema.id) schemata.append(schema) dest.schemata = schemata dest_active = [p.name for p in dest.active_properties] for prop in orig.properties: if prop.name in dest_active: prop.active = False prop.entity = dest for rel in orig.inbound: rel.target = dest for rel in orig.outbound: rel.source = dest orig.same_as = dest.id dest.same_as = None db.session.flush() _entity_changed.delay(dest.id, 'update') _entity_changed.delay(orig.id, 'delete') return dest
def merge(source, dest): """ Copy all properties and relations from one entity onto another, then mark the source entity as an ID alias for the destionation entity. """ if source.id == dest.id: return source if dest.same_as == source.id: return source if source.same_as == dest.id: return dest if dest.same_as is not None: # potential infinite recursion here. canonical = Entity.by_id(dest.same_as) if canonical is not None: return merge(source, canonical) source_schema = source.schema if dest.schema.is_parent(source_schema): dest.schema = source_schema else: source_schema = dest.schema.common_parent(source_schema) source_valid = [a.name for a in source_schema.attributes] dest_active = [p.name for p in dest.active_properties] for prop in source.properties: if prop.name in source_valid: if prop.name in dest_active: prop.active = False prop.entity = dest else: properties_logic.delete(prop) for rel in source.inbound: rel.target = dest for rel in source.outbound: rel.source = dest source.same_as = dest.id db.session.flush() _entity_changed.delay(dest.id, 'update') _entity_changed.delay(source.id, 'delete') return dest
def generate_sitemap(count=40000): """ Generate a static sitemap.xml for the most central entities in the database. """ PATTERN = app.config.get('ENTITY_VIEW_PATTERN') entities = [] for i, entity in enumerate(Entity.all().yield_per(5000)): dt = entity.updated_at.strftime('%Y-%m-%d') entities.append((PATTERN % entity.id, dt, entity.degree)) if i > 0 and i % 1000 == 0: log.info("Loaded %s entities...", i) upper = max([e[2] for e in entities]) entities = sorted(entities, key=lambda e: e[2], reverse=True)[:count] entities = [(i, d, '%.2f' % max(0.3, ((float(s)**0.3)/upper))) for (i,d,s) in entities] xml = render_template('sitemap.xml', entities=entities) with open(os.path.join(app.static_folder, 'sitemap.xml'), 'w') as fh: fh.write(xml)
def generate_sitemap(count=40000): """ Generate a static sitemap.xml for the most central entities in the database. """ PATTERN = app.config.get('ENTITY_VIEW_PATTERN') entities = [] for i, entity in enumerate(Entity.all().yield_per(5000)): dt = entity.updated_at.strftime('%Y-%m-%d') entities.append((PATTERN % entity.id, dt, entity.degree)) if i > 0 and i % 1000 == 0: log.info("Loaded %s entities...", i) upper = max([e[2] for e in entities]) entities = sorted(entities, key=lambda e: e[2], reverse=True)[:count] entities = [(i, d, '%.2f' % max(0.3, ((float(s)**0.3) / upper))) for (i, d, s) in entities] xml = render_template('sitemap.xml', entities=entities) with open(os.path.join(app.static_folder, 'sitemap.xml'), 'w') as fh: fh.write(xml)
def index(): query = filter_query(Entity, Entity.all(), request.args) if 'q' in request.args and len(request.args.get('q').strip()): q = '%%%s%%' % request.args.get('q').strip() query = query.join(EntityProperty) query = query.filter(EntityProperty.name=='name') query = query.filter(EntityProperty.value_string.ilike(q)) for schema in request.args.getlist('schema'): if not len(schema.strip()): continue alias = aliased(Schema) query = query.join(alias, Entity.schemata) query = query.filter(alias.name.in_(schema.split(','))) query = query.filter(Entity.same_as==None) query = query.distinct() pager = Pager(query) validate_cache(keys=pager.cache_keys()) return jsonify(pager, index=True)
def apply_alias(project, author, canonical_name, alias_name, source_url=None): """ Given two names, find out if there are existing entities for one or both of them. If so, merge them into a single entity - or, if only the entity associated with the alias exists - re-name the entity. """ # Don't import meaningless aliases. if not len(canonical_name) or not len(alias_name): return log.info("Not an alias: %s", canonical_name) canonical = None # de-duplicate existing entities with the same name. known_names = set() for existing in Entity.by_name_many(project, canonical_name): for prop in existing.properties: if prop.name != 'name': continue known_names.add(prop.value) # make sure the canonical name is actually active if prop.value == canonical_name: prop.active = True else: prop.active = False if canonical is not None and canonical.id != existing.id: canonical = merge(existing, canonical) else: canonical = existing # Find aliases, i.e. entities with the alias name which are not # the canonical entity. q = Entity.by_name_many(project, alias_name) if canonical is not None: q = q.filter(Entity.id != canonical.id) aliases = q.all() # If there are no existing aliases with that name, add the alias # name to the canonical entity. if not len(aliases) and canonical is not None: if alias_name not in known_names: data = { 'value': alias_name, 'active': False, 'name': 'name', 'source_url': source_url } properties_logic.save(canonical, data) _entity_changed.delay(canonical.id, 'update') log.info("Alias: %s -> %s", alias_name, canonical_name) for alias in aliases: if canonical is None: # Rename an alias to its new, canonical name. data = { 'value': canonical_name, 'active': True, 'name': 'name', 'source_url': source_url } properties_logic.save(alias, data) _entity_changed.delay(alias.id, 'update') log.info("Renamed: %s -> %s", alias_name, canonical_name) else: # Merge two existing entities, declare one as "same_as" merge(alias, canonical) log.info("Mapped: %s -> %s", alias.id, canonical.id) db.session.commit()
def view(id): entity = object_or_404(Entity.by_id(id)) authz.require(authz.project_read(entity.project)) return jsonify(entities.to_rest(entity))
def entities_index(obj_id): query = Property.all() query = query.filter(Property.relation_id != None) obj = Entity.by_id(obj_id) query = query.filter_by(entity_id=obj_id) return _index(query, obj)
def delete(id): entity = object_or_404(Entity.by_id(id)) authz.require(authz.project_edit(entity.project)) entities.delete(entity) db.session.commit() raise Gone()
def check(name): entity = Entity.by_name(project, name) if entity is not None: if obj is None or obj.id != entity.id: return False return True
def apply_alias(project, author, canonical_name, alias_name, source_url=None): """ Given two names, find out if there are existing entities for one or both of them. If so, merge them into a single entity - or, if only the entity associated with the alias exists - re-name the entity. """ # Don't import meaningless aliases. if not len(canonical_name) or not len(alias_name): return log.info("Not an alias: %s", canonical_name) canonical = None # de-duplicate existing entities with the same name. known_names = set() for existing in Entity.by_name_many(project, canonical_name): for prop in existing.properties: if prop.name != 'name': continue known_names.add(prop.value) # make sure the canonical name is actually active # TODO: is this desirable? if prop.value == canonical_name: prop.active = True else: prop.active = False if canonical is not None and canonical.id != existing.id: canonical = merge(existing, canonical) else: canonical = existing schema = Schema.by_name(project, 'base') attribute = schema.get_attribute('name') # Find aliases, i.e. entities with the alias name which are not # the canonical entity. q = Entity.by_name_many(project, alias_name) if canonical is not None: q = q.filter(Entity.id != canonical.id) aliases = q.all() # If there are no existing aliases with that name, add the alias # name to the canonical entity. if not len(aliases) and canonical is not None: if alias_name not in known_names: data = { 'value': alias_name, 'schema': schema, 'attribute': attribute, 'active': False, 'name': 'name', 'source_url': source_url } properties_logic.save(canonical, data) _entity_changed.delay(canonical.id, 'update') log.info("Alias: %s -> %s", alias_name, canonical_name) for alias in aliases: if canonical is None: # Rename an alias to its new, canonical name. data = { 'value': canonical_name, 'schema': schema, 'attribute': attribute, 'active': True, 'name': 'name', 'source_url': source_url } properties_logic.save(alias, data) _entity_changed.delay(alias.id, 'update') log.info("Renamed: %s -> %s", alias_name, canonical_name) else: # Merge two existing entities, declare one as "same_as" merge(alias, canonical) log.info("Mapped: %s -> %s", alias.id, canonical.id) db.session.commit()