def delete_relationship( db: PartitionedDatabase, record_id: RecordId, relationship_id: RecordRelationshipId) -> RecordRelationshipId: with db.transaction() as tx: return db.delete_outgoing_record_relationship_tx( tx, record_id, relationship_id)
def create_concept_instance_relationship( db: PartitionedDatabase, relationship_id_or_name: Union[ModelRelationshipId, str], body: JsonDict, ) -> Tuple[RelationshipInstancePair, int]: relationship_to_create_schema = CreateModelRelationship.schema() model_relationship_to_create: Optional[CreateModelRelationship] = None if body.get("relationshipToCreate"): model_relationship_to_create = relationship_to_create_schema.load( row["relationshipToCreate"]) to_create = CreateRecordRelationship( from_=body.get("from"), to=body.get("to"), model_relationship_to_create=model_relationship_to_create, ) with db.transaction() as tx: rels = db.create_legacy_record_relationship_batch_tx( tx=tx, to_create=[to_create], relation=relationship_id_or_name) return ( ( to_legacy_relationship_instance(rels[0][0]), to_legacy_relationship(rels[0][1]), ), 200, )
def get_schema_graph(db: PartitionedDatabase) -> JsonDict: with db.transaction() as tx: structure = db.graph_schema_structure_tx(tx) models: List[Model] = structure.models model_ids: List[ModelId] = [m.id for m in models] property_counts: Dict[ModelId, int] = db.get_property_counts_tx(tx, model_ids) one_to_one, one_to_many = partition( lambda r: r.one_to_many, structure.relationships ) legacy_models = [ dict(type="concept", **to_concept_dict(m, property_counts[m.id])) for m in models ] legacy_model_relationships = [ dict(type="schemaRelationship", **to_legacy_relationship(r)) for r in one_to_many ] legacy_schema_linked_properties = [ to_schema_linked_property(r) for r in one_to_one ] return ( legacy_models + legacy_model_relationships + legacy_schema_linked_properties )
def create_concept_instance_relationships( db: PartitionedDatabase, relationship_id_or_name: Union[ModelRelationshipId, str], body: List[JsonDict], ) -> List[RelationshipInstancePair]: relationship_to_create_schema = CreateModelRelationship.schema() to_create = [] for row in body: if row.get("relationshipToCreate"): model_relationship_to_create = relationship_to_create_schema.load( row["relationshipToCreate"]) else: model_relationship_to_create = None to_create.append( CreateRecordRelationship( from_=row["from"], to=row["to"], model_relationship_to_create=model_relationship_to_create, )) with db.transaction() as tx: rels = db.create_legacy_record_relationship_batch_tx( tx=tx, to_create=to_create, relation=relationship_id_or_name) return [( to_legacy_relationship_instance(record_relationship), to_legacy_relationship(model_relationship), ) for record_relationship, model_relationship in rels]
def update_properties( db: PartitionedDatabase, concept_id_or_name: str, body: List[JsonDict] ) -> List[JsonDict]: x_bf_trace_id = AuditLogger.trace_id_header() with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.update_properties_tx( tx, model, *[to_model_property(p) for p in body] ) # Emit "UpdateModel" event: PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=[ CreateModelProperty( property_name=p.name, model_id=UUID(model.id), model_name=model.name ) if created else UpdateModelProperty( property_name=p.name, model_id=UUID(model.id), model_name=model.name ) for p, created in properties ], trace_id=TraceId(x_bf_trace_id), ) return [to_property_dict(p) for p, _ in properties]
def get_related_concepts(db: PartitionedDatabase, id_or_name: str) -> List[JsonDict]: with db.transaction() as tx: models = db.get_related_models_tx(tx, id_or_name) property_counts = db.get_property_counts_tx( tx, [model.id for model in models]) return [to_concept_dict(m, property_counts[m.id]) for m in models]
def get_relationship( db: PartitionedDatabase, record_id: RecordId, relationship_id: RecordRelationshipId ) -> JsonDict: with db.transaction() as tx: return db.get_outgoing_record_relationship_tx( tx, record_id, relationship_id ).to_dict()
def delete_relationship(db: PartitionedDatabase, model_id_or_name: str, relationship_id: ModelRelationshipId) -> None: with db.transaction() as tx: deleted = db.delete_model_relationship_tx(tx=tx, relationship=relationship_id) if deleted is None: raise NotFound(f"Could not find property [{relationship_id}]")
def get_relationships(db: PartitionedDatabase, model_id_or_name: str) -> List[JsonDict]: with db.transaction() as tx: return [ r.to_dict() for r in db.get_outgoing_model_relationships_tx( tx, from_model=model_id_or_name) ]
def get_schema_linked_properties(db: PartitionedDatabase, id_: ModelId): with db.transaction() as tx: return [ to_schema_linked_property_target(relationship) for relationship in db.get_outgoing_model_relationships_tx( tx=tx, from_model=id_, one_to_many=False) ]
def update_properties(db: PartitionedDatabase, model_id_or_name: str, body: List[JsonDict]): x_bf_trace_id = AuditLogger.trace_id_header() payload: List[ModelProperty] = ModelProperty.schema().load(body, many=True) with db.transaction() as tx: model = db.get_model_tx(tx, model_id_or_name) properties = db.update_properties_tx(tx, model, *payload) PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=[ CreateModelProperty(property_name=p.name, model_id=UUID(model.id), model_name=model.name) if created else UpdateModelProperty(property_name=p.name, model_id=UUID(model.id), model_name=model.name) for p, created in properties ], trace_id=TraceId(x_bf_trace_id), ) return [p.to_dict() for p, _ in properties]
def get_all_concept_instances( db: PartitionedDatabase, concept_id_or_name: str, limit: int, offset: int, order_by: Optional[str] = None, ascending: Optional[bool] = None, ) -> List[JsonDict]: with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) results = db.get_all_records_offset_tx( tx, model=model, limit=limit, offset=offset, fill_missing=True, order_by=None if order_by is None and ascending is None else OrderByField( name="created_at" if order_by is None else order_by, ascending=True if ascending is None else ascending, ), ) x_bf_trace_id = AuditLogger.trace_id_header() record_ids = [] instances = [] for record in results: record_ids.append(str(record.id)) instances.append(to_concept_instance(record, model, properties)) AuditLogger.get().message().append("records", *record_ids).log(x_bf_trace_id) return instances
def create_concept_relationship(db: PartitionedDatabase, body: JsonDict) -> Tuple[JsonDict, int]: from_model = body.get("from", None) to_model = body.get("to", None) name = body["name"] display_name = body["display_name"] description = body["description"] with db.transaction() as tx: if from_model is None and to_model is None: relationship = to_legacy_relationship( db.create_model_relationship_stub_tx(tx, name=name, display_name=display_name, description=description)) else: relationship = to_legacy_relationship( db.create_model_relationship_tx( tx, from_model=from_model, name=name, display_name=display_name, to_model=to_model, one_to_many=True, description=description, )) return relationship, 201
def create_concept_instance(db: PartitionedDatabase, concept_id_or_name: str, body: JsonDict): with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) record = to_record(properties, body["values"]) records = db.create_records_tx(tx, concept_id_or_name, [record], fill_missing=True) if not records: raise BadRequest( f"Could not create concept instance [{concept_id_or_name}]") record = records[0] # Log the created concept instance: x_bf_trace_id = AuditLogger.trace_id_header() # Emit "CreateRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=CreateRecord(id=record.id, name=record.name, model_id=model.id), trace_id=TraceId(x_bf_trace_id), ) AuditLogger.get().message().append("records", str(record.id)).log(x_bf_trace_id) return to_concept_instance(record, model, properties), 201
def delete_relationships( db: PartitionedDatabase, record_id: RecordId, body: JsonDict ) -> Tuple[JsonDict, int]: if len(body) == 0: return [], 200 response = [] # TODO: batch these database calls with db.transaction() as tx: for record_relationship_id in body: try: db.delete_outgoing_record_relationship_tx( tx, record_id, record_relationship_id ) except RecordRelationshipNotFoundError: record_status = 404 except Exception as e: record_status = 500 else: record_status = 200 response.append({"id": record_relationship_id, "status": record_status}) # The 207 response (multi-status) allows parts of a batch request to # fail/succeed individually. If all succeed or all fail with the same # status, use this status for the overall response. Otherwise, return 207. # See https://tools.ietf.org/html/rfc4918#section-13 unique_status = set(r["status"] for r in response) if len(unique_status) == 1: response_status = unique_status.pop() else: response_status = 207 return response, response_status
def delete_linked_property( db: PartitionedDatabase, concept_id: ModelId, id_: RecordId, link_id: RecordRelationshipId, ): with db.transaction() as tx: return db.delete_outgoing_record_relationship_tx(tx, id_, link_id)
def get_concept_instance_relationship( db: PartitionedDatabase, relationship_id_or_name: ModelRelationshipId, # unused id_: RecordRelationshipId, ): with db.transaction() as tx: return to_legacy_relationship_instance( db.get_record_relationship_tx(tx, relationship_id_or_name, id_))
def create_linked_properties(db: PartitionedDatabase, concept_id: ModelId, id_: RecordId, body: Dict): with db.transaction() as tx: # map of model relationship id -> relationship # allows us to make sure model relationships exist model_relationship_map = { model_relationship.id: model_relationship for model_relationship in db.get_outgoing_model_relationships_tx( tx=tx, from_model=concept_id, one_to_many=False) } # map of model relationship id -> existing record relationship # so we can delete existing relationships such that this behaves as an UPSERT existing_record_relationship_map = { record_relationship.model_relationship_id: record_relationship for record_relationship in db.get_outgoing_record_relationships_tx( tx=tx, from_record=id_, one_to_many=False) } payload: List[Dict[str, str]] = list(body["data"]) # assure each request is or a unique schemaLinkedProperty if len(payload) != len( set(p["schemaLinkedPropertyId"] for p in payload)): raise BadRequest("duplicate model linked properties") to_create: List[CreateRecordRelationship] = [] for item in payload: model_relationship_id = item["schemaLinkedPropertyId"] if model_relationship_id not in model_relationship_map.keys(): raise BadRequest( f"no model linked property exists for {item['schemaLinkedPropertyId']}" ) model_relationship = model_relationship_map[model_relationship_id] if model_relationship_id in existing_record_relationship_map.keys( ): db.delete_outgoing_record_relationship_tx( tx=tx, record=id_, relationship=existing_record_relationship_map[ model_relationship_id], ) to_create.append( CreateRecordRelationship( from_=id_, to=item["to"], model_relationship=model_relationship)) result = db.create_record_relationship_batch_tx(tx=tx, to_create=to_create) data = [to_linked_property(r) for r, _ in result] return {"data": data}
def get_relationships( db: PartitionedDatabase, record_id: RecordId, relationship_type: Optional[str] = None, ) -> List[JsonDict]: with db.transaction() as tx: relationships = db.get_outgoing_record_relationships_tx( tx=tx, from_record=record_id, relationship_name=relationship_type) return [rel.to_dict() for rel in relationships]
def get_concept_instance(db: PartitionedDatabase, concept_id_or_name: str, concept_instance_id: str) -> JsonDict: with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) record = db.get_record_tx(tx, concept_instance_id, fill_missing=True) if record is None: raise NotFound(f"Could not get record {concept_instance_id}") return to_concept_instance(record, model, properties)
def get_linked_properties(db: PartitionedDatabase, concept_id: ModelId, id_: RecordId): # Ignore the ID of the model/concept, as it's not needed to actually # fetch the linked property: with db.transaction() as tx: return [ to_linked_property(rr) for rr in db.get_outgoing_record_relationships_tx( tx=tx, from_record=id_, one_to_many=False) ]
def delete_concept_instance_relationship( db: PartitionedDatabase, relationship_id_or_name: ModelRelationshipId, # ununsed id_: RecordRelationshipId, ) -> RecordRelationshipId: with db.transaction() as tx: deleted = db.delete_record_relationships_tx(tx, id_) if deleted is None or len(deleted) == 0: raise NotFound(f"Could not delete record relationship [{id_}]") return deleted[0]
def create_proxy_instance(db: PartitionedDatabase, proxy_type: str, body: JsonDict) -> List[JsonDict]: response = [] with db.transaction() as tx: x_bf_trace_id = AuditLogger.trace_id_header() link_targets = [] package_ids = [] for target in body["targets"]: link_target = target["linkTarget"] relationship_type = target["relationshipType"] link_targets.append(link_target) concept_link_target = to_proxy_link_target(link_target) if concept_link_target is None: raise InvalidPackageProxyLinkTargetError(link_target=str(body)) package = PennsieveApiClient.get().get_package_ids( db.dataset_node_id, body["external_id"], headers=dict(**auth_header(), **with_trace_id_header(x_bf_trace_id)), ) package_ids.append(str(package.id)) package_proxy = db.create_package_proxy_tx( tx=tx, record=concept_link_target.id, package_id=package.id, package_node_id=package.node_id, legacy_relationship_type=relationship_type, ) linkResult = { "proxyInstance": to_proxy_instance(PROXY_TYPE, package_proxy), "relationshipInstance": make_proxy_relationship_instance(concept_link_target.id, package_proxy, relationship_type), } response.append(linkResult) AuditLogger.get().message().append("link-targets", *link_targets).append( "packages", *package_ids).log(x_bf_trace_id) return response, 201
def get_records_related_to_package( db: PartitionedDatabase, proxy_type: str, package_id: str, concept_id_or_name: str, limit: Optional[int] = None, offset: Optional[int] = None, relationship_order_by: Optional[str] = None, record_order_by: Optional[str] = None, ascending: bool = False, ) -> List[JsonDict]: with db.transaction() as tx: x_bf_trace_id = AuditLogger.trace_id_header() model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) results = [] package_proxy_ids = [] record_ids = [] for pp, r in db.get_records_related_to_package_tx( tx=tx, package_id=PackageNodeId(package_id), related_model_id_or_name=concept_id_or_name, limit=limit, offset=offset, relationship_order_by=relationship_order_by, record_order_by=record_order_by, ascending=ascending, ): package_proxy_ids.append(str(pp.id)) record_ids.append(str(r.id)) t = ( # All package-to-record relationships are defined with the # internal `@IN_PACKAGE` relationship type: # (Package)<-[`@IN_PACKAGE`]-(Record) # For legacy consistency, we just use the generic "belongs_to" # here: make_proxy_relationship_instance(r.id, pp, "belongs_to"), to_concept_instance(r, model, properties), ) results.append(t) AuditLogger.get().message().append("package-proxies", *package_proxy_ids).append( "records", *record_ids).log(x_bf_trace_id) return results
def delete_property( db: PartitionedDatabase, model_id: str, property_name: str, modify_records: bool = False, ) -> None: x_bf_trace_id = AuditLogger.trace_id_header() max_record_count = current_app.config[ "config"].max_record_count_for_property_deletion with db.transaction() as tx: model = db.get_model_tx(tx, model_id) if modify_records: record_count = db.model_property_record_count_tx( tx, model_id, property_name) if record_count > 0: if record_count > max_record_count: raise BadRequest( f"Cannot delete properties that are used on > {max_record_count} records. This property is used on {record_count}" ) model_properties = [ p for p in db.get_properties_tx(tx, model_id) if p.name == property_name ] if not model_properties: raise NotFound(f"no such property {property_name} exists") updated_records = db.delete_property_from_all_records_tx( tx, model_id, model_properties[0]) if updated_records != record_count: raise ServerError( "the property was not removed from all records") deleted = db.delete_property_tx(tx, model_id, property_name) if deleted is None: raise NotFound( f"Could not delete property [{model_id}.{property_name}]") PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=DeleteModelProperty( property_name=deleted.name, model_id=UUID(model.id), model_name=model.name, ), trace_id=TraceId(x_bf_trace_id), )
def delete_concept_relationship( db: PartitionedDatabase, relationship_id_or_name: ModelRelationshipId) -> JsonDict: with db.transaction() as tx: id_or_name = to_relationship_id_or_name(relationship_id_or_name) deleted_id = db.delete_model_relationship_stub_tx(tx=tx, relation=id_or_name) if deleted_id is None: deleted_id = db.delete_model_relationship_tx( tx=tx, relationship=id_or_name) if deleted_id is None: raise NotFound(f"Could not delete model relationship [{deleted_id}]") return [str(deleted_id)]
def get_related( db: PartitionedDatabase, concept_id: str, id_: str, target_concept_id_or_name: str, relationship_order_by: Optional[str] = None, record_order_by: Optional[str] = None, ascending: Optional[bool] = True, limit: int = 100, offset: int = 0, include_incoming_linked_properties: bool = False, ) -> List[JsonDict]: with db.transaction() as tx: model = db.get_model_tx(tx, target_concept_id_or_name) properties = db.get_properties_tx(tx, target_concept_id_or_name) order_by: Optional[ModelOrderBy] = None asc = ascending if ascending is not None else True if record_order_by is not None: order_by = ModelOrderBy.field(name=record_order_by, ascending=asc) elif relationship_order_by is not None: order_by = ModelOrderBy.relationship( type=relationship_order_by, ascending=asc ) else: order_by = ModelOrderBy.field( name="created_at", ascending=True ) # default order for backwards compatibility related = db.get_related_records_tx( tx, start_from=id_, model_name=target_concept_id_or_name, order_by=order_by, limit=limit, offset=offset, include_incoming_linked_properties=include_incoming_linked_properties, ) return [ ( to_legacy_relationship_instance(rr) if rr.one_to_many else to_linked_property(rr), to_concept_instance(r, model, properties), ) for (rr, r) in related ]
def update_concept( db: PartitionedDatabase, concept_id_or_name: str, body: JsonDict ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() with db.transaction() as tx: model = db.update_model_tx(tx, concept_id_or_name, **filter_model_dict(body)) property_count = db.get_property_counts_tx(tx, [model.id])[model.id] # Emit "UpdateModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=UpdateModel(id=UUID(model.id), name=model.name), trace_id=TraceId(x_bf_trace_id), ) return to_concept_dict(model, property_count)
def delete_concept_instance_relationships( db: PartitionedDatabase) -> List[JsonDict]: # HACK: request bodies on DELETE requests do not have defined # semantics and are not directly support by OpenAPI/Connexion. See # - https://swagger.io/docs/specification/describing-request-body # - https://github.com/zalando/connexion/issues/896 body = connexion.request.json # HACK: # since we're pulling directly from the raw body, names will not be camel cased: relationship_instance_ids: List[str] = body.get("relationshipInstanceIds", []) with db.transaction() as tx: return db.delete_record_relationships_tx(tx, *relationship_instance_ids)
def delete_proxy_instances(db: PartitionedDatabase, proxy_type: str) -> List[JsonDict]: # HACK: request bodies on DELETE requests do not have defined # semantics and are not directly support by OpenAPI/Connexion. See # - https://swagger.io/docs/specification/describing-request-body # - https://github.com/zalando/connexion/issues/896 body = connexion.request.json # HACK: # since we're pulling directly from the raw body, names will not be camel cased: source_record_id = get_record_id(body.get("sourceRecordId")) proxy_instance_ids: List[str] = body.get("proxyInstanceIds", []) with db.transaction() as tx: return db.delete_package_proxies_tx(tx, source_record_id, *proxy_instance_ids)