def autocomplete_model_properties( organization_id: int, model_name: str, token_info: Claim, dataset_id: Optional[int] = None, ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() db = authorize_search(organization_id, x_bf_trace_id, token_info) ds_id = None if dataset_id is None else DatasetId(dataset_id) datasets_properties_operators = list( db.suggest_properties(model_filter=ModelFilter(name=model_name), dataset_id=ds_id)) datasets = {d for (d, _, _) in datasets_properties_operators} properties_and_operators = unique_everseen( [(p, op) for (_, p, op) in datasets_properties_operators], key=lambda t: prop_key(t[0]), ) # Write to the audit log: AuditLogger.get().message().append("organization", organization_id).append( "datasets", *[str(ds.id) for ds in datasets]).log(x_bf_trace_id) # If a name is a duplicate, include its type in the output display name # to disambinguate: return [ property_to_suggestion(p, ops) for (p, ops) in properties_and_operators ]
def get_all_concept_instances( db: PartitionedDatabase, concept_id_or_name: str, limit: int, offset: int, order_by: Optional[str] = None, ascending: Optional[bool] = None, ) -> List[JsonDict]: with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) results = db.get_all_records_offset_tx( tx, model=model, limit=limit, offset=offset, fill_missing=True, order_by=None if order_by is None and ascending is None else OrderByField( name="created_at" if order_by is None else order_by, ascending=True if ascending is None else ascending, ), ) x_bf_trace_id = AuditLogger.trace_id_header() record_ids = [] instances = [] for record in results: record_ids.append(str(record.id)) instances.append(to_concept_instance(record, model, properties)) AuditLogger.get().message().append("records", *record_ids).log(x_bf_trace_id) return instances
def create_concept_instance(db: PartitionedDatabase, concept_id_or_name: str, body: JsonDict): with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) record = to_record(properties, body["values"]) records = db.create_records_tx(tx, concept_id_or_name, [record], fill_missing=True) if not records: raise BadRequest( f"Could not create concept instance [{concept_id_or_name}]") record = records[0] # Log the created concept instance: x_bf_trace_id = AuditLogger.trace_id_header() # Emit "CreateRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=CreateRecord(id=record.id, name=record.name, model_id=model.id), trace_id=TraceId(x_bf_trace_id), ) AuditLogger.get().message().append("records", str(record.id)).log(x_bf_trace_id) return to_concept_instance(record, model, properties), 201
def get_all_package_proxies( db: PartitionedDatabase, record_id: RecordId, limit: int = 100, offset: int = 0 ) -> JsonDict: total_count, proxies = db.get_package_proxies_for_record( record_id, limit=limit, offset=offset ) x_bf_trace_id = AuditLogger.trace_id_header() package_proxy_ids = [] packages = [] for p in proxies: package_proxy_ids.append(str(p.id)) packages.append(p.to_dict()) AuditLogger.get().message().append("package-proxies", *package_proxy_ids).log( x_bf_trace_id ) return { "limit": limit, "offset": offset, "totalCount": total_count, "packages": packages, }
def create_proxy_instance(db: PartitionedDatabase, proxy_type: str, body: JsonDict) -> List[JsonDict]: response = [] with db.transaction() as tx: x_bf_trace_id = AuditLogger.trace_id_header() link_targets = [] package_ids = [] for target in body["targets"]: link_target = target["linkTarget"] relationship_type = target["relationshipType"] link_targets.append(link_target) concept_link_target = to_proxy_link_target(link_target) if concept_link_target is None: raise InvalidPackageProxyLinkTargetError(link_target=str(body)) package = PennsieveApiClient.get().get_package_ids( db.dataset_node_id, body["external_id"], headers=dict(**auth_header(), **with_trace_id_header(x_bf_trace_id)), ) package_ids.append(str(package.id)) package_proxy = db.create_package_proxy_tx( tx=tx, record=concept_link_target.id, package_id=package.id, package_node_id=package.node_id, legacy_relationship_type=relationship_type, ) linkResult = { "proxyInstance": to_proxy_instance(PROXY_TYPE, package_proxy), "relationshipInstance": make_proxy_relationship_instance(concept_link_target.id, package_proxy, relationship_type), } response.append(linkResult) AuditLogger.get().message().append("link-targets", *link_targets).append( "packages", *package_ids).log(x_bf_trace_id) return response, 201
def get_records_related_to_package( db: PartitionedDatabase, proxy_type: str, package_id: str, concept_id_or_name: str, limit: Optional[int] = None, offset: Optional[int] = None, relationship_order_by: Optional[str] = None, record_order_by: Optional[str] = None, ascending: bool = False, ) -> List[JsonDict]: with db.transaction() as tx: x_bf_trace_id = AuditLogger.trace_id_header() model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) results = [] package_proxy_ids = [] record_ids = [] for pp, r in db.get_records_related_to_package_tx( tx=tx, package_id=PackageNodeId(package_id), related_model_id_or_name=concept_id_or_name, limit=limit, offset=offset, relationship_order_by=relationship_order_by, record_order_by=record_order_by, ascending=ascending, ): package_proxy_ids.append(str(pp.id)) record_ids.append(str(r.id)) t = ( # All package-to-record relationships are defined with the # internal `@IN_PACKAGE` relationship type: # (Package)<-[`@IN_PACKAGE`]-(Record) # For legacy consistency, we just use the generic "belongs_to" # here: make_proxy_relationship_instance(r.id, pp, "belongs_to"), to_concept_instance(r, model, properties), ) results.append(t) AuditLogger.get().message().append("package-proxies", *package_proxy_ids).append( "records", *record_ids).log(x_bf_trace_id) return results
def update_record( db: PartitionedDatabase, record_id: RecordId, body: JsonDict ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() record = db.get_record(record_id, embed_linked=False, fill_missing=True) if record is None: raise NotFound(f"Could not get record {record_id}") model = db.get_model_of_record(record) if model is None: raise NotFound(f"Cound not find model for record {record_id}") properties = db.get_properties(model) updated_record = db.update_record(record_id, body["values"]) # Emit "UpdateRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=UpdateRecord( id=record.id, name=record.name, model_id=model.id, properties=UpdateRecord.compute_diff( properties, record.values, updated_record.values ), ), trace_id=TraceId(x_bf_trace_id), ) return updated_record.to_dict()
def update_properties(db: PartitionedDatabase, model_id_or_name: str, body: List[JsonDict]): x_bf_trace_id = AuditLogger.trace_id_header() payload: List[ModelProperty] = ModelProperty.schema().load(body, many=True) with db.transaction() as tx: model = db.get_model_tx(tx, model_id_or_name) properties = db.update_properties_tx(tx, model, *payload) PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=[ CreateModelProperty(property_name=p.name, model_id=UUID(model.id), model_name=model.name) if created else UpdateModelProperty(property_name=p.name, model_id=UUID(model.id), model_name=model.name) for p, created in properties ], trace_id=TraceId(x_bf_trace_id), ) return [p.to_dict() for p, _ in properties]
def update_properties( db: PartitionedDatabase, concept_id_or_name: str, body: List[JsonDict] ) -> List[JsonDict]: x_bf_trace_id = AuditLogger.trace_id_header() with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.update_properties_tx( tx, model, *[to_model_property(p) for p in body] ) # Emit "UpdateModel" event: PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=[ CreateModelProperty( property_name=p.name, model_id=UUID(model.id), model_name=model.name ) if created else UpdateModelProperty( property_name=p.name, model_id=UUID(model.id), model_name=model.name ) for p, created in properties ], trace_id=TraceId(x_bf_trace_id), ) return [to_property_dict(p) for p, _ in properties]
def autocomplete_model_property_values( organization_id: int, model_name: str, property_name: str, token_info: Claim, dataset_id: Optional[int] = None, prefix: Optional[str] = None, unit: Optional[str] = None, limit: Optional[int] = 10, ) -> List[SuggestedValues]: x_bf_trace_id = AuditLogger.trace_id_header() db = authorize_search(organization_id, x_bf_trace_id, token_info) ds_id = None if dataset_id is None else DatasetId(dataset_id) suggested_values: List[Tuple[Dataset, SuggestedValues]] = db.suggest_values( model_name=model_name, model_property_name=property_name, dataset_id=ds_id, matching_prefix=prefix, unit=unit, limit=limit, ) datasets: List[Dataset] = [d for d, _ in suggested_values] # Write to the audit log: AuditLogger.get().message().append("organization", organization_id).append( "datasets", *[str(ds.id) for ds in datasets]).log(x_bf_trace_id) # Group properties by data type grouped_suggestions = defaultdict(list) for _, suggestion in suggested_values: grouped_suggestions[suggestion.property_.data_type.to_json()].append( suggestion) return [{ "property": property_to_suggestion(suggestions[0].property_, suggestions[0].operators), "values": list(chain.from_iterable(sv.values for sv in suggestions)), } for suggestions in grouped_suggestions.values()]
def get_files_paged( db: PartitionedDatabase, concept_id: str, id_: str, limit: int = 100, offset: int = 0, order_by: str = "createdAt", ascending: bool = True, ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() total_count, package_proxies = db.get_package_proxies_for_record( id_, limit=limit, offset=offset) package_proxies = list(package_proxies) # If any packages cannot be found they will be ignored in this response # TODO: https://app.clickup.com/t/3gaec4 packages = PennsieveApiClient.get().get_packages( db.dataset_node_id, package_ids=[proxy.package_id for proxy in package_proxies], headers=dict(**auth_header(), **with_trace_id_header(x_bf_trace_id)), ) package_proxy_ids = [str(p.id) for p in package_proxies] package_ids = packages.keys() AuditLogger.get().message().append( "package-proxies", *package_proxy_ids).append("packages", *package_ids).log(TraceId(x_bf_trace_id)) return { "limit": limit, "offset": offset, "totalCount": total_count, "results": [[{ "id": proxy.id }, to_legacy_package_dto(packages[proxy.package_id])] for proxy in package_proxies if proxy.package_id in packages], }
def filtered_datasets_by_model( organization_id: int, token_info: Claim, model_name: str, ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() db = authorize_search(organization_id, x_bf_trace_id, token_info) datasets = db.get_dataset_id_by_model_name(model_name) AuditLogger.get().message().append("organization", organization_id).append( "datasets", *[str(ds.id) for ds in datasets]).log(x_bf_trace_id) return { "datasets": [dataset.to_dict() for dataset in datasets], "count": len(datasets), }
def get_all_records( db: PartitionedDatabase, model_id_or_name: str, limit: int, linked: bool, next_page: Optional[NextPageCursor] = None, ) -> List[JsonDict]: x_bf_trace_id = AuditLogger.trace_id_header() paged_result = db.get_all_records(model_id_or_name, limit=limit, embed_linked=linked, next_page=next_page) record_ids = [] for record in paged_result: record_ids.append(str(record.id)) AuditLogger.get().message().append("records", *record_ids).log(TraceId(x_bf_trace_id)) return PagedResult(results=paged_result.results, next_page=paged_result.next_page).to_dict()
def delete_model(db: PartitionedDatabase, model_id_or_name: str) -> None: model = db.delete_model(model_id_or_name) x_bf_trace_id = AuditLogger.trace_id_header() # Emit "DeleteModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=DeleteModel(id=model.id, name=model.name), trace_id=TraceId(x_bf_trace_id), ) return None
def create_concept(db: PartitionedDatabase, body: JsonDict) -> Tuple[JsonDict, int]: x_bf_trace_id = AuditLogger.trace_id_header() model = db.create_model(**filter_model_dict(body)) # Emit "CreateModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=CreateModel(id=UUID(model.id), name=model.name), trace_id=TraceId(x_bf_trace_id), ) return to_concept_dict(model, property_count=0), 201
def get_files( db: PartitionedDatabase, concept_id: str, id_: str, limit: int = 100, offset: int = 0, order_by: str = "createdAt", ascending: bool = True, ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() _, package_proxies = db.get_package_proxies_for_record( id_, limit=limit, offset=offset ) package_proxies = list(package_proxies) # If any packages cannot be found they will be ignored in this response # TODO: https://app.clickup.com/t/3gaec4 packages = PennsieveApiClient.get().get_packages( db.dataset_node_id, package_ids=[proxy.package_id for proxy in package_proxies], headers=dict(**auth_header(), **with_trace_id_header(x_bf_trace_id)), ) package_proxy_ids = [str(p.id) for p in package_proxies] package_ids = packages.keys() AuditLogger.get().message().append("package-proxies", *package_proxy_ids).append( "packages", *package_ids ).log(TraceId(x_bf_trace_id)) # Yes, this response is crazy: an array of two-tuples (arrays), containing a # single object with the proxy id, and the package DTO. return [ [{"id": proxy.id}, to_legacy_package_dto(packages[proxy.package_id])] for proxy in package_proxies if proxy.package_id in packages ]
def get_all_proxy_instances(db: PartitionedDatabase, proxy_type: str) -> List[JsonDict]: with db.transaction() as tx: proxy_instances = [] package_proxy_ids = [] record_ids = [] x_bf_trace_id = AuditLogger.trace_id_header() for package_proxy, record in db.get_all_package_proxies_tx(tx): proxy_instances.append(to_proxy_instance(PROXY_TYPE, package_proxy)) package_proxy_ids.append(str(package_proxy.id)) record_ids.append(str(record.id)) AuditLogger.get().message().append("package-proxies", *package_proxy_ids).append( "records", *record_ids).log(x_bf_trace_id) return proxy_instances
def update_model(db: PartitionedDatabase, model_id_or_name: str, body: JsonDict) -> JsonDict: model = db.update_model(model_id_or_name, **body) x_bf_trace_id = AuditLogger.trace_id_header() # Emit "UpdateModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=UpdateModel(id=model.id, name=model.name), trace_id=TraceId(x_bf_trace_id), ) return model.to_dict()
def create_model(db: PartitionedDatabase, body: JsonDict) -> Tuple[JsonDict, int]: model = db.create_model(**body) x_bf_trace_id = AuditLogger.trace_id_header() # Emit "CreateModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=CreateModel(id=model.id, name=model.name), trace_id=TraceId(x_bf_trace_id), ) return model.to_dict(), 201
def create_concept_instance_batch(db: PartitionedDatabase, concept_id_or_name: str, body: JsonDict): with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) requests = [to_record(properties, req["values"]) for req in body] records = db.create_records_tx(tx, concept_id_or_name, requests, fill_missing=True) instances = [ to_concept_instance(r, model, properties) for r in records ] if not instances: raise BadRequest( f"Could not create concept instances for [{concept_id_or_name}]" ) # Log the created concept instance: x_bf_trace_id = AuditLogger.trace_id_header() # Emit "CreateRecord" events: PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=[ CreateRecord(id=r.id, name=r.name, model_id=model.id) for r in records ], trace_id=TraceId(x_bf_trace_id), ) AuditLogger.get().message().append("records", *[str(r.id) for r in records ]).log(x_bf_trace_id) return instances
def delete_property( db: PartitionedDatabase, model_id: str, property_name: str, modify_records: bool = False, ) -> None: x_bf_trace_id = AuditLogger.trace_id_header() max_record_count = current_app.config[ "config"].max_record_count_for_property_deletion with db.transaction() as tx: model = db.get_model_tx(tx, model_id) if modify_records: record_count = db.model_property_record_count_tx( tx, model_id, property_name) if record_count > 0: if record_count > max_record_count: raise BadRequest( f"Cannot delete properties that are used on > {max_record_count} records. This property is used on {record_count}" ) model_properties = [ p for p in db.get_properties_tx(tx, model_id) if p.name == property_name ] if not model_properties: raise NotFound(f"no such property {property_name} exists") updated_records = db.delete_property_from_all_records_tx( tx, model_id, model_properties[0]) if updated_records != record_count: raise ServerError( "the property was not removed from all records") deleted = db.delete_property_tx(tx, model_id, property_name) if deleted is None: raise NotFound( f"Could not delete property [{model_id}.{property_name}]") PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=DeleteModelProperty( property_name=deleted.name, model_id=UUID(model.id), model_name=model.name, ), trace_id=TraceId(x_bf_trace_id), )
def autocomplete_models( organization_id: int, token_info: Claim, dataset_id: Optional[int] = None, related_to: Optional[str] = None, ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() db = authorize_search(organization_id, x_bf_trace_id, token_info) ds_id = None if dataset_id is None else DatasetId(dataset_id) datasets_and_models = list( db.suggest_models(dataset_id=ds_id, related_to=related_to)) datasets = {d for (d, _) in datasets_and_models} models = unique_everseen((m for (_, m) in datasets_and_models), key=lambda m: m.name) # Write to the audit log: AuditLogger.get().message().append("organization", organization_id).append( "datasets", *[str(ds.id) for ds in datasets]).log(x_bf_trace_id) return {"models": [model.to_dict() for model in models]}
def update_concept( db: PartitionedDatabase, concept_id_or_name: str, body: JsonDict ) -> JsonDict: x_bf_trace_id = AuditLogger.trace_id_header() with db.transaction() as tx: model = db.update_model_tx(tx, concept_id_or_name, **filter_model_dict(body)) property_count = db.get_property_counts_tx(tx, [model.id])[model.id] # Emit "UpdateModel" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=UpdateModel(id=UUID(model.id), name=model.name), trace_id=TraceId(x_bf_trace_id), ) return to_concept_dict(model, property_count)
def create_package_proxy( db: PartitionedDatabase, record_id: RecordId, package_id: PackageId, body: JsonDict ) -> Tuple[JsonDict, int]: x_bf_trace_id = AuditLogger.trace_id_header() package = PennsieveApiClient.get().get_package_ids( db.dataset_node_id, package_id, headers=dict(**auth_header(), **with_trace_id_header(x_bf_trace_id)), ) return ( db.create_package_proxy( record_id, package_id=package.id, package_node_id=package.node_id ).to_dict(), 201, )
def update_concept_instance( db: PartitionedDatabase, concept_id_or_name: str, concept_instance_id: str, body: JsonDict, ) -> JsonDict: with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) record = db.get_record_tx( tx, concept_instance_id, embed_linked=False, fill_missing=True, ) if record is None: raise NotFound(f"Could not get record {concept_instance_id}") updated_record = db.update_record_tx( tx, concept_instance_id, to_record(properties, body["values"]), fill_missing=True, ) x_bf_trace_id = AuditLogger.trace_id_header() # Emit a "UpdateRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=UpdateRecord( id=record.id, name=record.name, model_id=model.id, properties=UpdateRecord.compute_diff(properties, record.values, updated_record.values), ), trace_id=TraceId(x_bf_trace_id), ) return to_concept_instance(updated_record, model, properties)
def create_record(db: PartitionedDatabase, model_id_or_name: str, body: JsonDict) -> Tuple[JsonDict, int]: record_values = [body["values"]] x_bf_trace_id = AuditLogger.trace_id_header() record = db.create_records(model_id_or_name, record_values)[0] model = db.get_model(model_id_or_name) if model is None: raise NotFound(f"Model {model_id_or_name}") # Emit "CreateRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=CreateRecord(id=record.id, name=record.name, model_id=model.id), trace_id=TraceId(x_bf_trace_id), ) return record.to_dict(), 201
def delete_concept_instances(db: PartitionedDatabase, concept_id_or_name: str) -> JsonDict: # HACK: request bodies on DELETE requests do not have defined # semantics and are not directly support by OpenAPI/Connexion. See # - https://swagger.io/docs/specification/describing-request-body # - https://github.com/zalando/connexion/issues/896 body = connexion.request.json success = [] errors = [] events = [] with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, model) for instance_id in body: try: deleted = db.delete_record_tx(tx, instance_id, properties) events.append( DeleteRecord( id=deleted.id, name=deleted.name, model_id=model.id, )) except Exception as e: # noqa: F841 errors.append([instance_id, f"Could not delete {instance_id}"]) else: success.append(instance_id) x_bf_trace_id = AuditLogger.trace_id_header() # Emit a "DeleteRecord" event: PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=events, trace_id=TraceId(x_bf_trace_id), ) return {"success": success, "errors": errors}
def delete_record(db: PartitionedDatabase, record_id: RecordId) -> None: x_bf_trace_id = AuditLogger.trace_id_header() model = db.get_model_of_record(record_id) if model is None: raise NotFound(f"Cound not find model for record {record_id}") properties = db.get_properties(model) deleted = db.delete_record(record_id, properties) # Emit "DeleteRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=DeleteRecord(id=deleted.id, name=deleted.name, model_id=model.id), trace_id=TraceId(x_bf_trace_id), ) return None
def create_records( db: PartitionedDatabase, model_id_or_name: str, body: List[Dict] ) -> Tuple[List[JsonDict], int]: x_bf_trace_id = AuditLogger.trace_id_header() record_values = [r["values"] for r in body] records = db.create_records(model_id_or_name, records=record_values) model = db.get_model(model_id_or_name) if model is None: raise NotFound(f"Model {model_id_or_name}") # Emit "CreateRecord" event: events = [CreateRecord(id=r.id, name=r.name, model_id=model.id) for r in records] PennsieveJobsClient.get().send_changelog_events( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, events=events, trace_id=TraceId(x_bf_trace_id), ) return [record.to_dict() for record in records], 201
def delete_concept_instance(db: PartitionedDatabase, concept_id_or_name: str, concept_instance_id: str) -> None: with db.transaction() as tx: model = db.get_model_tx(tx, concept_id_or_name) properties = db.get_properties_tx(tx, concept_id_or_name) record = db.delete_record_tx(tx, concept_instance_id, properties) x_bf_trace_id = AuditLogger.trace_id_header() # Emit a "DeleteRecord" event: PennsieveJobsClient.get().send_changelog_event( organization_id=db.organization_id, dataset_id=db.dataset_id, user_id=db.user_id, event=DeleteRecord(id=record.id, name=record.name, model_id=model.id), trace_id=TraceId(x_bf_trace_id), ) return to_concept_instance(record, model, properties)