Beispiel #1
0
    def delete(self, *args, **kwargs):
        # Clear delta history. Deltas can reference Steps: if we don't
        # clear the deltas, Django may decide to CASCADE to Step first and
        # we'll raise a ProtectedError.
        self.deltas.all().delete()

        # Next, clear Report blocks. Their Step/Tab ON_DELETE is models.PROTECT,
        # because [2020-11-30, adamhooper] we want to test for months before
        # we're confident that we don't delete blocks at the wrong times.
        self.blocks.all().delete()

        # Clear all s3 data. We _should_ clear it in pre-delete hooks on
        # StoredObject, UploadedFile, etc.; but [2019-06-03, adamhooper] the
        # database is inconsistent and Django is hard to use so new bugs may
        # crop up anyway.
        #
        # [2019-06-03, adamhooper] hooks never work in ORMs. Better would be
        # to make `delete()` a controller method, not a funky mishmash of
        # Django-ORM absurdities. TODO nix Django ORM.
        #
        # TL;DR we're double-deleting s3 data, to be extra-safe. The user
        # said "delete." We'll delete.
        if self.id:  # be extra-safe: use if-statement so we don't remove '/'
            s3.remove_recursive(s3.StoredObjectsBucket, f"{self.id}/")
            s3.remove_recursive(s3.UserFilesBucket, f"wf-{self.id}/")

        super().delete(*args, **kwargs)
Beispiel #2
0
def delete_parquet_files_for_step(workflow_id: int, step_id: int) -> None:
    """Delete all Parquet files cached for `step`.

    Different deltas on the same module produce different Parquet
    filenames. This function removes all of them.

    This deletes from s3 but not from the database. Beware -- this can leave
    the database in an inconsistent state.
    """
    s3.remove_recursive(BUCKET, parquet_prefix(workflow_id, step_id))
Beispiel #3
0
 def delete(self, *args, **kwargs):
     # TODO make DB _not_ depend upon s3.
     s3.remove_recursive(s3.UserFilesBucket, self.uploaded_file_prefix)
     s3.remove_recursive(
         s3.CachedRenderResultsBucket,
         "wf-%d/wfm-%d/" % (self.workflow_id, self.id),
     )
     # We can't delete in-progress uploads from tusd's bucket because there's
     # no directory hierarchy. The object lifecycle policy will delete them.
     super().delete(*args, **kwargs)
Beispiel #4
0
def clear_s3():
    buckets = (
        s3.UserFilesBucket,
        s3.StoredObjectsBucket,
        s3.ExternalModulesBucket,
        s3.CachedRenderResultsBucket,
        s3.TusUploadBucket,
    )

    for bucket in buckets:
        s3.remove_recursive(bucket, "/", force=True)
Beispiel #5
0
def _delete_from_s3_post_delete(sender, instance, **kwargs):
    """
    Delete module _code_ from S3, now that ModuleVersion is gone.
    """
    prefix = "%s/%s/" % (sender.id_name, sender.source_version_hash)
    s3.remove_recursive(s3.ExternalModulesBucket, prefix)