Exemple #1
0
class StorageQuota(models.Model):
    """Storage type and quota to be used by Groupworkspace Quota"""

    # populate choices from backends
    bi = 0
    STORAGE = []
    __STORAGE_CHOICES = []
    for be in jdma_control.backends.get_backends():
        bo = be()
        __STORAGE_CHOICES.append((bi, bo.get_id()))
        STORAGE.append(bo.get_id())
        bi += 1
    storage = models.IntegerField(choices=__STORAGE_CHOICES,
                                  default=0,
                                  db_index=True)

    quota_size = FileSizeField(
        default=0, help_text="Size of quota allocated to the groupworkspace")

    quota_used = FileSizeField(
        default=0, help_text="Size of quota used in the groupworkspace")

    # keep a record of the workspace so we can search on it
    workspace = models.ForeignKey(
        Groupworkspace,
        null=False,
        help_text="Workspace that this storage quota is for",
        on_delete=models.CASCADE)

    def get_storage_name(nid):
        """Get the storage name from the numerical id"""
        return StorageQuota.STORAGE[nid]

    def get_storage_index(name):
        return StorageQuota.STORAGE.index(name)

    def quota_formatted_used(self):
        return filesizeformat(self.quota_used)

    quota_formatted_used.short_description = "Quota used"

    def quota_formatted_size(self):
        return filesizeformat(self.quota_size)

    quota_formatted_size.short_description = "Quota size"

    def get_name(self):
        return StorageQuota.__STORAGE_CHOICES[self.storage][1]

    get_name.short_description = "quota_name"

    def __str__(self):
        desc_str = "{} : {} : {} / {}".format(
            self.workspace.workspace,
            str(StorageQuota.__STORAGE_CHOICES[self.storage][1]),
            filesizeformat(self.quota_used), filesizeformat(self.quota_size))
        return desc_str
class DataProduct(models.Model):
    CHOICES = (
        ('digital', 'Digital Dataset'),
        ('model_source', 'Model Source Code'),
        ('physical', 'Pysical Collections & Samples'),
        ('hardcopy', 'Hardcopy Records'),
        ('third_party', 'Third Party/Existing Datasets'),
    )
    grant = models.ForeignKey(Grant, on_delete=models.CASCADE)
    added = models.DateField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)
    data_product_type = models.CharField(choices=CHOICES, max_length=50)

    name = models.CharField(max_length=200, blank=True, null=True)
    contact = models.CharField(max_length=100, blank=True, null=True)
    data_format = models.ForeignKey(DataFormat, on_delete=models.PROTECT, blank=True, null=True)
    preservation_plan = models.ForeignKey(PreservationPlan, on_delete=models.PROTECT, blank=True, null=True)
    description = models.TextField(blank=True, null=True)
    data_volume = FileSizeField(default=0, blank=True, null=True)
    delivery_date = models.DateField(blank=True, null=True)
    embargo_date = models.DateField(blank=True, null=True)
    doi = models.BooleanField(blank=True, verbose_name='DOI', null=True)
    sample_type = models.CharField(max_length=50, blank=True, null=True)
    sample_destination = models.CharField(max_length=100, blank=True, null=True)
    issues = models.TextField(blank=True, null=True)
    data_location = models.CharField(max_length=100, blank=True, null=True)
    responsibility = models.CharField(max_length=50, blank=True, null=True)
    additional_comments = models.TextField(blank=True, null=True)

    def __str__(self):
        return f'{self._meta.object_name} ({self.data_product_type.title()} Data Product) - {self.name if self.name else self.description}'
Exemple #3
0
class RestoreDisk(models.Model):
    """Allocated area(s) of disk(s) to hold restored files.  Restore will find a space on one
       of these RestoreDisks to write the files to.

       :var models.CharField mountpoint: the path to the restore area
       :var FileSizeField allocated_bytes: the allocated size of the restore area (in bytes)
       :var FileSizeField used_bytes: the amount of space used of the restore area (in bytes).  Updated by ``update()`` method.
       """
    mountpoint = models.CharField(blank=True,
                                  max_length=1024,
                                  help_text="E.g. /badc/restore_1",
                                  unique=True)
    allocated_bytes = FileSizeField(
        default=0,
        help_text=
        "Maximum size on the disk that can be allocated to the restore area")
    used_bytes = FileSizeField(
        default=0, help_text="Used value calculated by update method")

    def __str__(self):
        return self.__unicode__()

    def __unicode__(self):
        return "%s" % self.mountpoint

    def formatted_used(self):
        return filesizeformat(self.used_bytes)

    formatted_used.short_description = "used"

    def formatted_allocated(self):
        return filesizeformat(self.allocated_bytes)

    formatted_allocated.short_description = "allocated"

    def update(self):
        """Update the number of bytes used on the RestoreDisk by summing the size of each TapeFile that is restored
           to this RestoreDisk"""
        # Get all the tape files that are restored
        restored_files = TapeFile.objects.filter(stage=TapeFile.RESTORED,
                                                 restore_disk=self)
        # reset the used_bytes
        self.used_bytes = 0
        for f in restored_files:
            self.used_bytes += f.size
        self.save()
Exemple #4
0
class MigrationFile(models.Model):
    """A record of a file in a migration in the JASMIN data migration app
    (JDMA).
    """
    # path to the file
    path = models.CharField(
        max_length=1024,
        null=True,
        help_text=
        "Relative path to the file (relative to Migration.common_path)")
    # Checksum digest
    digest = models.CharField(max_length=64,
                              null=True,
                              help_text="Checksum digest of the file")
    digest_format = models.CharField(max_length=32,
                                     null=True,
                                     blank=False,
                                     default="SHA256")
    # size in bytes
    size = FileSizeField(null=False,
                         default=0,
                         help_text="size of file in bytes")
    # file type - a string, either "FILE", "DIR", "LINK", "LNCM", "LNAS" or "MISS" (missing)
    ftype = models.CharField(max_length=4,
                             null=False,
                             default="FILE",
                             help_text="Type of the file")
    # link location - we can then restore links on restore
    link_target = models.CharField(
        max_length=1024,
        null=True,
        blank=True,
        help_text=
        "Relative (for LNCM) and absolute (for LNAS) path to the linked file location"
    )
    # user id, group id and permissions - record what they are so that they
    # can be restored when the directory is restored
    unix_user_id = models.IntegerField(
        blank=True, null=True, help_text="uid of original owner of file")
    unix_group_id = models.IntegerField(
        blank=True, null=True, help_text="gid of original owner of file")
    unix_permission = models.IntegerField(
        blank=True, null=True, help_text="File permissions of original file")
    # which archive does this file belong to?
    archive = models.ForeignKey(MigrationArchive,
                                on_delete=models.CASCADE,
                                null=False,
                                help_text="Archive that this File belongs to")

    def formatted_size(self):
        return filesizeformat(self.size)

    formatted_size.short_description = "size"

    def __str__(self):
        return "{} {}".format(self.path, self.ftype)
Exemple #5
0
class DataProduct(models.Model):

    # Data products are data streams produced by projects

    title = models.CharField(max_length=200)
    desc = models.TextField(blank=True, null=True)
    notes = GenericRelation("Note")
    datavol = FileSizeField(default=0)
    project = models.ForeignKey(Project,
                                help_text="Project producing this data",
                                blank=True,
                                null=True)
    sciSupContact = models.ForeignKey(
        User,
        help_text="CEDA person contact for this data",
        blank=True,
        null=True)
    contact1 = models.CharField(max_length=200, blank=True, null=True)
    contact2 = models.CharField(max_length=200, blank=True, null=True)
    deliverydate = models.DateField(blank=True, null=True)
    preservation_plan = models.CharField(
        max_length=200,
        blank=True,
        null=True,
        choices=(("KeepIndefinitely", "Keep Indefinitely"),
                 ("KeepAsIs", "Keep as is - Even if obsolete"),
                 ("Dispose5years ",
                  "Review for disposal 5 years after project completes"),
                 ("ManageInProject",
                  "Don't Archive - manage the data within the project"),
                 ("Subset", "Plan to keep a subset of the data indefinitely"),
                 ("TBD", "TBD")))
    added = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)
    review_date = models.DateTimeField(auto_now=True)
    status = models.CharField(
        max_length=200,
        blank=True,
        null=True,
        choices=(
            ("WithProjectTeam", "With Project Team"),
            ("Ingesting", "Ingesting"), ("Archived", "Archived and complete"),
            ("Defaulted",
             "Defaulted - not archived due to project not supplying data"),
            ("NotArchived", "Not going to archive - planned")))
    data_URL = models.URLField(blank=True, null=True)

    def __unicode__(self):
        return "%s" % self.title

    def projects_where_thirdparty(self):
        return Projects.objects.filter(third_party_data=self)
Exemple #6
0
class Quota(models.Model):
    """Users quota for tape requests

       :var models.CharField user: identified for the user, the same as their JASMIN login
       :var FileSizeField size: The size of the quota in bytes
       :var models.CharField email_address: The email address of the user
       :var models.TextField notes: Notes about the user, affliation, project, etc.

    """
    user = models.CharField(max_length=2024)
    size = FileSizeField(help_text='size of quota in bytes')
    email_address = models.CharField(
        max_length=2024,
        blank=True,
        null=True,
        help_text='email address of user for notifications')
    notes = models.TextField(blank=True, null=True)

    def __str__(self):
        return self.__unicode__()

    def __unicode__(self):
        return "%s (%s)" % (self.user, filesizeformat(self.size))

    def used(self, retention_date):
        """Get the amount of quota used by this user

           :param DateTime retention: the retention date.  User's requests with retention dates beyond this will be used in the calculation of the used quota.
           :return: The size of the quota in bytes.
           :rtype: integer

        """
        reqs = TapeRequest.objects.filter(quota=self,
                                          retention__gte=retention_date)
        size = 0
        for req in reqs:
            size += req.size()
        return size

    def requests(self):
        """Get the requests associated with this quota

           :return: All requests associated with this quota.
           :rtype: QuerySet[TapeRequest]
        """
        return TapeRequest.objects.filter(quota=self)

    def formatted_size(self):
        return filesizeformat(self.size)

    formatted_size.short_description = "size"
Exemple #7
0
class FileFieldPluginBase(FieldPluginBase):
    upload_to = FilerFolderField(
        verbose_name=_('Upload files to'),
        help_text=_('Select a folder to which all files submitted through '
                    'this field will be uploaded to.'))
    max_size = FileSizeField(
        verbose_name=_('Maximum file size'),
        null=True,
        blank=True,
        help_text=_('The maximum file size of the upload, in bytes. You can '
                    'use common size suffixes (kB, MB, GB, ...).'))

    class Meta:
        abstract = True
Exemple #8
0
class Still(models.Model):
    """
    A still is a single frame (likely the first frame) from a segment, attached
    to the stream object.
    """
    stream = models.ForeignKey(Stream,
                               related_name="stills",
                               on_delete=models.CASCADE)
    timecode = models.FloatField(db_index=True)

    file = models.ImageField(upload_to=generate_still_filename)
    file_size = FileSizeField()

    created = models.DateTimeField(db_index=True, auto_now_add=True)
    last_updated = models.DateTimeField(db_index=True, auto_now=True)

    def __str__(self):
        return f"{self.stream}:still@{self.timecode}"
Exemple #9
0
class Segment(models.Model):
    """
    A segment is a single video file as part of a distribution.
    """
    distribution = models.ForeignKey(Distribution,
                                     related_name="segments",
                                     on_delete=models.CASCADE)
    sequence_number = models.IntegerField()
    file = models.FileField(upload_to=generate_segment_filename)
    file_size = FileSizeField()

    duration = models.FloatField()

    transcode_command = models.TextField(blank=True)
    transcode_stderr = models.TextField(blank=True)

    created = models.DateTimeField(db_index=True, auto_now_add=True)
    last_updated = models.DateTimeField(db_index=True, auto_now=True)

    class Meta:
        unique_together = ("distribution", "sequence_number")

    def __str__(self):
        return f"{self.distribution}:{self.sequence_number}"
Exemple #10
0
class MigrationArchive(models.Model):
    """An archive stores a list of files that are to be tarred together then
    uploaded.
    This is to enabled efficient upload / download of small files.
    An archive may often contain only one file.
    """
    # Checksum digest and format
    digest = models.CharField(max_length=64, help_text="Digest of the archive")
    digest_format = models.CharField(max_length=32,
                                     null=True,
                                     blank=False,
                                     default="SHA256")
    # which migration does this belong to?
    # Many to one mapping (many Migration Archives->one Migration)
    migration = models.ForeignKey(
        Migration,
        on_delete=models.CASCADE,
        null=False,
        help_text="Migration that this Archive belongs to")

    # size in bytes
    size = FileSizeField(null=False,
                         default=0,
                         help_text="size of file in bytes")

    # is the archive to be packed / is it packed?
    packed = models.BooleanField(default=False,
                                 help_text="Is the archive packed (tarred)?")

    def name(self):
        return "Archive " + str(self.pk)

    name.short_description = "archive_name"

    def get_id(self):
        return "archive_{:010}".format(self.pk)

    name.short_description = "get_id"

    def first_file(self):
        """Get the first file in the archive"""
        q_set = self.migrationfile_set.all()
        if q_set.count() == 0:
            return ""
        else:
            fname = q_set[0].path
            return str(q_set.count()) + " files. First file: " + fname

    first_file.short_description = "first_file"

    def formatted_size(self):
        return filesizeformat(self.size)

    formatted_size.short_description = "size"

    def __str__(self):
        """Return a string representation"""
        # get the migration
        return "Archive " + str(self.pk)

    def get_archive_name(self, prefix=""):
        """Get the name of the archive, if the archive is packed"""
        if not self.packed:
            return ""
        else:
            return os.path.join(prefix, self.get_id() + ".tar")

    def get_file_names(self, prefix="", filter_list=None):
        """Return a dictionary of three lists of files from the archive to be
           / that have been uploaded.
           The dictionary consists of:
             {"FILE" : [list of files],
              "DIR"  : [list of directories],
              "LINK" : [list of links],
              "LNCM" : [list of links with relation to a common path],
              "LNAS" : [list of links with absolute path]}
           The function can also be given an optional filelist, to only include
           files that are in the filelist.  This is so that GET requests can
           specify a subset of files to download.
        """
        # not packed, return a list of the files in the archive
        file_list = {"FILE": [], "DIR": [], "LINK": [], "LNAS": [], "LNCM": []}
        for f in self.migrationfile_set.all():
            if filter_list is None:
                file_list[f.ftype].append(os.path.join(prefix, f.path))
            else:
                if f.path in filter_list:
                    full_path = os.path.join(prefix, f.path)
                    if not full_path in file_list[f.ftype]:
                        file_list[f.ftype].append(full_path)
        return file_list

    get_file_names.short_description = "Filelist"

    def get_file_list_text(self):
        """Convert the output of get_filtered file_names into a string buffer"""
        output = ""
        for f in self.migrationfile_set.all():
            output += f.path + " : " + f.ftype + "\n"
        return output

    get_file_list_text.short_description = "List of files in archive"
Exemple #11
0
class TapeFile(models.Model):
    """Files that are archived on tape as the primary media, and have been added to the NLA system via move_files_to_nla.

       :var models.CharField logical_path: The original logical of the file in the archive, before it was moved to tape
       :var FileSizeField size: The size of the file (in bytes)
       :var models.DateTimeField verified: The time and date that the file was verified within the NLA system
       :var models.IntegerField stage: The stage that the file is at, one of **UDTAR**

          - **U**: UNVERIFIED (3)

          - **D**: ONDISK (2)

          - **T**: ONTAPE (0)

          - **A**: RESTORING (1)

          - **R**: RESTORED (5)

       :var models.ForeignKey restore_disk: A reference to the RestoreDisk where the file has been restored to
    """

    # stages for tape files
    UNVERIFIED = 0
    ONTAPE = 1
    RESTORING = 2
    ONDISK = 3
    #    DELETED = 4    # NRM - deleted has been deleted as not actually needed - files will have to be reintroduced
    #                   # into
    RESTORED = 5
    __CHOICES = ((ONTAPE, 'On tape'), (RESTORING, 'Restoring'),
                 (ONDISK, 'On Disk'), (UNVERIFIED, 'Unverified'), (RESTORED,
                                                                   'Restored'))

    STAGE_NAMES = [
        "UNVERIFIED", "ON TAPE", "restoring", "on disk", "D", "RESTORED"
    ]

    logical_path = models.CharField(
        max_length=2024,
        help_text='logical path of archived files e.g. /badc/acsoe/file10.dat',
        db_index=True)
    size = FileSizeField(help_text='size of file in bytes')
    verified = models.DateTimeField(
        blank=True,
        null=True,
        help_text="Checked tape copy is same as disk copy")
    stage = models.IntegerField(choices=__CHOICES, db_index=True)

    # which restore disk is the restored file on?
    restore_disk = models.ForeignKey(RestoreDisk,
                                     blank=True,
                                     null=True,
                                     on_delete=models.SET_NULL)

    @staticmethod
    def load_storage_paths():
        """Load the fileset logical paths to spotname mappings by retrieving the spotnames from a URL,
           finding the corresponding logical path for the spot and reformatiing them into a dictionary"""

        response = requests.get(CEDA_DOWNLOAD_CONF)
        if response.status_code != 200:
            raise TapeFileException(
                "Cannot find url: {}".format(CEDA_DOWNLOAD_CONF))
        else:
            page = response.text.split("\n")

        TapeFile.fileset_logical_path_map = {}
        TapeFile.fileset_logical_paths = []

        # make a dictotionary that maps logical paths to spot names
        for line in page:
            line = str(line.strip())
            if line == '':
                continue
            spot_name, logical_path = line.split()
            TapeFile.fileset_logical_path_map[logical_path] = spot_name
            TapeFile.fileset_logical_paths.append(logical_path)

        # reverse sort the logical paths so that longer paths match first
        TapeFile.fileset_logical_paths.sort(reverse=True)

        response = requests.get(STORAGE_PATHS_URL)
        if response.status_code != 200:
            raise TapeFileException(
                "Cannot find url: {}".format(STORAGE_PATHS_URL))
        else:
            page = response.text.split("\n")

        TapeFile.fileset_storage_path_map = {}

        # make a dictionary that maps spot names to storage paths
        for line in page:
            line = line.strip()
            if line == '':
                continue
            storage_path, spot_name = line.split()
            TapeFile.fileset_storage_path_map[spot_name] = storage_path

    def spotname(self):
        """Return portion of path that maps to spot name, and the spotname for a file.
            e.g. ``/badc/cira/data/x.dat -> /badc/cira, spot-1234-cira``

            This function is used to give the elements needed to construct a storage path.

            :return: A tuple of (logical_spot_path, spot_name)
            :rtype: (string, string)
        """
        file_path = self._logical_path
        # find the longest logical path that matches the
        for l in TapeFile.fileset_logical_paths:
            # convert to unicode if we have to
            if file_path[:len(l)] == l:
                # start of the filename is the same as a fileset
                return l, TapeFile.fileset_logical_path_map[l]
        else:
            # There should always be a spot for a file
            raise TapeFileException("File %s has no associated fileset" %
                                    file_path)

    def storage_path(self):
        """Return the current storage path to file.

           :return: storage path of the TapeFile
           :rtype: string
        """
        logical_spot_path, spot_name = self.spotname()
        return TapeFile.fileset_storage_path_map[spot_name]

    def archive_volume_path(self):
        """Return the current volume path for a file. e.g. /datacentre/archvol/pan52/archive

          :return: volume path of the TapeFile
          :rtype: string
        """
        return os.path.dirname(self.storage_path())

    def __str__(self):
        return self.__unicode__()

    def __unicode__(self):
        return "%s (%s)" % (self._logical_path,
                            TapeFile.STAGE_NAMES[self.stage])

    def match(self, pattern):
        """Return whether the logical path of this TapeFile matches the input pattern (a UNIX filesystem pattern).

           :param string pattern: The UNIX filesystem pattern to match against
           :return: ``True | False``
           :rtype: boolean
        """
        return fnmatch.fnmatch(self.logical_path, pattern)

    @staticmethod
    def add(file_path, size):
        """Method to add a logical path as a TapeFile if its not already present on the NLA system.

           :param string file_path: The (original) logical path to the file, before it was archived to tape
           :param integer size: The size of the file, in bytes

        """
        existing_tape_file = TapeFile.objects.filter(logical_path=file_path)
        if len(existing_tape_file) == 0:
            TapeFile(logical_path=file_path,
                     size=size,
                     stage=TapeFile.UNVERIFIED).save()

    @property
    def _logical_path(self):
        slp = str(self.logical_path)
        return slp
        if slp[0] == 'b':
            return slp[2:-1]
        else:
            return slp

    def formatted_size(self):
        return filesizeformat(self.size)

    formatted_size.short_description = "size"

    def formatted_logical_path(self):
        slp = self._logical_path
        return slp


#        if slp[0] == 'b':
#            return slp[2:-1]
#        else:
#            return slp

    formatted_logical_path.short_description = "logical_path"
Exemple #12
0
class Cluster(models.Model):
    """A fake cluster, within a physical cluster."""

    name = models.CharField(_("Cluster name"),
                            max_length=50,
                            validators=[core.identifier_validator],
                            unique=True)

    max_volumes = models.PositiveSmallIntegerField(
        verbose_name=_("Max volumes"),
        validators=[MinValueValidator(1), core.max_objects_validator],
        blank=True,
        null=True)
    max_users = models.PositiveSmallIntegerField(
        verbose_name=_("Max users"),
        validators=[MinValueValidator(1), core.max_objects_validator],
        blank=True,
        null=True)
    size = FileSizeField(verbose_name=_("Size"),
                         validators=[core.size_validator],
                         null=True)
    thin_provisioning = models.BooleanField(
        verbose_name=_("Enable thin provisioning"), default=False)

    replicas = models.PositiveSmallIntegerField(
        verbose_name=_("Replicas"),
        help_text=_(
            "Optional. If set, all new volumes in this cluster will use "
            "this replica count."),
        validators=[core.replicas_validator],
        blank=True,
        null=True)

    _settings = JSONField(default={}, blank=True)
    """Customization store for the virutal cluster."""

    admins = models.ManyToManyField('accounts.Admin', related_name='_clusters')

    objects = ClusterQuerySet.as_manager()

    class Meta:
        ordering = ['name']

    def __str__(self):
        return self.name

    def clean(self):
        self.name = self.name.lower()
        return super(Cluster, self).clean()

    def validate_unique(self, exclude=None):
        exclude = exclude or []
        errors = {}

        name = 'name'
        if name not in exclude:
            if Cluster.objects.filter(name__iexact=self.name).exists():
                errors[name] = self.unique_error_message(Cluster, [name])
        exclude.append(name)

        try:
            super(Cluster, self).validate_unique(exclude)
        except ValidationError as e:
            errors.update(e.error_dict)

        if errors:
            raise ValidationError(errors)

    def get_absolute_url(self):
        return reverse('cluster_detail', args=[self.pk])

    def get_setting(self, key):
        return self._settings.get(key) or _default_settings[key]

    def set_setting(self, key, value):
        if key not in _default_settings:
            raise ValueError("Unknown setting: {}".format(key))
        self._settings[key] = value
        self.save(update_fields=['_settings'])

    @property
    def is_root(self):
        """Root cluster groups all objects that don't have a vcluster."""
        return self.name == core._root_cluster_name

    @property
    def used_size(self):
        return sum(v.used_size for v in self.volumes)

    def get_used_size_display(self):
        usage = self.used_size
        size = self.size
        if size:
            percentage = int(round(100. * usage / size))
            return _("{usage} of {size} ({percentage}%)").format(
                usage=filesizeformat(usage),
                size=filesizeformat(size),
                percentage=percentage)
        else:
            return filesizeformat(usage)

    def get_allocated_size_display(self):
        allocated = sum(v.size for v in self.volumes)
        size = self.size
        if size:
            percentage = int(round(100. * allocated / size))
            return _("{usage} of {size} ({percentage}%)").format(
                usage=filesizeformat(allocated),
                size=filesizeformat(size),
                percentage=percentage)
        else:
            return filesizeformat(allocated)

    def build_name(self, volume_name):
        """Given a volume name, return a full volume name."""
        if self.is_root:
            return volume_name
        return core.build_name(self.name, volume_name)

    def build_volume_owner(self):
        update_user(self)  # Assure the user exists
        owner = core._admin_user_name if self.is_root else self.name
        return owner

    def build_volume_meta(self):
        return {} if self.is_root else {'namespace': self.namespace}

    @property
    def namespace(self):
        if not self.is_root:
            return self.name.encode('hex')

    @cached_property
    def volumes(self):
        """Get a list of volumes in this cluster."""
        def volume_belongs_to_cluster(namespace):
            if self.is_root:
                return True
            namespace = data['volumeMeta'].get('namespace')
            return namespace == self.namespace

        all_volumes = sx.listVolumes(includeMeta=True)['volumeList']
        volumes = [
            Volume(self, name, data) for name, data in all_volumes.items()
            if volume_belongs_to_cluster(data)
        ]
        return sorted(volumes, key=lambda v: (v.prefix, v.name))

    def get_volume(self, name, refresh=False):
        """Return a volume based on the given name.

        If `refresh` is True, volumes cache will be reloaded first."""
        if refresh:
            try:
                del self.volumes
            except AttributeError:
                pass  # Cache was empty
        for volume in self.volumes:
            if volume.name == name:
                return volume
        raise ValueError('No such volume: {}'.format(name))

    def can_be_deleted(self):
        return not (self.volumes or self.users)

    @cached_property
    def users(self):
        """Get a list of users in this cluster.

        Unlike volumes, user emails are not prefixed with cluster name.
        """
        def user_belongs_to_cluster(user):
            if self.is_root:
                return True
            return self.name in core.get_user_cluster_names(user)

        all_users = sx.listUsers()
        users = [
            User(self, email, data) for email, data in all_users.items()
            if user_belongs_to_cluster(data)
        ]
        return sorted(users, key=lambda u: (u.is_reserved, u.email))

    def get_user(self, email, refresh=False):
        """Return a user based on the given email.

        If `refresh` is True, users cache will be reloaded first."""
        if refresh:
            try:
                del self.users
            except AttributeError:
                pass  # Cache was empty
        for user in self.users:
            if user.email == email:
                return user
        raise ValueError('No such user: {}'.format(email))

    @property
    def expiration(self):
        try:
            return self._expiration
        except ClusterExpiration.DoesNotExist:
            return None

    @property
    def is_expired(self):
        exp = self.expiration
        return exp and exp.expiration_date <= timezone.now().date()
Exemple #13
0
class Resource(UniqueNameMixin, TimeStampedModel):
    created_by = models.ForeignKey(
        settings.AUTH_USER_MODEL,
        models.CASCADE,
        null=True, blank=True,
    )
    name = models.CharField(
        'Name',
        max_length=100,
    )
    description = models.TextField('Description', blank=True, null=True)
    slug = AutoSlugField(
        populate_from='name',
        always_update=True,
        max_length=200,
    )
    extension = models.CharField(
        max_length=10,
        blank=True,
        null=True,
    )
    mimetype = models.CharField(
        max_length=150,
        blank=True,
        null=True,
    )
    # private name, without extension
    _filename = models.CharField(
        max_length=100,
        blank=True,
        null=True,
    )
    link = models.CharField(max_length=200, blank=True, null=True)
    metadata = JSONField(blank=True, null=True)
    tags = tagulous.models.TagField(
        force_lowercase=True,
        tree=True,
    )
    file_size = FileSizeField(blank=True, null=True)
    objects = ResourceManager()

    class Meta:
        verbose_name = 'Resource'
        verbose_name_plural = 'Resources'
        ordering = ['name']

    def __str__(self):
        return self.name

    def save(self, *args, **kwargs):
        self.name = self.create_unique(
            self.name,
            'name',
            suffix=' (%s)',
        )
        if not self.id:
            tag_original = [kwargs.pop('tag_original', settings.FILES_GENERAL_TAG)]
            self.tags = tag_original
        super(Resource, self).save(*args, **kwargs)  # Call the "real" save() method.

    @property
    def is_link(self):
        return self.link is not None

    @property
    def is_file(self):
        return self._filename is not None

    @property
    def type(self):
        if self.is_link:
            return settings.FILES_TYPE_LINK
        return settings.FILES_MIMETYPE.get(
            self.mimetype,
            settings.FILES_MIMETYPE.get('default'),
        )

    @property
    def url(self):
        if self.is_link:
            return self.link
        else:
            return self.get_url()

    def get_url(self):
        return reverse(
            'files:download', kwargs={
                'slug': self.file_name,
            },
        )

    def read(self):
        storage = ResourceStorage()
        return storage.open(self._filename).read()

    def hydrate_project(self, project):
        self._project = project

    @property
    def file_name(self):
        return '{}.{}'.format(self.slug, self.extension)

    @property
    def public_tags(self):
        project = self._project
        assert project
        classes_list = get_subclasses(PublicTag)
        classes_list.pop(0)
        tags = []
        for class_obj in classes_list:
            queryset = class_obj.objects.filter_by_project(project)
            for instance in queryset:
                if instance.slug in self.tags.all():
                    tags.extend(instance.public_tags)
        seen = set()
        seen_add = seen.add
        return [x for x in tags if not (x in seen or seen_add(x))]  # remove duplicates

    @property
    def is_general(self):
        return settings.FILES_GENERAL_TAG in self.tags

    @property
    def is_user(self):
        return settings.FILES_USER_TAG in self.tags