Beispiel #1
0
 def test_to_model_options_fails(self):
     "Check when a to model is specified, tag options are invalid"
     new_field = tag_models.TagField(test_models.TagMetaUser, force_lowercase=True)
     with self.assertRaises(ValueError) as cm:
         new_field._process_deferred_options()
     self.assertEqual(
         str(cm.exception),
         "Cannot set tag options on explicit tag model "
         "<class 'tests.tagulous_tests_app.models.TagMetaUser'>",
     )
Beispiel #2
0
class Url(models.Model):
    url = models.URLField()
    publish_date = models.DateTimeField(auto_now=True)
    title = models.CharField(max_length=200)
    nick = models.CharField(max_length=200)
    tags = tagulous_models.TagField(Tags)

    @classmethod
    def create(cls, url="", title="", nick="", tags=None):
        url = cls(title=title, url=url, nick=nick, tags=None)
        return url
Beispiel #3
0
    def test_to_model_force_options_works(self):
        "Check when _set_tag_meta is True, options can be set after all"
        self.assertTrue(test_models.TagMetaModel.tag_options.force_lowercase)
        new_field = tag_models.TagField(
            test_models.TagMetaModel, force_lowercase=False, _set_tag_meta=True
        )
        new_field._process_deferred_options()
        self.assertFalse(test_models.TagMetaModel.tag_options.force_lowercase)

        # Set force_lowercase back ready for future tests - unittest won't fix
        test_models.TagMetaModel.tag_options.force_lowercase = True
        self.assertTrue(test_models.TagMetaModel.tag_options.force_lowercase)
Beispiel #4
0
class Experiment(UUIDModelMixin, models.Model):
    """An experiment correspond to preliminary work on an OmicsUnit, _e.g._ a
    publication or preliminary work from a partnering laboratory.
    """

    id = models.UUIDField(
        primary_key=True,
        default=uuid.uuid4,
        editable=False,
    )

    description = models.TextField(
        _("Description"),
        blank=True,
    )

    omics_area = mptt.fields.TreeForeignKey(
        'OmicsArea',
        on_delete=models.CASCADE,
        related_name='experiments',
        related_query_name='experiment',
    )

    entries = models.ManyToManyField(
        'data.Entry',
        related_name='experiments',
        related_query_name='experiment',
    )

    tags = tgl_models.TagField(to=Tag, )

    completed_at = models.DateField(_("Completion date"), )

    released_at = models.DateField(_("Release date"), )

    created_at = models.DateTimeField(
        auto_now_add=True,
        editable=False,
    )

    saved_at = models.DateTimeField(
        auto_now=True,
        editable=False,
    )

    class Meta:
        ordering = ('completed_at', 'released_at')
        verbose_name = _("Experiment")
        verbose_name_plural = _("Experiments")
Beispiel #5
0
 def test_nulled_tag_field(self):
     "Check model field raises warning when given invalid null"
     nulled_tagfield = tag_models.TagField(null=True)
     warnings = nulled_tagfield._check_ignored_options()
     self.assertEqual(len(warnings), 1)
     self.assertEqual(warnings[0].msg, "null has no effect on TagField.")
Beispiel #6
0
 class FailModel_forbidden_symmetrical(models.Model):
     symmetrical = tag_models.TagField(symmetrical="fail")
Beispiel #7
0
 class FailModel_forbidden_through(models.Model):
     through = tag_models.TagField(through="fail")
Beispiel #8
0
 class FailModel_forbidden_db(models.Model):
     db_table = tag_models.TagField(db_table="fail")
Beispiel #9
0
 class FailModel_invalid_to(models.Model):
     to_model = tag_models.TagField(test_models.TagFieldModel)
Beispiel #10
0
class Pub(models.Model):

    class Meta:
        verbose_name = 'Publication'
        verbose_name_plural = 'Publications'
        ordering = ['-date']

    DATE_DEFAULT = '1970/01/01'

    # metadata fields

    slug = models.SlugField(unique=True)

    created_by = models.ForeignKey(
        settings.AUTH_USER_MODEL,
        on_delete=models.PROTECT,
        related_name='pubs',
        blank=True,
        null=True,
    )

    creation_date = models.DateTimeField('Created',
        auto_now_add = True,
        editable = False,
    )

    modified_date = models.DateTimeField('Modified',
        auto_now = True,
        editable = False
    )

    status = models.CharField('Status',
        max_length = 10,
        blank = False,
        default = 'draft',
        choices = STATUS_CHOICES)

    # categories and tags

    pubtype = models.CharField('Publication Type',
        max_length = 20,
        blank = False,
        default = 'journalArticle',
        choices = PUB_TYPES)

    background = models.CharField('Background',
        max_length = 20,
        blank = False,
        default = 'other',
        choices = PUB_BACKGROUNDS)

    tags = TagulousModels.TagField(to=Tag,help_text='&nbsp;',blank=True)

    companytags = TagulousModels.TagField(to=CompanyTag,help_text='&nbsp;',blank=True)

    # related publications (m2m relation to self)

    relatedpubs = models.ManyToManyField('self',
        symmetrical=False,
        blank = True,
        related_name='is_related_to',
        verbose_name='publication(s)')

    # main fields

    title = models.CharField('Title',max_length=255,blank=False)

    description = models.TextField('Description',
        blank = True
    )

    authors = models.CharField('Author(s)',
        max_length = 255,
        blank = True,
        help_text = u'One or more authors, format e.g. Firstname Lastname, Firstname Middlename Lastname,...')

    publishers = models.CharField('Publisher or Institution(s)',
        max_length = 255,
        blank = True,
        help_text = u'One or more publishers, comma-separated, e.g. book publisher, institution, company, government agency,...')

    publication = models.CharField('Publication Name',
        max_length = 255,
        blank = True,
        help_text = u'Title of journal, magazine, news outlet, ')

    date_as_string = models.CharField('Publication Date',
        max_length = 10,
        default = DATE_DEFAULT,
        help_text = u'Format: either YYYY or YYYY/MM or YYYY/MM/DD')

    date = models.DateField(
        blank = True,
        default=datetime.strptime(DATE_DEFAULT,'%Y/%m/%d'),
        editable = False)

    url = models.URLField('URL', max_length=2000, blank=True)

    url_dl = models.URLField('URL Download', max_length=2000, blank=True)

    url_dl_preprint = models.URLField('URL Download (preprint)', max_length=2000, blank=True)

    url_archive_org = models.URLField('URL archive.org', max_length=2000, blank=True)

    url_archive_is = models.URLField('URL archive.is', max_length=2000, blank=True)

    doi = models.CharField('DOI', max_length=255, blank=True)

    isbn = models.CharField('ISBN', max_length=20, blank=True) #13

    issn = models.CharField('ISBN', max_length=10, blank=True) #8

    def __str__(self):
        return self.title

    def __init__(self, *args, **kwargs):
        models.Model.__init__(self, *args, **kwargs)

        # list of authors
        self.authors_list = [author.strip() for author in self.authors.split(',')]

        # tests if title already ends with a punctuation mark
        self.title_ends_with_punct = self.title[-1] in ['.', '!', '?'] \
            if len(self.title) > 0 else False

        # further post processing: https://github.com/christianglodt/django-publications/blob/develop/publications/models/publication.py

    def get_relatedpubs_for_user(self, user):
        status_filter = None
        if not user.is_authenticated:
            status_filter = ['review','reviewed','published']
        if status_filter:
            return self.relatedpubs.filter(status__in=status_filter)
        else:
            return self.relatedpubs.all()
Beispiel #11
0
class Topography(models.Model, SubjectMixin):
    """Topography Measurement of a Surface.
    """

    # TODO After upgrade to Django 2.2, use constraints: https://docs.djangoproject.com/en/2.2/ref/models/constraints/
    class Meta:
        ordering = ['measurement_date', 'pk']
        unique_together = (('surface', 'name'),)

    LENGTH_UNIT_CHOICES = [
        ('km', 'kilometers'),
        ('m', 'meters'),
        ('mm', 'millimeters'),
        ('µm', 'micrometers'),
        ('nm', 'nanometers'),
        ('Å', 'angstrom'),
    ]

    HAS_UNDEFINED_DATA_DESCRIPTION = {
        None: 'contact.engineering could not (yet) determine if this topography has undefined data points.',
        True: 'The dataset has undefined/missing data points.',
        False: 'No undefined/missing data found.'
    }

    FILL_UNDEFINED_DATA_MODE_NOFILLING = 'do-not-fill'
    FILL_UNDEFINED_DATA_MODE_HARMONIC = 'harmonic'

    FILL_UNDEFINED_DATA_MODE_CHOICES = [
        (FILL_UNDEFINED_DATA_MODE_NOFILLING, 'Do not fill undefined data points'),
        (FILL_UNDEFINED_DATA_MODE_HARMONIC, 'Interpolate undefined data points with harmonic functions'),
    ]

    DETREND_MODE_CHOICES = [
        ('center', 'No detrending, but subtract mean height'),
        ('height', 'Remove tilt'),
        ('curvature', 'Remove curvature and tilt'),
    ]

    INSTRUMENT_TYPE_UNDEFINED = 'undefined'
    INSTRUMENT_TYPE_MICROSCOPE_BASED = 'microscope-based'
    INSTRUMENT_TYPE_CONTACT_BASED = 'contact-based'

    INSTRUMENT_TYPE_CHOICES = [
        (INSTRUMENT_TYPE_UNDEFINED, 'Instrument of unknown type - all data considered as reliable'),
        (INSTRUMENT_TYPE_MICROSCOPE_BASED, 'Microscope-based instrument with known resolution'),
        (INSTRUMENT_TYPE_CONTACT_BASED, 'Contact-based instrument with known tip radius'),
    ]

    verbose_name = 'measurement'
    verbose_name_plural = 'measurements'

    #
    # Descriptive fields
    #
    surface = models.ForeignKey('Surface', on_delete=models.CASCADE)
    name = models.CharField(max_length=80)
    creator = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
    measurement_date = models.DateField()
    description = models.TextField(blank=True)
    tags = tm.TagField(to=TagModel)
    analyses = GenericRelation(Analysis, related_query_name='topography',
                               content_type_field='subject_type',
                               object_id_field='subject_id')

    #
    # Fields related to raw data
    #
    datafile = models.FileField(max_length=250, upload_to=user_directory_path)  # currently upload_to not used in forms
    datafile_format = models.CharField(max_length=MAX_LENGTH_DATAFILE_FORMAT,
                                       null=True, default=None, blank=True)
    data_source = models.IntegerField()
    # Django documentation discourages the use of null=True on a CharField. I'll use it here
    # nevertheless, because I need this values as argument to a function where None has
    # a special meaning (autodetection of format). If I would use an empty string
    # as proposed in the docs, I would have to implement extra logic everywhere the field
    # 'datafile_format' is used.

    # All data is also stored in a 'squeezed' format for faster loading and processing
    # This is probably netCDF3. Scales and detrend has already been applied here.
    squeezed_datafile = models.FileField(max_length=260, upload_to=user_directory_path, null=True)

    #
    # Fields with physical meta data
    #
    size_editable = models.BooleanField(default=False)
    size_x = models.FloatField()
    size_y = models.FloatField(null=True)  # null for line scans

    unit_editable = models.BooleanField(default=False)
    unit = models.TextField(choices=LENGTH_UNIT_CHOICES)

    height_scale_editable = models.BooleanField(default=False)
    height_scale = models.FloatField(default=1)

    has_undefined_data = models.BooleanField(null=True, default=None)  # default is undefined
    fill_undefined_data_mode = models.TextField(choices=FILL_UNDEFINED_DATA_MODE_CHOICES,
                                                default=FILL_UNDEFINED_DATA_MODE_NOFILLING)

    detrend_mode = models.TextField(choices=DETREND_MODE_CHOICES, default='center')

    resolution_x = models.IntegerField(null=True)  # null for line scans TODO really?
    resolution_y = models.IntegerField(null=True)  # null for line scans

    bandwidth_lower = models.FloatField(null=True, default=None)  # in meters
    bandwidth_upper = models.FloatField(null=True, default=None)  # in meters

    is_periodic = models.BooleanField(default=False)

    #
    # Fields about instrument and its parameters
    #
    instrument_name = models.CharField(max_length=200, blank=True)
    instrument_type = models.TextField(choices=INSTRUMENT_TYPE_CHOICES, default=INSTRUMENT_TYPE_UNDEFINED)
    instrument_parameters = models.JSONField(default=dict, blank=True)

    #
    # Other fields
    #
    thumbnail = models.ImageField(null=True, upload_to=user_directory_path)

    #
    # Methods
    #
    def __str__(self):
        return "Topography '{0}' from {1}".format(self.name, self.measurement_date)

    @property
    def label(self):
        """Return a string which can be used in the UI.
        """
        return self.name

    @property
    def has_squeezed_datafile(self):
        """If True, a squeezed data file can be retrieved via self.squeezed_datafile"""
        return bool(self.squeezed_datafile)

    @property
    def has_thumbnail(self):
        """If True, a thumbnail can be retrieved via self.thumbnail"""
        return bool(self.thumbnail)

    def get_absolute_url(self):
        """URL of detail page for this topography."""
        return reverse('manager:topography-detail', kwargs=dict(pk=self.pk))

    def cache_key(self):
        """Used for caching topographies avoiding reading datafiles again when interpreted in the same way"""
        return f"topography-{self.id}-channel-{self.data_source}"

    def is_shared(self, with_user, allow_change=False):
        """Returns True, if this topography is shared with a given user.

        Just returns whether the related surface is shared with the user
        or not.

        :param with_user: User to test
        :param allow_change: If True, only return True if topography can be changed by given user
        :return: True or False
        """
        return self.surface.is_shared(with_user, allow_change=allow_change)

    def topography(self, allow_cache=True, allow_squeezed=True):
        """Return a SurfaceTopography.Topography/UniformLineScan/NonuniformLineScan instance.

        This instance is guaranteed to

        - have a 'unit' property
        - have a size: .physical_sizes
        - have been scaled and detrended with the saved parameters

        It has not necessarily a pipeline with all these steps
        and a 'detrend_mode` attribute.

        This is only always the case
        if allow_squeezed=False. In this case the returned instance
        was regenerated from the original file with additional steps
        applied.

        If allow_squeezed=True, the returned topography may be read
        from a cached file which scaling and detrending already applied.

        Parameters
        ----------
        allow_cache: bool
            If True (default), the instance is allowed to get the
            topography from cache if available. If not, the topography
            in cache is rewritten.

        allow_squeezed: bool
            If True (default), the instance is allowed to be generated
            from a squeezed datafile which is not the original datafile.
            This is often faster then the original file format.
        """
        if not _IN_CELERY_WORKER_PROCESS and self.size_y is not None:
            _log.warning('You are requesting to load a (2D) topography and you are not within in a Celery worker '
                         'process. This operation is potentially slow and may require a lot of memory - do not use '
                         '`Topography.topography` within the main Django server!')

        cache_key = self.cache_key()

        #
        # Try to get topography from cache if possible
        #
        topo = cache.get(cache_key) if allow_cache else None
        if topo is None:
            # Build dictionary with instrument information from database... this may override data provided by the
            # topography reader
            info = {
                'instrument': {
                    'name': self.instrument_name,
                    'type': self.instrument_type,
                    'parameters': self.instrument_parameters,
                }
            }

            if allow_squeezed:
                try:
                    # If we are allowed to use a squeezed version, create one if not already happened
                    # (also needed for downloads/analyses, so this is saved in the database)
                    if not self.has_squeezed_datafile:
                        from topobank.taskapp.tasks import renew_squeezed_datafile
                        renew_squeezed_datafile.delay(self.id)
                        # this is now done in background, we load the original files instead for minimal delay
                    else:
                        #
                        # Okay, we can use the squeezed datafile, it's already there.
                        #
                        toporeader = get_topography_reader(self.squeezed_datafile, format=SQUEEZED_DATAFILE_FORMAT)
                        topo = toporeader.topography(info=info)
                        # In the squeezed format, these things are already applied/included:
                        # unit, scaling, detrending, physical sizes
                        # so don't need to provide them to the .topography() method
                        _log.info(f"Using squeezed datafile instead of original datafile for topography id {self.id}.")
                except Exception as exc:
                    _log.error(f"Could not create squeezed datafile for topography with id {self.id}. "
                               "Using original file instead.")
                    topo = None

            if topo is None:
                toporeader = get_topography_reader(self.datafile, format=self.datafile_format)
                topography_kwargs = dict(channel_index=self.data_source,
                                         periodic=self.is_periodic,
                                         info=info)

                # Set size if physical size was not given in datafile
                # (see also  TopographyCreateWizard.get_form_initial)
                # Physical size is always a tuple or None.
                channel_dict = toporeader.channels[self.data_source]
                channel_physical_sizes = channel_dict.physical_sizes
                physical_sizes_is_None = channel_physical_sizes is None \
                                         or (channel_physical_sizes == (None,)) \
                                         or (channel_physical_sizes == (None, None))
                # workaround, see GH 299 in Pyco

                if physical_sizes_is_None:
                    if self.size_y is None:
                        topography_kwargs['physical_sizes'] = self.size_x,
                    else:
                        topography_kwargs['physical_sizes'] = self.size_x, self.size_y

                if self.height_scale_editable:
                    # Adjust height scale to value chosen by user
                    topography_kwargs['height_scale_factor'] = self.height_scale

                    # This is only possible and needed, if no height scale was
                    # given in the data file already.
                    # So default is to use the factor from the file.

                #
                # Set the unit, if not already given by file contents
                #
                channel_unit = channel_dict.unit
                if not channel_unit and self.unit:
                    topography_kwargs['unit'] = self.unit

                # Eventually get topography from module "SurfaceTopography" using the given keywords
                topo = toporeader.topography(**topography_kwargs)
                if self.fill_undefined_data_mode != Topography.FILL_UNDEFINED_DATA_MODE_NOFILLING:
                    topo = topo.interpolate_undefined_data(self.fill_undefined_data_mode)
                topo = topo.detrend(detrend_mode=self.detrend_mode)

            cache.set(cache_key, topo)
            # be sure to invalidate the cache key if topography is saved again -> signals.py

        else:
            _log.info(f"Using topography from cache for id {self.id}.")

        return topo

    def renew_analyses(self):
        """Submit all analysis for this topography."""
        renew_analyses_for_subject(self)

    def to_dict(self):
        """Create dictionary for export of metadata to json or yaml"""
        result = {'name': self.name,
                  'datafile': {
                      'original': self.datafile.name,
                      'squeezed-netcdf': self.squeezed_datafile.name,
                  },
                  'data_source': self.data_source,
                  'has_undefined_data': self.has_undefined_data,
                  'fill_undefined_data_mode': self.fill_undefined_data_mode,
                  'detrend_mode': self.detrend_mode,
                  'is_periodic': self.is_periodic,
                  'creator': {'name': self.creator.name, 'orcid': self.creator.orcid_id},
                  'measurement_date': self.measurement_date,
                  'description': self.description,
                  'unit': self.unit,
                  'size': [self.size_x] if self.size_y is None else [self.size_x, self.size_y],
                  'tags': [t.name for t in self.tags.order_by('name')],
                  'instrument': {
                      'name': self.instrument_name,
                      'type': self.instrument_type,
                      'parameters': self.instrument_parameters,
                  }}
        if self.height_scale_editable:
            result['height_scale'] = self.height_scale
            # see GH 718

        return result

    def deepcopy(self, to_surface):
        """Creates a copy of this topography with all data files copied.

        Parameters
        ----------
        to_surface: Surface
            target surface

        Returns
        -------
        The copied topography.
        The reference to an instrument is not copied, it is always None.

        """

        copy = Topography.objects.get(pk=self.pk)
        copy.pk = None
        copy.surface = to_surface

        with self.datafile.open(mode='rb') as datafile:
            copy.datafile = default_storage.save(self.datafile.name, File(datafile))

        copy.tags = self.tags.get_tag_list()
        copy.save()

        return copy

    def get_plot(self, thumbnail=False):
        """Return bokeh plot.

        Parameters
        ----------
        thumbnail
            boolean, if True, return a reduced plot suitable for a thumbnail

        Returns
        -------

        """
        try:
            st_topo = self.topography()  # SurfaceTopography instance (=st)
        except Exception as exc:
            raise LoadTopographyException("Can't load topography.") from exc

        if st_topo.dim == 1:
            try:
                return self._get_1d_plot(st_topo, reduced=thumbnail)
            except Exception as exc:
                raise PlotTopographyException("Error generating 1D plot for topography.") from exc
        elif st_topo.dim == 2:
            try:
                return self._get_2d_plot(st_topo, reduced=thumbnail)
            except Exception as exc:
                raise PlotTopographyException("Error generating 2D plot for topography.") from exc
        else:
            raise PlotTopographyException("Can only plot 1D or 2D topographies, this has {} dimensions.".format(
                st_topo.dim
            ))

    def _get_1d_plot(self, st_topo, reduced=False):
        """Calculate 1D line plot of topography (line scan).

        :param st_topo: SurfaceTopography.Topography instance
        :return: bokeh plot
        """
        x, y = st_topo.positions_and_heights()

        x_range = DataRange1d(bounds='auto')
        y_range = DataRange1d(bounds='auto')

        TOOLTIPS = """
            <style>
                .bk-tooltip>div:not(:first-child) {{display:none;}}
                td.tooltip-varname {{ text-align:right; font-weight: bold}}
            </style>

            <table>
              <tr>
                <td class="tooltip-varname">x</td>
                <td>:</td>
                <td>@x {}</td>
              </tr>
              <tr>
                <td class="tooltip-varname">height</td>
                <td>:</td>
                <td >@y {}</td>
              </tr>
            </table>
        """.format(self.unit, self.unit)

        if reduced:
            toolbar_location = None
        else:
            toolbar_location = 'above'

        plot = figure(x_range=x_range, y_range=y_range,
                      x_axis_label=f'Position ({self.unit})',
                      y_axis_label=f'Height ({self.unit})',
                      tooltips=TOOLTIPS,
                      toolbar_location=toolbar_location)

        show_symbols = y.shape[0] <= MAX_NUM_POINTS_FOR_SYMBOLS_IN_LINE_SCAN_PLOT

        if reduced:
            line_kwargs = dict(line_width=3)
        else:
            line_kwargs = dict()

        plot.line(x, y, **line_kwargs)
        if show_symbols:
            plot.circle(x, y)

        configure_plot(plot)
        if reduced:
            plot.xaxis.visible = False
            plot.yaxis.visible = False
            plot.grid.visible = False

        # see js function "format_exponential()" in project.js file
        plot.xaxis.formatter = FuncTickFormatter(code="return format_exponential(tick);")
        plot.yaxis.formatter = FuncTickFormatter(code="return format_exponential(tick);")

        plot.toolbar.logo = None

        return plot

    def _get_2d_plot(self, st_topo, reduced=False):
        """Calculate 2D image plot of topography.

        :param st_topo: SurfaceTopography.Topography instance
        :return: bokeh plot
        """
        heights = st_topo.heights()

        topo_size = st_topo.physical_sizes
        # x_range = DataRange1d(start=0, end=topo_size[0], bounds='auto')
        # y_range = DataRange1d(start=0, end=topo_size[1], bounds='auto')
        x_range = DataRange1d(start=0, end=topo_size[0], bounds='auto', range_padding=5)
        y_range = DataRange1d(start=topo_size[1], end=0, flipped=True, range_padding=5)

        color_mapper = LinearColorMapper(palette="Viridis256", low=heights.min(), high=heights.max())

        TOOLTIPS = [
            ("Position x", "$x " + self.unit),
            ("Position y", "$y " + self.unit),
            ("Height", "@image " + self.unit),
        ]
        colorbar_width = 50

        aspect_ratio = topo_size[0] / topo_size[1]
        frame_height = 500
        frame_width = int(frame_height * aspect_ratio)

        if frame_width > 1200:  # rule of thumb, scale down if too wide
            frame_width = 1200
            frame_height = int(frame_width / aspect_ratio)

        if reduced:
            toolbar_location = None
        else:
            toolbar_location = 'above'

        plot = figure(x_range=x_range,
                      y_range=y_range,
                      frame_width=frame_width,
                      frame_height=frame_height,
                      # sizing_mode='scale_both',
                      # aspect_ratio=aspect_ratio,
                      match_aspect=True,
                      x_axis_label=f'x ({self.unit})',
                      y_axis_label=f'y ({self.unit})',
                      # tools=[PanTool(),BoxZoomTool(match_aspect=True), "save", "reset"],
                      tooltips=TOOLTIPS,
                      toolbar_location=toolbar_location)

        configure_plot(plot)
        if reduced:
            plot.xaxis.visible = None
            plot.yaxis.visible = None

        # we need to rotate the height data in order to be compatible with image in Gwyddion
        plot.image([np.rot90(heights)], x=0, y=topo_size[1],
                   dw=topo_size[0], dh=topo_size[1], color_mapper=color_mapper)
        # the anchor point of (0,topo_size[1]) is needed because the y range is flipped
        # in order to have the origin in upper left like in Gwyddion

        plot.toolbar.logo = None

        if not reduced:
            colorbar = ColorBar(color_mapper=color_mapper,
                                label_standoff=12,
                                location=(0, 0),
                                width=colorbar_width,
                                formatter=FuncTickFormatter(code="return format_exponential(tick);"),
                                title=f"height ({self.unit})")
            plot.add_layout(colorbar, 'right')

        return plot

    def _renew_images(self, driver=None):
        """Renew thumbnail and deep zoom images.

        Parameters
        ----------
        driver
            selenium webdriver instance, if not given
            a firefox instance is created using
            `utils.get_firefox_webdriver()`
        Returns
        -------
        None
        """

        plot = self.get_plot(thumbnail=True)

        #
        # Create a plot and save a thumbnail image in in-memory file
        #
        generate_driver = not driver
        if generate_driver:
            driver = webdriver_control.create()
            driver.implicitly_wait(1200)

        try:
          image = get_screenshot_as_png(plot, driver=driver, timeout=20)

          thumbnail_height = 400
          thumbnail_width = int(image.size[0] * thumbnail_height / image.size[1])
          image.thumbnail((thumbnail_width, thumbnail_height))
          image_file = io.BytesIO()
          image.save(image_file, 'PNG')

          #
          # Remove old thumbnail
          #
          self.thumbnail.delete()

          #
          # Save the contents of in-memory file in Django image field
          #
          self.thumbnail.save(
              f'{self.id}/thumbnail.png',
              ContentFile(image_file.getvalue()),
          )
        except RuntimeError as exc:
            _log.error(f"Cannot generate thumbnail for topography {self.id}. Reason: {exc}")
            self.thumbnail.delete()
            _log.warning(f"Thumbnail generation failed for topography {self.id}. Deleted old thumbnail which could be outdated.")

        if generate_driver:
            driver.close()  # important to free memory

        if self.size_y is not None:
            # This is a topography (map), we need to create a Deep Zoom Image
            make_dzi(self.topography(), user_directory_path(self, f'{self.id}/dzi'))

    def renew_images(self, driver=None, none_on_error=True):
        """Renew thumbnail field.

        Parameters
        ----------
        driver
            selenium webdriver instance, if not given
            a firefox instance is created using
            `utils.get_firefox_webdriver()`
        none_on_error: bool
            If True (default), sets thumbnail to None if there are any errors.
            If False, exceptions have to be caught outside.

        Returns
        -------
        None

        Raises
        ------
        ThumbnailGenerationException
        """
        try:
            self._renew_images(driver=driver)
        except Exception as exc:
            if none_on_error:
                self.thumbnail = None
                self.save()
                _log.warning(f"Problems while generating thumbnail for topography {self.id}: {exc}. "
                             "Saving <None> instead.")
                import traceback
                _log.warning(f"Traceback: {traceback.format_exc()}")
            else:
                raise ThumbnailGenerationException from exc

    def _renew_bandwidth_cache(self, st_topo=None):
        if st_topo is None:
            st_topo = self.topography()
        if st_topo.unit is not None:
            bandwidth_lower, bandwidth_upper = st_topo.bandwidth()
            fac = get_unit_conversion_factor(st_topo.unit, 'm')
            self.bandwidth_lower = fac * bandwidth_lower
            self.bandwidth_upper = fac * bandwidth_upper

    def renew_squeezed_datafile(self):
        """Renew squeezed datafile file."""
        _log.info(f"Renewing squeezed datafile for topography {self.id}..")
        with tempfile.NamedTemporaryFile() as tmp:
            # Reread topography from original file
            st_topo = self.topography(allow_cache=False, allow_squeezed=False)

            # Check whether original data file has undefined data point and update database accordingly.
            # (We never load the topography so we don't know this until here. `has_undefined_data` can be
            # undefined.)
            parent_topo = st_topo
            while hasattr(parent_topo, 'parent_topography'):
                parent_topo = parent_topo.parent_topography
            self.has_undefined_data = parent_topo.has_undefined_data
            if not self.has_undefined_data:
                self.fill_undefined_data_mode = Topography.FILL_UNDEFINED_DATA_MODE_NOFILLING

            # Cache bandwidth for bandwidth plot in database. Data is stored in units of meter.
            self._renew_bandwidth_cache(st_topo)

            # Write and upload NetCDF file
            st_topo.to_netcdf(tmp.name)
            # Delete old squeezed file
            self.squeezed_datafile.delete()
            # Upload new squeezed file
            dirname, basename = os.path.split(self.datafile.name)
            orig_stem, orig_ext = os.path.splitext(basename)
            squeezed_name = user_directory_path(self, f'{self.id}/{orig_stem}-squeezed.nc')
            self.squeezed_datafile = default_storage.save(squeezed_name, File(open(tmp.name, mode='rb')))
            self.save()

    def get_undefined_data_status(self):
        """Get human-readable description about status of undefined data as string."""
        s = self.HAS_UNDEFINED_DATA_DESCRIPTION[self.has_undefined_data]
        if self.fill_undefined_data_mode == Topography.FILL_UNDEFINED_DATA_MODE_NOFILLING:
            s += ' No correction of undefined data is performed.'
        elif self.fill_undefined_data_mode == Topography.FILL_UNDEFINED_DATA_MODE_HARMONIC:
            s += ' Undefined/missing values are filled in with values obtained from a harmonic interpolation.'
        return s
Beispiel #12
0
class Surface(models.Model, SubjectMixin):
    """Physical Surface.

    There can be many topographies (measurements) for one surface.
    """
    CATEGORY_CHOICES = [
        ('exp', 'Experimental data'),
        ('sim', 'Simulated data'),
        ('dum', 'Dummy data')
    ]

    LICENSE_CHOICES = [(k, settings.CC_LICENSE_INFOS[k]['option_name']) for k in ['cc0-1.0', 'ccby-4.0', 'ccbysa-4.0']]

    name = models.CharField(max_length=80)
    creator = models.ForeignKey(User, on_delete=models.CASCADE)
    description = models.TextField(blank=True)
    category = models.TextField(choices=CATEGORY_CHOICES, null=True, blank=False)  # TODO change in character field
    tags = tm.TagField(to=TagModel)
    analyses = GenericRelation(Analysis, related_query_name='surface',
                               content_type_field='subject_type',
                               object_id_field='subject_id')

    objects = models.Manager()
    published = PublishedSurfaceManager()
    unpublished = UnpublishedSurfaceManager()

    class Meta:
        ordering = ['name']
        permissions = (
            ('share_surface', 'Can share surface'),
            ('publish_surface', 'Can publish surface'),
        )

    def __str__(self):
        s = self.name
        if self.is_published:
            s += f" (version {self.publication.version})"
        return s

    @property
    def label(self):
        return str(self)

    def get_absolute_url(self):
        return reverse('manager:surface-detail', kwargs=dict(pk=self.pk))

    def num_topographies(self):
        return self.topography_set.count()

    def to_dict(self, request=None):
        """Create dictionary for export of metadata to json or yaml.

        Does not include topographies. They can be added like this:

         surface_dict = surface.to_dict()
         surface_dict['topographies'] = [t.to_dict() for t in surface.topography_set.order_by('name')]

        Parameters:
            request: HTTPRequest
                Needed for calculating publication URLs.
                If not given, only return relative publication URL.
        Returns:
            dict
        """
        d = {'name': self.name,
             'category': self.category,
             'creator': {'name': self.creator.name, 'orcid': self.creator.orcid_id},
             'description': self.description,
             'tags': [t.name for t in self.tags.order_by('name')],
             'is_published': self.is_published,
             }
        if self.is_published:
            d['publication'] = {
                'url': self.publication.get_full_url(request) if request else self.publication.get_absolute_url(),
                'license': self.publication.get_license_display(),
                'authors': self.publication.authors,
                'version': self.publication.version,
                'date': str(self.publication.datetime.date()),
            }
        return d

    def is_shared(self, with_user, allow_change=False):
        """Returns True, if this surface is shared with a given user.

        Always returns True if user is the creator.

        :param with_user: User to test
        :param allow_change: If True, only return True if surface can be changed by given user
        :return: True or False
        """
        result = with_user.has_perm('view_surface', self)
        if result and allow_change:
            result = with_user.has_perm('change_surface', self)
        return result

    def share(self, with_user, allow_change=False):
        """Share this surface with a given user.

        :param with_user: user to share with
        :param allow_change: if True, also allow changing the surface
        """
        assign_perm('view_surface', with_user, self)
        if allow_change:
            assign_perm('change_surface', with_user, self)

        #
        # Request all standard analyses to be available for that user
        #
        _log.info("After sharing surface %d with user %d, requesting all standard analyses..", self.id, with_user.id)
        from topobank.analysis.models import AnalysisFunction
        from topobank.analysis.utils import request_analysis
        analysis_funcs = AnalysisFunction.objects.all()
        for topo in self.topography_set.all():
            for af in analysis_funcs:
                request_analysis(with_user, af, topo)  # standard arguments

    def unshare(self, with_user):
        """Remove share on this surface for given user.

        If the user has no permissions, nothing happens.

        :param with_user: User to remove share from
        """
        for perm in ['view_surface', 'change_surface']:
            if with_user.has_perm(perm, self):
                remove_perm(perm, with_user, self)

    def deepcopy(self):
        """Creates a copy of this surface with all topographies and meta data.

        The database entries for this surface and all related
        topographies are copied, therefore all meta data.
        All files will be copied.

        References to instruments will not be copied.

        The automated analyses will be triggered for this new surface.

        Returns
        -------
        The copy of the surface.

        """
        # Copy of the surface entry
        # (see https://docs.djangoproject.com/en/2.2/topics/db/queries/#copying-model-instances)

        copy = Surface.objects.get(pk=self.pk)
        copy.pk = None
        copy.tags = self.tags.get_tag_list()
        copy.save()

        for topo in self.topography_set.all():
            new_topo = topo.deepcopy(copy)
            # we pass the surface here because there is a constraint that (surface_id + topography name)
            # must be unique, i.e. a surface should never have two topographies of the same name,
            # so we can't set the new surface as the second step
        copy.renew_analyses()

        _log.info("Created deepcopy of surface %s -> surface %s", self.pk, copy.pk)
        return copy

    def set_publication_permissions(self):
        """Sets all permissions as needed for publication.

        - removes edit, share and delete permission from everyone
        - add read permission for everyone
        """
        # Remove edit, share and delete permission from everyone
        users = get_users_with_perms(self)
        for u in users:
            for perm in ['publish_surface', 'share_surface', 'change_surface', 'delete_surface']:
                remove_perm(perm, u, self)

        # Add read permission for everyone
        assign_perm('view_surface', get_default_group(), self)

        from guardian.shortcuts import get_perms
        # TODO for unknown reasons, when not in Docker, the published surfaces are still changeable
        # Here "remove_perm" does not work. We do not allow this. See GH 704.
        if 'change_surface' in get_perms(self.creator, self):
            raise PublicationException("Withdrawing permissions for publication did not work!")

    def publish(self, license, authors):
        """Publish surface.

        An immutable copy is created along with a publication entry.
        The latter is returned.

        Parameters
        ----------
        license: str
            One of the keys of LICENSE_CHOICES
        authors: str
            Comma-separated string of author names;

        Returns
        -------
        Publication
        """
        if self.is_published:
            raise AlreadyPublishedException()

        latest_publication = Publication.objects.filter(original_surface=self).order_by('version').last()
        #
        # We limit the publication rate
        #
        min_seconds = settings.MIN_SECONDS_BETWEEN_SAME_SURFACE_PUBLICATIONS
        if latest_publication and (min_seconds is not None):
            delta_since_last_pub = timezone.now() - latest_publication.datetime
            delta_secs = delta_since_last_pub.total_seconds()
            if delta_secs < min_seconds:
                raise NewPublicationTooFastException(latest_publication, math.ceil(min_seconds - delta_secs))

        #
        # Create a copy of this surface
        #
        copy = self.deepcopy()

        try:
            copy.set_publication_permissions()
        except PublicationException as exc:
            # see GH 704
            _log.error(f"Could not set permission for copied surface to publish .. deleting copy of surface {self.pk}.")
            copy.delete()
            raise

        #
        # Create publication
        #
        if latest_publication:
            version = latest_publication.version + 1
        else:
            version = 1

        pub = Publication.objects.create(surface=copy, original_surface=self,
                                         authors=authors,
                                         license=license,
                                         version=version,
                                         publisher=self.creator,
                                         publisher_orcid_id=self.creator.orcid_id)

        _log.info(f"Published surface {self.name} (id: {self.id}) " + \
                  f"with license {license}, version {version}, authors '{authors}'")
        _log.info(f"URL of publication: {pub.get_absolute_url()}")

        return pub

    @property
    def is_published(self):
        """Returns True, if a publication for this surface exists.
        """
        return hasattr(self, 'publication')  # checks whether the related object surface.publication exists

    def renew_analyses(self, include_topographies=True):
        """Renew analyses related to this surface.

        This includes analyses
        - with any of its topographies as subject  (if also_topographies=True)
        - with this surfaces as subject
        This is done in that order.
        """
        if include_topographies:
            _log.info("Regenerating analyses of topographies of surface %d..", self.pk)
            for topo in self.topography_set.all():
                topo.renew_analyses()
        _log.info("Regenerating analyses directly related to surface %d..", self.pk)
        renew_analyses_for_subject(self)
Beispiel #13
0
class Analysis(UUIDModelMixin, models.Model):
    """An analysis from a set of pixels
    """
    def secondary_data_upload_to(instance, filename):
        return '{}/analyses/{}/secondary_data/{}'.format(
            instance.pixeler.id, instance.id, filename)

    def notebook_upload_to(instance, filename):
        return '{}/analyses/{}/notebook/{}'.format(instance.pixeler.id,
                                                   instance.id, filename)

    id = models.UUIDField(
        primary_key=True,
        default=uuid.uuid4,
        editable=False,
    )

    description = models.TextField(
        _("Description"),
        blank=True,
    )

    experiments = models.ManyToManyField(
        'Experiment',
        related_name='analyses',
        related_query_name='analysis',
    )

    secondary_data = models.FileField(
        _("Secondary data"),
        upload_to=secondary_data_upload_to,
        max_length=255,
    )

    notebook = models.FileField(
        _("Notebook"),
        help_text=_(
            "Upload your Jupiter Notebook or any other document helping to "
            "understand your analysis"),
        blank=True,
        upload_to=notebook_upload_to,
        max_length=255,
    )

    pixeler = models.ForeignKey(
        settings.AUTH_USER_MODEL,
        on_delete=models.CASCADE,
        related_name='analyses',
        related_query_name='analysis',
    )

    tags = tgl_models.TagField(to=Tag, )

    completed_at = models.DateField(_("Completion date"), )

    created_at = models.DateTimeField(
        auto_now_add=True,
        editable=False,
    )

    saved_at = models.DateTimeField(
        auto_now=True,
        editable=False,
    )

    class Meta:
        ordering = ('pixeler', 'completed_at')
        verbose_name = _("Analysis")
        verbose_name_plural = _("Analyses")