Example #1
0
    def as_postgresql(self, compiler, connection):
        function = None
        geo_field = GeometryField(
            srid=self.srid)  # Fake field to get SRID info
        expr2 = self.source_expressions[1]
        geography = self.source_is_geography()
        if expr2.output_field.geography != geography:
            if isinstance(expr2, Value):
                expr2.output_field.geography = geography
            else:
                self.source_expressions[1] = Cast(
                    expr2,
                    GeometryField(srid=expr2.output_field.srid,
                                  geography=geography),
                )

        if not geography and geo_field.geodetic(connection):
            # Geometry fields with geodetic (lon/lat) coordinates need special distance functions
            if self.spheroid:
                # DistanceSpheroid is more accurate and resource intensive than DistanceSphere
                function = connection.ops.spatial_function_name(
                    'DistanceSpheroid')
                # Replace boolean param by the real spheroid of the base field
                self.source_expressions[2] = Value(
                    geo_field.spheroid(connection))
            else:
                function = connection.ops.spatial_function_name(
                    'DistanceSphere')
        return super().as_sql(compiler, connection, function=function)
Example #2
0
 def as_sqlite(self, compiler, connection):
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(connection):
         raise NotImplementedError(
             "Perimeter cannot use a non-projected field.")
     return super(Perimeter, self).as_sql(compiler, connection)
Example #3
0
 def as_sqlite(self, compiler, connection):
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(connection):
         raise NotImplementedError(
             "Perimeter cannot use a non-projected field.")
     return super(Perimeter, self).as_sql(compiler, connection)
Example #4
0
 def as_postgresql(self, compiler, connection):
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(connection) and not self.source_is_geography():
         raise NotImplementedError("ST_Perimeter cannot use a non-projected non-geography field.")
     dim = min(f.dim for f in self.get_source_fields())
     if dim > 2:
         self.function = connection.ops.perimeter3d
     return super(Perimeter, self).as_sql(compiler, connection)
Example #5
0
 def as_sqlite(self, compiler, connection):
     geo_field = GeometryField(srid=self.srid)
     if geo_field.geodetic(connection):
         if self.spheroid:
             self.function = 'GeodesicLength'
         else:
             self.function = 'GreatCircleLength'
     return super(Length, self).as_sql(compiler, connection)
Example #6
0
 def as_sqlite(self, compiler, connection):
     geo_field = GeometryField(srid=self.srid)
     if geo_field.geodetic(connection):
         if self.spheroid:
             self.function = 'GeodesicLength'
         else:
             self.function = 'GreatCircleLength'
     return super(Length, self).as_sql(compiler, connection)
Example #7
0
 def as_sql(self, compiler, connection):
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(
             connection
     ) and not connection.features.supports_length_geodetic:
         raise NotImplementedError(
             "This backend doesn't support Length on geodetic fields")
     return super(Length, self).as_sql(compiler, connection)
Example #8
0
 def as_sql(self, compiler, connection):
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(
             connection
     ) and not connection.features.supports_length_geodetic:
         raise NotImplementedError(
             "This backend doesn't support Length on geodetic fields")
     return super(Length, self).as_sql(compiler, connection)
Example #9
0
 def as_postgresql(self, compiler, connection):
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if self.source_is_geography():
         self.source_expressions.append(Value(self.spheroid))
     elif geo_field.geodetic(connection):
         # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
         self.function = 'ST_Length_Spheroid'
         self.source_expressions.append(Value(geo_field._spheroid))
     else:
         dim = min(f.dim for f in self.get_source_fields() if f)
         if dim > 2:
             self.function = connection.ops.length3d
     return super(Length, self).as_sql(compiler, connection)
Example #10
0
 def as_postgresql(self, compiler, connection):
     function = None
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if self.source_is_geography():
         self.source_expressions.append(Value(self.spheroid))
     elif geo_field.geodetic(connection):
         # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
         function = connection.ops.spatial_function_name('LengthSpheroid')
         self.source_expressions.append(Value(geo_field.spheroid(connection)))
     else:
         dim = min(f.dim for f in self.get_source_fields() if f)
         if dim > 2:
             function = connection.ops.length3d
     return super().as_sql(compiler, connection, function=function)
Example #11
0
 def convert_value(self, value, expression, connection, context):
     if value is None:
         return None
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(connection):
         dist_att = 'm'
     else:
         units = geo_field.units_name(connection)
         if units:
             dist_att = DistanceMeasure.unit_attname(units)
         else:
             dist_att = None
     if dist_att:
         return DistanceMeasure(**{dist_att: value})
     return value
Example #12
0
 def convert_value(self, value, expression, connection, context):
     if value is None:
         return None
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if geo_field.geodetic(connection):
         dist_att = 'm'
     else:
         units = geo_field.units_name(connection)
         if units:
             dist_att = DistanceMeasure.unit_attname(units)
         else:
             dist_att = None
     if dist_att:
         return DistanceMeasure(**{dist_att: value})
     return value
Example #13
0
    def as_postgresql(self, compiler, connection):
        clone = self.copy()
        function = None
        expr2 = clone.source_expressions[1]
        geography = self.source_is_geography()
        if expr2.output_field.geography != geography:
            if isinstance(expr2, Value):
                expr2.output_field.geography = geography
            else:
                clone.source_expressions[1] = Cast(
                    expr2,
                    GeometryField(srid=expr2.output_field.srid,
                                  geography=geography),
                )

        if not geography and self.geo_field.geodetic(connection):
            # Geometry fields with geodetic (lon/lat) coordinates need special distance functions
            if self.spheroid:
                # DistanceSpheroid is more accurate and resource intensive than DistanceSphere
                function = connection.ops.spatial_function_name(
                    "DistanceSpheroid")
                # Replace boolean param by the real spheroid of the base field
                clone.source_expressions.append(
                    Value(self.geo_field.spheroid(connection)))
            else:
                function = connection.ops.spatial_function_name(
                    "DistanceSphere")
        return super(Distance, clone).as_sql(compiler,
                                             connection,
                                             function=function)
Example #14
0
    def __init__(self, *expressions, **extra):
        if 'output_field' not in extra and self.output_field_class:
            extra['output_field'] = self.output_field_class()
        super().__init__(*expressions, **extra)

        # Ensure that value expressions are geometric.
        for pos in self.geom_param_pos:
            expr = self.source_expressions[pos]
            if not isinstance(expr, Value):
                continue
            try:
                output_field = expr.output_field
            except FieldError:
                output_field = None
            geom = expr.value
            if not isinstance(geom,
                              Geometry) or output_field and not isinstance(
                                  output_field, GeometryField):
                raise TypeError(
                    "%s function requires a geometric argument in position %d."
                    % (self.name, pos + 1))
            if not geom.srid and not output_field:
                raise ValueError("SRID is required for all geometries.")
            if not output_field:
                self.source_expressions[pos] = Value(
                    geom, output_field=GeometryField(srid=geom.srid))
Example #15
0
 def test_deconstruct_values(self):
     field = GeometryField(
         srid=4067,
         dim=3,
         geography=True,
         extent=(50199.4814, 6582464.0358, -50000.0, 761274.6247, 7799839.8902, 50000.0),
         tolerance=0.01,
     )
     *_, kwargs = field.deconstruct()
     self.assertEqual(kwargs, {
         'srid': 4067,
         'dim': 3,
         'geography': True,
         'extent': (50199.4814, 6582464.0358, -50000.0, 761274.6247, 7799839.8902, 50000.0),
         'tolerance': 0.01,
     })
Example #16
0
class ZipCode(models.Model):
    code = models.CharField(max_length=5, unique=True)
    bounds = GeometryField(geography=True)
    center = PointField()

    def __str__(self):
        return self.code
Example #17
0
class Location(models.Model):
    """
    Provide a location, a geospatial polygon describing a part of the school::

        >>> library = Location.objects.create(name='Library',
        ...     polygon='POLYGON ((1 1, 3 3, 3 1, 1 1))')
        >>> library.save()
        >>> print(library)
        Library
        >>> print(library.polygon)
        SRID=4326;POLYGON ((1 1, 3 3, 3 1, 1 1))

    Locate the centre of the polygon using ``STCentroid()``.

        >>> print(library.polygon.centroid)
        SRID=4326;POINT (2.333333333333334 1.666666666666667)

    """
    locationid = models.AutoField(primary_key=True)
    name = models.CharField(
        max_length=50,
        help_text=_(
            "Useful name for this location - by convention uses CamelCase"))
    polygon = GeometryField(help_text=_("Polygon or Multipolygon"))

    def __str__(self):
        "Returns the location's name."
        return self.name

    class Meta:
        ordering = ('name', )
Example #18
0
 def as_postgresql(self, compiler, connection):
     function = None
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if self.source_is_geography():
         self.source_expressions.append(Value(self.spheroid))
     elif geo_field.geodetic(connection):
         # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
         function = connection.ops.spatial_function_name('LengthSpheroid')
         self.source_expressions.append(
             Value(geo_field.spheroid(connection)))
     else:
         dim = min(f.dim for f in self.get_source_fields() if f)
         if dim > 2:
             function = connection.ops.length3d
     return super().as_sql(compiler, connection, function=function)
Example #19
0
 def __init__(self, expression, srid, **extra):
     expressions = [
         expression,
         self._handle_param(srid, 'srid', int),
     ]
     if 'output_field' not in extra:
         extra['output_field'] = GeometryField(srid=srid)
     super().__init__(*expressions, **extra)
Example #20
0
 def __init__(self, expression, srid, **extra):
     expressions = [
         expression,
         self._handle_param(srid, "srid", int),
     ]
     if "output_field" not in extra:
         extra["output_field"] = GeometryField(srid=srid)
     super().__init__(*expressions, **extra)
Example #21
0
 def as_postgresql(self, compiler, connection):
     geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
     if self.source_is_geography():
         # Set parameters as geography if base field is geography
         for pos, expr in enumerate(
                 self.source_expressions[self.geom_param_pos + 1:], start=self.geom_param_pos + 1):
             if isinstance(expr, GeomValue):
                 expr.geography = True
     elif geo_field.geodetic(connection):
         # Geometry fields with geodetic (lon/lat) coordinates need special distance functions
         if self.spheroid:
             self.function = 'ST_Distance_Spheroid'  # More accurate, resource intensive
             # Replace boolean param by the real spheroid of the base field
             self.source_expressions[2] = Value(geo_field._spheroid)
         else:
             self.function = 'ST_Distance_Sphere'
     return super(Distance, self).as_sql(compiler, connection)
Example #22
0
 def __init__(self, expression, srid, **extra):
     expressions = [
         expression,
         self._handle_param(srid, 'srid', six.integer_types),
     ]
     if 'output_field' not in extra:
         extra['output_field'] = GeometryField(srid=srid)
     super(Transform, self).__init__(*expressions, **extra)
Example #23
0
class vols(models.Model):
    #id = models.UUIDField(primary_key=True, default=uuid_module.uuid4, unique=True, serialize=False, editable=False)
    created = models.DateTimeField(auto_now_add=True)
    name = models.CharField(max_length=150)
    id_vol = models.IntegerField(default=0)
    donnees = models.CharField(max_length=1500)
    #ortho = models.RasterField()
    orthophoto = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the orthophoto created by OpenDroneMap")
    dsm_extent = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the DSM created by OpenDroneMap")
    dtm_extent = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the DTM created by OpenDroneMap")
Example #24
0
 def test_deconstruct_values(self):
     field = GeometryField(
         srid=4067,
         dim=3,
         geography=True,
         extent=(50199.4814, 6582464.0358, -50000.0, 761274.6247,
                 7799839.8902, 50000.0),
         tolerance=0.01,
     )
     *_, kwargs = field.deconstruct()
     self.assertEqual(
         kwargs, {
             'srid':
             4067,
             'dim':
             3,
             'geography':
             True,
             'extent': (50199.4814, 6582464.0358, -50000.0, 761274.6247,
                        7799839.8902, 50000.0),
             'tolerance':
             0.01,
         })
Example #25
0
 def as_postgresql(self, compiler, connection):
     function = None
     geo_field = GeometryField(
         srid=self.srid)  # Fake field to get SRID info
     if self.source_is_geography():
         # Set parameters as geography if base field is geography
         for pos, expr in enumerate(
                 self.source_expressions[self.geom_param_pos + 1:],
                 start=self.geom_param_pos + 1):
             if isinstance(expr, GeomValue):
                 expr.geography = True
     elif geo_field.geodetic(connection):
         # Geometry fields with geodetic (lon/lat) coordinates need special distance functions
         if self.spheroid:
             # DistanceSpheroid is more accurate and resource intensive than DistanceSphere
             function = connection.ops.spatial_function_name(
                 'DistanceSpheroid')
             # Replace boolean param by the real spheroid of the base field
             self.source_expressions[2] = Value(geo_field._spheroid)
         else:
             function = connection.ops.spatial_function_name(
                 'DistanceSphere')
     return super().as_sql(compiler, connection, function=function)
Example #26
0
    def as_postgresql(self, compiler, connection):
        function = None
        geo_field = GeometryField(srid=self.srid)  # Fake field to get SRID info
        expr2 = self.source_expressions[1]
        geography = self.source_is_geography()
        if expr2.output_field.geography != geography:
            if isinstance(expr2, Value):
                expr2.output_field.geography = geography
            else:
                self.source_expressions[1] = Cast(
                    expr2,
                    GeometryField(srid=expr2.output_field.srid, geography=geography),
                )

        if not geography and geo_field.geodetic(connection):
            # Geometry fields with geodetic (lon/lat) coordinates need special distance functions
            if self.spheroid:
                # DistanceSpheroid is more accurate and resource intensive than DistanceSphere
                function = connection.ops.spatial_function_name('DistanceSpheroid')
                # Replace boolean param by the real spheroid of the base field
                self.source_expressions[2] = Value(geo_field.spheroid(connection))
            else:
                function = connection.ops.spatial_function_name('DistanceSphere')
        return super().as_sql(compiler, connection, function=function)
Example #27
0
 def as_sqlite(self, compiler, connection):
     function = None
     geo_field = GeometryField(srid=self.srid)
     if geo_field.geodetic(connection):
         function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength'
     return super().as_sql(compiler, connection, function=function)
Example #28
0
 def geo_field(self):
     return GeometryField(srid=self.srid) if self.srid else None
Example #29
0
class Location(models.Model):
    id = models.BigIntegerField(primary_key=True)
    name = models.TextField(blank=True, null=True)
    division_id = models.TextField(blank=True, null=True)
    feature_type = models.TextField(blank=True, null=True)
    tags = models.JSONField(blank=True, null=True)
    sfm = models.JSONField(blank=True, null=True)
    adminlevel1 = models.ForeignKey('self',
                                    related_name='area_locations',
                                    on_delete=models.CASCADE,
                                    null=True,
                                    blank=True)
    adminlevel2 = models.ForeignKey('self',
                                    related_name='place_locations',
                                    on_delete=models.CASCADE,
                                    null=True,
                                    blank=True)
    adminlevel = models.CharField(max_length=50, null=True, blank=True)
    geometry = GeometryField(blank=True, null=True)

    objects = LocationManager()

    def __str__(self):
        if self.name is None:
            return str(self.id)
        return self.name

    @property
    def related_entities(self):
        """
        Return a list of dicts with metadata for all of the entities (Organizations
        and Violations) linked to this Location via an Emplacement or Association.

        Metadata dicts must have the following keys:
            - name
            - entity_type
            - start_date
            - end_date
            - open_ended
            - url (a link to edit the entity)
        """
        related_entities = []

        for associationarea in self.associationarea_set.all():
            association = associationarea.object_ref
            organization = association.organization.get_value().value
            related_entities.append({
                'name':
                organization.name.get_value().value,
                'entity_type':
                _('Organization'),
                'start_date':
                association.startdate.get_value(),
                'end_date':
                association.enddate.get_value(),
                'open_ended':
                association.open_ended.get_value(),
                'url':
                reverse('view-organization',
                        kwargs={'slug': organization.uuid}),
            })

        for emplacementsite in self.emplacementsite_set.all():
            emplacement = emplacementsite.object_ref
            organization = emplacement.organization.get_value().value
            related_entities.append({
                'name':
                organization.name.get_value().value,
                'entity_type':
                _('Organization'),
                'start_date':
                emplacement.startdate.get_value(),
                'end_date':
                emplacement.enddate.get_value(),
                'open_ended':
                emplacement.open_ended.get_value(),
                'url':
                reverse('view-organization',
                        kwargs={'slug': organization.uuid}),
            })

        for violationlocation in self.violationlocation_set.all():
            violation = violationlocation.object_ref
            related_entities.append({
                'name':
                truncatewords(violation.description.get_value(), 10),
                'entity_type':
                _('Violation'),
                'start_date':
                violation.startdate.get_value(),
                'end_date':
                violation.enddate.get_value(),
                'open_ended':
                '',
                'url':
                reverse('view-violation', kwargs={'slug': violation.uuid}),
            })

        return related_entities

    @property
    def osm_feature_type(self):
        if self.feature_type == 'boundary':
            return 'relation'
        return self.feature_type
Example #30
0
class Task(models.Model):
    ASSETS_MAP = {
        'all.zip':
        'all.zip',
        'orthophoto.tif':
        os.path.join('odm_orthophoto', 'odm_orthophoto.tif'),
        'orthophoto.png':
        os.path.join('odm_orthophoto', 'odm_orthophoto.png'),
        'orthophoto.mbtiles':
        os.path.join('odm_orthophoto', 'odm_orthophoto.mbtiles'),
        'georeferenced_model.las':
        os.path.join('odm_georeferencing', 'odm_georeferenced_model.las'),
        'georeferenced_model.laz':
        os.path.join('odm_georeferencing', 'odm_georeferenced_model.laz'),
        'georeferenced_model.ply':
        os.path.join('odm_georeferencing', 'odm_georeferenced_model.ply'),
        'georeferenced_model.csv':
        os.path.join('odm_georeferencing', 'odm_georeferenced_model.csv'),
        'textured_model.zip': {
            'deferred_path': 'textured_model.zip',
            'deferred_compress_dir': 'odm_texturing'
        },
        'dtm.tif':
        os.path.join('odm_dem', 'dtm.tif'),
        'dsm.tif':
        os.path.join('odm_dem', 'dsm.tif'),
    }

    STATUS_CODES = (
        (status_codes.QUEUED, 'QUEUED'),
        (status_codes.RUNNING, 'RUNNING'),
        (status_codes.FAILED, 'FAILED'),
        (status_codes.COMPLETED, 'COMPLETED'),
        (status_codes.CANCELED, 'CANCELED'),
    )

    PENDING_ACTIONS = (
        (pending_actions.CANCEL, 'CANCEL'),
        (pending_actions.REMOVE, 'REMOVE'),
        (pending_actions.RESTART, 'RESTART'),
        (pending_actions.RESIZE, 'RESIZE'),
    )

    # Not an exact science
    TASK_OUTPUT_MILESTONES = {
        'Running ODM Load Dataset Cell': 0.01,
        'Running ODM Load Dataset Cell - Finished': 0.05,
        'opensfm/bin/opensfm match_features': 0.10,
        'opensfm/bin/opensfm reconstruct': 0.20,
        'opensfm/bin/opensfm export_visualsfm': 0.30,
        'Running ODM Meshing Cell': 0.60,
        'Running MVS Texturing Cell': 0.65,
        'Running ODM Georeferencing Cell': 0.70,
        'Running ODM DEM Cell': 0.80,
        'Running ODM Orthophoto Cell': 0.85,
        'Running ODM OrthoPhoto Cell - Finished': 0.90,
        'Compressing all.zip:': 0.95
    }

    id = models.UUIDField(primary_key=True,
                          default=uuid_module.uuid4,
                          unique=True,
                          serialize=False,
                          editable=False)

    uuid = models.CharField(
        max_length=255,
        db_index=True,
        default='',
        blank=True,
        help_text=
        "Identifier of the task (as returned by OpenDroneMap's REST API)")
    project = models.ForeignKey(Project,
                                on_delete=models.CASCADE,
                                help_text="Project that this task belongs to")
    name = models.CharField(max_length=255,
                            null=True,
                            blank=True,
                            help_text="A label for the task")
    processing_time = models.IntegerField(
        default=-1,
        help_text=
        "Number of milliseconds that elapsed since the beginning of this task (-1 indicates that no information is available)"
    )
    processing_node = models.ForeignKey(
        ProcessingNode,
        on_delete=models.SET_NULL,
        null=True,
        blank=True,
        help_text=
        "Processing node assigned to this task (or null if this task has not been associated yet)"
    )
    auto_processing_node = models.BooleanField(
        default=True,
        help_text=
        "A flag indicating whether this task should be automatically assigned a processing node"
    )
    status = models.IntegerField(choices=STATUS_CODES,
                                 db_index=True,
                                 null=True,
                                 blank=True,
                                 help_text="Current status of the task")
    last_error = models.TextField(
        null=True, blank=True, help_text="The last processing error received")
    options = fields.JSONField(
        default=dict(),
        blank=True,
        help_text="Options that are being used to process this task",
        validators=[validate_task_options])
    available_assets = fields.ArrayField(
        models.CharField(max_length=80),
        default=list(),
        blank=True,
        help_text="List of available assets to download")
    console_output = models.TextField(
        null=False,
        default="",
        blank=True,
        help_text="Console output of the OpenDroneMap's process")

    orthophoto_extent = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the orthophoto created by OpenDroneMap")
    dsm_extent = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the DSM created by OpenDroneMap")
    dtm_extent = GeometryField(
        null=True,
        blank=True,
        srid=4326,
        help_text="Extent of the DTM created by OpenDroneMap")

    # mission
    created_at = models.DateTimeField(default=timezone.now,
                                      help_text="Creation date")
    pending_action = models.IntegerField(
        choices=PENDING_ACTIONS,
        db_index=True,
        null=True,
        blank=True,
        help_text=
        "A requested action to be performed on the task. The selected action will be performed by the worker at the next iteration."
    )

    public = models.BooleanField(
        default=False,
        help_text=
        "A flag indicating whether this task is available to the public")
    resize_to = models.IntegerField(
        default=-1,
        help_text=
        "When set to a value different than -1, indicates that the images for this task have been / will be resized to the size specified here before processing."
    )

    upload_progress = models.FloatField(
        default=0.0,
        help_text=
        "Value between 0 and 1 indicating the upload progress of this task's files to the processing node",
        blank=True)
    resize_progress = models.FloatField(
        default=0.0,
        help_text=
        "Value between 0 and 1 indicating the resize progress of this task's images",
        blank=True)
    running_progress = models.FloatField(
        default=0.0,
        help_text=
        "Value between 0 and 1 indicating the running progress (estimated) of this task",
        blank=True)

    def __init__(self, *args, **kwargs):
        super(Task, self).__init__(*args, **kwargs)

        # To help keep track of changes to the project id
        self.__original_project_id = self.project.id

    def __str__(self):
        name = self.name if self.name is not None else "unnamed"

        return 'Task [{}] ({})'.format(name, self.id)

    def move_assets(self, old_project_id, new_project_id):
        """
        Moves the task's folder, update ImageFields and orthophoto files to a new project
        """
        old_task_folder = full_task_directory_path(self.id, old_project_id)
        new_task_folder = full_task_directory_path(self.id, new_project_id)
        new_task_folder_parent = os.path.abspath(
            os.path.join(new_task_folder, os.pardir))

        try:
            if os.path.exists(
                    old_task_folder) and not os.path.exists(new_task_folder):
                # Use parent, otherwise we get a duplicate directory in there
                if not os.path.exists(new_task_folder_parent):
                    os.makedirs(new_task_folder_parent)

                shutil.move(old_task_folder, new_task_folder_parent)

                logger.info("Moved task folder from {} to {}".format(
                    old_task_folder, new_task_folder))

                with transaction.atomic():
                    for img in self.imageupload_set.all():
                        prev_name = img.image.name
                        img.image.name = assets_directory_path(
                            self.id, new_project_id,
                            os.path.basename(img.image.name))
                        logger.info("Changing {} to {}".format(prev_name, img))
                        img.save()

            else:
                logger.warning(
                    "Project changed for task {}, but either {} doesn't exist, or {} already exists. This doesn't look right, so we will not move any files."
                    .format(self, old_task_folder, new_task_folder))
        except shutil.Error as e:
            logger.warning(
                "Could not move assets folder for task {}. We're going to proceed anyway, but you might experience issues: {}"
                .format(self, e))

    def save(self, *args, **kwargs):
        if self.project.id != self.__original_project_id:
            self.move_assets(self.__original_project_id, self.project.id)
            self.__original_project_id = self.project.id

        # Autovalidate on save
        self.full_clean()

        super(Task, self).save(*args, **kwargs)

    def assets_path(self, *args):
        """
        Get a path relative to the place where assets are stored
        """
        return self.task_path("assets", *args)

    def task_path(self, *args):
        """
        Get path relative to the root task directory
        """
        return os.path.join(
            settings.MEDIA_ROOT,
            assets_directory_path(self.id, self.project.id, ""), *args)

    def is_asset_available_slow(self, asset):
        """
        Checks whether a particular asset is available in the file system
        Generally this should never be used directly, as it's slow. Use the available_assets field
        in the database instead.
        :param asset: one of ASSETS_MAP keys
        :return: boolean
        """
        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return os.path.exists(self.assets_path(value))
            elif isinstance(value, dict):
                if 'deferred_compress_dir' in value:
                    return os.path.exists(
                        self.assets_path(value['deferred_compress_dir']))

        return False

    def get_asset_download_path(self, asset):
        """
        Get the path to an asset download
        :param asset: one of ASSETS_MAP keys
        :return: path
        """

        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return self.assets_path(value)

            elif isinstance(value, dict):
                if 'deferred_path' in value and 'deferred_compress_dir' in value:
                    return self.generate_deferred_asset(
                        value['deferred_path'], value['deferred_compress_dir'])
                else:
                    raise FileNotFoundError(
                        "{} is not a valid asset (invalid dict values)".format(
                            asset))

            else:
                raise FileNotFoundError(
                    "{} is not a valid asset (invalid map)".format(asset))
        else:
            raise FileNotFoundError("{} is not a valid asset".format(asset))

    def process(self):
        """
        This method contains the logic for processing tasks asynchronously
        from a background thread or from a worker. Here tasks that are
        ready to be processed execute some logic. This could be communication
        with a processing node or executing a pending action.
        """

        try:
            if self.pending_action == pending_actions.RESIZE:
                resized_images = self.resize_images()
                self.refresh_from_db()
                self.resize_gcp(resized_images)
                self.pending_action = None
                self.save()

            if self.auto_processing_node and not self.status in [
                    status_codes.FAILED, status_codes.CANCELED
            ]:
                # No processing node assigned and need to auto assign
                if self.processing_node is None:
                    # Assign first online node with lowest queue count
                    self.processing_node = ProcessingNode.find_best_available_node(
                    )
                    if self.processing_node:
                        self.processing_node.queue_count += 1  # Doesn't have to be accurate, it will get overridden later
                        self.processing_node.save()

                        logger.info(
                            "Automatically assigned processing node {} to {}".
                            format(self.processing_node, self))
                        self.save()

                # Processing node assigned, but is offline and no errors
                if self.processing_node and not self.processing_node.is_online(
                ):
                    # If we are queued up
                    # detach processing node, and reassignment
                    # will be processed at the next tick
                    if self.status == status_codes.QUEUED:
                        logger.info(
                            "Processing node {} went offline, reassigning {}..."
                            .format(self.processing_node, self))
                        self.uuid = ''
                        self.processing_node = None
                        self.status = None
                        self.save()

                    elif self.status == status_codes.RUNNING:
                        # Task was running and processing node went offline
                        # It could have crashed due to low memory
                        # or perhaps it went offline due to network errors.
                        # We can't easily differentiate between the two, so we need
                        # to notify the user because if it crashed due to low memory
                        # the user might need to take action (or be stuck in an infinite loop)
                        raise ProcessingError(
                            "Processing node went offline. This could be due to insufficient memory or a network error."
                        )

            if self.processing_node:
                # Need to process some images (UUID not yet set and task doesn't have pending actions)?
                if not self.uuid and self.pending_action is None and self.status is None:
                    logger.info("Processing... {}".format(self))

                    images = [
                        image.path() for image in self.imageupload_set.all()
                    ]

                    # Track upload progress, but limit the number of DB updates
                    # to every 2 seconds (and always record the 100% progress)
                    last_update = 0

                    def callback(progress):
                        nonlocal last_update
                        if time.time() - last_update >= 2 or (
                                progress >= 1.0 - 1e-6
                                and progress <= 1.0 + 1e-6):
                            Task.objects.filter(pk=self.id).update(
                                upload_progress=progress)
                            last_update = time.time()

                    # This takes a while
                    uuid = self.processing_node.process_new_task(
                        images, self.name, self.options, callback)

                    # Refresh task object before committing change
                    self.refresh_from_db()
                    self.uuid = uuid
                    self.save()

                    # TODO: log process has started processing

            if self.pending_action is not None:
                if self.pending_action == pending_actions.CANCEL:
                    # Do we need to cancel the task on the processing node?
                    logger.info("Canceling {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to cancel the task on the processing node
                        # We don't care if this fails (we tried)
                        try:
                            self.processing_node.cancel_task(self.uuid)
                        except ProcessingException:
                            logger.warning(
                                "Could not cancel {} on processing node. We'll proceed anyway..."
                                .format(self))

                        self.status = status_codes.CANCELED
                        self.pending_action = None
                        self.save()
                    else:
                        raise ProcessingError(
                            "Cannot cancel a task that has no processing node or UUID"
                        )

                elif self.pending_action == pending_actions.RESTART:
                    logger.info("Restarting {}".format(self))
                    if self.processing_node:

                        # Check if the UUID is still valid, as processing nodes purge
                        # results after a set amount of time, the UUID might have been eliminated.
                        uuid_still_exists = False

                        if self.uuid:
                            try:
                                info = self.processing_node.get_task_info(
                                    self.uuid)
                                uuid_still_exists = info['uuid'] == self.uuid
                            except ProcessingException:
                                pass

                        need_to_reprocess = False

                        if uuid_still_exists:
                            # Good to go
                            try:
                                self.processing_node.restart_task(
                                    self.uuid, self.options)
                            except ProcessingError as e:
                                # Something went wrong
                                logger.warning(
                                    "Could not restart {}, will start a new one"
                                    .format(self))
                                need_to_reprocess = True
                        else:
                            need_to_reprocess = True

                        if need_to_reprocess:
                            logger.info(
                                "{} needs to be reprocessed".format(self))

                            # Task has been purged (or processing node is offline)
                            # Process this as a new task
                            # Removing its UUID will cause the scheduler
                            # to process this the next tick
                            self.uuid = ''

                            # We also remove the "rerun-from" parameter if it's set
                            self.options = list(
                                filter(lambda d: d['name'] != 'rerun-from',
                                       self.options))
                            self.upload_progress = 0

                        self.console_output = ""
                        self.processing_time = -1
                        self.status = None
                        self.last_error = None
                        self.pending_action = None
                        self.running_progress = 0
                        self.save()
                    else:
                        raise ProcessingError(
                            "Cannot restart a task that has no processing node"
                        )

                elif self.pending_action == pending_actions.REMOVE:
                    logger.info("Removing {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to delete the resources on the processing node
                        # We don't care if this fails, as resources on processing nodes
                        # Are expected to be purged on their own after a set amount of time anyway
                        try:
                            self.processing_node.remove_task(self.uuid)
                        except ProcessingException:
                            pass

                    # What's more important is that we delete our task properly here
                    self.delete()

                    # Stop right here!
                    return

            if self.processing_node:
                # Need to update status (first time, queued or running?)
                if self.uuid and self.status in [
                        None, status_codes.QUEUED, status_codes.RUNNING
                ]:
                    # Update task info from processing node
                    info = self.processing_node.get_task_info(self.uuid)

                    self.processing_time = info["processingTime"]
                    self.status = info["status"]["code"]

                    current_lines_count = len(self.console_output.split("\n"))
                    console_output = self.processing_node.get_task_console_output(
                        self.uuid, current_lines_count)
                    if len(console_output) > 0:
                        self.console_output += "\n".join(console_output) + '\n'

                        # Update running progress
                        for line in console_output:
                            for line_match, value in self.TASK_OUTPUT_MILESTONES.items(
                            ):
                                if line_match in line:
                                    self.running_progress = value
                                    break

                    if "errorMessage" in info["status"]:
                        self.last_error = info["status"]["errorMessage"]

                    # Has the task just been canceled, failed, or completed?
                    if self.status in [
                            status_codes.FAILED, status_codes.COMPLETED,
                            status_codes.CANCELED
                    ]:
                        logger.info("Processing status: {} for {}".format(
                            self.status, self))

                        if self.status == status_codes.COMPLETED:
                            assets_dir = self.assets_path("")

                            # Remove previous assets directory
                            if os.path.exists(assets_dir):
                                logger.info(
                                    "Removing old assets directory: {} for {}".
                                    format(assets_dir, self))
                                shutil.rmtree(assets_dir)

                            os.makedirs(assets_dir)

                            logger.info(
                                "Downloading all.zip for {}".format(self))

                            # Download all assets
                            zip_stream = self.processing_node.download_task_asset(
                                self.uuid, "all.zip")
                            zip_path = os.path.join(assets_dir, "all.zip")
                            with open(zip_path, 'wb') as fd:
                                for chunk in zip_stream.iter_content(4096):
                                    fd.write(chunk)

                            logger.info(
                                "Done downloading all.zip for {}".format(self))

                            # Extract from zip
                            with zipfile.ZipFile(zip_path, "r") as zip_h:
                                zip_h.extractall(assets_dir)

                            logger.info(
                                "Extracted all.zip for {}".format(self))

                            # Populate *_extent fields
                            extent_fields = [
                                (os.path.realpath(
                                    self.assets_path("odm_orthophoto",
                                                     "odm_orthophoto.tif")),
                                 'orthophoto_extent'),
                                (os.path.realpath(
                                    self.assets_path("odm_dem", "dsm.tif")),
                                 'dsm_extent'),
                                (os.path.realpath(
                                    self.assets_path("odm_dem", "dtm.tif")),
                                 'dtm_extent'),
                            ]

                            for raster_path, field in extent_fields:
                                if os.path.exists(raster_path):
                                    # Read extent and SRID
                                    raster = GDALRaster(raster_path)
                                    extent = OGRGeometry.from_bbox(
                                        raster.extent)

                                    # It will be implicitly transformed into the SRID of the model’s field
                                    # self.field = GEOSGeometry(...)
                                    setattr(
                                        self, field,
                                        GEOSGeometry(extent.wkt,
                                                     srid=raster.srid))

                                    logger.info(
                                        "Populated extent field with {} for {}"
                                        .format(raster_path, self))

                            self.update_available_assets_field()
                            self.running_progress = 1.0
                            self.save()

                            from app.plugins import signals as plugin_signals
                            plugin_signals.task_completed.send_robust(
                                sender=self.__class__, task_id=self.id)
                        else:
                            # FAILED, CANCELED
                            self.save()
                    else:
                        # Still waiting...
                        self.save()

        except ProcessingError as e:
            self.set_failure(str(e))
        except (ConnectionRefusedError, ConnectionError) as e:
            logger.warning(
                "{} cannot communicate with processing node: {}".format(
                    self, str(e)))
        except ProcessingTimeout as e:
            logger.warning(
                "{} timed out with error: {}. We'll try reprocessing at the next tick."
                .format(self, str(e)))

    def get_tile_path(self, tile_type, z, x, y):
        return self.assets_path("{}_tiles".format(tile_type), z, x,
                                "{}.png".format(y))

    def get_tile_json_url(self, tile_type):
        return "/api/projects/{}/tasks/{}/{}/tiles.json".format(
            self.project.id, self.id, tile_type)

    def get_map_items(self):
        types = []
        if 'orthophoto.tif' in self.available_assets:
            types.append('orthophoto')
        if 'dsm.tif' in self.available_assets: types.append('dsm')
        if 'dtm.tif' in self.available_assets: types.append('dtm')

        return {
            'tiles': [{
                'url': self.get_tile_json_url(t),
                'type': t
            } for t in types],
            'meta': {
                'task': {
                    'id': str(self.id),
                    'project': self.project.id,
                    'public': self.public
                }
            }
        }

    def get_model_display_params(self):
        """
        Subset of a task fields used in the 3D model display view
        """
        return {
            'id': str(self.id),
            'project': self.project.id,
            'available_assets': self.available_assets,
            'public': self.public
        }

    def generate_deferred_asset(self, archive, directory):
        """
        :param archive: path of the destination .zip file (relative to /assets/ directory)
        :param directory: path of the source directory to compress (relative to /assets/ directory)
        :return: full path of the generated archive
        """
        archive_path = self.assets_path(archive)
        directory_path = self.assets_path(directory)

        if not os.path.exists(directory_path):
            raise FileNotFoundError("{} does not exist".format(directory_path))

        if not os.path.exists(archive_path):
            shutil.make_archive(
                os.path.splitext(archive_path)[0], 'zip', directory_path)

        return archive_path

    def update_available_assets_field(self, commit=False):
        """
        Updates the available_assets field with the actual types of assets available
        :param commit: when True also saves the model, otherwise the user should manually call save()
        """
        all_assets = list(self.ASSETS_MAP.keys())
        self.available_assets = [
            asset for asset in all_assets
            if self.is_asset_available_slow(asset)
        ]
        if commit: self.save()

    def delete(self, using=None, keep_parents=False):
        task_id = self.id
        from app.plugins import signals as plugin_signals
        plugin_signals.task_removing.send_robust(sender=self.__class__,
                                                 task_id=task_id)

        directory_to_delete = os.path.join(
            settings.MEDIA_ROOT, task_directory_path(self.id, self.project.id))

        super(Task, self).delete(using, keep_parents)

        # Remove files related to this task
        try:
            shutil.rmtree(directory_to_delete)
        except FileNotFoundError as e:
            logger.warning(e)

        plugin_signals.task_removed.send_robust(sender=self.__class__,
                                                task_id=task_id)

    def set_failure(self, error_message):
        logger.error("FAILURE FOR {}: {}".format(self, error_message))
        self.last_error = error_message
        self.status = status_codes.FAILED
        self.pending_action = None
        self.save()

    def find_all_files_matching(self, regex):
        directory = full_task_directory_path(self.id, self.project.id)
        return [
            os.path.join(directory, f) for f in os.listdir(directory)
            if re.match(regex, f, re.IGNORECASE)
        ]

    def resize_images(self):
        """
        Destructively resize this task's JPG images while retaining EXIF tags.
        Resulting images are always converted to JPG.
        TODO: add support for tiff files
        :return list containing paths of resized images and resize ratios
        """
        if self.resize_to < 0:
            logger.warning(
                "We were asked to resize images to {}, this might be an error."
                .format(self.resize_to))
            return []

        images_path = self.find_all_files_matching(r'.*\.jpe?g$')
        total_images = len(images_path)
        resized_images_count = 0
        last_update = 0

        def callback(retval=None):
            nonlocal last_update
            nonlocal resized_images_count
            nonlocal total_images

            resized_images_count += 1
            if time.time() - last_update >= 2:
                Task.objects.filter(pk=self.id).update(
                    resize_progress=(float(resized_images_count) /
                                     float(total_images)))
                last_update = time.time()

        with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
            resized_images = list(
                filter(
                    lambda i: i is not None,
                    executor.map(
                        partial(resize_image,
                                resize_to=self.resize_to,
                                done=callback), images_path)))

        Task.objects.filter(pk=self.id).update(resize_progress=1.0)

        return resized_images

    def resize_gcp(self, resized_images):
        """
        Destructively change this task's GCP file (if any)
        by resizing the location of GCP entries.
        :param resized_images: list of objects having "path" and "resize_ratio" keys
            for example [{'path': 'path/to/DJI_0018.jpg', 'resize_ratio': 0.25}, ...]
        :return: path to changed GCP file or None if no GCP file was found/changed
        """
        gcp_path = self.find_all_files_matching(r'.*\.txt$')
        if len(gcp_path) == 0: return None

        # Assume we only have a single GCP file per task
        gcp_path = gcp_path[0]
        resize_script_path = os.path.join(settings.BASE_DIR, 'app', 'scripts',
                                          'resize_gcp.js')

        dict = {}
        for ri in resized_images:
            dict[os.path.basename(ri['path'])] = ri['resize_ratio']

        try:
            new_gcp_content = subprocess.check_output("node {} {} '{}'".format(
                quote(resize_script_path), quote(gcp_path), json.dumps(dict)),
                                                      shell=True)
            with open(gcp_path, 'w') as f:
                f.write(new_gcp_content.decode('utf-8'))
            logger.info("Resized GCP file {}".format(gcp_path))
            return gcp_path
        except subprocess.CalledProcessError as e:
            logger.warning("Could not resize GCP file {}: {}".format(
                gcp_path, str(e)))
            return None

    class Meta:
        permissions = (('view_task', 'Can view task'), )
Example #31
0
class Task(PhotoEnabled, models.Model):
    """
    A task, also used to generate a todo list, i.e. those without a completed date.

    Tasks need a staff member/s to do the work::

        >>> from django.contrib.auth.models import User
        >>> from caretaking.models import Staff
        >>> darryl, created = User.objects.get_or_create(username='******', first_name='Darryl',
        ...     last_name='Cousins')
        >>> if created:
        ...     darryl.save()
        >>> caretaker, created = Staff.objects.get_or_create(user=darryl, title='Caretaker')

    A type of task is also required::

        >>> from caretaking.models import TaskType
        >>> maintenance, created = TaskType.objects.get_or_create(name='Maintenance')
        >>> if created:
        ...     maintenance.save()
        >>> duties, created = TaskType.objects.get_or_create(name='Duties')
        >>> if created:
        ...     duties.save()

    Without a completed date then the task can be considered a todo::

        >>> from caretaking.models import Task
        >>> todo = Task.objects.create(description="Clear downpipe at library", urgency='high',
        ...     point='POINT (172.29307 -43.75858)', staff=caretaker)
        >>> todo.save()
        >>> print(todo)
        Clear downpipe at library
        >>> todo.tasktype.add(maintenance)
        >>> todo.tasktype.add(duties)

    It can have more than one point of activity::

        >>> todo.point = 'MULTIPOINT ((172.29307 -43.75858), (172.30 -43.76))'
        >>> todo.save()

    Assign the task::

        >>> print(todo.staff)
        Darryl Cousins (Caretaker)

    It has a location::

        >>> print(todo.point)
        SRID=4326;MULTIPOINT (172.29307 -43.75858, 172.3 -43.76)

    Maybe the job got done too::

        >>> from datetime import date
        >>> todo.completed = date(2017, 3, 17)
        >>> todo.save()
        >>> print(todo.completed)
        2017-03-17
        >>> print(todo)
        Fri 17 Mar 2017 Clear downpipe at library

    To select tasks for the caretaker::

        >>> Task.objects.filter(staff=caretaker).count()
        11
 
    Accordingly we expect none from the assistant::

        >>> maria, created = User.objects.get_or_create(username='******', first_name='Maria',
        ...     last_name='Halloumis')
        >>> if created:
        ...     maria.save()
        >>> assistant, created = Staff.objects.get_or_create(user=maria, title='Assistant')
        >>> Task.objects.filter(staff=assistant).count()
        0

    We can select earliest and latest Tasks by completed date::

        >>> Task.objects.filter(staff=caretaker).earliest()
        <Task: Fri 10 Mar ...>

    Or a count of tasks completed for the year::

        >>> Task.objects.filter(staff=caretaker).filter(completed__year=2017).count()
        11

    """
    URGENCY = (('low', 'Low'), ('med', 'Medium'), ('high', 'High'))
    taskid = models.AutoField(primary_key=True)
    description = models.TextField(
        blank=False,
        null=False,
        help_text=_("A descriptive summary of the task"))
    completed = models.DateField(blank=True,
                                 null=True,
                                 help_text=_("The date completed"))
    urgency = models.CharField(max_length=4,
                               choices=URGENCY,
                               default='low',
                               help_text=_("The urgency of the task"))
    tasktype = models.ManyToManyField('TaskType',
                                      help_text=_("One or more types"))
    staff = models.ForeignKey('Staff',
                              blank=True,
                              null=True,
                              on_delete=models.DO_NOTHING,
                              help_text=("Staff member performing the work"))
    point = GeometryField(blank=True,
                          null=True,
                          help_text=("Point or Multipoint for the task"))

    class Meta:
        get_latest_by = 'completed'
        ordering = ('completed', )

    def __str__(self):
        "Returns completed date and a truncated line of the tasks description."
        day = self.completed.strftime(
            "%a %d %b %Y") if self.completed else None
        if day:
            return '{0} {1:.100}'.format(day, self.description)
        else:
            return '{:.100}'.format(self.description)

    def get_absolute_url(self):
        if self.completed:
            return self.get_diary_url()
        else:
            return (reverse('todo-list'))

    def get_diary_entry(self):
        from caretaking.models.diary import Diary
        diary = Diary.objects.filter(staff=self.staff).get(day=self.completed)
        return diary

    def get_diary_url(self):
        return self.get_diary_entry().get_absolute_url()

    def locations(self):
        """Find the location or locations in which the point falls"""
        return Location.objects.filter(polygon__intersects=self.point).exclude(
            name='CollegeBoundary')
Example #32
0
 def test_deconstruct_empty(self):
     field = GeometryField()
     *_, kwargs = field.deconstruct()
     self.assertEqual(kwargs, {'srid': 4326})
Example #33
0
 def as_sqlite(self, compiler, connection):
     function = None
     geo_field = GeometryField(srid=self.srid)
     if geo_field.geodetic(connection):
         function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength'
     return super().as_sql(compiler, connection, function=function)
Example #34
0
 def test_deconstruct_empty(self):
     field = GeometryField()
     *_, kwargs = field.deconstruct()
     self.assertEqual(kwargs, {'srid': 4326})
Example #35
0
 def __init__(self, *expressions, **extra):
     if 'output_field' not in extra:
         extra['output_field'] = GeometryField()
     super(GeomOutputGeoFunc, self).__init__(*expressions, **extra)
Example #36
0
 def output_field(self):
     return GeometryField(srid=self.geo_field.srid)
from decimal import Decimal
Example #38
0
class Task(models.Model):
    ASSETS_MAP = {
            'all.zip': 'all.zip',
            'orthophoto.tif': os.path.join('odm_orthophoto', 'odm_orthophoto.tif'),
            'orthophoto.png': os.path.join('odm_orthophoto', 'odm_orthophoto.png'),
            'orthophoto.mbtiles': os.path.join('odm_orthophoto', 'odm_orthophoto.mbtiles'),
            'georeferenced_model.las': os.path.join('odm_georeferencing', 'odm_georeferenced_model.las'),
            'georeferenced_model.laz': os.path.join('odm_georeferencing', 'odm_georeferenced_model.laz'),
            'georeferenced_model.ply': os.path.join('odm_georeferencing', 'odm_georeferenced_model.ply'),
            'georeferenced_model.csv': os.path.join('odm_georeferencing', 'odm_georeferenced_model.csv'),
            'textured_model.zip': {
                'deferred_path': 'textured_model.zip',
                'deferred_compress_dir': 'odm_texturing'
            },
            'dtm.tif': os.path.join('odm_dem', 'dtm.tif'),
            'dsm.tif': os.path.join('odm_dem', 'dsm.tif'),
            'dtm_tiles.zip': {
                'deferred_path': 'dtm_tiles.zip',
                'deferred_compress_dir': 'dtm_tiles'
            },
            'dsm_tiles.zip': {
                'deferred_path': 'dsm_tiles.zip',
                'deferred_compress_dir': 'dsm_tiles'
            },
            'orthophoto_tiles.zip': {
                'deferred_path': 'orthophoto_tiles.zip',
                'deferred_compress_dir': 'orthophoto_tiles'
            },
            'cameras.json': 'cameras.json',
            'shots.geojson': os.path.join('odm_report', 'shots.geojson'),
    }

    STATUS_CODES = (
        (status_codes.QUEUED, 'QUEUED'),
        (status_codes.RUNNING, 'RUNNING'),
        (status_codes.FAILED, 'FAILED'),
        (status_codes.COMPLETED, 'COMPLETED'),
        (status_codes.CANCELED, 'CANCELED'),
    )

    PENDING_ACTIONS = (
        (pending_actions.CANCEL, 'CANCEL'),
        (pending_actions.REMOVE, 'REMOVE'),
        (pending_actions.RESTART, 'RESTART'),
        (pending_actions.RESIZE, 'RESIZE'),
        (pending_actions.IMPORT, 'IMPORT'),
    )

    TASK_PROGRESS_LAST_VALUE = 0.85

    id = models.UUIDField(primary_key=True, default=uuid_module.uuid4, unique=True, serialize=False, editable=False, verbose_name=_("Id"))

    uuid = models.CharField(max_length=255, db_index=True, default='', blank=True, help_text=_("Identifier of the task (as returned by NodeODM API)"), verbose_name=_("UUID"))
    project = models.ForeignKey(Project, on_delete=models.CASCADE, help_text=_("Project that this task belongs to"), verbose_name=_("Project"))
    name = models.CharField(max_length=255, null=True, blank=True, help_text=_("A label for the task"), verbose_name=_("Name"))
    processing_time = models.IntegerField(default=-1, help_text=_("Number of milliseconds that elapsed since the beginning of this task (-1 indicates that no information is available)"), verbose_name=_("Processing Time"))
    processing_node = models.ForeignKey(ProcessingNode, on_delete=models.SET_NULL, null=True, blank=True, help_text=_("Processing node assigned to this task (or null if this task has not been associated yet)"), verbose_name=_("Processing Node"))
    auto_processing_node = models.BooleanField(default=True, help_text=_("A flag indicating whether this task should be automatically assigned a processing node"), verbose_name=_("Auto Processing Node"))
    status = models.IntegerField(choices=STATUS_CODES, db_index=True, null=True, blank=True, help_text=_("Current status of the task"), verbose_name=_("Status"))
    last_error = models.TextField(null=True, blank=True, help_text=_("The last processing error received"), verbose_name=_("Last Error"))
    options = fields.JSONField(default=dict, blank=True, help_text=_("Options that are being used to process this task"), validators=[validate_task_options], verbose_name=_("Options"))
    available_assets = fields.ArrayField(models.CharField(max_length=80), default=list, blank=True, help_text=_("List of available assets to download"), verbose_name=_("Available Assets"))
    console_output = models.TextField(null=False, default="", blank=True, help_text=_("Console output of the processing node"), verbose_name=_("Console Output"))

    orthophoto_extent = GeometryField(null=True, blank=True, srid=4326, help_text=_("Extent of the orthophoto"), verbose_name=_("Orthophoto Extent"))
    dsm_extent = GeometryField(null=True, blank=True, srid=4326, help_text="Extent of the DSM", verbose_name=_("DSM Extent"))
    dtm_extent = GeometryField(null=True, blank=True, srid=4326, help_text="Extent of the DTM", verbose_name=_("DTM Extent"))

    # mission
    created_at = models.DateTimeField(default=timezone.now, help_text=_("Creation date"), verbose_name=_("Created at"))
    pending_action = models.IntegerField(choices=PENDING_ACTIONS, db_index=True, null=True, blank=True, help_text=_("A requested action to be performed on the task. The selected action will be performed by the worker at the next iteration."), verbose_name=_("Pending Action"))

    public = models.BooleanField(default=False, help_text=_("A flag indicating whether this task is available to the public"), verbose_name=_("Public"))
    resize_to = models.IntegerField(default=-1, help_text=_("When set to a value different than -1, indicates that the images for this task have been / will be resized to the size specified here before processing."), verbose_name=_("Resize To"))

    upload_progress = models.FloatField(default=0.0,
                                        help_text=_("Value between 0 and 1 indicating the upload progress of this task's files to the processing node"),
                                        verbose_name=_("Upload Progress"),
                                        blank=True)
    resize_progress = models.FloatField(default=0.0,
                                        help_text=_("Value between 0 and 1 indicating the resize progress of this task's images"),
                                        verbose_name=_("Resize Progress"),
                                        blank=True)
    running_progress = models.FloatField(default=0.0,
                                        help_text=_("Value between 0 and 1 indicating the running progress (estimated) of this task"),
                                        verbose_name=_("Running Progress"),
                                        blank=True)
    import_url = models.TextField(null=False, default="", blank=True, help_text=_("URL this task is imported from (only for imported tasks)"), verbose_name=_("Import URL"))
    images_count = models.IntegerField(null=False, blank=True, default=0, help_text=_("Number of images associated with this task"), verbose_name=_("Images Count"))
    partial = models.BooleanField(default=False, help_text=_("A flag indicating whether this task is currently waiting for information or files to be uploaded before being considered for processing."), verbose_name=_("Partial"))

    class Meta:
        verbose_name = _("Task")
        verbose_name_plural = _("Tasks")

    def __init__(self, *args, **kwargs):
        super(Task, self).__init__(*args, **kwargs)

        # To help keep track of changes to the project id
        self.__original_project_id = self.project.id

    def __str__(self):
        name = self.name if self.name is not None else gettext("unnamed")

        return 'Task [{}] ({})'.format(name, self.id)

    def move_assets(self, old_project_id, new_project_id):
        """
        Moves the task's folder, update ImageFields and orthophoto files to a new project
        """
        old_task_folder = full_task_directory_path(self.id, old_project_id)
        new_task_folder = full_task_directory_path(self.id, new_project_id)
        new_task_folder_parent = os.path.abspath(os.path.join(new_task_folder, os.pardir))

        try:
            if os.path.exists(old_task_folder) and not os.path.exists(new_task_folder):
                # Use parent, otherwise we get a duplicate directory in there
                if not os.path.exists(new_task_folder_parent):
                    os.makedirs(new_task_folder_parent)

                shutil.move(old_task_folder, new_task_folder_parent)

                logger.info("Moved task folder from {} to {}".format(old_task_folder, new_task_folder))

                with transaction.atomic():
                    for img in self.imageupload_set.all():
                        prev_name = img.image.name
                        img.image.name = assets_directory_path(self.id, new_project_id,
                                                               os.path.basename(img.image.name))
                        logger.info("Changing {} to {}".format(prev_name, img))
                        img.save()

            else:
                logger.warning("Project changed for task {}, but either {} doesn't exist, or {} already exists. This doesn't look right, so we will not move any files.".format(self,
                                                                                                             old_task_folder,
                                                                                                             new_task_folder))
        except shutil.Error as e:
            logger.warning("Could not move assets folder for task {}. We're going to proceed anyway, but you might experience issues: {}".format(self, e))

    def save(self, *args, **kwargs):
        if self.project.id != self.__original_project_id:
            self.move_assets(self.__original_project_id, self.project.id)
            self.__original_project_id = self.project.id

        # Autovalidate on save
        self.full_clean()

        super(Task, self).save(*args, **kwargs)

    def assets_path(self, *args):
        """
        Get a path relative to the place where assets are stored
        """
        return self.task_path("assets", *args)

    def task_path(self, *args):
        """
        Get path relative to the root task directory
        """
        return os.path.join(settings.MEDIA_ROOT,
                            assets_directory_path(self.id, self.project.id, ""),
                            *args)

    def is_asset_available_slow(self, asset):
        """
        Checks whether a particular asset is available in the file system
        Generally this should never be used directly, as it's slow. Use the available_assets field
        in the database instead.
        :param asset: one of ASSETS_MAP keys
        :return: boolean
        """
        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return os.path.exists(self.assets_path(value))
            elif isinstance(value, dict):
                if 'deferred_compress_dir' in value:
                    return os.path.exists(self.assets_path(value['deferred_compress_dir']))

        return False

    def get_asset_download_path(self, asset):
        """
        Get the path to an asset download
        :param asset: one of ASSETS_MAP keys
        :return: path
        """

        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return self.assets_path(value)

            elif isinstance(value, dict):
                if 'deferred_path' in value and 'deferred_compress_dir' in value:
                    return self.generate_deferred_asset(value['deferred_path'], value['deferred_compress_dir'])
                else:
                    raise FileNotFoundError("{} is not a valid asset (invalid dict values)".format(asset))

            else:
                raise FileNotFoundError("{} is not a valid asset (invalid map)".format(asset))
        else:
            raise FileNotFoundError("{} is not a valid asset".format(asset))

    def handle_import(self):
        self.console_output += gettext("Importing assets...") + "\n"
        self.save()

        zip_path = self.assets_path("all.zip")

        if self.import_url and not os.path.exists(zip_path):
            try:
                # TODO: this is potentially vulnerable to a zip bomb attack
                #       mitigated by the fact that a valid account is needed to
                #       import tasks
                logger.info("Importing task assets from {} for {}".format(self.import_url, self))
                download_stream = requests.get(self.import_url, stream=True, timeout=10)
                content_length = download_stream.headers.get('content-length')
                total_length = int(content_length) if content_length is not None else None
                downloaded = 0
                last_update = 0

                with open(zip_path, 'wb') as fd:
                    for chunk in download_stream.iter_content(4096):
                        downloaded += len(chunk)

                        if time.time() - last_update >= 2:
                            # Update progress
                            if total_length is not None:
                                Task.objects.filter(pk=self.id).update(running_progress=(float(downloaded) / total_length) * 0.9)

                            self.check_if_canceled()
                            last_update = time.time()

                        fd.write(chunk)

            except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, ReadTimeoutError) as e:
                raise NodeServerError(e)

        self.refresh_from_db()

        try:
            self.extract_assets_and_complete()
        except zipfile.BadZipFile:
            raise NodeServerError(gettext("Invalid zip file"))

        images_json = self.assets_path("images.json")
        if os.path.exists(images_json):
            try:
                with open(images_json) as f:
                    images = json.load(f)
                    self.images_count = len(images)
            except:
                logger.warning("Cannot read images count from imported task {}".format(self))
                pass

        self.pending_action = None
        self.processing_time = 0
        self.save()

    def process(self):
        """
        This method contains the logic for processing tasks asynchronously
        from a background thread or from a worker. Here tasks that are
        ready to be processed execute some logic. This could be communication
        with a processing node or executing a pending action.
        """

        try:
            if self.pending_action == pending_actions.IMPORT:
                self.handle_import()

            if self.pending_action == pending_actions.RESIZE:
                resized_images = self.resize_images()
                self.refresh_from_db()
                self.resize_gcp(resized_images)
                self.pending_action = None
                self.save()

            if self.auto_processing_node and not self.status in [status_codes.FAILED, status_codes.CANCELED]:
                # No processing node assigned and need to auto assign
                if self.processing_node is None:
                    # Assign first online node with lowest queue count
                    self.processing_node = ProcessingNode.find_best_available_node()
                    if self.processing_node:
                        self.processing_node.queue_count += 1 # Doesn't have to be accurate, it will get overridden later
                        self.processing_node.save()

                        logger.info("Automatically assigned processing node {} to {}".format(self.processing_node, self))
                        self.save()

                # Processing node assigned, but is offline and no errors
                if self.processing_node and not self.processing_node.is_online():
                    # If we are queued up
                    # detach processing node, and reassignment
                    # will be processed at the next tick
                    if self.status == status_codes.QUEUED:
                        logger.info("Processing node {} went offline, reassigning {}...".format(self.processing_node, self))
                        self.uuid = ''
                        self.processing_node = None
                        self.status = None
                        self.save()

                    elif self.status == status_codes.RUNNING:
                        # Task was running and processing node went offline
                        # It could have crashed due to low memory
                        # or perhaps it went offline due to network errors.
                        # We can't easily differentiate between the two, so we need
                        # to notify the user because if it crashed due to low memory
                        # the user might need to take action (or be stuck in an infinite loop)
                        raise NodeServerError("Processing node went offline. This could be due to insufficient memory or a network error.")

            if self.processing_node:
                # Need to process some images (UUID not yet set and task doesn't have pending actions)?
                if not self.uuid and self.pending_action is None and self.status is None:
                    logger.info("Processing... {}".format(self))

                    images = [image.path() for image in self.imageupload_set.all()]

                    # Track upload progress, but limit the number of DB updates
                    # to every 2 seconds (and always record the 100% progress)
                    last_update = 0
                    def callback(progress):
                        nonlocal last_update

                        time_has_elapsed = time.time() - last_update >= 2
                        if time_has_elapsed:
                            testWatch.manual_log_call("Task.process.callback")
                            self.check_if_canceled()
                            Task.objects.filter(pk=self.id).update(upload_progress=float(progress) / 100.0)
                            last_update = time.time()

                    # This takes a while
                    try:
                        uuid = self.processing_node.process_new_task(images, self.name, self.options, callback)
                    except NodeConnectionError as e:
                        # If we can't create a task because the node is offline
                        # We want to fail instead of trying again
                        raise NodeServerError(gettext('Connection error: %(error)s') % {'error': str(e)})

                    # Refresh task object before committing change
                    self.refresh_from_db()
                    self.upload_progress = 1.0
                    self.uuid = uuid
                    self.save()

                    # TODO: log process has started processing

            if self.pending_action is not None:
                if self.pending_action == pending_actions.CANCEL:
                    # Do we need to cancel the task on the processing node?
                    logger.info("Canceling {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to cancel the task on the processing node
                        # We don't care if this fails (we tried)
                        try:
                            self.processing_node.cancel_task(self.uuid)
                        except OdmError:
                            logger.warning("Could not cancel {} on processing node. We'll proceed anyway...".format(self))

                        self.status = status_codes.CANCELED
                        self.pending_action = None
                        self.save()
                    else:
                        # Tasks with no processing node or UUID need no special action
                        self.status = status_codes.CANCELED
                        self.pending_action = None
                        self.save()

                elif self.pending_action == pending_actions.RESTART:
                    logger.info("Restarting {}".format(self))
                    if self.processing_node:

                        # Check if the UUID is still valid, as processing nodes purge
                        # results after a set amount of time, the UUID might have been eliminated.
                        uuid_still_exists = False

                        if self.uuid:
                            try:
                                info = self.processing_node.get_task_info(self.uuid)
                                uuid_still_exists = info.uuid == self.uuid
                            except OdmError:
                                pass

                        need_to_reprocess = False

                        if uuid_still_exists:
                            # Good to go
                            try:
                                self.processing_node.restart_task(self.uuid, self.options)
                            except (NodeServerError, NodeResponseError) as e:
                                # Something went wrong
                                logger.warning("Could not restart {}, will start a new one".format(self))
                                need_to_reprocess = True
                        else:
                            need_to_reprocess = True

                        if need_to_reprocess:
                            logger.info("{} needs to be reprocessed".format(self))

                            # Task has been purged (or processing node is offline)
                            # Process this as a new task
                            # Removing its UUID will cause the scheduler
                            # to process this the next tick
                            self.uuid = ''

                            # We also remove the "rerun-from" parameter if it's set
                            self.options = list(filter(lambda d: d['name'] != 'rerun-from', self.options))
                            self.upload_progress = 0

                        self.console_output = ""
                        self.processing_time = -1
                        self.status = None
                        self.last_error = None
                        self.pending_action = None
                        self.running_progress = 0
                        self.save()
                    else:
                        raise NodeServerError(gettext("Cannot restart a task that has no processing node"))

                elif self.pending_action == pending_actions.REMOVE:
                    logger.info("Removing {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to delete the resources on the processing node
                        # We don't care if this fails, as resources on processing nodes
                        # Are expected to be purged on their own after a set amount of time anyway
                        try:
                            self.processing_node.remove_task(self.uuid)
                        except OdmError:
                            pass

                    # What's more important is that we delete our task properly here
                    self.delete()

                    # Stop right here!
                    return

            if self.processing_node:
                # Need to update status (first time, queued or running?)
                if self.uuid and self.status in [None, status_codes.QUEUED, status_codes.RUNNING]:
                    # Update task info from processing node
                    if not self.console_output:
                        current_lines_count = 0
                    else:
                        current_lines_count = len(self.console_output.split("\n"))

                    info = self.processing_node.get_task_info(self.uuid, current_lines_count)

                    self.processing_time = info.processing_time
                    self.status = info.status.value

                    if len(info.output) > 0:
                        self.console_output += "\n".join(info.output) + '\n'

                    # Update running progress
                    self.running_progress = (info.progress / 100.0) * self.TASK_PROGRESS_LAST_VALUE

                    if info.last_error != "":
                        self.last_error = info.last_error

                    # Has the task just been canceled, failed, or completed?
                    if self.status in [status_codes.FAILED, status_codes.COMPLETED, status_codes.CANCELED]:
                        logger.info("Processing status: {} for {}".format(self.status, self))

                        if self.status == status_codes.COMPLETED:
                            assets_dir = self.assets_path("")

                            # Remove previous assets directory
                            if os.path.exists(assets_dir):
                                logger.info("Removing old assets directory: {} for {}".format(assets_dir, self))
                                shutil.rmtree(assets_dir)

                            os.makedirs(assets_dir)

                            # Download and try to extract results up to 4 times
                            # (~5% of the times, on large downloads, the archive could be corrupted)
                            retry_num = 0
                            extracted = False
                            last_update = 0

                            def callback(progress):
                                nonlocal last_update

                                time_has_elapsed = time.time() - last_update >= 2

                                if time_has_elapsed or int(progress) == 100:
                                    Task.objects.filter(pk=self.id).update(running_progress=(
                                        self.TASK_PROGRESS_LAST_VALUE + (float(progress) / 100.0) * 0.1))
                                    last_update = time.time()

                            while not extracted:
                                last_update = 0
                                logger.info("Downloading all.zip for {}".format(self))

                                # Download all assets
                                zip_path = self.processing_node.download_task_assets(self.uuid, assets_dir, progress_callback=callback, parallel_downloads=max(1, int(16 / (2 ** retry_num))))

                                # Rename to all.zip
                                all_zip_path = self.assets_path("all.zip")
                                os.rename(zip_path, all_zip_path)

                                logger.info("Extracting all.zip for {}".format(self))

                                try:
                                    self.extract_assets_and_complete()
                                    extracted = True
                                except zipfile.BadZipFile:
                                    if retry_num < 5:
                                        logger.warning("{} seems corrupted. Retrying...".format(all_zip_path))
                                        retry_num += 1
                                        os.remove(all_zip_path)
                                    else:
                                        raise NodeServerError(gettext("Invalid zip file"))
                        else:
                            # FAILED, CANCELED
                            self.save()
                    else:
                        # Still waiting...
                        self.save()

        except (NodeServerError, NodeResponseError) as e:
            self.set_failure(str(e))
        except NodeConnectionError as e:
            logger.warning("{} connection/timeout error: {}. We'll try reprocessing at the next tick.".format(self, str(e)))
        except TaskInterruptedException as e:
            # Task was interrupted during image resize / upload
            logger.warning("{} interrupted".format(self, str(e)))

    def extract_assets_and_complete(self):
        """
        Extracts assets/all.zip, populates task fields where required and assure COGs
        It will raise a zipfile.BadZipFile exception is the archive is corrupted.
        :return:
        """
        assets_dir = self.assets_path("")
        zip_path = self.assets_path("all.zip")

        # Extract from zip
        with zipfile.ZipFile(zip_path, "r") as zip_h:
            zip_h.extractall(assets_dir)

        logger.info("Extracted all.zip for {}".format(self))

        # Populate *_extent fields
        extent_fields = [
            (os.path.realpath(self.assets_path("odm_orthophoto", "odm_orthophoto.tif")),
             'orthophoto_extent'),
            (os.path.realpath(self.assets_path("odm_dem", "dsm.tif")),
             'dsm_extent'),
            (os.path.realpath(self.assets_path("odm_dem", "dtm.tif")),
             'dtm_extent'),
        ]

        for raster_path, field in extent_fields:
            if os.path.exists(raster_path):
                # Make sure this is a Cloud Optimized GeoTIFF
                # if not, it will be created
                try:
                    assure_cogeo(raster_path)
                except IOError as e:
                    logger.warning("Cannot create Cloud Optimized GeoTIFF for %s (%s). This will result in degraded visualization performance." % (raster_path, str(e)))

                # Read extent and SRID
                raster = GDALRaster(raster_path)
                extent = OGRGeometry.from_bbox(raster.extent)

                # Make sure PostGIS supports it
                with connection.cursor() as cursor:
                    cursor.execute("SELECT SRID FROM spatial_ref_sys WHERE SRID = %s", [raster.srid])
                    if cursor.rowcount == 0:
                        raise NodeServerError(gettext("Unsupported SRS %(code)s. Please make sure you picked a supported SRS.") % {'code': str(raster.srid)})

                # It will be implicitly transformed into the SRID of the model’s field
                # self.field = GEOSGeometry(...)
                setattr(self, field, GEOSGeometry(extent.wkt, srid=raster.srid))

                logger.info("Populated extent field with {} for {}".format(raster_path, self))

        self.update_available_assets_field()
        self.running_progress = 1.0
        self.console_output += gettext("Done!") + "\n"
        self.status = status_codes.COMPLETED
        self.save()

        from app.plugins import signals as plugin_signals
        plugin_signals.task_completed.send_robust(sender=self.__class__, task_id=self.id)

    def get_tile_path(self, tile_type, z, x, y):
        return self.assets_path("{}_tiles".format(tile_type), z, x, "{}.png".format(y))

    def get_tile_base_url(self, tile_type):
        # plant is just a special case of orthophoto
        if tile_type == 'plant':
            tile_type = 'orthophoto'

        return "/api/projects/{}/tasks/{}/{}/".format(self.project.id, self.id, tile_type)

    def get_map_items(self):
        types = []
        if 'orthophoto.tif' in self.available_assets: types.append('orthophoto')
        if 'orthophoto.tif' in self.available_assets: types.append('plant')
        if 'dsm.tif' in self.available_assets: types.append('dsm')
        if 'dtm.tif' in self.available_assets: types.append('dtm')

        camera_shots = ''
        if 'shots.geojson' in self.available_assets: camera_shots = '/api/projects/{}/tasks/{}/download/shots.geojson'.format(self.project.id, self.id)

        return {
            'tiles': [{'url': self.get_tile_base_url(t), 'type': t} for t in types],
            'meta': {
                'task': {
                    'id': str(self.id),
                    'project': self.project.id,
                    'public': self.public,
                    'camera_shots': camera_shots
                }
            }
        }

    def get_model_display_params(self):
        """
        Subset of a task fields used in the 3D model display view
        """
        return {
            'id': str(self.id),
            'project': self.project.id,
            'available_assets': self.available_assets,
            'public': self.public
        }

    def generate_deferred_asset(self, archive, directory):
        """
        :param archive: path of the destination .zip file (relative to /assets/ directory)
        :param directory: path of the source directory to compress (relative to /assets/ directory)
        :return: full path of the generated archive
        """
        archive_path = self.assets_path(archive)
        directory_path = self.assets_path(directory)

        if not os.path.exists(directory_path):
            raise FileNotFoundError("{} does not exist".format(directory_path))

        if not os.path.exists(archive_path):
            shutil.make_archive(os.path.splitext(archive_path)[0], 'zip', directory_path)

        return archive_path

    def update_available_assets_field(self, commit=False):
        """
        Updates the available_assets field with the actual types of assets available
        :param commit: when True also saves the model, otherwise the user should manually call save()
        """
        all_assets = list(self.ASSETS_MAP.keys())
        self.available_assets = [asset for asset in all_assets if self.is_asset_available_slow(asset)]
        if commit: self.save()


    def delete(self, using=None, keep_parents=False):
        task_id = self.id
        from app.plugins import signals as plugin_signals
        plugin_signals.task_removing.send_robust(sender=self.__class__, task_id=task_id)

        directory_to_delete = os.path.join(settings.MEDIA_ROOT,
                                           task_directory_path(self.id, self.project.id))

        super(Task, self).delete(using, keep_parents)

        # Remove files related to this task
        try:
            shutil.rmtree(directory_to_delete)
        except FileNotFoundError as e:
            logger.warning(e)

        plugin_signals.task_removed.send_robust(sender=self.__class__, task_id=task_id)

    def set_failure(self, error_message):
        logger.error("FAILURE FOR {}: {}".format(self, error_message))
        self.last_error = error_message
        self.status = status_codes.FAILED
        self.pending_action = None
        self.save()
        
    def find_all_files_matching(self, regex):
        directory = full_task_directory_path(self.id, self.project.id)
        return [os.path.join(directory, f) for f in os.listdir(directory) if
                       re.match(regex, f, re.IGNORECASE)]

    def check_if_canceled(self):
        # Check if task has been canceled/removed
        if Task.objects.only("pending_action").get(pk=self.id).pending_action in [pending_actions.CANCEL,
                                                                                  pending_actions.REMOVE]:
            raise TaskInterruptedException()

    def resize_images(self):
        """
        Destructively resize this task's JPG images while retaining EXIF tags.
        Resulting images are always converted to JPG.
        TODO: add support for tiff files
        :return list containing paths of resized images and resize ratios
        """
        if self.resize_to < 0:
            logger.warning("We were asked to resize images to {}, this might be an error.".format(self.resize_to))
            return []

        images_path = self.find_all_files_matching(r'.*\.(jpe?g|tiff?)$')
        total_images = len(images_path)
        resized_images_count = 0
        last_update = 0

        def callback(retval=None):
            nonlocal last_update
            nonlocal resized_images_count
            nonlocal total_images

            resized_images_count += 1
            if time.time() - last_update >= 2:
                # Update progress
                Task.objects.filter(pk=self.id).update(resize_progress=(float(resized_images_count) / float(total_images)))
                self.check_if_canceled()
                last_update = time.time()

        resized_images = list(map(partial(resize_image, resize_to=self.resize_to, done=callback), images_path))

        Task.objects.filter(pk=self.id).update(resize_progress=1.0)

        return resized_images

    def resize_gcp(self, resized_images):
        """
        Destructively change this task's GCP file (if any)
        by resizing the location of GCP entries.
        :param resized_images: list of objects having "path" and "resize_ratio" keys
            for example [{'path': 'path/to/DJI_0018.jpg', 'resize_ratio': 0.25}, ...]
        :return: path to changed GCP file or None if no GCP file was found/changed
        """
        gcp_path = self.find_all_files_matching(r'.*\.txt$')
        if len(gcp_path) == 0: return None

        # Assume we only have a single GCP file per task
        gcp_path = gcp_path[0]

        image_ratios = {}
        for ri in resized_images:
            image_ratios[os.path.basename(ri['path']).lower()] = ri['resize_ratio']

        try:
            gcpFile = GCPFile(gcp_path)
            gcpFile.create_resized_copy(gcp_path, image_ratios)
            logger.info("Resized GCP file {}".format(gcp_path))
            return gcp_path
        except Exception as e:
            logger.warning("Could not resize GCP file {}: {}".format(gcp_path, str(e)))


    def create_task_directories(self):
        """
        Create directories for this task (if they don't exist already)
        """
        assets_dir = self.assets_path("")
        try:
            os.makedirs(assets_dir)
        except OSError as exc:  # Python >2.5
            if exc.errno == errno.EEXIST and os.path.isdir(assets_dir):
                pass
            else:
                raise
Example #39
0
class Task(models.Model):
    ASSETS_MAP = {
            'all.zip': 'all.zip',
            'orthophoto.tif': os.path.join('odm_orthophoto', 'odm_orthophoto.tif'),
            'orthophoto.png': os.path.join('odm_orthophoto', 'odm_orthophoto.png'),
            'georeferenced_model.las': os.path.join('odm_georeferencing', 'odm_georeferenced_model.las'),
            'georeferenced_model.ply': os.path.join('odm_georeferencing', 'odm_georeferenced_model.ply'),
            'georeferenced_model.csv': os.path.join('odm_georeferencing', 'odm_georeferenced_model.csv'),
            'textured_model.zip': {
                'deferred_path': 'textured_model.zip',
                'deferred_compress_dir': 'odm_texturing'
            },
            'dtm.tif': os.path.join('odm_dem', 'dtm.tif'),
            'dsm.tif': os.path.join('odm_dem', 'dsm.tif'),
    }

    STATUS_CODES = (
        (status_codes.QUEUED, 'QUEUED'),
        (status_codes.RUNNING, 'RUNNING'),
        (status_codes.FAILED, 'FAILED'),
        (status_codes.COMPLETED, 'COMPLETED'),
        (status_codes.CANCELED, 'CANCELED'),
    )

    PENDING_ACTIONS = (
        (pending_actions.CANCEL, 'CANCEL'),
        (pending_actions.REMOVE, 'REMOVE'),
        (pending_actions.RESTART, 'RESTART'),
    )

    id = models.UUIDField(primary_key=True, default=uuid_module.uuid4, unique=True, serialize=False, editable=False)

    uuid = models.CharField(max_length=255, db_index=True, default='', blank=True, help_text="Identifier of the task (as returned by OpenDroneMap's REST API)")
    project = models.ForeignKey(Project, on_delete=models.CASCADE, help_text="Project that this task belongs to")
    name = models.CharField(max_length=255, null=True, blank=True, help_text="A label for the task")
    processing_lock = models.BooleanField(default=False, help_text="A flag indicating whether this task is currently locked for processing. When this flag is turned on, the task is in the middle of a processing step.")
    processing_time = models.IntegerField(default=-1, help_text="Number of milliseconds that elapsed since the beginning of this task (-1 indicates that no information is available)")
    processing_node = models.ForeignKey(ProcessingNode, null=True, blank=True, help_text="Processing node assigned to this task (or null if this task has not been associated yet)")
    auto_processing_node = models.BooleanField(default=True, help_text="A flag indicating whether this task should be automatically assigned a processing node")
    status = models.IntegerField(choices=STATUS_CODES, db_index=True, null=True, blank=True, help_text="Current status of the task")
    last_error = models.TextField(null=True, blank=True, help_text="The last processing error received")
    options = fields.JSONField(default=dict(), blank=True, help_text="Options that are being used to process this task", validators=[validate_task_options])
    available_assets = fields.ArrayField(models.CharField(max_length=80), default=list(), blank=True, help_text="List of available assets to download")
    console_output = models.TextField(null=False, default="", blank=True, help_text="Console output of the OpenDroneMap's process")
    ground_control_points = models.FileField(null=True, blank=True, upload_to=gcp_directory_path, help_text="Optional Ground Control Points file to use for processing")

    orthophoto_extent = GeometryField(null=True, blank=True, srid=4326, help_text="Extent of the orthophoto created by OpenDroneMap")
    dsm_extent = GeometryField(null=True, blank=True, srid=4326, help_text="Extent of the DSM created by OpenDroneMap")
    dtm_extent = GeometryField(null=True, blank=True, srid=4326, help_text="Extent of the DTM created by OpenDroneMap")

    # mission
    created_at = models.DateTimeField(default=timezone.now, help_text="Creation date")
    pending_action = models.IntegerField(choices=PENDING_ACTIONS, db_index=True, null=True, blank=True, help_text="A requested action to be performed on the task. The selected action will be performed by the scheduler at the next iteration.")

    public = models.BooleanField(default=False, help_text="A flag indicating whether this task is available to the public")


    def __init__(self, *args, **kwargs):
        super(Task, self).__init__(*args, **kwargs)

        # To help keep track of changes to the project id
        self.__original_project_id = self.project.id

    def __str__(self):
        name = self.name if self.name is not None else "unnamed"

        return 'Task [{}] ({})'.format(name, self.id)

    def move_assets(self, old_project_id, new_project_id):
        """
        Moves the task's folder, update ImageFields and orthophoto files to a new project
        """
        old_task_folder = full_task_directory_path(self.id, old_project_id)
        new_task_folder = full_task_directory_path(self.id, new_project_id)
        new_task_folder_parent = os.path.abspath(os.path.join(new_task_folder, os.pardir))

        try:
            if os.path.exists(old_task_folder) and not os.path.exists(new_task_folder):
                # Use parent, otherwise we get a duplicate directory in there
                if not os.path.exists(new_task_folder_parent):
                    os.makedirs(new_task_folder_parent)

                shutil.move(old_task_folder, new_task_folder_parent)

                logger.info("Moved task folder from {} to {}".format(old_task_folder, new_task_folder))

                with transaction.atomic():
                    for img in self.imageupload_set.all():
                        prev_name = img.image.name
                        img.image.name = assets_directory_path(self.id, new_project_id,
                                                               os.path.basename(img.image.name))
                        logger.info("Changing {} to {}".format(prev_name, img))
                        img.save()

            else:
                logger.warning("Project changed for task {}, but either {} doesn't exist, or {} already exists. This doesn't look right, so we will not move any files.".format(self,
                                                                                                             old_task_folder,
                                                                                                             new_task_folder))
        except shutil.Error as e:
            logger.warning("Could not move assets folder for task {}. We're going to proceed anyway, but you might experience issues: {}".format(self, e))

    def save(self, *args, **kwargs):
        if self.project.id != self.__original_project_id:
            self.move_assets(self.__original_project_id, self.project.id)
            self.__original_project_id = self.project.id

        # Autovalidate on save
        self.full_clean()

        super(Task, self).save(*args, **kwargs)

    def assets_path(self, *args):
        """
        Get a path relative to the place where assets are stored
        """
        return os.path.join(settings.MEDIA_ROOT,
                            assets_directory_path(self.id, self.project.id, ""),
                            "assets",
                            *args)

    def is_asset_available_slow(self, asset):
        """
        Checks whether a particular asset is available in the file system
        Generally this should never be used directly, as it's slow. Use the available_assets field
        in the database instead.
        :param asset: one of ASSETS_MAP keys
        :return: boolean
        """
        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return os.path.exists(self.assets_path(value))
            elif isinstance(value, dict):
                if 'deferred_compress_dir' in value:
                    return os.path.exists(self.assets_path(value['deferred_compress_dir']))

        return False

    def get_asset_download_path(self, asset):
        """
        Get the path to an asset download
        :param asset: one of ASSETS_MAP keys
        :return: path
        """

        if asset in self.ASSETS_MAP:
            value = self.ASSETS_MAP[asset]
            if isinstance(value, str):
                return self.assets_path(value)

            elif isinstance(value, dict):
                if 'deferred_path' in value and 'deferred_compress_dir' in value:
                    return self.generate_deferred_asset(value['deferred_path'], value['deferred_compress_dir'])
                else:
                    raise FileNotFoundError("{} is not a valid asset (invalid dict values)".format(asset))

            else:
                raise FileNotFoundError("{} is not a valid asset (invalid map)".format(asset))
        else:
            raise FileNotFoundError("{} is not a valid asset".format(asset))

    def process(self):
        """
        This method contains the logic for processing tasks asynchronously
        from a background thread or from the scheduler. Here tasks that are
        ready to be processed execute some logic. This could be communication
        with a processing node or executing a pending action.
        """

        try:
            if self.auto_processing_node and not self.status in [status_codes.FAILED, status_codes.CANCELED]:
                # No processing node assigned and need to auto assign
                if self.processing_node is None:
                    # Assign first online node with lowest queue count
                    self.processing_node = ProcessingNode.find_best_available_node()
                    if self.processing_node:
                        self.processing_node.queue_count += 1 # Doesn't have to be accurate, it will get overridden later
                        self.processing_node.save()

                        logger.info("Automatically assigned processing node {} to {}".format(self.processing_node, self))
                        self.save()

                # Processing node assigned, but is offline and no errors
                if self.processing_node and not self.processing_node.is_online():
                    # Detach processing node, will be processed at the next tick
                    logger.info("Processing node {} went offline, reassigning {}...".format(self.processing_node, self))
                    self.uuid = ''
                    self.processing_node = None
                    self.save()

            if self.processing_node:
                # Need to process some images (UUID not yet set and task doesn't have pending actions)?
                if not self.uuid and self.pending_action is None and self.status is None:
                    logger.info("Processing... {}".format(self))

                    images = [image.path() for image in self.imageupload_set.all()]

                    # This takes a while
                    uuid = self.processing_node.process_new_task(images, self.name, self.options)

                    # Refresh task object before committing change
                    self.refresh_from_db()
                    self.uuid = uuid
                    self.save()

                    # TODO: log process has started processing

            if self.pending_action is not None:
                if self.pending_action == pending_actions.CANCEL:
                    # Do we need to cancel the task on the processing node?
                    logger.info("Canceling {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to cancel the task on the processing node
                        # We don't care if this fails (we tried)
                        try:
                            self.processing_node.cancel_task(self.uuid)
                            self.status = None
                        except ProcessingException:
                            logger.warning("Could not cancel {} on processing node. We'll proceed anyway...".format(self))
                            self.status = status_codes.CANCELED

                        self.pending_action = None
                        self.save()
                    else:
                        raise ProcessingError("Cannot cancel a task that has no processing node or UUID")

                elif self.pending_action == pending_actions.RESTART:
                    logger.info("Restarting {}".format(self))
                    if self.processing_node:

                        # Check if the UUID is still valid, as processing nodes purge
                        # results after a set amount of time, the UUID might have eliminated.
                        uuid_still_exists = False

                        if self.uuid:
                            try:
                                info = self.processing_node.get_task_info(self.uuid)
                                uuid_still_exists = info['uuid'] == self.uuid
                            except ProcessingException:
                                pass

                        if uuid_still_exists:
                            # Good to go
                            try:
                                self.processing_node.restart_task(self.uuid)
                            except ProcessingError as e:
                                # Something went wrong
                                logger.warning("Could not restart {}, will start a new one".format(self))
                                self.uuid = ''
                        else:
                            # Task has been purged (or processing node is offline)
                            # Process this as a new task
                            # Removing its UUID will cause the scheduler
                            # to process this the next tick
                            self.uuid = ''

                        self.console_output = ""
                        self.processing_time = -1
                        self.status = None
                        self.last_error = None
                        self.pending_action = None
                        self.save()
                    else:
                        raise ProcessingError("Cannot restart a task that has no processing node")

                elif self.pending_action == pending_actions.REMOVE:
                    logger.info("Removing {}".format(self))
                    if self.processing_node and self.uuid:
                        # Attempt to delete the resources on the processing node
                        # We don't care if this fails, as resources on processing nodes
                        # Are expected to be purged on their own after a set amount of time anyway
                        try:
                            self.processing_node.remove_task(self.uuid)
                        except ProcessingException:
                            pass

                    # What's more important is that we delete our task properly here
                    self.delete()

                    # Stop right here!
                    return

            if self.processing_node:
                # Need to update status (first time, queued or running?)
                if self.uuid and self.status in [None, status_codes.QUEUED, status_codes.RUNNING]:
                    # Update task info from processing node
                    info = self.processing_node.get_task_info(self.uuid)

                    self.processing_time = info["processingTime"]
                    self.status = info["status"]["code"]

                    current_lines_count = len(self.console_output.split("\n")) - 1
                    self.console_output += self.processing_node.get_task_console_output(self.uuid, current_lines_count)

                    if "errorMessage" in info["status"]:
                        self.last_error = info["status"]["errorMessage"]

                    # Has the task just been canceled, failed, or completed?
                    if self.status in [status_codes.FAILED, status_codes.COMPLETED, status_codes.CANCELED]:
                        logger.info("Processing status: {} for {}".format(self.status, self))

                        if self.status == status_codes.COMPLETED:
                            assets_dir = self.assets_path("")

                            # Remove previous assets directory
                            if os.path.exists(assets_dir):
                                logger.info("Removing old assets directory: {} for {}".format(assets_dir, self))
                                shutil.rmtree(assets_dir)

                            os.makedirs(assets_dir)

                            logger.info("Downloading all.zip for {}".format(self))

                            # Download all assets
                            zip_stream = self.processing_node.download_task_asset(self.uuid, "all.zip")
                            zip_path = os.path.join(assets_dir, "all.zip")
                            with open(zip_path, 'wb') as fd:
                                for chunk in zip_stream.iter_content(4096):
                                    fd.write(chunk)

                            logger.info("Done downloading all.zip for {}".format(self))

                            # Extract from zip
                            with zipfile.ZipFile(zip_path, "r") as zip_h:
                                zip_h.extractall(assets_dir)

                            logger.info("Extracted all.zip for {}".format(self))

                            # Populate *_extent fields
                            extent_fields = [
                                (os.path.realpath(self.assets_path("odm_orthophoto", "odm_orthophoto.tif")),
                                 'orthophoto_extent'),
                                (os.path.realpath(self.assets_path("odm_dem", "dsm.tif")),
                                 'dsm_extent'),
                                (os.path.realpath(self.assets_path("odm_dem", "dtm.tif")),
                                 'dtm_extent'),
                            ]

                            for raster_path, field in extent_fields:
                                if os.path.exists(raster_path):
                                    # Read extent and SRID
                                    raster = GDALRaster(raster_path)
                                    extent = OGRGeometry.from_bbox(raster.extent)

                                    # It will be implicitly transformed into the SRID of the model’s field
                                    # self.field = GEOSGeometry(...)
                                    setattr(self, field, GEOSGeometry(extent.wkt, srid=raster.srid))

                                    logger.info("Populated extent field with {} for {}".format(raster_path, self))

                            self.update_available_assets_field()
                            self.save()
                        else:
                            # FAILED, CANCELED
                            self.save()
                    else:
                        # Still waiting...
                        self.save()

        except ProcessingError as e:
            self.set_failure(str(e))
        except (ConnectionRefusedError, ConnectionError) as e:
            logger.warning("{} cannot communicate with processing node: {}".format(self, str(e)))
        except ProcessingTimeout as e:
            logger.warning("{} timed out with error: {}. We'll try reprocessing at the next tick.".format(self, str(e)))


    def get_tile_path(self, tile_type, z, x, y):
        return self.assets_path("{}_tiles".format(tile_type), z, x, "{}.png".format(y))

    def get_tile_json_url(self, tile_type):
        return "/api/projects/{}/tasks/{}/{}/tiles.json".format(self.project.id, self.id, tile_type)

    def get_map_items(self):
        types = []
        if 'orthophoto.tif' in self.available_assets: types.append('orthophoto')
        if 'dsm.tif' in self.available_assets: types.append('dsm')
        if 'dtm.tif' in self.available_assets: types.append('dtm')

        return {
            'tiles': [{'url': self.get_tile_json_url(t), 'type': t} for t in types],
            'meta': {
                'task': {
                    'id': str(self.id),
                    'project': self.project.id,
                    'public': self.public
                }
            }
        }

    def get_model_display_params(self):
        """
        Subset of a task fields used in the 3D model display view
        """
        return {
            'id': str(self.id),
            'project': self.project.id,
            'available_assets': self.available_assets,
            'public': self.public
        }

    def generate_deferred_asset(self, archive, directory):
        """
        :param archive: path of the destination .zip file (relative to /assets/ directory)
        :param directory: path of the source directory to compress (relative to /assets/ directory)
        :return: full path of the generated archive
        """
        archive_path = self.assets_path(archive)
        directory_path = self.assets_path(directory)

        if not os.path.exists(directory_path):
            raise FileNotFoundError("{} does not exist".format(directory_path))

        if not os.path.exists(archive_path):
            shutil.make_archive(os.path.splitext(archive_path)[0], 'zip', directory_path)

        return archive_path

    def update_available_assets_field(self, commit=False):
        """
        Updates the available_assets field with the actual types of assets available
        :param commit: when True also saves the model, otherwise the user should manually call save()
        """
        all_assets = list(self.ASSETS_MAP.keys())
        self.available_assets = [asset for asset in all_assets if self.is_asset_available_slow(asset)]
        if commit: self.save()


    def delete(self, using=None, keep_parents=False):
        directory_to_delete = os.path.join(settings.MEDIA_ROOT,
                                           task_directory_path(self.id, self.project.id))

        super(Task, self).delete(using, keep_parents)

        # Remove files related to this task
        try:
            shutil.rmtree(directory_to_delete)
        except FileNotFoundError as e:
            logger.warning(e)


    def set_failure(self, error_message):
        logger.error("FAILURE FOR {}: {}".format(self, error_message))
        self.last_error = error_message
        self.status = status_codes.FAILED
        self.pending_action = None
        self.save()

    class Meta:
        permissions = (
            ('view_task', 'Can view task'),
        )