def test_run_osm_task(self, mock_chain):
        provider = DataProvider.objects.get(slug="osm")
        provider_task = DataProviderTask.objects.create(provider=provider)
        provider_task.formats.add(self.shp_task)
        provider_task.save()
        self.job.data_provider_tasks.add(provider_task)
        create_run(job=self.job)

        task_chain_builder = TaskChainBuilder()

        # Even though code using pipes seems to be supported here it is throwing an error.
        try:
            task_chain_builder.build_tasks(
                osm_data_collection_task,
                provider_task_uid=provider_task.uid,
                run=self.job.runs.first(),
                worker="some_worker",
            )
        except TypeError:
            pass
        run = self.job.runs.first()
        self.assertIsNotNone(run)
        tasks = run.data_provider_task_records.first().tasks.filter(
            name="OSM(.gpkg)")
        self.assertIsNotNone(tasks)
    def test_run_wms_task(self, mock_chain):

        celery_uid = str(uuid.uuid4())
        provider = DataProvider.objects.get(slug='wms')
        provider_task_record = DataProviderTask.objects.create(
            provider=provider)
        self.job.provider_tasks.add(provider_task_record)
        # celery chain mock
        mock_chain.return_value.apply_async.return_value = Mock()
        create_run(job_uid=self.job.uid)
        task_chain_builder = TaskChainBuilder()
        # Even though code using pipes seems to be supported here it is throwing an error.
        try:
            task_chain_builder.build_tasks(
                mapproxy_export_task,
                provider_task_uid=provider_task_record.uid,
                run=self.job.runs.first(),
                service_type='wms',
                worker="some_worker")
        except TypeError:
            pass
        run = self.job.runs.first()
        self.assertIsNotNone(run)
        tasks = run.provider_tasks.first().tasks.filter(
            name='Raster export (.gpkg)')
        self.assertIsNotNone(tasks)
 def test_run_wcs_task(self, mock_chain):
     provider = DataProvider.objects.get(slug="wms")
     provider_task_record = DataProviderTask.objects.create(
         provider=provider)
     self.job.data_provider_tasks.add(provider_task_record)
     # celery chain mock
     mock_chain.return_value.apply_async.return_value = Mock()
     create_run(job=self.job)
     task_chain_builder = TaskChainBuilder()
     # Even though code using pipes seems to be supported here it is throwing an error.
     try:
         task_chain_builder.build_tasks(
             wcs_export_task,
             provider_task_uid=provider_task_record.uid,
             run=self.job.runs.first(),
             service_type="wcs",
             worker="some_worker",
         )
     except TypeError:
         pass
     run = self.job.runs.first()
     self.assertIsNotNone(run)
     tasks = run.data_provider_task_records.first().tasks.filter(
         name="Geotiff Format (.tif)")
     self.assertIsNotNone(tasks)
Exemple #4
0
    def run(self, request, uid=None, *args, **kwargs):
        """
        Creates the run (i.e. runs the job).

        Gets the job_uid and current user from the request.
        Creates an instance of the TaskFactory and
        calls run_task on it, passing the job_uid and user.

        *request:* the http request

        *Returns:*
            - the serialized run data.
        """
        # run needs to be created so that the UI can be updated with the task list.
        run_uid = create_run(job_uid=uid)
        # Run is passed to celery to start the tasks.
        run = ExportRun.objects.get(uid=run_uid)
        if run.user != request.user and not request.user.is_superuser:
            return Response([{'detail': _('Unauthorized.')}], status.HTTP_403_FORBIDDEN)
        if run:
            pick_up_run_task.delay(run_uid=run_uid)
            running = ExportRunSerializer(run, context={'request': request})
            return Response(running.data, status=status.HTTP_202_ACCEPTED)
        else:
            return Response([{'detail': _('Failed to run Export')}], status.HTTP_400_BAD_REQUEST)
    def test_task_factory(self, task_factory_chain, finalize_task, mock_task_chain_builder, create_task, mock_invalid_licenses, mock_os):
        mock_invalid_licenses.return_value = []
        run_uid = create_run(job_uid=self.job.uid)
        self.assertIsNotNone(run_uid)
        self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))
        worker = "some_worker"
        mock_osm_task = 'osm-task'
        provider_uuid = uuid.uuid4()
        task = Mock()
        mock_task_chain_builder().build_tasks.return_value = (provider_uuid, task)

        del task.tasks

        task_factory = TaskFactory()
        task_factory.type_task_map = {'osm-generic': mock_osm_task, 'osm': mock_osm_task}

        task_factory.parse_tasks(run_uid=run_uid, worker=worker)
        task_factory_chain.assert_called()
        finalize_task.s.assert_called()
        mock_os.makedirs.assert_called()
        self.assertEqual(3, create_task.call_count)

        # Test that run is prevented and deleted if the user has not agreed to the licenses.
        mock_invalid_licenses.return_value = ['invalid-licenses']
        with self.assertRaises(Exception):
            task_factory.parse_tasks(run_uid=run_uid, worker=worker)
            run = ExportRun.objects.filter(uid=run_uid).first()
            self.assertIsNone(run)

        task.tasks = [task]
        task_factory = TaskFactory()
        task_factory.type_task_map = {'osm': mock_osm_task}

        task_factory.parse_tasks(run_uid=run_uid, worker=worker)
        task_factory_chain.assert_called()
    def test_run_osm_task(self, mock_chain):
        provider = DataProvider.objects.get(slug='osm')
        provider_task = DataProviderTask.objects.create(provider=provider)
        provider_task.formats.add(self.shp_task)
        provider_task.save()
        self.job.provider_tasks.add(provider_task)
        create_run(job_uid=self.job.uid)

        task_chain_builder = TaskChainBuilder()

        # Even though code using pipes seems to be supported here it is throwing an error.
        try:
            task_chain_builder.build_tasks(osm_data_collection_task,
                                           provider_task_uid=provider_task.uid, run=self.job.runs.first(),
                                           worker="some_worker")
        except TypeError:
            pass
        run = self.job.runs.first()
        self.assertIsNotNone(run)
        tasks = run.provider_tasks.first().tasks.filter(name='OSM(.gpkg)')
        self.assertIsNotNone(tasks)
Exemple #7
0
def rerun_data_provider_records(run_uid, user_id, data_provider_slugs):
    from eventkit_cloud.tasks.task_factory import create_run, Error, Unauthorized, InvalidLicense

    with transaction.atomic():
        old_run: ExportRun = ExportRun.objects.select_related(
            "job__user", "parent_run__job__user").get(uid=run_uid)

        user: User = User.objects.get(pk=user_id)

        while old_run and old_run.is_cloning:
            # Find pending providers and add them to list
            for dptr in old_run.data_provider_task_records.all():
                if dptr.status == TaskState.PENDING.value:
                    data_provider_slugs.append(dptr.provider.slug)
            old_run: ExportRun = old_run.parent_run

        # Remove any duplicates
        data_provider_slugs = list(set(data_provider_slugs))

        try:
            new_run_uid = create_run(job=old_run.job,
                                     user=user,
                                     clone=old_run,
                                     download_data=False)
        except Unauthorized:
            raise PermissionDenied(
                code="permission_denied",
                detail="ADMIN permission is required to run this DataPack.")
        except (InvalidLicense, Error) as err:
            return Response([{
                "detail": _(str(err))
            }], status.HTTP_400_BAD_REQUEST)

        run: ExportRun = ExportRun.objects.get(uid=new_run_uid)

        # Reset the old data provider task record for the providers we're recreating.
        data_provider_task_record: DataProviderTaskRecord
        run.data_provider_task_records.filter(slug="run").delete()
        for data_provider_task_record in run.data_provider_task_records.all():
            if data_provider_task_record.provider is not None:
                # Have to clean out the tasks that were finished and request the ones that weren't.
                if (data_provider_task_record.provider.slug
                        in data_provider_slugs
                        or TaskState[data_provider_task_record.status]
                        in TaskState.get_not_finished_states()):
                    data_provider_task_record.status = TaskState.PENDING.value
                    # Delete the associated tasks so that they can be recreated.
                    data_provider_task_record.tasks.all().delete()
                    data_provider_task_record.save()

        run.status = TaskState.SUBMITTED.value
        run.save()
    def test_run_wcs_task(self, mock_chain):

        celery_uid = str(uuid.uuid4())
        provider = DataProvider.objects.get(slug='wms')
        provider_task_record = DataProviderTask.objects.create(provider=provider)
        self.job.provider_tasks.add(provider_task_record)
        # celery chain mock
        mock_chain.return_value.apply_async.return_value = Mock()
        create_run(job_uid=self.job.uid)
        task_chain_builder = TaskChainBuilder()
        # Even though code using pipes seems to be supported here it is throwing an error.
        try:
            task_chain_builder.build_tasks(wcs_export_task,
                                           provider_task_uid=provider_task_record.uid, run=self.job.runs.first(),
                                           service_type='wcs',
                                           worker="some_worker")
        except TypeError:
            pass
        run = self.job.runs.first()
        self.assertIsNotNone(run)
        tasks = run.provider_tasks.first().tasks.filter(name='Geotiff Format (.tif)')
        self.assertIsNotNone(tasks)
    def test_task_factory(
        self,
        task_factory_chain,
        finalize_task,
        mock_task_chain_builder,
        create_task,
        mock_invalid_licenses,
    ):
        mock_invalid_licenses.return_value = []
        run_uid = create_run(job=self.job)
        self.assertIsNotNone(run_uid)
        self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))
        worker = "some_worker"
        mock_osm_task = "osm-task"
        provider_uuid = uuid.uuid4()
        task = Mock()
        mock_task_chain_builder().build_tasks.return_value = (provider_uuid,
                                                              task)

        del task.tasks

        task_factory = TaskFactory()
        task_factory.type_task_map = {
            "osm-generic": mock_osm_task,
            "osm": mock_osm_task
        }

        task_factory.parse_tasks(run_uid=run_uid, worker=worker)
        task_factory_chain.assert_called()
        finalize_task.s.assert_called()
        self.assertEqual(2, create_task.call_count)

        # Test that run is prevented and deleted if the user has not agreed to the licenses.
        mock_invalid_licenses.return_value = ["invalid-licenses"]
        with self.assertRaises(Exception):
            task_factory.parse_tasks(run_uid=run_uid, worker=worker)
            run = ExportRun.objects.filter(uid=run_uid).first()
            self.assertIsNone(run)

        task.tasks = [task]
        task_factory = TaskFactory()
        task_factory.type_task_map = {"osm": mock_osm_task}

        task_factory.parse_tasks(run_uid=run_uid, worker=worker)
        task_factory_chain.assert_called()
 def test_create_run_failure(self, ExportRun):
     ExportRun.objects.create.side_effect = DatabaseError('FAIL')
     with self.assertRaises(DatabaseError):
         run_uid = create_run(job_uid=self.job.uid)
         self.assertIsNone(run_uid)
 def test_create_run_success(self):
     run_uid = create_run(job_uid=self.job.uid)
     self.assertIsNotNone(run_uid)
     self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))
Exemple #12
0
    def create(self, request, *args, **kwargs):
        """
        Create a Job from the supplied request data.

        The request data is validated by *api.serializers.JobSerializer*.
        Associates the *Job* with required *ExportFormats*, *ExportConfig*

        * request: the HTTP request in JSON.

            Example:

                {
                    "name" : "Example Name",
                    "description" : "Example Description",
                    "event" : "Example Event (Project)",
                    "include_zipfile" : true,
                    "selection": { ... valid geojson ... },
                    "tags" : [],
                    "provider_tasks" : [{
                            "provider" : "OpenStreetMap Data (Themes)",
                            "formats" : ["shp", "gpkg"]
                        }
                    ]
                }


        To monitor the resulting export run retreive the `uid` value from the returned json
        and call /api/runs?job_uid=[the returned uid]

        * Returns: the newly created Job instance.

            Example:

                {
                  "provider_tasks": [
                    {
                      "provider": "OpenStreetMap Tiles",
                      "formats": [
                        "gpkg"
                      ]
                    }
                  ],
                  "uid": "cf9c038c-a09a-4058-855a-b0b1d5a6c5c4",
                  "url": "http://cloud.eventkit.dev/api/jobs/cf9c038c-a09a-4058-855a-b0b1d5a6c5c4",
                  "name": "test",
                  "description": "test",
                  "event": "test",
                  "created_at": "2017-03-10T15:09:29.802364Z",
                  "owner": "admin",
                  "exports": [
                    {
                      "formats": [
                        {
                          "uid": "167fbc03-83b3-41c9-8034-8566257cb2e8",
                          "url": "http://cloud.eventkit.dev/api/formats/gpkg",
                          "slug": "gpkg",
                          "name": "Geopackage",
                          "description": "GeoPackage"
                        }
                      ],
                      "provider": "OpenStreetMap Tiles"
                    }
                  ],
                  "configurations": [],
                  "published": false,
                  "feature_save": false,
                  "feature_pub": false,
                  "region": null,
                  "extent": {
                    "type": "Feature",
                    "properties": {
                      "uid": "cf9c038c-a09a-4058-855a-b0b1d5a6c5c4",
                      "name": "test"
                    },
                    "geometry": {
                      "type": "Polygon",
                      "coordinates": [
                        [
                          [
                            -43.248281,
                            -22.816694
                          ],
                          [
                            -43.248281,
                            -22.812105
                          ],
                          [
                            -43.242617,
                            -22.812105
                          ],
                          [
                            -43.242617,
                            -22.816694
                          ],
                          [
                            -43.248281,
                            -22.816694
                          ]
                        ]
                      ]
                    }
                  },
                  "tags": [
                    {
                      "key": "highway",
                      "value": "path",
                      "data_model": "HDM",
                      "geom_types": [
                        "line"
                      ]
                    }
                  ],
                  "include_zipfile": false
                }

        * Raises: ValidationError: in case of validation errors.
        ** returns: Not 202
        """
        serializer = self.get_serializer(data=request.data)
        if serializer.is_valid(raise_exception=True):
            """Get the required data from the validated request."""

            export_providers = request.data.get('export_providers', [])
            provider_tasks = request.data.get('provider_tasks', [])
            tags = request.data.get('tags')
            preset = request.data.get('preset')
            translation = request.data.get('translation')
            transform = request.data.get('transform')

            with transaction.atomic():
                if export_providers:
                    for ep in export_providers:
                        ep['user'] = request.user.id
                    provider_serializer = ExportProviderSerializer(
                        data=export_providers,
                        many=True,
                        context={'request': request}
                    )
                    if provider_serializer.is_valid():
                        provider_serializer.save()
                if len(provider_tasks) > 0:
                    """Save the job and make sure it's committed before running tasks."""
                    try:
                        job = serializer.save()
                        provider_serializer = ProviderTaskSerializer(
                            data=provider_tasks,
                            many=True
                        )
                        try:
                            provider_serializer.is_valid(raise_exception=True)
                        except ValidationError:
                            error_data = OrderedDict()
                            error_data['errors'] = [_('A provider and an export format must be selected.')]
                            return Response(error_data, status=status.HTTP_400_BAD_REQUEST)
                        job.provider_tasks = provider_serializer.save()
                        if preset:
                            """Get the tags from the uploaded preset."""
                            logger.debug('Found preset with uid: %s' % preset)
                            config = ExportConfig.objects.get(uid=preset)
                            job.configs.add(config)
                            preset_path = config.upload.path
                            """Use the UnfilteredPresetParser."""
                            parser = presets.UnfilteredPresetParser(preset=preset_path)
                            tags_dict = parser.parse()
                            simplified_tags = []
                            for entry in tags_dict:
                                tag = {'key': entry['key'], 'value': entry['value'], 'geom': entry['geom_types']}
                                simplified_tags.append(tag)
                            job.json_tags = simplified_tags
                            job.save()
                        elif tags:
                            """Get tags from request."""
                            simplified_tags = []
                            for entry in tags:
                                tag = {'key': entry['key'], 'value': entry['value'], 'geom': entry['geom_types']}
                                simplified_tags.append(tag)
                            job.json_tags = simplified_tags
                            job.save()
                        else:
                            """
                            Use hdm preset as default tags if no preset or tags
                            are provided in the request.
                            """
                            hdm_default_tags = DatamodelPreset.objects.get(name='hdm').json_tags
                            job.json_tags = hdm_default_tags
                            job.save()
                        # check for translation file
                        if translation:
                            config = ExportConfig.objects.get(uid=translation)
                            job.configs.add(config)
                        # check for transform file
                        if transform:
                            config = ExportConfig.objects.get(uid=transform)
                            job.configs.add(config)
                    except Exception as e:
                        error_data = OrderedDict()
                        error_data['id'] = _('server_error')
                        error_data['message'] = _('Error creating export job: %(error)s') % {'error': e}
                        return Response(error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
                else:
                    error_data = OrderedDict()
                    error_data['provider_tasks'] = [_('Invalid provider task.')]
                    return Response(error_data, status=status.HTTP_400_BAD_REQUEST)

            # run the tasks
            job_uid = str(job.uid)
            # run needs to be created so that the UI can be updated with the task list.
            run_uid = create_run(job_uid=job_uid)

            running = JobSerializer(job, context={'request': request})
            # Run is passed to celery to start the tasks.
            pick_up_run_task.delay(run_uid=run_uid)
            return Response(running.data, status=status.HTTP_202_ACCEPTED)
        else:
            return Response(serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)
 def test_create_run_failure(self, ExportRun):
     ExportRun.objects.create.side_effect = DatabaseError('FAIL')
     with self.assertRaises(DatabaseError):
         run_uid = create_run(job_uid=self.job.uid)
         self.assertIsNone(run_uid)
 def test_create_run_success(self):
     run_uid = create_run(job_uid=self.job.uid)
     self.assertIsNotNone(run_uid)
     self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))