Пример #1
0
    def purge_run(self, event):
        """Run purge for the object with ``location_id`` specified in ``event`` argument."""
        location_id = event['location_id']
        verbosity = event['verbosity']

        try:
            logger.info(__("Running purge for location id {}.", location_id))
            location_purge(location_id=location_id, delete=True, verbosity=verbosity)
        except Exception:  # pylint: disable=broad-except
            logger.exception("Error while purging location.", extra={'location_id': location_id})
Пример #2
0
    def purge_run(self, event):
        """Run purge for the object with ``location_id`` specified in ``event`` argument."""
        location_id = event["location_id"]
        verbosity = event["verbosity"]

        try:
            logger.info(__("Running purge for location id {}.", location_id))
            location_purge(location_id=location_id,
                           delete=True,
                           verbosity=verbosity)
        except Exception:
            logger.exception("Error while purging location.",
                             extra={"location_id": location_id})
Пример #3
0
    def purge_run(self, event):
        """Run purge for the object with ``location_id`` specified in ``event`` argument."""
        location_id = event['location_id']
        verbosity = event['verbosity']

        try:
            logger.info(__("Running purge for location id {}.", location_id))
            location_purge(location_id=location_id,
                           delete=True,
                           verbosity=verbosity)
        except Exception:  # pylint: disable=broad-except
            logger.exception("Error while purging location.",
                             extra={'location_id': location_id})
Пример #4
0
    def create_and_run_processor(self, processor, **kwargs):
        processor_slug = get_random_string(6)
        Process.objects.create(slug=processor_slug,
                               name='Test Purge Process',
                               contributor=self.admin,
                               type='data:test',
                               version=1,
                               **processor)

        data = self.run_process(processor_slug, **kwargs)
        # Purge is normally called in an async worker, so we have to emulate the call.
        purge.location_purge(location_id=data.location.id, delete=True)

        return data
Пример #5
0
    def create_and_run_processor(self, processor, **kwargs):
        processor_slug = get_random_string(6)
        Process.objects.create(
            slug=processor_slug,
            name='Test Purge Process',
            contributor=self.admin,
            type='data:test',
            version=1,
            **processor
        )

        data = self.run_process(processor_slug, **kwargs)
        # Purge is normally called in an async worker, so we have to emulate the call.
        purge.location_purge(location_id=data.location.id, delete=True)

        return data
Пример #6
0
    def test_remove(self):
        completed_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(completed_data)
        completed_data.status = Data.STATUS_DONE
        completed_data.output = {'sample': {'file': 'test-file'}}
        self.create_test_file(completed_data.location, 'test-file')
        self.create_test_file(completed_data.location, 'removeme')
        completed_data.save()

        pending_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(pending_data)
        self.create_test_file(pending_data.location, 'test-file')
        self.create_test_file(pending_data.location, 'donotremoveme')

        # Check that nothing is removed if delete is False (the default).
        with patch('resolwe.flow.utils.purge.os', wraps=os) as os_mock:
            os_mock.remove = MagicMock()
            purge.purge_all()
            os_mock.remove.assert_not_called()

        completed_data.location.purged = False
        completed_data.location.save()

        # Check that only the 'removeme' file from the completed Data objects is removed
        # and files from the second (not completed) Data objects are unchanged.
        with patch('resolwe.flow.utils.purge.os', wraps=os) as os_mock:
            os_mock.remove = MagicMock()
            purge.purge_all(delete=True)
            os_mock.remove.assert_called_once_with(
                completed_data.location.get_path(filename='removeme'))

        completed_data.location.purged = False
        completed_data.location.save()

        # Create dummy data directories for non-existant data objects.
        self.create_test_file(DataLocation.objects.create(subpath='990'),
                              'dummy')
        self.create_test_file(DataLocation.objects.create(subpath='991'),
                              'dummy')

        # Check that only the 'removeme' file from the completed Data objects is removed
        # together with directories not belonging to any data objects.
        with contextlib.ExitStack() as stack:
            os_mock = stack.enter_context(
                patch('resolwe.flow.utils.purge.os', wraps=os))
            shutil_mock = stack.enter_context(
                patch('resolwe.flow.utils.purge.shutil', wraps=shutil))

            os_mock.remove = MagicMock()
            shutil_mock.rmtree = MagicMock()
            purge.purge_all(delete=True)
            self.assertEqual(os_mock.remove.call_count, 1)
            self.assertEqual(shutil_mock.rmtree.call_count, 2)
            os_mock.remove.assert_called_once_with(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'],
                             str(completed_data.location.id), 'removeme'))
            shutil_mock.rmtree.assert_any_call(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'], '990'))
            shutil_mock.rmtree.assert_any_call(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'], '991'))

        completed_data.location.purged = False
        completed_data.location.save()

        # Create another data object and check that if remove is called on one object,
        # only that object's data is removed.
        another_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(another_data)
        another_data.status = Data.STATUS_DONE
        another_data.output = {'sample': {'file': 'test-file'}}
        self.create_test_file(another_data.location, 'test-file')
        self.create_test_file(another_data.location, 'removeme')
        another_data.save()

        with contextlib.ExitStack() as stack:
            os_mock = stack.enter_context(
                patch('resolwe.flow.utils.purge.os', wraps=os))
            shutil_mock = stack.enter_context(
                patch('resolwe.flow.utils.purge.shutil', wraps=shutil))

            os_mock.remove = MagicMock()
            purge.location_purge(location_id=another_data.location.id,
                                 delete=True)
            os_mock.remove.assert_called_once_with(
                another_data.location.get_path(filename='removeme'))
            shutil_mock.rmtree.assert_not_called()
Пример #7
0
    def run_process(self, process_slug, input_={}, assert_status=Data.STATUS_DONE,
                    descriptor=None, descriptor_schema=None, verbosity=0, tags=None):
        """Run the specified process with the given inputs.

        If input is a file, file path should be given relative to the
        ``tests/files`` directory of a Django application.
        If ``assert_status`` is given, check if
        :class:`~resolwe.flow.models.Data` object's status matches
        it after the process has finished.

        .. note::

            If you need to delay calling the manager, you must put the
            desired code in a ``with transaction.atomic()`` block.

        :param str process_slug: slug of the
            :class:`~resolwe.flow.models.Process` to run

        :param dict ``input_``: :class:`~resolwe.flow.models.Process`'s
            input parameters

            .. note::

                You don't have to specify parameters with defined
                default values.

        :param str ``assert_status``: desired status of the
            :class:`~resolwe.flow.models.Data` object

        :param dict descriptor: descriptor to set on the
            :class:`~resolwe.flow.models.Data` object

        :param dict descriptor_schema: descriptor schema to set on the
            :class:`~resolwe.flow.models.Data` object

        :param list tags: list of tags that will be added to the created
            :class:`~resolwe.flow.models.Data` object

        :return: object created by
            :class:`~resolwe.flow.models.Process`
        :rtype: ~resolwe.flow.models.Data

        """
        # Copy input_, to avoid mutation that would occur in ``mock_upload``
        input_ = input_.copy()

        # backward compatibility
        process_slug = slugify(process_slug.replace(':', '-'))

        # Enforce correct process tags.
        if getattr(settings, 'TEST_PROCESS_REQUIRE_TAGS', False) and not self._preparation_stage:
            test = getattr(self, self._testMethodName)
            if not has_process_tag(test, process_slug):
                self.fail(
                    'Tried to run process with slug "{0}" outside of preparation_stage\n'
                    'block while test is not tagged for this process. Either tag the\n'
                    'test using tag_process decorator or move this under the preparation\n'
                    'stage block if this process is only used to prepare upstream inputs.\n'
                    '\n'
                    'To tag the test you can add the following decorator:\n'
                    '    @tag_process(\'{0}\')\n'
                    ''.format(process_slug)
                )

        self._executed_processes.add(process_slug)

        process = Process.objects.filter(slug=process_slug).order_by('-version').first()

        if process is None:
            self.fail('No process with slug "{}"'.format(process_slug))

        def mock_upload(file_path):
            """Mock file upload."""
            def is_url(path):
                """Check if path is a URL."""
                validate = URLValidator()
                try:
                    validate(path)
                except (ValueError, ValidationError):
                    return False
                return True

            if is_url(file_path):
                return {
                    'file': file_path,
                    'file_temp': file_path,
                    'is_remote': True,
                }
            else:
                old_path = os.path.join(self.files_path, file_path)
                if not os.path.isfile(old_path):
                    raise RuntimeError('Missing file: {}'.format(old_path))

                file_basename = os.path.basename(file_path)

                file_temp = '{}_{}'.format(file_basename, uuid.uuid4())
                upload_file_path = os.path.join(self.upload_dir, file_temp)
                # create directories needed by new_path
                upload_file_dir = os.path.dirname(upload_file_path)
                if not os.path.exists(upload_file_dir):
                    os.makedirs(upload_file_dir)

                shutil.copy2(old_path, upload_file_path)
                self._upload_files.append(upload_file_path)
                return {
                    'file': file_basename,
                    'file_temp': file_temp,
                }

        for field_schema, fields in iterate_fields(input_, process.input_schema):
            # copy referenced files to upload dir
            if field_schema['type'] == "basic:file:":
                fields[field_schema['name']] = mock_upload(fields[field_schema['name']])
            elif field_schema['type'] == "list:basic:file:":
                file_list = [mock_upload(file_path) for file_path in fields[field_schema['name']]]
                fields[field_schema['name']] = file_list

            # convert primary keys to strings
            if field_schema['type'].startswith('data:'):
                fields[field_schema['name']] = fields[field_schema['name']]
            if field_schema['type'].startswith('list:data:'):
                fields[field_schema['name']] = [obj for obj in fields[field_schema['name']]]

        data = Data.objects.create(
            input=input_,
            contributor=self.admin,
            process=process,
            slug=get_random_string(length=6),
            tags=tags or [],
            descriptor_schema=descriptor_schema,
            descriptor=descriptor or {})
        self.collection.data.add(data)

        # Fetch latest Data object from database
        data = Data.objects.get(pk=data.pk)

        if assert_status:
            if not transaction.get_autocommit() and assert_status == Data.STATUS_DONE:
                # We are in an atomic transaction block, hence the data object will not be done
                # until after the block. Therefore the expected status is resolving.
                assert_status = Data.STATUS_RESOLVING
            self.assertStatus(data, assert_status)

        # Purge is normally called in an async worker, so we have to emulate the call.
        if data.location:
            purge.location_purge(location_id=data.location.id, delete=True)

        return data
Пример #8
0
    def test_remove(self):
        completed_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(completed_data)
        completed_data.status = Data.STATUS_DONE
        completed_data.output = {'sample': {'file': 'test-file'}}
        self.create_test_file(completed_data.location, 'test-file')
        self.create_test_file(completed_data.location, 'removeme')
        completed_data.save()

        pending_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(pending_data)
        self.create_test_file(pending_data.location, 'test-file')
        self.create_test_file(pending_data.location, 'donotremoveme')

        # Check that nothing is removed if delete is False (the default).
        with patch('resolwe.flow.utils.purge.os', wraps=os) as os_mock:
            os_mock.remove = MagicMock()
            purge.purge_all()
            os_mock.remove.assert_not_called()

        completed_data.location.purged = False
        completed_data.location.save()

        # Check that only the 'removeme' file from the completed Data objects is removed
        # and files from the second (not completed) Data objects are unchanged.
        with patch('resolwe.flow.utils.purge.os', wraps=os) as os_mock:
            os_mock.remove = MagicMock()
            purge.purge_all(delete=True)
            os_mock.remove.assert_called_once_with(
                completed_data.location.get_path(filename='removeme'))

        completed_data.location.purged = False
        completed_data.location.save()

        # Create dummy data directories for non-existant data objects.
        self.create_test_file(DataLocation.objects.create(subpath='990'), 'dummy')
        self.create_test_file(DataLocation.objects.create(subpath='991'), 'dummy')

        # Check that only the 'removeme' file from the completed Data objects is removed
        # together with directories not belonging to any data objects.
        with contextlib.ExitStack() as stack:
            os_mock = stack.enter_context(patch('resolwe.flow.utils.purge.os', wraps=os))
            shutil_mock = stack.enter_context(patch('resolwe.flow.utils.purge.shutil', wraps=shutil))

            os_mock.remove = MagicMock()
            shutil_mock.rmtree = MagicMock()
            purge.purge_all(delete=True)
            self.assertEqual(os_mock.remove.call_count, 1)
            self.assertEqual(shutil_mock.rmtree.call_count, 2)
            os_mock.remove.assert_called_once_with(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'], str(completed_data.location.id), 'removeme'))
            shutil_mock.rmtree.assert_any_call(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'], '990'))
            shutil_mock.rmtree.assert_any_call(
                os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'], '991'))

        completed_data.location.purged = False
        completed_data.location.save()

        # Create another data object and check that if remove is called on one object,
        # only that object's data is removed.
        another_data = Data.objects.create(**self.data)
        data_location = DataLocation.objects.create(subpath='')
        data_location.subpath = str(data_location.id)
        data_location.save()
        data_location.data.add(another_data)
        another_data.status = Data.STATUS_DONE
        another_data.output = {'sample': {'file': 'test-file'}}
        self.create_test_file(another_data.location, 'test-file')
        self.create_test_file(another_data.location, 'removeme')
        another_data.save()

        with contextlib.ExitStack() as stack:
            os_mock = stack.enter_context(patch('resolwe.flow.utils.purge.os', wraps=os))
            shutil_mock = stack.enter_context(patch('resolwe.flow.utils.purge.shutil', wraps=shutil))

            os_mock.remove = MagicMock()
            purge.location_purge(location_id=another_data.location.id, delete=True)
            os_mock.remove.assert_called_once_with(
                another_data.location.get_path(filename='removeme'))
            shutil_mock.rmtree.assert_not_called()