Example #1
0
 def setUp(self) -> None:
     if Path(ARCHIVE_ROOT).exists():
         rmtree(ARCHIVE_ROOT)
     self.instrument = "TEST"
     self.data_archive = DataArchive([self.instrument], 19, 20)
     self.expected_cycle_path = Path(CYCLE_DIRECTORY %
                                     (self.instrument, "19_1")).parent
     self.expected_script_path = Path(SCRIPTS_DIRECTORY % self.instrument)
Example #2
0
    def setUp(self):
        """ Start all external services """
        # Get all clients
        try:
            self.publisher, self.consumer = setup_kafka_connections()
        except ConnectionException as err:
            raise RuntimeError(
                "Could not connect to Kafka - check your credentials. If running locally check that "
                "the Kafka Docker container is running") from err
        # Add placeholder variables:
        # these are used to ensure runs are deleted even if test fails before completion
        self.instrument = 'ARMI'
        self.instrument_obj, _ = Instrument.objects.get_or_create(
            name=self.instrument, is_active=True)
        self.rb_number = 1234567
        self.run_number = 101
        self.run_title = "test title"
        self.software = {
            "name": "Mantid",
            "version": "latest",
        }

        # Create test archive and add data
        self.data_archive = DataArchive([self.instrument], 19, 19)
        self.data_archive.create()

        # Create and send json message to Kafka
        self.data_ready_message = Message(rb_number=self.rb_number,
                                          instrument=self.instrument,
                                          run_number=self.run_number,
                                          run_title=self.run_title,
                                          description="This is a system test",
                                          facility="ISIS",
                                          software=self.software,
                                          started_by=0,
                                          reduction_script=None,
                                          reduction_arguments=None)

        if self.software.get("name") == "Mantid":
            expected_mantid_py = f"{MANTID_PATH}/mantid.py"
            if not os.path.exists(expected_mantid_py):
                os.makedirs(MANTID_PATH)
                with open(expected_mantid_py, mode="w",
                          encoding="utf-8") as self.test_mantid_py:
                    self.test_mantid_py.write(FAKE_MANTID)
            else:
                # Mantid is installed, don't create or delete (in tearDown) anything
                self.test_mantid_py = None
 def setUpClass(cls):
     """Sets up Dataarchive with scripts and sets instrument for all test cases"""
     super().setUpClass()
     cls.instrument_name = "TestInstrument"
     cls.data_archive = DataArchive([cls.instrument_name], 21, 21)
     cls.data_archive.create()
     cls.data_archive.add_reduce_vars_script(
         cls.instrument_name,
         """standard_vars={"variable1":"test_variable_value_123"}""")
Example #4
0
 def setUpClass(cls):
     """Sets up the data archive to be shared across test cases"""
     super().setUpClass()
     cls.instrument_name = "TestInstrument"
     cls.data_archive = DataArchive([cls.instrument_name], 21, 21)
     cls.data_archive.create()
     cls.data_archive.add_reduction_script(
         cls.instrument_name,
         """def main(input_file, output_dir): print('some text')""")
     cls.data_archive.add_reduce_vars_script(
         cls.instrument_name,
         """standard_vars={"variable1":"test_variable_value_123"}""")
     cls.instrument_name = "TestInstrument"
Example #5
0
 def setUpClass(cls):
     """Sets up the data archive with a reduce and reduce_vars script to be shared between test cases"""
     super().setUpClass()
     cls.instrument_name = "TestInstrument"
     cls.data_archive = DataArchive([cls.instrument_name], 21, 21)
     cls.data_archive.create()
     cls.data_archive.add_reduction_script(cls.instrument_name,
                                           """print('some text')""")
     # NOTE the value here must match the value of the last variable in the fixture
     # if it doesn't it's value will be updated once the previous range (100151-100199) is updated
     # this is expected, as the final variable (valid for 100200 onwards) should have the default script value!
     cls.data_archive.add_reduce_vars_script(
         cls.instrument_name, """standard_vars={"variable1":"value4"}""")
def setup_external_services(
        instrument_name: str, start_year: int,
        end_year: int) -> Tuple[DataArchive, QueueClient, QueueListener]:
    """
    Sets up a DataArchive complete with scripts, database client and queue client and listeners and returns their
    objects in a tuple
    :param instrument_name: Name of the instrument
    :param start_year: Start year for the archive
    :param end_year: End year for the archive
    :return: Tuple of external objects needed.
    """
    data_archive = DataArchive([instrument_name], start_year, end_year)
    data_archive.create()
    try:
        queue_client, listener = setup_connection()
    except ConnectionException as err:
        raise RuntimeError(
            "Could not connect to ActiveMQ - check your credentials. If running locally check that "
            "ActiveMQ is running and started by `python setup.py start`"
        ) from err

    return data_archive, queue_client, listener
    def test_alert_message_when_reduce_vars_has_error(self):
        """
        Test that the correct message is shown when the reduce_vars.py has an error in it
        """
        data_archive = DataArchive([self.instrument_name], 21, 21)
        data_archive.create()

        # add a reduce_vars script with a syntax error -> a missing " after value1
        data_archive.add_reduce_vars_script(
            self.instrument_name, """standard_vars={"variable1":"value1}""")

        self.page.launch()
        expected = "The buttons above have been disabled because reduce_vars.py has an import or syntax error."
        assert self.page.alert_message_text() == expected

        data_archive.delete()
Example #8
0
class TestDataArchive(TestCase):
    def setUp(self) -> None:
        if Path(ARCHIVE_ROOT).exists():
            rmtree(ARCHIVE_ROOT)
        self.instrument = "TEST"
        self.data_archive = DataArchive([self.instrument], 19, 20)
        self.expected_cycle_path = Path(CYCLE_DIRECTORY %
                                        (self.instrument, "19_1")).parent
        self.expected_script_path = Path(SCRIPTS_DIRECTORY % self.instrument)

    def tearDown(self) -> None:
        if Path(ARCHIVE_ROOT).exists():
            rmtree(ARCHIVE_ROOT)

    def test_delete(self):
        """
        Tests the delete method removes the data-archive
        """
        test_archive_path = Path(CYCLE_DIRECTORY)
        test_archive_path.mkdir(parents=True)
        self.assertTrue(test_archive_path.exists())
        self.data_archive.delete()
        self.assertFalse(test_archive_path.exists())

    def test_delete_post_create(self):
        """
        Tests delete when archvie was created from create
        """
        self.data_archive.create()
        self.data_archive.delete()
        self.assertFalse(self.expected_cycle_path.exists())

    def test_create(self):
        """
        Tests the data-archive is created with the correct structure in the correct place
        """
        self.data_archive.create()
        self.assertTrue(self.expected_cycle_path.exists())
        self.assertTrue(self.expected_script_path.exists())

    def test_add_data_file(self):
        """
        Tests that a datafile can be added in the correct location with the correct name
        """
        expected_data_file = self.expected_cycle_path / "cycle_19_1" / "datafile.nxs"
        result = self.data_archive.add_data_file(self.instrument,
                                                 "datafile.nxs", 19, 1)
        self.assertEqual(str(expected_data_file), result)
        self.assertTrue(expected_data_file.exists())

    def test_add_reduction_script(self):
        """
        Tests that a reduction script can be added with the correct text
        """
        expected_script_file = self.expected_script_path / "reduce.py"
        expected_script_text = "print('hello')\nprint('world')"
        self.data_archive.create()
        self.data_archive.add_reduction_script(self.instrument,
                                               expected_script_text)
        self.assertTrue(expected_script_file.exists())
        with open(expected_script_file, encoding="utf-8") as fle:
            actual_text = fle.read()
        self.assertEqual(expected_script_text, actual_text)

    def test_add_reduce_vars_script(self):
        """
        Tests that a reduce vars script can be added with the correct text
        """
        expected_var_file = self.expected_script_path / "reduce_vars.py"
        expected_var_text = "vars = {}"
        self.data_archive.create()
        self.data_archive.add_reduce_vars_script(self.instrument,
                                                 expected_var_text)
        self.assertTrue(expected_var_file.exists())
        with open(expected_var_file, encoding="utf-8") as fle:
            actual_text = fle.read()
        self.assertEqual(expected_var_text, actual_text)
Example #9
0
class BaseAutoreduceSystemTest(TransactionTestCase):
    """Tests that the Queue Listener reconnects after ActiveMQ goes down"""
    fixtures = ["status_fixture"]

    def setUp(self):
        """ Start all external services """
        # Get all clients
        try:
            self.publisher, self.consumer = setup_kafka_connections()
        except ConnectionException as err:
            raise RuntimeError(
                "Could not connect to Kafka - check your credentials. If running locally check that "
                "the Kafka Docker container is running") from err
        # Add placeholder variables:
        # these are used to ensure runs are deleted even if test fails before completion
        self.instrument = 'ARMI'
        self.instrument_obj, _ = Instrument.objects.get_or_create(
            name=self.instrument, is_active=True)
        self.rb_number = 1234567
        self.run_number = 101
        self.run_title = "test title"
        self.software = {
            "name": "Mantid",
            "version": "latest",
        }

        # Create test archive and add data
        self.data_archive = DataArchive([self.instrument], 19, 19)
        self.data_archive.create()

        # Create and send json message to Kafka
        self.data_ready_message = Message(rb_number=self.rb_number,
                                          instrument=self.instrument,
                                          run_number=self.run_number,
                                          run_title=self.run_title,
                                          description="This is a system test",
                                          facility="ISIS",
                                          software=self.software,
                                          started_by=0,
                                          reduction_script=None,
                                          reduction_arguments=None)

        if self.software.get("name") == "Mantid":
            expected_mantid_py = f"{MANTID_PATH}/mantid.py"
            if not os.path.exists(expected_mantid_py):
                os.makedirs(MANTID_PATH)
                with open(expected_mantid_py, mode="w",
                          encoding="utf-8") as self.test_mantid_py:
                    self.test_mantid_py.write(FAKE_MANTID)
            else:
                # Mantid is installed, don't create or delete (in tearDown) anything
                self.test_mantid_py = None

    def tearDown(self):
        """ Disconnect from services, stop external services and delete data archive """
        self.consumer.stop()
        self._remove_run_from_database(self.instrument, self.run_number)
        self.data_archive.delete()

        self._delete_reduction_directory()

        if self.test_mantid_py:
            shutil.rmtree(MANTID_PATH)

    @staticmethod
    def _remove_run_from_database(instrument, run_number):
        """
        Uses the scripts.manual_operations.manual_remove script
        to remove records added to the database
        """
        if not isinstance(run_number, list):
            run_number = [run_number]

        ReductionRun.objects.filter(
            instrument__name=instrument,
            run_numbers__run_number__in=run_number).delete()

    @staticmethod
    def _delete_reduction_directory():
        """ Delete the temporary reduction directory"""
        path = Path(os.path.join(PROJECT_DEV_ROOT, 'reduced-data'))
        if path.exists():
            shutil.rmtree(path.absolute())

    def _setup_data_structures(self, reduce_script, vars_script):
        """
        Sets up a fake archive and reduced data save location on the system
        :param reduce_script: The content to use in the reduce.py file
        :param vars_script:  The content to use in the reduce_vars.py file
        :return: file_path to the reduced data
        """
        raw_file = f'{self.instrument}{self.run_number}.nxs'
        self.data_archive.add_reduction_script(self.instrument, reduce_script)
        self.data_archive.add_reduce_vars_script(self.instrument, vars_script)
        raw_file = self.data_archive.add_data_file(self.instrument, raw_file,
                                                   19, 1)
        return raw_file

    def _find_run_in_database(self):
        """
        Find a ReductionRun record in the database
        This includes a timeout to wait for several seconds to ensure the database has received
        the record in question
        :return: The resulting record
        """
        instrument = db.get_instrument(self.instrument)
        return instrument.reduction_runs.filter(
            run_numbers__run_number=self.run_number)

    def send_and_wait_for_result(self, message):
        """Sends the message to the topic and waits until the consumer has finished processing it"""
        self.consumer._processing = True  # pylint:disable=protected-access
        self.publisher.publish(topic='data_ready', messages=message)
        start_time = time.time()
        while self.consumer.is_processing_message():
            time.sleep(5)
            if time.time(
            ) > start_time + 120:  # Prevent waiting indefinitely and break after 2 minutes
                break
        time.sleep(10)  # Wait for the message to be processed
        results = self._find_run_in_database()
        assert results
        return results