Beispiel #1
0
    def test_launch_operation_hdd_full_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            "",
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        view_model = adapter.get_view_model()()
        dao.store_entity(started_operation)
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model) +
            space_taken_by_started - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter,
                                                      model_view=view_model)
        self._assert_no_ddti()
Beispiel #2
0
    def _export_linked_datatypes(self, project, zip_file):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(
            project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                           TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        op_folder = self.files_helper.get_operation_folder(
            op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        self.files_helper.remove_folder(op_folder)
Beispiel #3
0
    def test_launch_operation_hdd_with_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        form.fill_from_post({'_test': "100"})
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            "",
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        self._assert_stored_dt2()
Beispiel #4
0
    def test_get_filtered_datatypes(self, dummy_datatype_index_factory):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model_operation.Operation(self.test_user.id, self.test_project.id, self.algorithm.id, 'test params',
                                        status=model_operation.STATUS_FINISHED, start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = dummy_datatype_index_factory()
                datatype_inst.type = "DummyDataTypeIndex"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.fk_from_operation = operation.id
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = dummy_datatype_index_factory()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "DummyDataTypeIndex"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.fk_from_operation = operation.id
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, DummyDataTypeIndex)[0]
        for row in returned_data:
            if row[1] != 'DummyDataTypeIndex':
                raise AssertionError("Some invalid data was returned!")
        assert 4 == len(returned_data), "Invalid length of result"

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            raise AssertionError("DataTypes were not filtered properly!")
Beispiel #5
0
    def _export_linked_datatypes(self, project):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return None, None

        # Make an import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        return linked_paths, op
    def test_adapter_launch(self, connectivity_factory, region_mapping_factory,
                            time_series_region_index_factory):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        algo = FlowService().get_algorithm_by_module_and_class(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)
        self.operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            algo.id,
            json.dumps(''),
            meta=json.dumps(meta),
            status=model_operation.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)

        # Get connectivity, region_mapping and a dummy time_series_region
        connectivity = connectivity_factory()
        region_mapping = region_mapping_factory()
        dummy_time_series_index = time_series_region_index_factory(
            connectivity=connectivity, region_mapping=region_mapping)

        dummy_time_series_index.start_time = 0.0
        dummy_time_series_index.sample_period = 1.0

        dummy_time_series_index = \
        dao.get_generic_entity(dummy_time_series_index.__class__, dummy_time_series_index.gid, 'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        form = TimeseriesMetricsAdapterForm()
        view_model = form.get_view_model()()
        view_model.time_series = UUID(dummy_time_series_index.gid)
        form.fill_trait(view_model)
        ts_metric_adapter.submit_form(form)
        resulted_metric = ts_metric_adapter.launch(view_model)
        assert isinstance(
            resulted_metric,
            DatatypeMeasureIndex), "Result should be a datatype measure."
        assert len(resulted_metric.metrics) >= len(list(ts_metric_adapter.get_form().algorithms.choices)), \
            "At least a result should have been generated for every metric."
        for metric_value in json.loads(resulted_metric.metrics).values():
            assert isinstance(metric_value, (float, int))
Beispiel #7
0
 def test_write_operation_metadata(self):
     """
     Test that a correct XML is created for an operation.
     """
     operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
     expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
     assert not os.path.exists(expected_file)
     self.files_helper.write_operation_metadata(operation)
     assert os.path.exists(expected_file)
     operation_meta = XMLReader(expected_file).read_metadata()
     loaded_operation = model_operation.Operation(None, None, None, None)
     loaded_operation.from_dict(operation_meta, dao, user_id=self.test_user.id)
     expected_dict = operation.to_dict()[1]
     found_dict = loaded_operation.to_dict()[1]
     for key, value in expected_dict.items():
         assert str(value) == str(found_dict[key])
     # Now validate that operation metaData can be also updated
     assert "new_group_name" != found_dict['user_group']
     self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
     found_dict = XMLReader(expected_file).read_metadata()  
     assert "new_group_name" == found_dict['user_group']
Beispiel #8
0
    def test_adapter_huge_memory_requirement(self, test_adapter_factory):
        """
        Test that an MemoryException is raised in case adapter cant launch due to lack of memory.
        """
        # Prepare adapter
        test_adapter_factory(adapter_class=TestAdapterHugeMemoryRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHugeMemoryRequired")

        # Simulate receiving POST data
        form = TestAdapterHugeMemoryRequiredForm()
        adapter.submit_form(form)

        view_model = form.get_view_model()()
        view_model.test = 5

        # Prepare operation for launch
        operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            json.dumps({'gid': view_model.gid.hex}),
            json.dumps({}),
            status=model_operation.STATUS_STARTED)
        operation = dao.store_entity(operation)

        # Store ViewModel in H5
        parent_folder = FilesHelper().get_project_folder(
            self.test_project, str(operation.id))
        view_model_path = os.path.join(
            parent_folder,
            h5.path_for(parent_folder, ViewModelH5, view_model.gid))
        with ViewModelH5(view_model_path, view_model) as view_model_h5:
            view_model_h5.store(view_model)

        # Launch operation
        with pytest.raises(NoMemoryAvailableException):
            OperationService().initiate_prelaunch(operation, adapter)
    def test_adapter_huge_memory_requirement(self, test_adapter_factory):
        """
        Test that an MemoryException is raised in case adapter cant launch due to lack of memory.
        """
        test_adapter_factory(adapter_class=TestAdapterHugeMemoryRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHugeMemoryRequired")
        form = TestAdapterHugeMemoryRequiredForm()
        adapter.submit_form(form)
        data = {"test": 5}

        operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            json.dumps(data),
            json.dumps({}),
            status=model_operation.STATUS_STARTED)
        operation = dao.store_entity(operation)
        with pytest.raises(NoMemoryAvailableException):
            OperationService().initiate_prelaunch(operation, adapter)
Beispiel #10
0
    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_count = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert inserted_count == 0, "Expected to find no data."
        #create an operation
        algorithm_id = FlowService().get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.ndimensionarrayadapter',
            'NDimensionArrayAdapter').id
        operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            algorithm_id,
            'test params',
            meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
            status=model_operation.STATUS_FINISHED)
        operation = dao.store_entity(operation)
        #save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0]
        assert len(inserted_data) == 1, "Problems when inserting data"
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        #from the 3D array do not select any array
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": "requiredDim_1",
            "input_data_dimensions_1": "",
            "input_data_dimensions_2": ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            raise AssertionError(
                "Test should not pass. The resulted array should be a 1D array."
            )
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        #from the 3D array select only a 1D array
        first_dim = [gid + '_1_0', 'requiredDim_1']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from the 3D array select a 2D array
        first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select 1D array by applying SUM function on the first
        #dimension and average function on the second dimension
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
            "input_data_dimensions_1": "func_average",
            "input_data_dimensions_2": ""
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size of results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {
            "python_method":
            "reduce_dimension",
            "input_data":
            gid,
            "input_data_dimensions_0": [
                "requiredDim_2", "func_sum", "expected_shape_x,512",
                "operations_x,&gt;"
            ],
            "input_data_dimensions_1":
            "",
            "input_data_dimensions_2":
            ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            raise AssertionError(
                "Test should not pass! The second dimension of the array should be >512."
            )
        except Exception:
            # OK, do nothing;
            pass