コード例 #1
0
    def test_filter_sql_equivalent(self):
        """
        Test applying a filter on DB.
        """
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value3"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value3"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)

        test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['value1'])
        test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['vaue2'])
        test_filter_3 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value1', 'value2']])
        test_filter_4 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value5', 'value6']])
        
        all_stored_dts = self.count_all_entities(Datatype1)
        self.assertEqual(3, all_stored_dts)
        
        self._evaluate_db_filter(test_filter_1, 2)
        self._evaluate_db_filter(test_filter_2, 0)
        self._evaluate_db_filter(test_filter_3, 1)
        self._evaluate_db_filter(test_filter_4, 0)
コード例 #2
0
 def launch(self, test_dt_input, test_non_dt_input):
     str(test_dt_input)
     result = Datatype1()
     result.row1 = 'test'
     result.row2 = 'test'
     result.storage_path = self.storage_path
     return result
コード例 #3
0
    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        :param workflow_step_list: a list of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id=self.test_project.id,
            burst_id=burst_config.id,
            simulator_index=0,
            simulator_id=first_step_algorithm.id,
            operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id
コード例 #4
0
    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)

        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list
コード例 #5
0
    def create_simple_datatype(self, subject=USER_FULL_NAME, state=DATATYPE_STATE):
        """
        This method creates a simple data type
        """
        datatype_inst = Datatype1()
        self._fill_datatype(datatype_inst, subject, state)

        # Store data type
        return self._store_datatype(datatype_inst)
コード例 #6
0
    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model.Operation(self.test_user.id, self.test_project.id, self.algorithm.id, 'test params',
                                        status=model.STATUS_FINISHED, start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1)[0]
        for row in returned_data:
            if row[1] != 'Datatype1':
                raise AssertionError("Some invalid data was returned!")
        assert 4 == len(returned_data), "Invalid length of result"

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            raise AssertionError("DataTypes were not filtered properly!")
コード例 #7
0
    def launch(self, test1_val1, test1_val2):
        """
        Tests successful launch of an ABCSynchronous adapter

        :param test1_val1: a dummy integer value
        :param test1_val2: a dummy integer value
        :return: a `Datatype1` object
        """
        int(test1_val1)
        int(test1_val2)
        result = Datatype1()
        result.row1 = 'test'
        result.row2 = 'test'
        result.storage_path = self.storage_path
        return result
コード例 #8
0
 def _create_datatypes(self, dt_factory, nr_of_dts):
     for idx in range(nr_of_dts):
         dt = Datatype1()
         dt.row1 = "value%i" % (idx, )
         dt.row2 = "value%i" % (idx + 1, )
         dt_factory._store_datatype(dt)