Esempio n. 1
0
 def _process_input_zip(self, zip_arch, result_folder, remote_prefix, 
                        file_name_base, expected_pairs, fix_number=True):
     """
     Read entries in uploaded ZIP.
     Raise Exception in case pairs HDR/IMG are not matched or number "expected_pairs" is not met.
     :return string with HDR list (to be passed to DTI pipeline).
     """
     
     hdr_files = []
     for file_name in zip_arch.namelist():
         if not file_name.startswith(file_name_base) or file_name.endswith("/"):
             continue
         if file_name.endswith(".hdr"):
             pair_img = file_name.replace(".hdr", ".img")
             if pair_img not in zip_arch.namelist():
                 raise ConnectException("Could not find pair for HDR file :" + str(file_name))
             
             new_file_name = os.path.join(result_folder, file_name_base + str(len(hdr_files)) + ".hdr")
             src = zip_arch.open(file_name, 'rU')
             FilesHelper.copy_file(src, new_file_name)
             hdr_files.append(os.path.join(remote_prefix, os.path.split(new_file_name)[1]))
             new_file_name = new_file_name.replace(".hdr", ".img")
             src = zip_arch.open(pair_img, 'rU')
             FilesHelper.copy_file(src, new_file_name)
             
         elif not file_name.endswith(".img"):
             self.logger.warning("Ignored file :" + str(file_name))
         
     if len(hdr_files) < expected_pairs or (fix_number and len(hdr_files) > expected_pairs):
         raise ConnectException("Invalid number of files "+ str(len(hdr_files)) + " expected "+ str(expected_pairs))
     result = ""
     for hdr_name in hdr_files:
         result = result + hdr_name + " "
     return result
Esempio n. 2
0
 def launch(self, weights, tracts, input_data):
     """
     Execute import operations: process the weights and tracts csv files, then use
     the reference connectivity passed as input_data for the rest of the attributes.
     """
     dti_service = dtipipelineservice.DTIPipelineService()
     dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
     dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
     weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
     tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
     FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                               os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])
     if weights_matrix.shape[0] != input_data.orientations.shape[0]:
         raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference has only %s nodes."%(
                                 weights_matrix.shape[0], input_data.orientations.shape[0]))
     result = Connectivity()
     result.storage_path = self.storage_path
     result.nose_correction = input_data.nose_correction
     result.centres = input_data.centres
     result.region_labels = input_data.region_labels
     result.weights = weights_matrix
     result.tract_lengths = tract_matrix
     result.orientations = input_data.orientations
     result.areas = input_data.areas
     result.cortical = input_data.cortical
     result.hemispheres = input_data.hemispheres
     return result
Esempio n. 3
0
    def setUp(self):
        """
        Set up the context needed by the tests.
        """
#        self.clean_database()
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
Esempio n. 4
0
    def export_project(self, project):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        :param project)name: name of the project to be exported
        :param export_folder: folder where to store export result( e.g zip file)
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)

        bursts_dict = {}
        datatype_burst_mapping = {}
        bursts_count = dao.get_bursts_for_project(project.id, count=True)
        for start_idx in range(0, bursts_count, BURST_PAGE_SIZE):
            bursts = dao.get_bursts_for_project(project.id,
                                                page_start=start_idx,
                                                page_end=start_idx +
                                                BURST_PAGE_SIZE)
            for burst in bursts:
                self._build_burst_export_dict(burst, bursts_dict)

        datatypes_count = dao.get_datatypes_for_project(project.id, count=True)
        for start_idx in range(0, datatypes_count, DATAYPES_PAGE_SIZE):
            datatypes = dao.get_datatypes_for_project(project.id,
                                                      page_start=start_idx,
                                                      page_end=start_idx +
                                                      DATAYPES_PAGE_SIZE)
            for datatype in datatypes:
                datatype_burst_mapping[datatype.gid] = datatype.fk_parent_burst

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name,
                                      self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE)
        burst_info = {
            BURSTS_DICT_KEY: bursts_dict,
            DT_BURST_MAP: datatype_burst_mapping
        }
        with open(bursts_file_name, 'w') as bursts_file:
            bursts_file.write(json.dumps(burst_info))

        # pack project content into a ZIP file
        result_zip = files_helper.zip_folder(result_path, project_folder)

        # remove these files, since we only want them in export archive
        os.remove(bursts_file_name)
        return result_zip
 def setUp(self):
     """
     Reset the database before each test.
     """
     EVENTS_FOLDER = ''
     #        self.clean_database()
     self.project_service = ProjectService()
     self.structure_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
Esempio n. 6
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.service = DTIPipelineService('127.0.0.1', 'root')
     self.helper = FilesHelper()
Esempio n. 7
0
    def _update_datatype_disk_size(self, file_path):
        """
        Computes and updates the disk_size attribute of the DataType, for which was created the given file.
        """
        file_handler = FilesHelper()
        datatype_gid = self._get_manager(file_path).get_gid_attribute()
        datatype = dao.get_datatype_by_gid(datatype_gid)

        if datatype is not None:
            datatype.disk_size = file_handler.compute_size_on_disk(file_path)
            dao.store_entity(datatype)
Esempio n. 8
0
def get_gifty_file_name(project_id, desired_name):
    """
    Compute non-existent file name, in the TEMP folder of
    the given project.
    Try desired_name, and if already exists, try adding a number.
    """
    if project_id:
        project = dao.get_project_by_id(project_id)
        file_helper = FilesHelper()
        temp_path = file_helper.get_project_folder(project,
                                                   FilesHelper.TEMP_FOLDER)
        return get_unique_file_name(temp_path, desired_name)[0]
    return get_unique_file_name(cfg.TVB_STORAGE, desired_name)[0]
Esempio n. 9
0
    def _build_data_export_folder(self, data):
        """
        This method computes the folder where results of an export operation will be 
        stored for a while (e.g until download is done; or for 1 day)
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day,
                                             now.hour, now.minute, now.second,
                                             now.microsecond)
        tmp_str = date_str + "@" + data.gid
        data_export_folder = os.path.join(self.export_folder, tmp_str)
        files_helper = FilesHelper()
        files_helper.check_created(data_export_folder)

        return data_export_folder
 def tearDown(self):
     """
     Remove project folders and restore config file
     """
     FilesHelper().remove_project_structure(self.test_project.name)
     #        self.clean_database()
     cfg.CURRENT_DIR = self.old_config_file
Esempio n. 11
0
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Esempio n. 12
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, 'tvb_test.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Esempio n. 13
0
 def tearDown(self):
     """
     Remove project folders and clean up database.
     """
     FilesHelper().remove_project_structure(self.test_project.name)
     self.delete_project_folders()
     cfg.CURRENT_DIR = self.old_config_file
Esempio n. 14
0
    def setUp(self):
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, cfg.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(cfg.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
Esempio n. 16
0
 def _burst_create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
     once that is done.
     """
     meta = {
         DataTypeMetaData.KEY_SUBJECT: "John Doe",
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     algorithm, algo_group = FlowService(
     ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.operation = model.Operation(self.test_user.id,
                                      self.test_project.id,
                                      algo_group.id,
                                      json.dumps(''),
                                      meta=json.dumps(meta),
                                      status="STARTED",
                                      method_name=ABCAdapter.LAUNCH_METHOD)
     self.operation = dao.store_entity(self.operation)
     storage_path = FilesHelper().get_project_folder(
         self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                           {})
     return algorithm.id, connectivity
Esempio n. 17
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         result_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user,
                       self.test_project.id,
                       adapter,
                       tmp_folder,
                       method_name=ABCAdapter.LAUNCH_METHOD,
                       **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 0)
Esempio n. 18
0
    def __init__(self):
        now = datetime.now()
        micro_postfix = "_%d" % now.microsecond

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix,
                    description='test_desc',
                    users=[])
        self.project = project_service.store_project(self.user, True, None,
                                                     **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1",
                                         alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id,
                                    'id',
                                    name='',
                                    req_data='',
                                    param_name='',
                                    output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {
            DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
            DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE
        }
        operation = model.Operation(self.user.id,
                                    self.project.id,
                                    self.algorithm.id,
                                    'test parameters',
                                    meta=json.dumps(self.meta),
                                    status="FINISHED",
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
Esempio n. 19
0
 def set_operation_id(self, operation_id):
     """
     Setter for FK_operation_id.
     """
     self.fk_from_operation = operation_id
     parent_project = dao.get_project_for_operation(operation_id)
     self.storage_path = FilesHelper().get_project_folder(
         parent_project, str(operation_id))
     self._storage_manager = None
Esempio n. 20
0
class DTITest(TransactionalTestCase):
    """
    Test basic functionality of DTI Import Service.
    """
    ### First dataSet
    FILE_1 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionCapacityMatrix.csv")
    FILE_2 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionDistanceMatrix.csv")
    ### Second dataSet
    FILE_3 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionCapacityMatrix_3.csv")
    FILE_4 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionDistanceMatrix_3.csv")

    def setUp(self):
        """
        Reset the database before each test.
        """
        #        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.service = DTIPipelineService('127.0.0.1', 'root')
        self.helper = FilesHelper()

    def test_process_csv(self):
        """
        Test that a CSV generated on the server is correctly processed.
        """

        folder = self.helper.get_project_folder(self.test_project, "TEMP")

        for file_name in [self.FILE_1, self.FILE_2, self.FILE_3, self.FILE_4]:

            intermediate_file = os.path.join(folder,
                                             os.path.split(file_name)[1])
            self.helper.copy_file(file_name, intermediate_file)
            result_file = 'weights.txt' if 'Capacity' in file_name else 'tracts.txt'
            result_file = os.path.join(folder, result_file)
            self.service._process_csv_file(intermediate_file, result_file)
            matrix = read_list_data(result_file)
            self.assertEqual(96, len(matrix))
            self.assertEqual(96, len(matrix[0]))
 def _create_connectivity(self, nodes_number):
     """
     Create a connectivity entity and return its GID
     """
     storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((nodes_number, nodes_number))
     connectivity.centres = numpy.ones((nodes_number, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
     
     return dao.get_datatype_by_id(connectivity.id).gid
Esempio n. 22
0
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")
        self.assertEqual(len(connectivities), 1)
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) == len(
                ts_metric_adapter.available_algorithms.keys()),
            "A result should have been generated for every metric.")
Esempio n. 23
0
    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, 'test params',
                                        status="FINISHED", start_date=start_dates[i], completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb_test.datatypes.datatype1.Datatype1")
        for row in returned_data:
            if row[1] != 'Datatype1':
                self.fail("Some invalid data was returned!")
        self.assertEqual(4, len(returned_data), "Invalid length of result")

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb_test.datatypes.datatype1.Datatype1", filter_op)
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            self.fail("DataTypes were not filtered properly!")
Esempio n. 24
0
    def tearDown(self):
        """
        Clean-up tests data
        """
        project = self.datatypeFactory.get_project()
        FilesHelper().remove_project_structure(project.name)

        # Remove EXPORT folder
        export_folder = os.path.join(cfg.TVB_STORAGE,
                                     ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)
Esempio n. 25
0
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)

        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)

        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
Esempio n. 27
0
    def test_import_surface_gifti_data(self):
        """
            This method tests import of a surface from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        surface = parser.parse(self.GIFTI_SURFACE_FILE)

        self.assertEqual(131342, len(surface.vertices))
        self.assertEqual(262680, len(surface.triangles))
Esempio n. 28
0
    def test_import_timeseries_gifti_data(self):
        """
            This method tests import of a time series from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        time_series = parser.parse(self.GIFTI_TIME_SERIES_FILE)

        data_shape = time_series.read_data_shape()

        self.assertEqual(135, data_shape[0])
        self.assertEqual(143479, data_shape[1])
Esempio n. 29
0
    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries",
                           "tvb.datatypes.time_series")
        timeseries = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.time_series.TimeSeries")
        self.assertEqual(len(timeseries), 1, "Should be only one TimeSeries")
Esempio n. 30
0
 def __init__(self, remote_machine=None, remote_user=None):
     """
     :param remote_machine: IP for the remote machine
     :param remote_user: Username valid on remote_machine. No further password should be needed for connecting.
     """
     self.logger = get_logger(self.__class__.__module__)
     self.remote_machine = remote_machine
     self.remote_user = remote_user
     self.flow_service = FlowService()
     self.file_handler = FilesHelper()
     
     folder_default_data = os.path.dirname(demo_root.__file__)
     file_order = os.path.join(folder_default_data, self.FILE_NODES_ORDER)
     self.expected_nodes_order = read_list_data(file_order, dtype=numpy.int32,  usecols=[0])
     
     zip_path = os.path.join(folder_default_data, self.CONNECTIVITY_DEFAULT)
     if not (os.path.exists(zip_path) and os.path.isfile(zip_path)):
         raise ConnectException("Could not find default Connectivity for the pipeline! " + str(zip_path))
     self.default_connectivity_zip_path = zip_path