示例#1
0
    def build(data=4, op=None, cortical=False):
        surface = surface_factory(data, cortical=cortical)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        surface_db = h5.store_complete(surface, storage_path)
        surface_db.fk_from_operation = op.id
        return dao.store_entity(surface_db), surface
示例#2
0
 def transactional_setup_method(self):
     self.test_user = TestFactory.create_user('Rest_User')
     self.test_project = TestFactory.create_project(
         self.test_user, 'Rest_Project', users=[self.test_user.id])
     self.operations_resource = GetOperationsInProjectResource()
     self.status_resource = GetOperationStatusResource()
     self.results_resource = GetOperationResultsResource()
     self.launch_resource = LaunchOperationResource()
     self.files_helper = FilesHelper()
示例#3
0
 def set_operation_id(self, operation_id):
     """
     Setter for FK_operation_id.
     """
     self.fk_from_operation = operation_id
     parent_project = dao.get_project_for_operation(operation_id)
     self.storage_path = FilesHelper().get_project_folder(
         parent_project, str(operation_id))
     self._storage_manager = None
示例#4
0
 def __init__(self):
     BurstBaseController.__init__(self)
     self.range_parameters = SimulatorRangeParameters()
     self.burst_service2 = BurstService2()
     self.simulator_service = SimulatorService()
     self.files_helper = FilesHelper()
     self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(
         IntrospectionRegistry.SIMULATOR_MODULE,
         IntrospectionRegistry.SIMULATOR_CLASS)
示例#5
0
    def build(type="EEG", nr_sensors=3, op=None):
        sensors = sensors_factory(type, nr_sensors)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        sensors_db = h5.store_complete(sensors, storage_path)
        sensors_db.fk_from_operation = op.id
        return dao.store_entity(sensors_db), sensors
 def _prepare_input(operation, simulator_gid):
     # type: (Operation, str) -> list
     storage_path = FilesHelper().get_project_folder(
         operation.project, str(operation.id))
     input_files = []
     h5.gather_all_references_of_view_model(simulator_gid, storage_path,
                                            input_files)
     input_files = list(set(input_files))
     return input_files
示例#7
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            if zipfile.is_zipfile(view_model.data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                operations = service.import_project_operations(
                    current_op.project, tmp_folder)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        service.check_import_references(
                            view_model.data_file, datatype)
                        service.store_datatype(datatype, view_model.data_file)
                        self.nr_of_datatypes += 1
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
    def __init__(self, project_path):

        self.project_path = project_path
        self.files_helper = FilesHelper()
        # This assumes that old project metadata file can be parsed by current version.
        self.project_meta = self.files_helper.read_project_metadata(project_path)
        from_version = self.project_meta.get('version', 0)

        super(ProjectUpdateManager, self).__init__(project_versions, from_version,
                                                   TvbProfile.current.version.PROJECT_VERSION)
示例#9
0
文件: forms.py 项目: yop0/tvb-root
 def __init__(self,
              traited_attribute,
              required_type,
              form,
              name,
              disabled=False):
     super(TraitUploadField, self).__init__(traited_attribute, form, name,
                                            disabled)
     self.required_type = required_type
     self.files_helper = FilesHelper()
示例#10
0
 def __init__(self):
     self.generic_attributes = GenericAttributes()
     self.generic_attributes.subject = DataTypeMetaData.DEFAULT_SUBJECT
     self.file_handler = FilesHelper()
     self.storage_path = '.'
     # Will be populate with current running operation's identifier
     self.operation_id = None
     self.user_id = None
     self.submitted_form = None
     self.log = get_logger(self.__class__.__module__)
示例#11
0
    def build(data=4, op=None, conn=None):
        if conn is None:
            conn = connectivity_factory(data)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        conn_db = h5.store_complete(conn, storage_path)
        conn_db.fk_from_operation = op.id
        return dao.store_entity(conn_db)
示例#12
0
    def test_load_burst_only(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity',
                             'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path, "John")
        connectivity = TestFactory.get_entity(self.test_project,
                                              ConnectivityIndex)

        simulator_index = SimulatorIndex()
        simulator_index.fill_from_has_traits(self.session_stored_simulator)

        burst_config = BurstConfiguration(self.test_project.id,
                                          simulator_index.id)
        burst_config = dao.store_entity(burst_config)

        simulator_index.fk_from_operation = burst_config.id
        simulator_index = dao.store_entity(simulator_index)
        simulator_index.fk_parent_burst = burst_config.id
        simulator_index = dao.store_entity(simulator_index)

        burst = dao.get_bursts_for_project(self.test_project.id)

        self.sess_mock['burst_id'] = str(burst[0].id)
        self.sess_mock['_connectivity'] = connectivity.gid
        self.sess_mock['_conduction_speed'] = "3.0"
        self.sess_mock['_coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG,
                               self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(simulator_index.fk_from_operation))
        simulator_service = SimulatorService()
        SimulatorSerializer().serialize_simulator(
            self.session_stored_simulator, simulator_index.gid, None,
            storage_path)

        with patch('cherrypy.session', self.sess_mock, create=True):
            self.simulator_controller.load_burst_read_only(str(burst[0].id))
            is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD)
            is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY)
            last_loaded_form_url = common.get_from_session(
                KEY_LAST_LOADED_FORM_URL)

        database_simulator = dao.get_generic_entity(SimulatorIndex,
                                                    burst_config.id,
                                                    'fk_parent_burst')[0]

        assert simulator_index.gid == database_simulator.gid, "Simulator was not added correctly!"
        assert is_simulator_load, "Simulator Load Flag should be True!"
        assert not is_simulator_copy, "Simulator Copy Flag should be False!"
        assert last_loaded_form_url == '/burst/setup_pse', "Incorrect last form URL!"
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0]
        self.assertEqual(2, len(connectivities))
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) >= len(
                ts_metric_adapter.available_algorithms.keys()),
            "At least a result should have been generated for every metric.")
        for metric_value in resulted_metric.metrics.values():
            self.assertTrue(isinstance(metric_value, (float, int)))
示例#14
0
 def __create_operation(self):
     """
     Create a operation entity. Return the operation, algo_id and the storage path.
     """
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     algorithm = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     operation = model.Operation(self.user.id, self.project.id, algorithm.id, json.dumps(''), meta=json.dumps(meta),
                                 status=model.STATUS_STARTED)
     operation = dao.store_entity(operation)
     storage_path = FilesHelper().get_project_folder(self.project, str(operation.id))
     return operation, algorithm.id, storage_path
示例#15
0
def initialize_storage():
    """
    Create Projects storage root folder in case it does not exist.
    """
    try:
        helper = FilesHelper()
        helper.check_created()
    except FileStructureException:
        # Do nothing, because we do not have any UI to display exception
        logger = get_logger("tvb.core.services.initialize_storage")
        logger.exception("Could not make sure the root folder exists!")
示例#16
0
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        # Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher',
                      str(operation_id), TvbProfile.CURRENT_PROFILE_NAME]

        current_operation = dao.get_operation_by_id(operation_id)
        project_folder = FilesHelper().get_project_folder(current_operation.project)
        encryption_handler.inc_running_op_count(project_folder)
        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env)

            LOGGER.debug("Storing pid=%s for operation id=%s launched on local machine." % (operation_id,
                                                                                            launched_process.pid))
            op_ident = OperationProcessIdentifier(operation_id, pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                burst_service = BurstService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error("Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned,
                                                                                                   subprocess_result))
                burst_service.persist_operation_state(operation, STATUS_ERROR,
                                                      "Operation failed unexpectedly! Please check the log files.")

            del launched_process

        encryption_handler.dec_running_op_count(project_folder)
        encryption_handler.check_and_delete(project_folder)

        # Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
示例#17
0
    def fire_simulation(self, project_gid, session_stored_simulator, temp_folder):
        temporary_folder = FilesHelper.create_temp_folder()

        h5.store_view_model(session_stored_simulator, temporary_folder)
        zip_folder_path = os.path.join(temp_folder, RequestFileKey.SIMULATION_FILE_NAME.value)
        FilesHelper().zip_folder(zip_folder_path, temporary_folder)
        shutil.rmtree(temporary_folder)

        file_obj = open(zip_folder_path, 'rb')
        return self.secured_request().post(self.build_request_url(RestLink.FIRE_SIMULATION.compute_url(True, {
            LinkPlaceholder.PROJECT_GID.value: project_gid
        })), files={RequestFileKey.SIMULATION_FILE_KEY.value: (RequestFileKey.SIMULATION_FILE_NAME.value, file_obj)})
示例#18
0
    def test_launch_operation_hdd_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        self._assert_stored_ddti()
示例#19
0
    def build(test_user=None,
              test_project=None,
              is_simulation=False,
              store_vm=False,
              operation_status=STATUS_FINISHED,
              range_values=None,
              conn_gid=None):
        """
        Create persisted operation with a ViewModel stored
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        vm_gid = uuid.uuid4()
        view_model = None

        if is_simulation:
            algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE,
                                                    SIMULATOR_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.connectivity = connectivity_factory(
                    4).gid if conn_gid is None else conn_gid
                vm_gid = view_model.gid

        else:
            algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                    TVB_IMPORTER_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.data_file = "."
                vm_gid = view_model.gid

        operation = Operation(vm_gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)

        if store_vm:
            op_folder = FilesHelper().get_project_folder(
                test_project, str(operation.id))
            h5.store_view_model(view_model, op_folder)

        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
示例#20
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(
                    current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                #upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(
                            folder, h5file, self.operation_id)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception, excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(
                                datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException(
                            "Invalid file received as input. Most probably incomplete "
                            "meta-data ...  " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        data_file)
示例#21
0
    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        user = TestFactory.create_user('Exporter_Tests_User2')
        project = TestFactory.create_project(user, 'Exporter_Tests_Project2')
        FilesHelper().remove_project_structure(project.name)

        # Remove EXPORT folder
        export_folder = os.path.join(TvbProfile.current.TVB_STORAGE, ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)
示例#22
0
文件: forms.py 项目: nedkab/tvb-root
 def __init__(self,
              required_type,
              form,
              name,
              disabled=False,
              required=False,
              label='',
              doc=''):
     super(UploadField, self).__init__(form, name, disabled, required,
                                       label, doc)
     self.required_type = required_type
     self.files_helper = FilesHelper()
    def tearDown(self):
        """
        Clean-up tests data
        """
        project = self.datatypeFactory.get_project()
        FilesHelper().remove_project_structure(project.name)

        # Remove EXPORT folder
        export_folder = os.path.join(TvbProfile.current.TVB_STORAGE,
                                     ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)
示例#24
0
    def export_project(self, project, optimize_size=False):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)
        project_datatypes = self._gather_project_datatypes(project, optimize_size)
        to_be_exported_folders = []
        considered_op_ids = []

        if optimize_size:
            ## take only the DataType with visibility flag set ON
            for dt in project_datatypes:
                if dt[KEY_OPERATION_ID] not in considered_op_ids:
                    to_be_exported_folders.append({'folder': files_helper.get_project_folder(project,
                                                                                             str(dt[KEY_OPERATION_ID])),
                                                   'archive_path_prefix': str(dt[KEY_OPERATION_ID]) + os.sep})
                    considered_op_ids.append(dt[KEY_OPERATION_ID])

        else:
            to_be_exported_folders.append({'folder': project_folder,
                                           'archive_path_prefix': '', 'exclude': ["TEMP"]})

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        with TvbZip(result_path, "w") as zip_file:
            # Pack project [filtered] content into a ZIP file:
            LOG.debug("Done preparing, now we will write folders " + str(len(to_be_exported_folders)))
            LOG.debug(str(to_be_exported_folders))
            for pack in to_be_exported_folders:
                zip_file.write_folder(**pack)
            LOG.debug("Done exporting files, now we will write the burst configurations...")
            self._export_bursts(project, project_datatypes, zip_file)
            LOG.debug("Done exporting burst configurations, now we will export linked DTs")
            self._export_linked_datatypes(project, zip_file)
            ## Make sure the Project.xml file gets copied:
            if optimize_size:
                LOG.debug("Done linked, now we write the project xml")
                zip_file.write(files_helper.get_project_meta_file_path(project.name), files_helper.TVB_PROJECT_FILE)
            LOG.debug("Done, closing")

        return result_path
示例#25
0
 def __init__(self):
     # It will be populate with key from DataTypeMetaData
     self.meta_data = {
         DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
     }
     self.file_handler = FilesHelper()
     self.storage_path = '.'
     # Will be populate with current running operation's identifier
     self.operation_id = None
     self.user_id = None
     self.log = get_logger(self.__class__.__module__)
     self.tree_manager = InputTreeManager()
示例#26
0
    def test_get_filtered_datatypes(self, dummy_datatype_index_factory):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model_operation.Operation(self.test_user.id, self.test_project.id, self.algorithm.id, 'test params',
                                        status=model_operation.STATUS_FINISHED, start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = dummy_datatype_index_factory()
                datatype_inst.type = "DummyDataTypeIndex"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.fk_from_operation = operation.id
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = dummy_datatype_index_factory()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "DummyDataTypeIndex"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.fk_from_operation = operation.id
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, DummyDataTypeIndex)[0]
        for row in returned_data:
            if row[1] != 'DummyDataTypeIndex':
                raise AssertionError("Some invalid data was returned!")
        assert 4 == len(returned_data), "Invalid length of result"

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            raise AssertionError("DataTypes were not filtered properly!")
示例#27
0
 def test_remove_entity_with_links_moves_links(self, initialize_two_projects):
     project_path = FilesHelper().get_project_folder(self.src_project)
     self.red_datatype.storage_path = project_path
     dest_id = self.dest_project.id
     self.flow_service.create_link([self.red_datatype.id], dest_id)
     assert 1 == self.red_datatypes_in(dest_id)
     # remove original datatype
     self.project_service.remove_datatype(self.src_project.id, self.red_datatype.gid)
     # datatype has been moved to one of it's links
     assert 1 == self.red_datatypes_in(dest_id)
     # project dest no longer has a link but owns the data type
     dt_links = dao.get_linked_datatypes_in_project(dest_id)
     assert 0 == len(dt_links)
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
示例#29
0
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
示例#30
0
    def _build_data_export_folder(self, data):
        """
        This method computes the folder where results of an export operation will be 
        stored for a while (e.g until download is done; or for 1 day)
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour,
                                             now.minute, now.second, now.microsecond)
        tmp_str = date_str + "@" + data.gid
        data_export_folder = os.path.join(self.export_folder, tmp_str)
        files_helper = FilesHelper()
        files_helper.check_created(data_export_folder)

        return data_export_folder