def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        alg_group = dao.find_group(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        algo = dao.get_algorithm_by_group(alg_group.id)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
    def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
Beispiel #3
0
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old +
                                             '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) +
                                 " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params,
                                              cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {
                        'value': epileptor_new
                    }
                    burst._simulator_configuration = json.dumps(
                        burst.simulator_configuration,
                        cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
Beispiel #5
0
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)  # decode the image
        final_image = Image.open(
            StringIO(img_data))  # place it in a PIL stream

        branding_bar = Image.open(
            FigureService._BRANDING_BAR_PNG)  # place the branding bar over
        final_image.paste(branding_bar,
                          (0, final_image.size[1] - branding_bar.size[1]),
                          branding_bar)

        final_image.save(store_path)  # store to disk as PNG

    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[
            0]  # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:  # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')  # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[
            0]  # get the branding bar
        brandingSvg.setAttribute("y",
                                 str(height))  # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace(
            'px', ''))  # increase original height with branding bar's height
        finalSvg.setAttribute("width",
                              str(width))  # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)  # add the image
        finalSvg.appendChild(brandingSvg)  # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)  # store to disk

    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.file_helper.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)

    @staticmethod
    def _generate_image_name(project, user, operation, image_name):
        if not image_name:
            if operation is not None:
                # create a name based on the operation that created the image
                # e.g. TVB-Algo-Name-354
                image_name = operation.algorithm.displayname.replace(' ', '-')
            else:
                # default to a generic name
                image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)

    def store_result_figure(self,
                            project,
                            user,
                            img_type,
                            export_data,
                            image_name=None,
                            operation_id=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)

        if img_type == FigureService._TYPE_PNG:  # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:  # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        if operation_id:
            operation = dao.get_operation_by_id(operation_id)
        else:
            operation = None
            operation_id = None

        image_name = self._generate_image_name(project, user, operation,
                                               image_name)

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id,
                                    FigureService._DEFAULT_SESSION_NAME,
                                    image_name, file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk
        self.file_helper.write_image_metadata(figure)

        if operation:
            # Force writing operation meta data on disk.
            # This is important later for operation import
            self.file_helper.write_operation_metadata(operation)

    def retrieve_result_figures(self,
                                project,
                                user,
                                selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id,
                                                 selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(
                    project.name)
                figure_full_path = os.path.join(figures_folder,
                                                figure.file_path)
                # Compute the path
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info

    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)

    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)

    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(
            figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
Beispiel #6
0
class TestFilesHelper(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """ 
    PROJECT_NAME = "test_proj"

    def transactional_setup_method(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)

    def transactional_teardown_method(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()

    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        assert os.path.exists(root_storage), "Storage not created!"
        
        self.files_helper.check_created(os.path.join(root_storage, "test"))
        assert os.path.exists(root_storage), "Storage not created!"
        assert os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!"

    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        assert os.path.exists(project_path), "Folder doesn't exist"
        
        folder_path = self.files_helper.get_project_folder(self.test_project, "43")
        assert os.path.exists(project_path), "Folder doesn't exist"
        assert os.path.exists(folder_path), "Folder doesn't exist"

    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
        assert path != name, "Rename didn't take effect."

    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)
        
        with pytest.raises(FileStructureException):
            self.files_helper.rename_project_structure(self.test_project.name, self.PROJECT_NAME)

    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        assert os.path.exists(full_path), "Folder was not created."
        
        self.files_helper.remove_project_structure(self.test_project.name)
        assert not os.path.exists(full_path), "Project folder not deleted."

    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
        assert os.path.exists(expected_file)
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model_project.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        assert self.test_project.name == loaded_project.name
        assert self.test_project.description == loaded_project.description
        assert self.test_project.gid == loaded_project.gid
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self._dictContainsSubset(expected_dict, found_dict)
        self._dictContainsSubset(found_dict, expected_dict)

    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
        assert not os.path.exists(expected_file)
        self.files_helper.write_operation_metadata(operation)
        assert os.path.exists(expected_file)
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model_operation.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao, user_id=self.test_user.id)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.items():
            assert str(value) == str(found_dict[key])
        # Now validate that operation metaData can be also updated
        assert "new_group_name" != found_dict['user_group']
        self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
        found_dict = XMLReader(expected_file).read_metadata()  
        assert "new_group_name" == found_dict['user_group']

    def test_remove_dt_happy_flow(self, dummy_datatype_index_factory):
        """
        Happy flow for removing a file related to a DataType.
        """
        datatype = dummy_datatype_index_factory()
        h5_path = h5.path_for_stored_index(datatype)
        assert os.path.exists(h5_path), "Test file was not created!"
        self.files_helper.remove_datatype_file(h5_path)
        assert not os.path.exists(h5_path), "Test file was not deleted!"

    def test_remove_dt_non_existent(self, dummy_datatype_index_factory):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        datatype = dummy_datatype_index_factory()
        h5_path = h5.path_for_stored_index(datatype)
        wrong_path = os.path.join(h5_path, "WRONG_PATH")
        assert not os.path.exists(wrong_path)
        self.files_helper.remove_datatype_file(wrong_path)

    def test_move_datatype(self, dummy_datatype_index_factory):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        datatype = dummy_datatype_index_factory(project=self.test_project)
        old_file_path = h5.path_for_stored_index(datatype)
        assert os.path.exists(old_file_path), "Test file was not created!"
        full_path = h5.path_for_stored_index(datatype)
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '2', "1", full_path)
        
        assert not os.path.exists(old_file_path), "Test file was not moved!"
        datatype.fk_from_operation = 43
        new_file_path = os.path.join(self.files_helper.get_project_folder(self.PROJECT_NAME + '2', "1"), old_file_path.split("\\")[-1])
        assert os.path.exists(new_file_path), "Test file was not created!"

    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here")
        assert rel_path == os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected."

    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            assert os.path.isfile(file_n)
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            assert not os.path.isfile(file_n)

    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        assert os.path.isdir(folder_name), "Folder should be created."
        self.files_helper.remove_folder(folder_name)
        assert not os.path.isdir(folder_name), "Folder should be deleted."
        
    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        assert not os.path.isdir(folder_name), "Folder should not exist before call."
        self.files_helper.remove_folder(folder_name, ignore_errors=True)

    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        assert not os.path.isdir(folder_name), "Folder should not exist before call."
        with pytest.raises(FileStructureException):
            self.files_helper.remove_folder(folder_name, False)

    def _dictContainsSubset(self, expected, actual, msg=None):
        """Checks whether actual is a superset of expected."""
        missing = []
        mismatched = []
        for key, value in expected.items():
            if key not in actual:
                return False
            elif value != actual[key]:
                return False
        return True
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)                        # decode the image
        final_image = Image.open(StringIO(img_data))                    # place it in a PIL stream

        branding_bar = Image.open(FigureService._BRANDING_BAR_PNG)      # place the branding bar over
        final_image.paste(branding_bar, (0, final_image.size[1] - branding_bar.size[1]), branding_bar)

        final_image.save(store_path)                                    # store to disk as PNG


    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[0]                          # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:                                                      # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')                                     # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[0]                        # get the branding bar
        brandingSvg.setAttribute("y", str(height))                              # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace('px', ''))   # increase original height with branding bar's height
        finalSvg.setAttribute("width", str(width))                              # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)                                         # add the image
        finalSvg.appendChild(brandingSvg)                                       # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)                                                 # store to disk


    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.file_helper.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)


    @staticmethod
    def _generate_image_name(project, user, operation, image_name):
        if not image_name:
            if operation is not None:
                # create a name based on the operation that created the image
                # e.g. TVB-Algo-Name-354
                image_name = operation.algorithm.name.replace(' ', '-')
            else:
                # default to a generic name
                image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)


    def store_result_figure(self, project, user, img_type, export_data, image_name=None, operation_id=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)

        if img_type == FigureService._TYPE_PNG:            # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:          # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        if operation_id:
            operation = dao.get_operation_by_id(operation_id)
        else:
            operation = None
            operation_id = None

        image_name = self._generate_image_name(project, user, operation, image_name)

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
                                    image_name, file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk  
        self.file_helper.write_image_metadata(figure)

        if operation:
            # Force writing operation meta data on disk.
            # This is important later for operation import
            self.file_helper.write_operation_metadata(operation)


    def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(project.name)
                figure_full_path = os.path.join(figures_folder, figure.file_path)
                # Compute the path 
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info


    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)


    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)


    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
        
        
        
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def store_result_figure(self, project, user, img_type, operation_id, export_data):
        """
        Store into a file, Result Image and reference in DB.
        """
        # Generate path where to store image
        store_path = self.file_helper.get_images_folder(project.name, operation_id)
        store_path = utils.get_unique_file_name(store_path, FigureService._DEFAULT_IMAGE_FILE_NAME + img_type)[0]
        file_path = os.path.split(store_path)[1]

        if img_type == FigureService._TYPE_PNG:                         # PNG file from canvas
            imgData = base64.b64decode(export_data)                     # decode the image
            fakeImgFile = StringIO(imgData)                             # PIL.Image only opens from file, so fake one
            origImg = Image.open(fakeImgFile)
            brandingBar = Image.open(FigureService._BRANDING_BAR_PNG)

            finalSize = (origImg.size[0],                               # original width
                         origImg.size[1] + brandingBar.size[1])         # original height + brandingBar height
            finalImg = Image.new("RGBA", finalSize)

            finalImg.paste(origImg, (0, 0))                             # add the original image
            finalImg.paste(brandingBar, (0, origImg.size[1]))           # add the branding bar, below the original
                                                                        # the extra width will be discarded

            finalImg.save(store_path)                                   # store to disk

        elif img_type == FigureService._TYPE_SVG:                                   # SVG file from svg viewer
            dom = xml.dom.minidom.parseString(export_data)
            figureSvg = dom.getElementsByTagName('svg')[0]                          # get the original image

            dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)
            brandingSvg = dom.getElementsByTagName('svg')[0]                        # get the branding bar
            brandingSvg.setAttribute("y", figureSvg.getAttribute("height"))         # position it below the figure

            finalSvg = dom.createElement('svg')                                     # prepare the final svg
            width = figureSvg.getAttribute('width').replace('px', '')               # same width as original figure
            finalSvg.setAttribute("width", width)
            height = float(figureSvg.getAttribute('height').replace('px', ''))      # increase original height with
            height += float(brandingSvg.getAttribute('height').replace('px', ''))   # branding bar's height
            finalSvg.setAttribute("height", str(height))

            finalSvg.appendChild(figureSvg)                                         # add the image
            finalSvg.appendChild(brandingSvg)                                       # and the branding bar

            # Generate path where to store image
            dest = open(store_path, 'w')
            finalSvg.writexml(dest)                                                 # store to disk
            dest.close()

        operation = dao.get_operation_by_id(operation_id)
        file_name = 'TVB-%s-%s' % (operation.algorithm.name.replace(' ', '-'), operation_id)    # e.g. TVB-Algo-Name-352

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
                                    file_name, file_path, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk  
        self.file_helper.write_image_metadata(figure)

        # Force writing operation meta data on disk. 
        # This is important later for operation import
        self.file_helper.write_operation_metadata(operation)


    def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(project.name, figure.operation.id)
                figure_full_path = os.path.join(figures_folder, figure.file_path)
                # Compute the path 
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info


    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)


    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)


    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(figure.project.name, figure.operation.id)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
        
        
        
Beispiel #9
0
class ProjectService:
    """
    Services layer for Project entities.
    """

    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()

    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_users_except(prj_admin, int(page), MEMBERS_PAGE_SIZE)[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)

        selected_user_ids = data["users"]
        if is_create and current_user.id not in selected_user_ids:
            # Make the project admin also member of the current project
            selected_user_ids.append(current_user.id)
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no

    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no

    @staticmethod
    def retrieve_all_user_projects(user_id, page_start=0, page_size= PROJECTS_PAGE_SIZE):
        """
        Return a list with all projects visible for current user, without pagination.
        """
        return dao.get_projects_for_user(user_id, page_start=page_start, page_size=page_size)

    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)

    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))

    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)

    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)

    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)

    @staticmethod
    def load_operation_lazy_by_gid(operation_gid):
        """ Retrieve lazy Operation from DB"""
        return dao.get_operation_lazy_by_gid(operation_gid)

    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)

    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)

    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)

    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)

    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)

    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details

    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()

    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            #  Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)

    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None

    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.parameters,
                                       datatype.parent_operation.meta_data,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")

    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(Operation, operation.id)
            self.structure_helper.remove_operation_data(operation.project.name, operation_id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)

    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))

    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        from tvb.basic.traits.types_mapped import MappedType

        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)

    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes

    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except Exception:
            self.logger.exception("Could not load details for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm changed dramatically. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters

    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return list(op_group_inputs.values())

    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)

    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)

    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)

    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)

    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)

    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)

    @staticmethod
    def get_datatypes_in_project(project_id, only_visible=False):
        return dao.get_data_in_project(project_id, only_visible)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)

    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
Beispiel #10
0
class BurstService(object):
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...

        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = BurstConfiguration.BURST_ERROR

        try:
            # If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            # Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.id)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)

    def persist_operation_state(self,
                                operation,
                                operation_status,
                                message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, message)
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        return operation

    def get_burst_for_operation_id(self, operation_id):
        return dao.get_burst_for_operation_id(operation_id)

    @staticmethod
    def rename_burst(burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(
            project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        # for burst in bursts:
        #     burst.prepare_after_load()
        return bursts

    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.id for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.id])

    def update_history_status(self, id_list):
        """
        For each burst_id received in the id_list read new status from DB and return a list [id, new_status] pair.
        """
        result = []
        for b_id in id_list:
            burst = dao.get_burst_by_id(b_id)
            # burst.prepare_after_load()
            if burst is not None:
                if burst.status == burst.BURST_RUNNING:
                    running_time = datetime.now() - burst.start_time
                else:
                    running_time = burst.finish_time - burst.start_time
                running_time = format_timedelta(running_time,
                                                most_significant2=False)

                if burst.status == burst.BURST_ERROR:
                    msg = 'Check Operations page for error Message'
                else:
                    msg = ''
                result.append([
                    burst.id, burst.status, burst.is_group, msg, running_time
                ])
            else:
                self.logger.debug("Could not find burst with id=" + str(b_id) +
                                  ". Might have been deleted by user!!")
        return result

    def stop_burst(self, burst):
        raise NotImplementedError

    def cancel_or_remove_burst(self, burst_id):
        raise NotImplementedError

    @staticmethod
    def update_simulation_fields(burst_id, op_simulation_id, simulation_gid):
        burst = dao.get_burst_by_id(burst_id)
        burst.fk_simulation_id = op_simulation_id
        burst.simulator_gid = simulation_gid.hex
        dao.store_entity(burst)
class OperationService:
    """
    Class responsible for preparing an operation launch. 
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.workflow_service = WorkflowService()
        self.file_helper = FilesHelper()


    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self, current_user, project_id, adapter_instance,
                           temporary_storage, visible=True, **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Prepare Files parameters
        files = {}
        kw2 = copy(kwargs)
        for i, j in six.iteritems(kwargs):
            if isinstance(j, FieldStorage) or isinstance(j, Part):
                files[i] = j
                del kw2[i]

        temp_files = {}
        try:
            for i, j in six.iteritems(files):
                if j.file is None:
                    kw2[i] = None
                    continue
                uq_name = utils.date2string(datetime.now(), True) + '_' + str(i)
                # We have to add original file name to end, in case file processing
                # involves file extension reading
                file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename
                file_name = os.path.join(temporary_storage, file_name)
                kw2[i] = file_name
                temp_files[i] = file_name
                with open(file_name, 'wb') as file_obj:
                    file_obj.write(j.file.read())
                self.logger.debug("Will store file:" + file_name)
            kwargs = kw2
        except Exception as excep:
            self._handle_exception(excep, temp_files, "Could not launch operation: invalid input files!")

        ### Store Operation entity. 
        algo = adapter_instance.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        operations = self.prepare_operations(current_user.id, project_id, algo, algo_category,
                                             {}, visible, **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException("Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance, temp_files, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance, current_user.username)


    @staticmethod
    def _prepare_metadata(initial_metadata, algo_category, operation_group, submit_data):
        """
        Gather metadata from submitted fields and current to be execute algorithm.
        Will populate STATE, GROUP in metadata
        """
        metadata = copy(initial_metadata)

        user_group = None
        if DataTypeMetaData.KEY_OPERATION_TAG in submit_data:
            user_group = submit_data[DataTypeMetaData.KEY_OPERATION_TAG]

        if operation_group is not None:
            metadata[DataTypeMetaData.KEY_OPERATION_TAG] = operation_group.name

        if DataTypeMetaData.KEY_TAG_1 in submit_data:
            metadata[DataTypeMetaData.KEY_TAG_1] = submit_data[DataTypeMetaData.KEY_TAG_1]

        metadata[DataTypeMetaData.KEY_STATE] = algo_category.defaultdatastate

        return metadata, user_group


    @staticmethod
    def _read_set(values):
        """ Parse a committed UI possible list of values, into a set converted into string."""
        if isinstance(values, list):
            set_values = []
            values_str = ""
            for val in values:
                if val not in set_values:
                    set_values.append(val)
                    values_str = values_str + " " + str(val)
            values = values_str
        return str(values).strip()


    def group_operation_launch(self, user_id, project_id, algorithm_id, category_id, existing_dt_group=None, **kwargs):
        """
        Create and prepare the launch of a group of operations.
        """
        category = dao.get_category_by_id(category_id)
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        ops, _ = self.prepare_operations(user_id, project_id, algorithm, category, {},
                                         existing_dt_group=existing_dt_group, **kwargs)
        for operation in ops:
            self.launch_operation(operation.id, True)


    def prepare_operations(self, user_id, project_id, algorithm, category, metadata,
                           visible=True, existing_dt_group=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category, group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = model.Operation(user_id, project_id, algorithm.id,
                                        json.dumps(one_set_of_args, cls=MapAsJson.MapAsJsonEncoder), meta_str,
                                        op_group_id=group_id, user_group=user_group, range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = model.DataTypeGroup(group, operation_id=operations[0].id, fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group


    def prepare_operations_for_workflowsteps(self, workflow_step_list, workflows, user_id, burst_id,
                                             project_id, group, sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, model.WorkflowStepView):
                operation_group = model.OperationGroup(project_id=project_id, ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, model.WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(metadata, algo_category, operation_group, op_params)
                    operation = model.Operation(user_id, project_id, step.fk_algorithm,
                                                json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder),
                                                meta=json.dumps(metadata),
                                                op_group_id=group_id, range_values=range_values, user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation.id,
                                                     fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)


    def initiate_prelaunch(self, operation, adapter_instance, temp_files, **kwargs):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        try:
            unique_id = None
            if self.ATT_UID in kwargs:
                unique_id = kwargs[self.ATT_UID]
            filtered_kwargs = adapter_instance.prepare_ui_inputs(kwargs)
            self.logger.debug("Launching operation " + str(operation.id) + " with " + str(filtered_kwargs))
            operation = dao.get_operation_by_id(operation.id)   # Load Lazy fields

            params = dict()
            for k, value_ in filtered_kwargs.items():
                params[str(k)] = value_

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by)    # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, unique_id, available_space, **params)
            operation = dao.get_operation_by_id(operation.id)
            ## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
            operation.parameters = json.dumps(kwargs)
            operation.mark_complete(model.STATUS_FINISHED)
            if nr_datatypes > 0:
                #### Write operation meta-XML only if some result are returned
                self.file_helper.write_operation_metadata(operation)
            dao.store_entity(operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = ("Could not execute operation because there is not enough free memory." +
                   " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        ### Try to find next workflow Step. It might throw WorkflowException
        next_op_id = self.workflow_service.prepare_next_step(operation.id)
        self.launch_operation(next_op_id)
        return result_msg


    def _send_to_cluster(self, operations, adapter_instance, current_username="******"):
        """ Initiate operation on cluster"""
        for operation in operations:
            try:
                BACKEND_CLIENT.execute(str(operation.id), current_username, adapter_instance)
            except Exception as excep:
                self._handle_exception(excep, {}, "Could not start operation!", operation)

        return operations


    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)
            parsed_params = utils.parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **parsed_params)


    def _handle_exception(self, exception, temp_files, message, operation=None):
        """
        Common way to treat exceptions:
            - remove temporary files, if any
            - set status ERROR on current operation (if any)
            - log exception
        """
        self.logger.exception(message)
        if operation is not None:
            self.workflow_service.persist_operation_state(operation, model.STATUS_ERROR, unicode(exception))
            self.workflow_service.update_executed_workflow_state(operation)
        self._remove_files(temp_files)
        exception.message = message
        raise exception, None, sys.exc_info()[2]  # when rethrowing in python this is required to preserve the stack trace


    def _remove_files(self, file_dictionary):
        """
        Remove any files that exist in the file_dictionary. 
        Currently used to delete temporary files created during an operation.
        """
        for pth in file_dictionary.itervalues():
            pth = str(pth)
            try:
                if os.path.exists(pth) and os.path.isfile(pth):
                    os.remove(pth)
                    self.logger.debug("We no longer need file:" + pth + " => deleted")
                else:
                    self.logger.warning("Trying to remove not existent file:" + pth)
            except OSError:
                self.logger.exception("Could not cleanup file!")


    @staticmethod
    def _range_name(range_no):
        return model.PARAM_RANGE_PREFIX + str(range_no)


    def _prepare_group(self, project_id, existing_dt_group, kwargs):
        """
        Create and store OperationGroup entity, or return None
        """
        # Standard ranges as accepted from UI
        range1_values = self.get_range_values(kwargs, self._range_name(1))
        range2_values = self.get_range_values(kwargs, self._range_name(2))
        available_args = self.__expand_arguments([(kwargs, None)], range1_values, self._range_name(1))
        available_args = self.__expand_arguments(available_args, range2_values, self._range_name(2))
        is_group = False
        ranges = []
        if self._range_name(1) in kwargs and range1_values is not None:
            is_group = True
            ranges.append(json.dumps((kwargs[self._range_name(1)], range1_values)))
        if self._range_name(2) in kwargs and range2_values is not None:
            is_group = True
            ranges.append(json.dumps((kwargs[self._range_name(2)], range2_values)))
        # Now for additional ranges which might be the case for the 'model exploration'
        last_range_idx = 3
        ranger_name = self._range_name(last_range_idx)
        while ranger_name in kwargs:
            values_for_range = self.get_range_values(kwargs, ranger_name)
            available_args = self.__expand_arguments(available_args, values_for_range, ranger_name)
            last_range_idx += 1
            ranger_name = self._range_name(last_range_idx)
        if last_range_idx > 3:
            ranges = []  # Since we only have 3 fields in db for this just hide it
        if not is_group:
            group = None
        elif existing_dt_group is None:
            group = model.OperationGroup(project_id=project_id, ranges=ranges)
            group = dao.store_entity(group)
        else:
            group = existing_dt_group.parent_operation_group

        return available_args, group


    def get_range_values(self, kwargs, ranger_name):
        """
        For the ranger given by ranger_name look in kwargs and return
        the array with all the possible values.
        """
        if ranger_name not in kwargs:
            return None
        if str(kwargs[ranger_name]) not in kwargs:
            return None

        range_values = []
        try:
            range_data = json.loads(str(kwargs[str(kwargs[ranger_name])]))
        except Exception:
            try:
                range_data = [x.strip() for x in str(kwargs[str(kwargs[ranger_name])]).split(',') if len(x.strip()) > 0]
                return range_data
            except Exception:
                self.logger.exception("Could not launch operation !")
                raise LaunchException("Could not launch with no data from:" + str(ranger_name))
        if type(range_data) in (list, tuple):
            return range_data

        if (constants.ATT_MINVALUE in range_data) and (constants.ATT_MAXVALUE in range_data):
            lo_val = float(range_data[constants.ATT_MINVALUE])
            hi_val = float(range_data[constants.ATT_MAXVALUE])
            step = float(range_data[constants.ATT_STEP])
            range_values = list(Range(lo=lo_val, hi=hi_val, step=step, mode=Range.MODE_INCLUDE_BOTH))

        else:
            for possible_value in range_data:
                if range_data[possible_value]:
                    range_values.append(possible_value)
        return range_values


    @staticmethod
    def __expand_arguments(arguments_list, range_values, range_title):
        """
        Parse the arguments submitted from UI (flatten form) 
        If any ranger is found, return a list of arguments for all possible operations.
        """
        if range_values is None:
            return arguments_list
        result = []
        for value in range_values:
            for args, range_ in arguments_list:
                kw_new = copy(args)
                range_new = copy(range_)
                kw_new[kw_new[range_title]] = value
                if range_new is None:
                    range_new = {}
                range_new[kw_new[range_title]] = value
                del kw_new[range_title]
                result.append((kw_new, range_new))
        return result


    ##########################################################################################
    ######## Methods related to stopping and restarting operations start here ################
    ##########################################################################################

    def stop_operation(self, operation_id):
        """
        Stop the operation given by the operation id.
        """
        return BACKEND_CLIENT.stop_operation(int(operation_id))
class WorkflowService:
    """
    service layer for work-flow entity.
    """

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def persist_operation_state(self, operation, operation_status, message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, unicode(message))
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        return operation


    @staticmethod
    def store_workflow_step(workflow_step):
        """
        Store a workflow step entity.
        """
        dao.store_entity(workflow_step)
    
    
    @staticmethod
    def create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations):
        """
        Create and store the workflow given the project, user and burst in which the workflow is created.
        :param simulator_index: the index of the simulator in the workflow
        :param simulator_id: the id of the simulator adapter
        :param operations: a list with the operations created for the simulator steps
        """
        workflows = []
        for operation in operations:
            new_workflow = model.Workflow(project_id, burst_id)
            new_workflow = dao.store_entity(new_workflow)
            workflows.append(new_workflow)
            simulation_step = model.WorkflowStep(algorithm_id=simulator_id, workflow_id=new_workflow.id,
                                                 step_index=simulator_index, static_param=operation.parameters)
            simulation_step.fk_operation = operation.id
            dao.store_entity(simulation_step)
        return workflows
        

    @staticmethod
    def set_dynamic_step_references(workflow_step, step_reference):
        """
        :param workflow_step: a valid instance of a workflow_step
        :param step_reference: the step to which every dataType reference index should be set
        
        For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference. 
        """
        dynamic_params = workflow_step.dynamic_param
        for entry in dynamic_params:
            dynamic_params[entry][WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
        workflow_step.dynamic_param = dynamic_params


    def prepare_next_step(self, last_executed_op_id):
        """
        If the operation with id 'last_executed_op_id' resulted after
        the execution of a workflow step then this method will launch
        the operation corresponding to the next step from the workflow.
        """
        try:
            current_step, next_workflow_step = self._get_data(last_executed_op_id)
            if next_workflow_step is not None:
                operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
                dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
                if len(dynamic_param_names) > 0:
                    op_params = json.loads(operation.parameters)
                    for param_name in dynamic_param_names:
                        dynamic_param = op_params[param_name]
                        former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
                                                                          dynamic_param[
                                                                              WorkflowStepConfiguration.STEP_INDEX_KEY])
                        if type(dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) is IntType:
                            datatypes = dao.get_results_for_operation(former_step.fk_operation)
                            op_params[param_name] = datatypes[
                                dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]].gid
                        else:
                            previous_operation = dao.get_operation_by_id(former_step.fk_operation)
                            op_params[param_name] = json.loads(previous_operation.parameters)[
                                dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]]
                    operation.parameters = json.dumps(op_params)
                    operation = dao.store_entity(operation)
                return operation.id
            elif current_step is not None:
                current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
                current_workflow.status = current_workflow.STATUS_FINISHED
                dao.store_entity(current_workflow)
                burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                parallel_workflows = dao.get_workflows_for_burst(burst_entity.id)
                all_finished = True
                for workflow in parallel_workflows:
                    if workflow.status == workflow.STATUS_STARTED:
                        all_finished = False
                if all_finished:
                    self.mark_burst_finished(burst_entity)
            return None
        except Exception, excep:
            self.logger.error(excep)
            self.logger.exception(excep)
            raise WorkflowInterStepsException(excep)
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """
    PROJECT_NAME = "test_proj"

    def setUp(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       self.PROJECT_NAME)

    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()

    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")

        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")),
                        "Test directory not created!")

    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")

        folder_path = self.files_helper.get_project_folder(
            self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")

    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(
            self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")

    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)

        self.assertRaises(FileStructureException,
                          self.files_helper.rename_project_structure,
                          self.test_project.name, self.PROJECT_NAME)

    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")

        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path),
                         "Project folder not deleted.")

    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(
            self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description,
                         loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertDictContainsSubset(expected_dict, found_dict)
        self.assertDictContainsSubset(found_dict, expected_dict)

    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(
            self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME,
                                                    "new_group_name",
                                                    operation.id)
        found_dict = XMLReader(expected_file).read_metadata()
        self.assertEqual("new_group_name", found_dict['user_group'])

    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.remove_datatype(datatype)
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not deleted!")

    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.files_helper.remove_datatype(datatype)

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11',
                                        "43")

        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(
            self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")

    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path(
            "/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]),
                         "Did not extract relative path as expected.")

    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))

    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name),
                        "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should be deleted.")

    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)

    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.assertRaises(FileStructureException,
                          self.files_helper.remove_folder, folder_name, False)
class WorkflowService:
    """
    service layer for work-flow entity.
    """

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def persist_operation_state(self, operation, operation_status, message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, unicode(message))
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        return operation


    @staticmethod
    def store_workflow_step(workflow_step):
        """
        Store a workflow step entity.
        """
        dao.store_entity(workflow_step)
    
    
    @staticmethod
    def create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations):
        """
        Create and store the workflow given the project, user and burst in which the workflow is created.
        :param simulator_index: the index of the simulator in the workflow
        :param simulator_id: the id of the simulator adapter
        :param operations: a list with the operations created for the simulator steps
        """
        workflows = []
        for operation in operations:
            new_workflow = model.Workflow(project_id, burst_id)
            new_workflow = dao.store_entity(new_workflow)
            workflows.append(new_workflow)
            simulation_step = model.WorkflowStep(algorithm_id=simulator_id, workflow_id=new_workflow.id,
                                                 step_index=simulator_index, static_param=operation.parameters)
            simulation_step.fk_operation = operation.id
            dao.store_entity(simulation_step)
        return workflows
        

    @staticmethod
    def set_dynamic_step_references(workflow_step, step_reference):
        """
        :param workflow_step: a valid instance of a workflow_step
        :param step_reference: the step to which every dataType reference index should be set
        
        For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference. 
        """
        dynamic_params = workflow_step.dynamic_param
        for entry in dynamic_params:
            dynamic_params[entry][WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
        workflow_step.dynamic_param = dynamic_params


    def prepare_next_step(self, last_executed_op_id):
        """
        If the operation with id 'last_executed_op_id' resulted after
        the execution of a workflow step then this method will launch
        the operation corresponding to the next step from the workflow.
        """
        try:
            current_step, next_workflow_step = self._get_data(last_executed_op_id)
            if next_workflow_step is not None:
                operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
                dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
                if len(dynamic_param_names) > 0:
                    op_params = json.loads(operation.parameters)
                    for param_name in dynamic_param_names:
                        dynamic_param = op_params[param_name]
                        former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
                                                                          dynamic_param[
                                                                              WorkflowStepConfiguration.STEP_INDEX_KEY])
                        if type(dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) is IntType:
                            datatypes = dao.get_results_for_operation(former_step.fk_operation)
                            op_params[param_name] = datatypes[
                                dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]].gid
                        else:
                            previous_operation = dao.get_operation_by_id(former_step.fk_operation)
                            op_params[param_name] = json.loads(previous_operation.parameters)[
                                dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]]
                    operation.parameters = json.dumps(op_params)
                    operation = dao.store_entity(operation)
                return operation.id
            elif current_step is not None:
                current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
                current_workflow.status = current_workflow.STATUS_FINISHED
                dao.store_entity(current_workflow)
                burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                parallel_workflows = dao.get_workflows_for_burst(burst_entity.id)
                all_finished = True
                for workflow in parallel_workflows:
                    if workflow.status == workflow.STATUS_STARTED:
                        all_finished = False
                if all_finished:
                    self.mark_burst_finished(burst_entity)
            return None
        except Exception as excep:
            self.logger.error(excep)
            self.logger.exception(excep)
            raise WorkflowInterStepsException(excep)


    def update_executed_workflow_state(self, operation):
        """
        Used for updating the state of an executed workflow.
        Only if the operation with the specified id has resulted after the execution
        of an ExecutedWorkflowStep than the state of the ExecutedWorkflow
        to which belongs the step will be updated.
        """
        executed_step, _ = self._get_data(operation.id)
        if executed_step is not None:
            if operation.status == model.STATUS_ERROR:
                all_executed_steps = dao.get_workflow_steps(executed_step.fk_workflow)
                for step in all_executed_steps:
                    if step.step_index > executed_step.step_index:
                        self.logger.debug("Marking unreached operation %s with error." % step.fk_operation)
                        unreached_operation = dao.get_operation_by_id(step.fk_operation)
                        self.persist_operation_state(unreached_operation, model.STATUS_ERROR,
                                                     "Blocked by failure in step %s with message: \n\n%s." % (
                                                         executed_step.step_index, operation.additional_info))

            workflow = dao.get_workflow_by_id(executed_step.fk_workflow)
            burst = dao.get_burst_by_id(workflow.fk_burst)
            self.mark_burst_finished(burst, error_message=operation.additional_info)
            dao.store_entity(burst)


    @staticmethod
    def _get_data(operation_id):
        """
        For a given operation id, return the corresponding WorkflowStep and the NextWorkflowStep to be executed.
        """
        executed_step = dao.get_workflow_step_for_operation(operation_id)
        if executed_step is not None:
            next_workflow_step = dao.get_workflow_step_by_step_index(executed_step.fk_workflow,
                                                                     executed_step.step_index + 1)
            return executed_step, next_workflow_step
        else:
            return None, None
        
        
    def mark_burst_finished(self, burst_entity, burst_status=None, error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = model.BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = model.BurstConfiguration.BURST_ERROR

        try:
            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(dt_group.id)
                dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(burst_entity.id)
            burst_entity.workflows_number = dao.get_workflows_for_burst(burst_entity.id, is_count=True)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception("Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """ 
    PROJECT_NAME = "test_proj"
           
           
    def setUp(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
    
    
    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()
    
    
    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        
        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!")
            
    
    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        
        folder_path = self.files_helper.get_project_folder(self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")
        
   
    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")


    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)
        
        self.assertRaises(FileStructureException, self.files_helper.rename_project_structure, 
                          self.test_project.name, self.PROJECT_NAME)


    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")
        
        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path), "Project folder not deleted.")
        
    
    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description, loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertDictContainsSubset(expected_dict, found_dict)
        self.assertDictContainsSubset(found_dict, expected_dict)
    
    
    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
        found_dict = XMLReader(expected_file).read_metadata()  
        self.assertEqual("new_group_name", found_dict['user_group'])
        
    
    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.remove_datatype(datatype) 
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!")      
        
        
    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.files_helper.remove_datatype(datatype)
        

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11', "43") 
        
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        
        
    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected.")
        
        
    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))


    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name), "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name), "Folder should be deleted.")
        
    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)
        
        
    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.assertRaises(FileStructureException, self.files_helper.remove_folder, folder_name, False)
Beispiel #17
0
class WorkflowService:
    """
    service layer for work-flow entity.
    """
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def persist_operation_state(self,
                                operation,
                                operation_status,
                                message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, message)
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        return operation

    @staticmethod
    def store_workflow_step(workflow_step):
        """
        Store a workflow step entity.
        """
        dao.store_entity(workflow_step)

    @staticmethod
    def create_and_store_workflow(project_id, burst_id, simulator_index,
                                  simulator_id, operations):
        """
        Create and store the workflow given the project, user and burst in which the workflow is created.
        :param simulator_index: the index of the simulator in the workflow
        :param simulator_id: the id of the simulator adapter
        :param operations: a list with the operations created for the simulator steps
        """
        workflows = []
        for operation in operations:
            new_workflow = model.Workflow(project_id, burst_id)
            new_workflow = dao.store_entity(new_workflow)
            workflows.append(new_workflow)
            simulation_step = model.WorkflowStep(
                algorithm_id=simulator_id,
                workflow_id=new_workflow.id,
                step_index=simulator_index,
                static_param=operation.parameters)
            simulation_step.fk_operation = operation.id
            dao.store_entity(simulation_step)
        return workflows

    @staticmethod
    def set_dynamic_step_references(workflow_step, step_reference):
        """
        :param workflow_step: a valid instance of a workflow_step
        :param step_reference: the step to which every dataType reference index should be set
        
        For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference. 
        """
        dynamic_params = workflow_step.dynamic_param
        for entry in dynamic_params:
            dynamic_params[entry][
                WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
        workflow_step.dynamic_param = dynamic_params

    def prepare_next_step(self, last_executed_op_id):
        """
        If the operation with id 'last_executed_op_id' resulted after
        the execution of a workflow step then this method will launch
        the operation corresponding to the next step from the workflow.
        """
        try:
            current_step, next_workflow_step = self._get_data(
                last_executed_op_id)
            if next_workflow_step is not None:
                operation = dao.get_operation_by_id(
                    next_workflow_step.fk_operation)
                dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
                if len(dynamic_param_names) > 0:
                    op_params = json.loads(operation.parameters)
                    for param_name in dynamic_param_names:
                        dynamic_param = op_params[param_name]
                        former_step = dao.get_workflow_step_by_step_index(
                            next_workflow_step.fk_workflow, dynamic_param[
                                WorkflowStepConfiguration.STEP_INDEX_KEY])
                        if type(dynamic_param[WorkflowStepConfiguration.
                                              DATATYPE_INDEX_KEY]) is int:
                            datatypes = dao.get_results_for_operation(
                                former_step.fk_operation)
                            op_params[param_name] = datatypes[
                                dynamic_param[WorkflowStepConfiguration.
                                              DATATYPE_INDEX_KEY]].gid
                        else:
                            previous_operation = dao.get_operation_by_id(
                                former_step.fk_operation)
                            op_params[param_name] = json.loads(
                                previous_operation.parameters)[
                                    dynamic_param[WorkflowStepConfiguration.
                                                  DATATYPE_INDEX_KEY]]
                    operation.parameters = json.dumps(op_params)
                    operation = dao.store_entity(operation)
                return operation.id
            elif current_step is not None:
                current_workflow = dao.get_workflow_by_id(
                    current_step.fk_workflow)
                current_workflow.status = current_workflow.STATUS_FINISHED
                dao.store_entity(current_workflow)
                burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                parallel_workflows = dao.get_workflows_for_burst(
                    burst_entity.id)
                all_finished = True
                for workflow in parallel_workflows:
                    if workflow.status == workflow.STATUS_STARTED:
                        all_finished = False
                if all_finished:
                    self.mark_burst_finished(burst_entity)
            return None
        except Exception as excep:
            self.logger.error(excep)
            self.logger.exception(excep)
            raise WorkflowInterStepsException(excep)

    def update_executed_workflow_state(self, operation):
        """
        Used for updating the state of an executed workflow.
        Only if the operation with the specified id has resulted after the execution
        of an ExecutedWorkflowStep than the state of the ExecutedWorkflow
        to which belongs the step will be updated.
        """
        executed_step, _ = self._get_data(operation.id)
        if executed_step is not None:
            if operation.status == model.STATUS_ERROR:
                all_executed_steps = dao.get_workflow_steps(
                    executed_step.fk_workflow)
                for step in all_executed_steps:
                    if step.step_index > executed_step.step_index:
                        self.logger.debug(
                            "Marking unreached operation %s with error." %
                            step.fk_operation)
                        unreached_operation = dao.get_operation_by_id(
                            step.fk_operation)
                        self.persist_operation_state(
                            unreached_operation, model.STATUS_ERROR,
                            "Blocked by failure in step %s with message: \n\n%s."
                            % (executed_step.step_index,
                               operation.additional_info))

            workflow = dao.get_workflow_by_id(executed_step.fk_workflow)
            burst = dao.get_burst_by_id(workflow.fk_burst)
            self.mark_burst_finished(burst,
                                     error_message=operation.additional_info)
            dao.store_entity(burst)

    @staticmethod
    def _get_data(operation_id):
        """
        For a given operation id, return the corresponding WorkflowStep and the NextWorkflowStep to be executed.
        """
        executed_step = dao.get_workflow_step_for_operation(operation_id)
        if executed_step is not None:
            next_workflow_step = dao.get_workflow_step_by_step_index(
                executed_step.fk_workflow, executed_step.step_index + 1)
            return executed_step, next_workflow_step
        else:
            return None, None

    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = model.BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = model.BurstConfiguration.BURST_ERROR

        try:
            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.id)
            burst_entity.workflows_number = dao.get_workflows_for_burst(
                burst_entity.id, is_count=True)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
Beispiel #18
0
class OperationService:
    """
    Class responsible for preparing an operation launch. 
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.workflow_service = WorkflowService()
        self.file_helper = FilesHelper()

    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self,
                           current_user,
                           project_id,
                           adapter_instance,
                           visible=True,
                           **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" +
                                str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        ### Store Operation entity.
        algo = adapter_instance.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        operations = self.prepare_operations(current_user.id, project_id, algo,
                                             algo_category, {}, visible,
                                             **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException(
                    "Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance,
                                           **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance,
                                         current_user.username)

    @staticmethod
    def _prepare_metadata(initial_metadata, algo_category, operation_group,
                          submit_data):
        """
        Gather metadata from submitted fields and current to be execute algorithm.
        Will populate STATE, GROUP in metadata
        """
        metadata = copy(initial_metadata)

        user_group = None
        if DataTypeMetaData.KEY_OPERATION_TAG in submit_data:
            user_group = submit_data[DataTypeMetaData.KEY_OPERATION_TAG]

        if operation_group is not None:
            metadata[DataTypeMetaData.KEY_OPERATION_TAG] = operation_group.name

        if DataTypeMetaData.KEY_TAG_1 in submit_data:
            metadata[DataTypeMetaData.KEY_TAG_1] = submit_data[
                DataTypeMetaData.KEY_TAG_1]

        metadata[DataTypeMetaData.KEY_STATE] = algo_category.defaultdatastate

        return metadata, user_group

    @staticmethod
    def _read_set(values):
        """ Parse a committed UI possible list of values, into a set converted into string."""
        if isinstance(values, list):
            set_values = []
            values_str = ""
            for val in values:
                if val not in set_values:
                    set_values.append(val)
                    values_str = values_str + " " + str(val)
            values = values_str
        return str(values).strip()

    def group_operation_launch(self,
                               user_id,
                               project_id,
                               algorithm_id,
                               category_id,
                               existing_dt_group=None,
                               **kwargs):
        """
        Create and prepare the launch of a group of operations.
        """
        category = dao.get_category_by_id(category_id)
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        ops, _ = self.prepare_operations(user_id,
                                         project_id,
                                         algorithm,
                                         category, {},
                                         existing_dt_group=existing_dt_group,
                                         **kwargs)
        for operation in ops:
            self.launch_operation(operation.id, True)

    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)

        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]
        ts_metrics_adapter_form = TimeseriesMetricsAdapterForm()
        ts_metrics_adapter_form.fill_from_trait(
            BaseTimeseriesMetricAlgorithm())
        ts_metrics_adapter_form.time_series.data = time_series_index.gid
        op_params = json.dumps(ts_metrics_adapter_form.get_dict())
        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, op_params)
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration2, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.metric_operation_group_id
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     op_params,
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        return operation

    def prepare_operations(self,
                           user_id,
                           project_id,
                           algorithm,
                           category,
                           metadata,
                           visible=True,
                           existing_dt_group=None,
                           **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id,
                                                    existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException(
                "Too big range specified. You should limit the"
                " resulting operations to %d" %
                TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." %
                              len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category,
                                                      group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) +
                          ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id,
                                  project_id,
                                  algorithm.id,
                                  json.dumps(one_set_of_args),
                                  meta_str,
                                  op_group_id=group_id,
                                  user_group=user_group,
                                  range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(
                    group,
                    operation_id=operations[0].id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group

    def prepare_operations_for_workflowsteps(self, workflow_step_list,
                                             workflows, user_id, burst_id,
                                             project_id, group,
                                             sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, WorkflowStepView):
                operation_group = OperationGroup(project_id=project_id,
                                                 ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(
                        metadata, algo_category, operation_group, op_params)
                    operation = Operation(user_id,
                                          project_id,
                                          step.fk_algorithm,
                                          json.dumps(
                                              op_params,
                                              cls=MapAsJson.MapAsJsonEncoder),
                                          meta=json.dumps(metadata),
                                          op_group_id=group_id,
                                          range_values=range_values,
                                          user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = DataTypeGroup(
                    operation_group,
                    operation_id=operation.id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)

    def initiate_prelaunch(self, operation, adapter_instance, **kwargs):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        temp_files = []
        try:
            unique_id = None
            if self.ATT_UID in kwargs:
                unique_id = kwargs[self.ATT_UID]
            #TODO: this currently keeps both ways to display forms
            if not 'SimulatorAdapter' in adapter_instance.__class__.__name__:
                if adapter_instance.get_input_tree() is None:
                    filtered_kwargs = adapter_instance.get_form(
                    ).get_form_values()
                else:
                    filtered_kwargs = adapter_instance.prepare_ui_inputs(
                        kwargs)

                params = dict()
                for k, value_ in filtered_kwargs.items():
                    params[str(k)] = value_
                self.logger.debug("Launching operation " + str(operation.id) +
                                  " with " + str(filtered_kwargs))
            else:
                params = kwargs
                self.logger.debug("Launching operation " + str(operation.id) +
                                  " with " + str(kwargs))

            operation = dao.get_operation_by_id(
                operation.id)  # Load Lazy fields

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(
                operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(
                operation.fk_launched_by)  # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            result_msg, nr_datatypes = adapter_instance._prelaunch(
                operation, unique_id, available_space, **params)
            operation = dao.get_operation_by_id(operation.id)
            ## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
            operation.parameters = json.dumps(kwargs)
            operation.mark_complete(STATUS_FINISHED)
            if nr_datatypes > 0:
                #### Write operation meta-XML only if some result are returned
                self.file_helper.write_operation_metadata(operation)
            dao.store_entity(operation)
            adapter_form = adapter_instance.get_form()
            try:
                temp_files = adapter_form.temporary_files
            except AttributeError:
                pass

            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = (
                "Could not execute operation because there is not enough free memory."
                + " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        if operation.fk_operation_group and 'SimulatorAdapter' in operation.algorithm.classname:
            next_op = self._prepare_metric_operation(operation)
            self.launch_operation(next_op.id)
        return result_msg

    def _send_to_cluster(self,
                         operations,
                         adapter_instance,
                         current_username="******"):
        """ Initiate operation on cluster"""
        for operation in operations:
            try:
                BACKEND_CLIENT.execute(str(operation.id), current_username,
                                       adapter_instance)
            except Exception as excep:
                self._handle_exception(excep, {}, "Could not start operation!",
                                       operation)

        return operations

    def launch_operation(self,
                         operation_id,
                         send_to_cluster=False,
                         adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)
            parsed_params = utils.parse_json_parameters(operation.parameters)
            if not 'SimulatorAdapter' in adapter_instance.__class__.__name__:
                adapter_form = adapter_instance.get_form()()
                adapter_form.fill_from_post(parsed_params)
                adapter_instance.submit_form(adapter_form)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance,
                                      operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance,
                                        **parsed_params)

    def _handle_exception(self,
                          exception,
                          temp_files,
                          message,
                          operation=None):
        """
        Common way to treat exceptions:
            - remove temporary files, if any
            - set status ERROR on current operation (if any)
            - log exception
        """
        self.logger.exception(message)
        if operation is not None:
            BurstService2().persist_operation_state(operation, STATUS_ERROR,
                                                    str(exception))
        self._remove_files(temp_files)
        exception.message = message
        raise exception.with_traceback(
            sys.exc_info()[2]
        )  # when rethrowing in python this is required to preserve the stack trace

    def _remove_files(self, file_list):
        """
        Remove any files that exist in the file_dictionary. 
        Currently used to delete temporary files created during an operation.
        """
        for pth in file_list:
            pth = str(pth)
            try:
                if os.path.exists(pth) and os.path.isfile(pth):
                    os.remove(pth)
                    self.logger.debug("We no longer need file:" + pth +
                                      " => deleted")
                else:
                    self.logger.warning("Trying to remove not existent file:" +
                                        pth)
            except OSError:
                self.logger.exception("Could not cleanup file!")

    @staticmethod
    def _range_name(range_no):
        return PARAM_RANGE_PREFIX + str(range_no)

    def _prepare_group(self, project_id, existing_dt_group, kwargs):
        """
        Create and store OperationGroup entity, or return None
        """
        # Standard ranges as accepted from UI
        range1_values = self.get_range_values(kwargs, self._range_name(1))
        range2_values = self.get_range_values(kwargs, self._range_name(2))
        available_args = self.__expand_arguments([(kwargs, None)],
                                                 range1_values,
                                                 self._range_name(1))
        available_args = self.__expand_arguments(available_args, range2_values,
                                                 self._range_name(2))
        is_group = False
        ranges = []
        if self._range_name(1) in kwargs and range1_values is not None:
            is_group = True
            ranges.append(
                json.dumps((kwargs[self._range_name(1)], range1_values)))
        if self._range_name(2) in kwargs and range2_values is not None:
            is_group = True
            ranges.append(
                json.dumps((kwargs[self._range_name(2)], range2_values)))
        # Now for additional ranges which might be the case for the 'model exploration'
        last_range_idx = 3
        ranger_name = self._range_name(last_range_idx)
        while ranger_name in kwargs:
            values_for_range = self.get_range_values(kwargs, ranger_name)
            available_args = self.__expand_arguments(available_args,
                                                     values_for_range,
                                                     ranger_name)
            last_range_idx += 1
            ranger_name = self._range_name(last_range_idx)
        if last_range_idx > 3:
            ranges = [
            ]  # Since we only have 3 fields in db for this just hide it
        if not is_group:
            group = None
        elif existing_dt_group is None:
            group = OperationGroup(project_id=project_id, ranges=ranges)
            group = dao.store_entity(group)
        else:
            group = existing_dt_group.parent_operation_group

        return available_args, group

    def get_range_values(self, kwargs, ranger_name):
        """
        For the ranger given by ranger_name look in kwargs and return
        the array with all the possible values.
        """
        if ranger_name not in kwargs:
            return None
        if str(kwargs[ranger_name]) not in kwargs:
            return None

        range_values = []
        try:
            range_data = json.loads(str(kwargs[str(kwargs[ranger_name])]))
        except Exception:
            try:
                range_data = [
                    x.strip()
                    for x in str(kwargs[str(kwargs[ranger_name])]).split(',')
                    if len(x.strip()) > 0
                ]
                return range_data
            except Exception:
                self.logger.exception("Could not launch operation !")
                raise LaunchException("Could not launch with no data from:" +
                                      str(ranger_name))
        if type(range_data) in (list, tuple):
            return range_data

        if (constants.ATT_MINVALUE in range_data) and (constants.ATT_MAXVALUE
                                                       in range_data):
            lo_val = float(range_data[constants.ATT_MINVALUE])
            hi_val = float(range_data[constants.ATT_MAXVALUE])
            step = float(range_data[constants.ATT_STEP])
            range_values = list(
                Range(lo=lo_val, hi=hi_val,
                      step=step).to_array())  #, mode=Range.MODE_INCLUDE_BOTH))

        else:
            for possible_value in range_data:
                if range_data[possible_value]:
                    range_values.append(possible_value)
        return range_values

    @staticmethod
    def __expand_arguments(arguments_list, range_values, range_title):
        """
        Parse the arguments submitted from UI (flatten form) 
        If any ranger is found, return a list of arguments for all possible operations.
        """
        if range_values is None:
            return arguments_list
        result = []
        for value in range_values:
            for args, range_ in arguments_list:
                kw_new = copy(args)
                range_new = copy(range_)
                kw_new[kw_new[range_title]] = value
                if range_new is None:
                    range_new = {}
                range_new[kw_new[range_title]] = value
                del kw_new[range_title]
                result.append((kw_new, range_new))
        return result

    ##########################################################################################
    ######## Methods related to stopping and restarting operations start here ################
    ##########################################################################################

    def stop_operation(self, operation_id):
        """
        Stop the operation given by the operation id.
        """
        return BACKEND_CLIENT.stop_operation(int(operation_id))
class WorkflowService:
    """
    service layer for work-flow entity.
    """
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def persist_operation_state(self,
                                operation,
                                operation_status,
                                message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, unicode(message))
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        return operation

    @staticmethod
    def store_workflow_step(workflow_step):
        """
        Store a workflow step entity.
        """
        dao.store_entity(workflow_step)

    @staticmethod
    def create_and_store_workflow(project_id, burst_id, simulator_index,
                                  simulator_id, operations):
        """
        Create and store the workflow given the project, user and burst in which the workflow is created.
        :param simulator_index: the index of the simulator in the workflow
        :param simulator_id: the id of the simulator adapter
        :param operations: a list with the operations created for the simulator steps
        """
        workflows = []
        for operation in operations:
            new_workflow = model.Workflow(project_id, burst_id)
            new_workflow = dao.store_entity(new_workflow)
            workflows.append(new_workflow)
            simulation_step = model.WorkflowStep(
                algorithm_id=simulator_id,
                workflow_id=new_workflow.id,
                step_index=simulator_index,
                static_param=operation.parameters)
            simulation_step.fk_operation = operation.id
            dao.store_entity(simulation_step)
        return workflows

    @staticmethod
    def set_dynamic_step_references(workflow_step, step_reference):
        """
        :param workflow_step: a valid instance of a workflow_step
        :param step_reference: the step to which every dataType reference index should be set
        
        For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference. 
        """
        dynamic_params = workflow_step.dynamic_param
        for entry in dynamic_params:
            dynamic_params[entry][
                WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
        workflow_step.dynamic_param = dynamic_params

    def prepare_next_step(self, last_executed_op_id):
        """
        If the operation with id 'last_executed_op_id' resulted after
        the execution of a workflow step then this method will launch
        the operation corresponding to the next step from the workflow.
        """
        try:
            current_step, next_workflow_step = self._get_data(
                last_executed_op_id)
            if next_workflow_step is not None:
                operation = dao.get_operation_by_id(
                    next_workflow_step.fk_operation)
                dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
                if len(dynamic_param_names) > 0:
                    op_params = json.loads(operation.parameters)
                    for param_name in dynamic_param_names:
                        dynamic_param = op_params[param_name]
                        former_step = dao.get_workflow_step_by_step_index(
                            next_workflow_step.fk_workflow, dynamic_param[
                                WorkflowStepConfiguration.STEP_INDEX_KEY])
                        if type(dynamic_param[WorkflowStepConfiguration.
                                              DATATYPE_INDEX_KEY]) is IntType:
                            datatypes = dao.get_results_for_operation(
                                former_step.fk_operation)
                            op_params[param_name] = datatypes[
                                dynamic_param[WorkflowStepConfiguration.
                                              DATATYPE_INDEX_KEY]].gid
                        else:
                            previous_operation = dao.get_operation_by_id(
                                former_step.fk_operation)
                            op_params[param_name] = json.loads(
                                previous_operation.parameters)[
                                    dynamic_param[WorkflowStepConfiguration.
                                                  DATATYPE_INDEX_KEY]]
                    operation.parameters = json.dumps(op_params)
                    operation = dao.store_entity(operation)
                return operation.id
            elif current_step is not None:
                current_workflow = dao.get_workflow_by_id(
                    current_step.fk_workflow)
                current_workflow.status = current_workflow.STATUS_FINISHED
                dao.store_entity(current_workflow)
                burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                parallel_workflows = dao.get_workflows_for_burst(
                    burst_entity.id)
                all_finished = True
                for workflow in parallel_workflows:
                    if workflow.status == workflow.STATUS_STARTED:
                        all_finished = False
                if all_finished:
                    self.mark_burst_finished(burst_entity)
            return None
        except Exception, excep:
            self.logger.error(excep)
            self.logger.exception(excep)
            raise WorkflowInterStepsException(excep)
class ProjectService:
    """
    Services layer for Project entities.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()


    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) in (str, unicode):
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) in (str, unicode):
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warn("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no


    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no


    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)


    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))


    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)


    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)


    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)


    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)


    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)


    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)


    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)


    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)


    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details


    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()


    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            ## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)


    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None


    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(model.Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = model.Operation(dao.get_system_user().id,
                                             links[0].fk_to_project,
                                             datatype.parent_operation.fk_from_algo,
                                             datatype.parent_operation.parameters,
                                             datatype.parent_operation.meta_data,
                                             datatype.parent_operation.status,
                                             datatype.parent_operation.start_date,
                                             datatype.parent_operation.completion_date,
                                             datatype.parent_operation.fk_operation_group,
                                             datatype.parent_operation.additional_info,
                                             datatype.parent_operation.user_group,
                                             datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id))
                    datatype.set_operation_id(new_op.id)
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(model.Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                self.structure_helper.remove_datatype(datatype)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")


    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(model.Operation, operation.id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)


    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))


    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))


    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)


    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes


    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except IntrospectionException:
            self.logger.warning("Could not find adapter class for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm was Removed. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters


    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return op_group_inputs.values()


    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)


    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)


    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)


    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)


    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)


    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)


    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
Beispiel #22
0
class BurstService(object):
    LAUNCH_NEW = 'new'
    LAUNCH_BRANCH = 'branch'

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...

        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = BurstConfiguration.BURST_ERROR

        try:
            # If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            # Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.id)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            self.update_burst_configuration_h5(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            self.update_burst_configuration_h5(burst_entity)

    def persist_operation_state(self,
                                operation,
                                operation_status,
                                message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, message)
        dao.store_entity(operation)
        operation = dao.get_operation_by_id(operation.id)
        self.file_helper.write_operation_metadata(operation)
        # update burst also
        burst_config = self.get_burst_for_operation_id(operation.id)
        if burst_config is not None:
            burst_status = STATUS_FOR_OPERATION.get(operation_status)
            self.mark_burst_finished(burst_config, burst_status, message)
        return operation

    def get_burst_for_operation_id(self, operation_id):
        return dao.get_burst_for_operation_id(operation_id)

    def rename_burst(self, burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)
        self.update_burst_configuration_h5(burst)

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(
            project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        # for burst in bursts:
        #     burst.prepare_after_load()
        return bursts

    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.id for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.id])

    def update_history_status(self, id_list):
        """
        For each burst_id received in the id_list read new status from DB and return a list [id, new_status] pair.
        """
        result = []
        for b_id in id_list:
            burst = dao.get_burst_by_id(b_id)
            # burst.prepare_after_load()
            if burst is not None:
                if burst.status == burst.BURST_RUNNING:
                    running_time = datetime.now() - burst.start_time
                else:
                    running_time = burst.finish_time - burst.start_time
                running_time = format_timedelta(running_time,
                                                most_significant2=False)

                if burst.status == burst.BURST_ERROR:
                    msg = 'Check Operations page for error Message'
                else:
                    msg = ''
                result.append([
                    burst.id, burst.status, burst.is_group, msg, running_time
                ])
            else:
                self.logger.debug("Could not find burst with id=" + str(b_id) +
                                  ". Might have been deleted by user!!")
        return result

    # TODO: We should implement these two methods
    def stop_burst(self, burst):
        raise NotImplementedError

    def cancel_or_remove_burst(self, burst_id):
        raise NotImplementedError

    @staticmethod
    def update_simulation_fields(burst_id, op_simulation_id, simulation_gid):
        burst = dao.get_burst_by_id(burst_id)
        burst.fk_simulation = op_simulation_id
        burst.simulator_gid = simulation_gid.hex
        burst = dao.store_entity(burst)
        return burst

    def update_burst_configuration_h5(self, burst_configuration):
        # type: (BurstConfiguration) -> None
        project = dao.get_project_by_id(burst_configuration.fk_project)
        storage_path = self.file_helper.get_project_folder(
            project, str(burst_configuration.fk_simulation))
        self.store_burst_configuration(burst_configuration, storage_path)

    def load_burst_configuration(self, burst_config_id):
        # type: (int) -> BurstConfiguration
        burst_config = dao.get_burst_by_id(burst_config_id)
        return burst_config

    def prepare_burst_for_pse(self, burst_config):
        # type: (BurstConfiguration) -> None
        if burst_config.range2:
            ranges = [burst_config.range1, burst_config.range2]
        else:
            ranges = [burst_config.range1]

        operation_group = OperationGroup(burst_config.fk_project,
                                         ranges=ranges)
        operation_group = dao.store_entity(operation_group)

        metric_operation_group = OperationGroup(burst_config.fk_project,
                                                ranges=ranges)
        metric_operation_group = dao.store_entity(metric_operation_group)

        burst_config.operation_group = operation_group
        burst_config.fk_operation_group = operation_group.id
        burst_config.metric_operation_group = metric_operation_group
        burst_config.fk_metric_operation_group = metric_operation_group.id
        dao.store_entity(burst_config)

    def store_burst_configuration(self, burst_config, storage_path):
        bc_path = h5.path_for(storage_path, BurstConfigurationH5,
                              burst_config.gid)
        with BurstConfigurationH5(bc_path) as bc_h5:
            bc_h5.store(burst_config)

    def load_burst_configuration_from_folder(self, simulator_folder, project):
        bc_h5_filename = DirLoader(
            simulator_folder,
            None).find_file_for_has_traits_type(BurstConfiguration)
        burst_config = BurstConfiguration(project.id)
        with BurstConfigurationH5(
                os.path.join(simulator_folder, bc_h5_filename)) as bc_h5:
            bc_h5.load_into(burst_config)
        return burst_config

    @staticmethod
    def prepare_name(burst, project_id):
        simulation_number = dao.get_number_of_bursts(project_id) + 1

        if burst.name is None:
            default_simulation_name = 'simulation_' + str(simulation_number)
        else:
            default_simulation_name = burst.name

        return default_simulation_name, simulation_number