Пример #1
0
 def tearDown(self):
     self.__session.rollback()
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     PathFinder.dir_content_remove(os.path.join(self.s_root_path, "test/output"))
     OptionManager._drop()
     SQLManager._drop()
Пример #2
0
 def tearDown(self):
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     # PathManager.dir_content_remove("outdir")
     shutil.rmtree("outdir", ignore_errors=True)
     OptionManager._drop()
     SQLManager._drop()
Пример #3
0
    def run(self):
        """
        Get the dag then execute it.

        The database is setUp here if workflow side tables have not been created yet.

        The dag is taken thanks to the :meth:`~.wopmars.framework.parsing.Parser.Parser.parse` method of the parser. And then pruned by the :meth:`~.wopmars.framework.management.WorkflowManager.WorkflowManager.get_dag_to_exec` method
        which will set the right DAG to be executed.
        Then, :meth:`~.wopmars.framework.management.WorkflowManager.WorkflowManager.execute_from` is called with no argument to get the origin nodes.
        """

        # This create_all is supposed to only create workflow-management side tables (called "wom_*")
        SQLManager.instance().create_all()

        if OptionManager.instance()["--clear-history"]:
            Logger.instance().info("Deleting WoPMaRS history...")
            SQLManager.instance().drop_table_content_list(SQLManager.wom_table_names)

        # The following lines allow to create types 'input' and 'output' in the db if they don't exist.
        self.__session.get_or_create(Type, defaults={"id": 1}, name="input")
        self.__session.get_or_create(Type, defaults={"id": 2}, name="output")
        self.__session.commit()
        # Get the DAG representing the whole workflow
        self.__dag_tools = self.__parser.parse()
        # Build the DAG which is willing to be executed according
        self.get_dag_to_exec()
        # Start the execution at the root nodes
        if OptionManager.instance()["--forceall"] and not OptionManager.instance()["--dry-run"]:
            self.erase_output()
        self.execute_from()
Пример #4
0
    def run(self):
        """
        Get the dag then execute it.

        The database is setUp here if workflow side tables have not been created yet.

        The dag is taken thanks to the :meth:`~.wopmars.framework.parsing.Parser.Parser.parse` method of the parser. And then pruned by the :meth:`~.wopmars.framework.management.WorkflowManager.WorkflowManager.get_dag_to_exec` method
        which will set the right DAG to be executed.
        Then, :meth:`~.wopmars.framework.management.WorkflowManager.WorkflowManager.execute_from` is called with no argument to get the origin nodes.
        """

        # This create_all is supposed to only create workflow-management side tables (called "wom_*")
        SQLManager.instance().create_all()

        if OptionManager.instance()["--clear-history"]:
            Logger.instance().info("Deleting WoPMaRS history...")
            SQLManager.instance().drop_table_content_list(SQLManager.wom_table_names)

        # The following lines allow to create types 'input' and 'output' in the db if they don't exist.
        self.__session.get_or_create(Type, defaults={"id": 1}, name="input")
        self.__session.get_or_create(Type, defaults={"id": 2}, name="output")
        self.__session.commit()
        # Get the DAG representing the whole workflow
        self.__dag_tools = self.__parser.parse()
        # Build the DAG which is willing to be executed according
        self.get_dag_to_exec()
        # Start the execution at the root nodes
        if OptionManager.instance()["--forceall"] and not OptionManager.instance()["--dry-run"]:
            self.erase_output()
        self.execute_from()
 def tearDown(self):
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     shutil.rmtree(os.path.join(self.test_path, "outdir"),
                   ignore_errors=True)
     OptionManager._drop()
     SQLManager._drop()
Пример #6
0
 def tearDown(self):
     SQLManager.instance().get_session().close()
     s_root_path = PathFinder.get_module_path()
     SQLManager.instance().drop_all()
     PathFinder.dir_content_remove(os.path.join(s_root_path, "test/output"))
     OptionManager._drop()
     SQLManager._drop()
Пример #7
0
 def tearDown(self):
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     PathFinder.dir_content_remove(
         os.path.join(self.s_root_path, "test/output"))
     OptionManager._drop()
     SQLManager._drop()
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        s_root_path = PathFinder.get_module_path()

        self.__s_path_to_example_definition_file_finishing = os.path.join(s_root_path, "test/resource/wopfile/example_def_file.yml")
        self.__s_path_to_example_definition_file_that_end_with_error = os.path.join(s_root_path, "test/resource/wopfile/example_def_file_toolwrapper_never_ready.yml")

        self.__wm = WorkflowManager()
Пример #9
0
    def setUp(self):
        OptionManager.initial_test_setup()  # Set tests arguments
        SQLManager.instance().create_all()  # Create database with tables

        [
            SQLManager.instance().get_session().add(
                FooBase(name="foo " + str(i))) for i in range(10000)
        ]
        SQLManager.instance().get_session().commit()
Пример #10
0
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        s_root_path = PathFinder.get_module_path()

        self.__s_path_to_example_definition_file_finishing = os.path.join(s_root_path, "test/resource/wopfile/example_def_file1.yml")
        self.__s_path_to_example_definition_file_that_end_with_error = os.path.join(s_root_path, "test/resource/wopfile/example_def_file5_never_ready.yml")

        self.__wm = WorkflowManager()
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()

        [
            SQLManager.instance().get_session().add(
                FooBase(name="foo " + str(i))) for i in range(10000)
        ]
        SQLManager.instance().get_session().commit()
Пример #12
0
    def __init__(self):
        # the top level logger which will distribute messages to different handlers
        self.__logger = logging.getLogger("wopmars")
        self.__logger.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s :: %(levelname)s :: %(message)s')

        # the loger for std out
        self.__stream_handler = logging.StreamHandler()

        # the loger which will write errors in stdout anyway
        self.__stream_handler_err = logging.StreamHandler()
        self.__stream_handler_err.setLevel(logging.WARNING)

        s_path_log_file = OptionManager.instance()["--log"].rsplit(".", 1)[0]
        # log file in append mode of size 1 Mo and 1 backup
        # handler equivalent to stream_handler in term of logging level but write in .log file
        self.__file_handler = RotatingFileHandler(s_path_log_file + ".log",
                                                  'a', 1000000, 1)
        formatter_file = logging.Formatter(
            '%(asctime)s :: %(levelname)s :: %(message)s')
        self.__file_handler.setFormatter(formatter_file)

        # err file in append mode of size 1 Mo and 1 backup
        # this handler will write everything in the .err file.
        self.__err_handler = RotatingFileHandler(s_path_log_file + ".err", 'a',
                                                 1000000, 1)
        formatter_err = logging.Formatter('%(asctime)s :: %(message)s')
        self.__err_handler.setFormatter(formatter_err)
        self.__err_handler.setLevel(logging.DEBUG)

        verbosity = int(OptionManager.instance()["-v"])

        # set the verbosity of the stream handler and file handler depending of the needs of the user.
        if verbosity <= 0:
            self.__stream_handler.setLevel(logging.WARNING)
            self.__file_handler.setLevel(logging.WARNING)
        elif verbosity == 1:
            self.__stream_handler.setLevel(logging.INFO)
            self.__file_handler.setLevel(logging.INFO)
        elif verbosity >= 2:
            self.__stream_handler.setLevel(logging.DEBUG)
            self.__file_handler.setLevel(logging.DEBUG)

        # if printtools is demanded, things about execution will be printed out in std out
        if OptionManager.instance()["--printtools"]:
            self.__logger.addHandler(self.__stream_handler)
        else:
            self.__logger.addHandler(self.__stream_handler_err)
        self.__logger.addHandler(self.__file_handler)
        self.__logger.addHandler(self.__err_handler)

        self.__tw_logger = logging.getLogger("tw")
        self.__tw_streamhandler = logging.StreamHandler()
        self.__tw_logger.addHandler(self.__tw_streamhandler)
        self.__tw_logger.setLevel(logging.DEBUG)
Пример #13
0
    def setUp(self):
        OptionManager.initial_test_setup()

        SQLManager.instance().create_all()
        session = SQLManager.instance().get_session()
        session.get_or_create(Type, defaults={"id": 1}, name="input")
        session.get_or_create(Type, defaults={"id": 2}, name="output")
        session.commit()
        self.__s_root_path = PathFinder.get_module_path()
        self.__parser = Parser()
Пример #14
0
    def setUp(self):
        OptionManager.initial_test_setup()

        SQLManager.instance().create_all()
        session = SQLManager.instance().get_session()
        session.get_or_create(Type, defaults={"id": 1}, name="input")
        session.get_or_create(Type, defaults={"id": 2}, name="output")
        session.commit()
        self.__s_root_path = PathFinder.get_module_path()
        self.__parser = Parser()
Пример #15
0
 def setUp(self):
     self.test_path = PathManager.get_test_path()  # Get tests path
     OptionManager.initial_test_setup()  # Set tests arguments
     self.db_url = OptionManager.instance()["--database"]
     self.db = self.db_url[10:]
     self.example_dir_path = os.path.join(PathManager.get_package_path(),
                                          "data/example")
     self.wopfile = os.path.join(self.example_dir_path, "Wopfile.yml")
     self.working_directory = os.path.join(PathManager.get_package_path(),
                                           "data/example")
Пример #16
0
    def setUp(self):

        OptionManager.initial_test_setup()  # Set tests arguments
        SQLManager.instance().create_all()  # Create database with tables
        self.test_path = PathManager.get_test_path()

        self.__s_path_to_example_definition_file_finishing = os.path.join(
            self.test_path, "resource/wopfile/example_def_file1.yml")
        self.__s_path_to_example_definition_file_that_end_with_error = os.path.join(
            self.test_path, "resource/wopfile/example_def_file5_never_ready.yml")

        self.__workflow_manager = WorkflowManager()
Пример #17
0
    def __init__(self):
        # the top level logger which will distribute messages to different handlers
        self.__logger = logging.getLogger("wopmars")
        self.__logger.setLevel(logging.DEBUG)
        formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')

        # the loger for std out
        self.__stream_handler = logging.StreamHandler()

        # the loger which will write errors in stdout anyway
        self.__stream_handler_err = logging.StreamHandler()
        self.__stream_handler_err.setLevel(logging.WARNING)

        s_path_log_file = OptionManager.instance()["--log"].rsplit(".", 1)[0]
        # log file in append mode of size 1 Mo and 1 backup
        # handler equivalent to stream_handler in term of logging level but write in .log file
        self.__file_handler = RotatingFileHandler(s_path_log_file + ".log", 'a', 1000000, 1)
        formatter_file = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
        self.__file_handler.setFormatter(formatter_file)

        # err file in append mode of size 1 Mo and 1 backup
        # this handler will write everything in the .err file.
        self.__err_handler = RotatingFileHandler(s_path_log_file + ".err", 'a', 1000000, 1)
        formatter_err = logging.Formatter('%(asctime)s :: %(message)s')
        self.__err_handler.setFormatter(formatter_err)
        self.__err_handler.setLevel(logging.DEBUG)

        verbosity = int(OptionManager.instance()["-v"])

        # set the verbosity of the stream handler and file handler depending of the needs of the user.
        if verbosity <= 0:
            self.__stream_handler.setLevel(logging.WARNING)
            self.__file_handler.setLevel(logging.WARNING)
        elif verbosity == 1:
            self.__stream_handler.setLevel(logging.INFO)
            self.__file_handler.setLevel(logging.INFO)
        elif verbosity >= 2:
            self.__stream_handler.setLevel(logging.DEBUG)
            self.__file_handler.setLevel(logging.DEBUG)

        # if printtools is demanded, things about execution will be printed out in std out
        if OptionManager.instance()["--printtools"]:
            self.__logger.addHandler(self.__stream_handler)
        else:
            self.__logger.addHandler(self.__stream_handler_err)
        self.__logger.addHandler(self.__file_handler)
        self.__logger.addHandler(self.__err_handler)

        self.__tw_logger = logging.getLogger("tw")
        self.__tw_streamhandler = logging.StreamHandler()
        self.__tw_logger.addHandler(self.__tw_streamhandler)
        self.__tw_logger.setLevel(logging.DEBUG)
Пример #18
0
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        self.__local_session = SQLManager.instance().get_session()

        SQLManager()
        self.__t1 = ConcurrentCommitingThread()
        self.__t2 = ConcurrentCommitingThread()
        self.__t3 = ConcurrentCommitingThread()

        self.__t4 = ConcurrentRollBackingThread()
        self.__t5 = ConcurrentRollBackingThread()
        self.__t6 = ConcurrentRollBackingThread()
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        self.__local_session = SQLManager.instance().get_session()

        SQLManager()
        self.__t1 = ConcurrentCommitingThread()
        self.__t2 = ConcurrentCommitingThread()
        self.__t3 = ConcurrentCommitingThread()

        self.__t4 = ConcurrentRollBackingThread()
        self.__t5 = ConcurrentRollBackingThread()
        self.__t6 = ConcurrentRollBackingThread()
Пример #20
0
 def tearDown(self):
     # pass
     # pip.main(['uninstall', 'wopexamplecar', '-y']) # working in travis
     subprocess.run(
         [sys.executable, '-m', 'pip', 'uninstall', 'example', '-y'])
     shutil.rmtree(os.path.join(self.working_directory, "build"),
                   ignore_errors=True)
     shutil.rmtree(os.path.join(self.working_directory,
                                "wopexamplecar.egg-info"),
                   ignore_errors=True)
     PathManager.unlink(self.db)
     OptionManager._drop()
     SQLManager._drop()
Пример #21
0
    def setUp(self):
        OptionManager.initial_test_setup()

        self.s_root_path = PathFinder.get_module_path()
        s_path_to_example_existing_file = os.path.join(self.s_root_path, "test/resource/input_files/example_existing_file.txt")
        s_path_to_example_existing_file2 = os.path.join(self.s_root_path, "test/resource/input_files/example_existing_file2.txt")
        s_path_to_example_not_existing_file = os.path.join(self.s_root_path, "test/resource/input_files/example_not_existing_file.txt")

        self.__io_file_existing = IOFilePut(name="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing2 = IOFilePut(name="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing3 = IOFilePut(name="existing_file2", path=s_path_to_example_existing_file)
        self.__io_file_existing4 = IOFilePut(name="existing_file", path=s_path_to_example_existing_file2)
        self.__io_file_not_existing = IOFilePut(name="not_existing_file", path=s_path_to_example_not_existing_file)
Пример #22
0
 def setUp(self):
     OptionManager.initial_test_setup()  # Set tests arguments
     SQLManager.instance().create_all()  # Create database with tables
     session = SQLManager.instance().get_session()
     session.get_or_create(TypeInputOrOutput,
                           defaults={"is_input": True},
                           is_input=True)
     session.get_or_create(TypeInputOrOutput,
                           defaults={"is_input": False},
                           is_input=False)
     session.commit()
     self.__test_path = PathManager.get_test_path()
     # self.__test_path = PathManager.get_package_path()
     self.__parser = Parser()
Пример #23
0
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        session = SQLManager.instance().get_session()
        session.get_or_create(Type, defaults={"id": 1}, name="input")
        session.get_or_create(Type, defaults={"id": 2}, name="output")
        session.commit()
        self.__session = SQLManager.instance().get_session()
        self.__reader = Reader()

        self.__s_root_path = PathFinder.get_module_path()

        # The good -------------------------------:

        self.__s_example_definition_file = os.path.join(
            self.__s_root_path, "test/resource/wopfile/example_def_file.yml")
        self.__s_example_definition_file2 = os.path.join(
            self.__s_root_path, "test/resource/wopfile/example_def_file3.yml")

        # The ugly (malformed file) --------------------:

        self.__s_example_definition_file_duplicate_rule = os.path.join(
            self.__s_root_path,
            "test/resource/wopfile/example_def_file_duplicate_rule.yml")

        self.__list_f_to_exception_init = [
            os.path.join(self.__s_root_path, s_path) for s_path in [
                "test/resource/wopfile/example_def_file_wrong_yaml.yml",
                "test/resource/wopfile/example_def_file_duplicate_rule.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar2.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar3.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar4.yml"
            ]
        ]

        # The bad (invalid file) ----------------------:

        self.__list_s_to_exception_read = [
            os.path.join(self.__s_root_path, s_path) for s_path in [
                "test/resource/wopfile/example_def_file_wrong_content.yml",
                "test/resource/wopfile/example_def_file_wrong_content2.yml",
                "test/resource/wopfile/example_def_file_wrong_content3.yml",
                "test/resource/wopfile/example_def_file_wrong_content4.yml",
                "test/resource/wopfile/example_def_file_wrong_content5.yml",
                "test/resource/wopfile/example_def_file_wrong_class_name.yml",
                "test/resource/wopfile/example_def_file_wrong_rule.yml",
            ]
        ]
Пример #24
0
    def setUp(self):

        OptionManager.initial_test_setup()  # Set tests arguments
        SQLManager.instance().create_all()  # Create database with tables

        self.__local_session = SQLManager.instance().get_session()

        SQLManager()
        self.__t1 = ConcurrentCommitingThread()
        self.__t2 = ConcurrentCommitingThread()
        self.__t3 = ConcurrentCommitingThread()

        self.__t4 = ConcurrentRollBackingThread()
        self.__t5 = ConcurrentRollBackingThread()
        self.__t6 = ConcurrentRollBackingThread()
Пример #25
0
    def notify_success(self, thread_toolwrapper):
        """
        Handle thread_toolwrapper success by continuing the dag.

        :param thread_toolwrapper: ToolWrapper thread that just succeed
        :type thread_toolwrapper: :class:`~.wopmars.management.ToolWrapperThread.ToolWrapperThread`
        """
        self.__session.add(thread_toolwrapper.get_toolwrapper())
        self.__session.commit()

        dry_status = thread_toolwrapper.get_dry()
        # if not OptionManager.instance()["--dry-run"]:
        #     thread_toolwrapper.get_toolwrapper().set_args_time_and_size("output", dry_status)
        if dry_status is False and not OptionManager.instance()["--dry-run"]:
            Logger.instance(
            ).info("ToolWrapper {} -> {} has succeeded.".format(
                str(thread_toolwrapper.get_toolwrapper().rule_name),
                str(thread_toolwrapper.get_toolwrapper().__class__.__name__)))
        # Continue the dag execution from the tool_python_path that just finished.
        self.__already_runned.add(thread_toolwrapper.get_toolwrapper())
        self.__count_exec -= 1

        if len(self.__list_queue_buffer):
            Logger.instance().debug("Fill the queue with the buffer: " + str([
                t.get_toolwrapper().__class__.__name__
                for t in self.__list_queue_buffer
            ]))
        i = 0
        for tw_thread in self.__list_queue_buffer:
            self.__queue_exec.put(tw_thread)
            del self.__list_queue_buffer[i]
            i += 1

        self.execute_from(thread_toolwrapper.get_toolwrapper())
Пример #26
0
    def notify_success(self, thread_toolwrapper):
        """
        Handle thread_toolwrapper success by continuing the dag.

        :param thread_toolwrapper: ToolWrapper thread that just succeed
        :type thread_toolwrapper: :class:`~.wopmars.management.ToolThread.ToolThread`
        """
        self.__session.add(thread_toolwrapper.get_toolwrapper())
        self.__session.commit()

        dry_status = thread_toolwrapper.get_dry()
        # if not OptionManager.instance()["--dry-run"]:
        #     thread_toolwrapper.get_toolwrapper().set_args_time_and_size("output", dry_status)
        if dry_status is False and not OptionManager.instance()["--dry-run"]:
            Logger.instance().info("Rule " + str(thread_toolwrapper.get_toolwrapper().name) + " -> " + str(thread_toolwrapper.get_toolwrapper().__class__.__name__) + " has succeed.")
        # Continue the dag execution from the toolwrapper that just finished.
        self.__already_runned.add(thread_toolwrapper.get_toolwrapper())
        self.__count_exec -= 1

        if len(self.__list_queue_buffer):
            Logger.instance().debug("Fill the queue with the Buffer: " +
                                    str([t.get_toolwrapper().__class__.__name__ for t in self.__list_queue_buffer]))
        i = 0
        for tw_thread in self.__list_queue_buffer:
            self.__queue_exec.put(tw_thread)
            del self.__list_queue_buffer[i]
            i += 1

        self.execute_from(thread_toolwrapper.get_toolwrapper())
Пример #27
0
    def setUp(self):

        OptionManager.initial_test_setup()  # Set tests arguments
        SQLManager.instance().create_all()  # Create database with tables

        session = SQLManager.instance().get_session()
        session.get_or_create(TypeInputOrOutput, defaults={"is_input": True}, is_input=True)
        session.get_or_create(TypeInputOrOutput, defaults={"is_input": False}, is_input=False)
        session.commit()
        self.__session = SQLManager.instance().get_session()
        self.__reader = Reader()

        self.__testdir_path = PathManager.get_test_path()

        # The good -------------------------------:

        self.__example_def_file1_path = os.path.join(self.__testdir_path, "resource/wopfile/example_def_file1.yml")
        self.__example_def_file3_path = os.path.join(self.__testdir_path, "resource/wopfile/example_def_file3.yml")

        # The ugly (malformed file) --------------------:

        self.__s_example_definition_file_duplicate_rule = os.path.join(self.__testdir_path, "resource/wopfile/example_def_file_duplicate_rule.yml")

        self.__list_f_to_exception_init = [
            os.path.join(self.__testdir_path, s_path) for s_path in [
                "resource/wopfile/example_def_file_wrong_yaml.yml",
                "resource/wopfile/example_def_file_duplicate_rule.yml",
                "resource/wopfile/example_def_file_wrong_grammar.yml",
                "resource/wopfile/example_def_file_wrong_grammar2.yml",
                "resource/wopfile/example_def_file_wrong_grammar3.yml",
                "resource/wopfile/example_def_file_wrong_grammar4.yml"
                ]
        ]

        # The bad (invalid file) ----------------------:

        self.__list_s_to_exception_read = [
            os.path.join(self.__testdir_path, s_path) for s_path in [
                "resource/wopfile/example_def_file1.yml",
                "resource/wopfile/example_def_file_wrong_content2.yml",
                "resource/wopfile/example_def_file_wrong_content3.yml",
                "resource/wopfile/example_def_file_wrong_content4.yml",
                "resource/wopfile/example_def_file_wrong_content5.yml",
                "resource/wopfile/example_def_file_wrong_class_name.yml",
                "resource/wopfile/example_def_file_wrong_rule.yml",
            ]
        ]
Пример #28
0
 def toolwrapper_error(self, msg, tw_name):
     if OptionManager.instance()["--toolwrapper-log"]:
         self.__tw_streamhandler.setFormatter(
             logging.Formatter(
                 ColorPrint.green(
                     tw_name +
                     ' :: %(asctime)s :: %(levelname)s :: %(message)s')))
         self.__tw_logger.error(msg)
Пример #29
0
    def get_command_line(self):
        """
        This create a string containing the command line for executing the toolwrapper only.

        :return: The string containg the command line
        """
        list_str_inputs_files = [f.name + "': '" + f.path for f in self.__toolwrapper.files if f.type.name == "input"]
        list_str_inputs_tables = [t.tablename + "': '" + t.model for t in self.__toolwrapper.tables if t.type.name == "input"]
        str_input_dict = ""
        str_input_dict_files = ""
        str_input_dict_tables = ""

        if list_str_inputs_files:
            str_input_dict_files = "'file':{'" + "', '".join(list_str_inputs_files) + "'}"
        if list_str_inputs_tables:
            str_input_dict_tables = "'table':{'" + "', '".join(list_str_inputs_tables) + "'}"
        if list_str_inputs_files or list_str_inputs_tables:
            str_input_dict = " -i \"{%s}\"" % (", ".join([s for s in [str_input_dict_files, str_input_dict_tables] if s != ""]))

        list_str_outputs_files = [f.name + "': '" + f.path for f in self.__toolwrapper.files if f.type.name == "output"]
        list_str_outputs_tables = [t.tablename + "': '" + t.model for t in self.__toolwrapper.tables if t.type.name == "output"]
        str_output_dict = ""
        str_output_dict_files = ""
        str_output_dict_tables = ""

        if list_str_outputs_files:
            str_output_dict_files = "'file':{'" + "', '".join(list_str_outputs_files) + "'}"
        if list_str_outputs_tables:
            str_output_dict_tables = "'table':{'" + "', '".join(list_str_outputs_tables) + "'}"
        if list_str_outputs_files or list_str_outputs_tables:
            str_output_dict = " -o \"{%s}\"" % (", ".join([s for s in [str_output_dict_files, str_output_dict_tables] if s != ""]))

        list_str_params = []
        str_params_dict = ""

        if list_str_params:
            str_params_dict = " -P \"{'" + "', '".join(list_str_params) + "'}\""

        consistent_keys = ["--forceall", "--dot", "--log", ]
        s = ""
        s += "wopmars tool " + self.__toolwrapper.toolwrapper + str_input_dict + str_output_dict + str_params_dict + " " + \
             " ".join(str(key) + " " + str(OptionManager.instance()[key]) for key in OptionManager.instance().keys() if key in consistent_keys and OptionManager.instance()[key] is not None and type(OptionManager.instance()[key]) != bool) + \
             " " + " ".join(str(key) for key in OptionManager.instance().keys() if key in consistent_keys and OptionManager.instance()[key] is True and type(OptionManager.instance()[key]) == bool)

        return s
Пример #30
0
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        session = SQLManager.instance().get_session()
        session.get_or_create(Type, defaults={"id": 1}, name="input")
        session.get_or_create(Type, defaults={"id": 2}, name="output")
        session.commit()
        self.__session = SQLManager.instance().get_session()
        self.__reader = Reader()

        self.__s_root_path = PathFinder.get_module_path()

        # The good -------------------------------:

        self.__s_example_definition_file = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file1.yml")
        self.__s_example_definition_file2 = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file3.yml")

        # The ugly (malformed file) --------------------:

        self.__s_example_definition_file_duplicate_rule = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file_duplicate_rule.yml")

        self.__list_f_to_exception_init = [
            os.path.join(self.__s_root_path, s_path) for s_path in [
                "test/resource/wopfile/example_def_file_wrong_yaml.yml",
                "test/resource/wopfile/example_def_file_duplicate_rule.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar2.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar3.yml",
                "test/resource/wopfile/example_def_file_wrong_grammar4.yml"
                ]
        ]

        # The bad (invalid file) ----------------------:

        self.__list_s_to_exception_read = [
            os.path.join(self.__s_root_path, s_path) for s_path in [
                "test/resource/wopfile/example_def_file1.yml",
                "test/resource/wopfile/example_def_file_wrong_content2.yml",
                "test/resource/wopfile/example_def_file_wrong_content3.yml",
                "test/resource/wopfile/example_def_file_wrong_content4.yml",
                "test/resource/wopfile/example_def_file_wrong_content5.yml",
                "test/resource/wopfile/example_def_file_wrong_class_name.yml",
                "test/resource/wopfile/example_def_file_wrong_rule.yml",
            ]
        ]
    def test_run(self):
        # OptionManager.instance()["--dot"] = None
        #
        # OptionManager.instance()["--wopfile"] = self.__s_path_to_example_definition_file_finishing
        # with self.assertRaises(SystemExit):
        #     self.__wm.run()

        OptionManager.instance()["--wopfile"] = self.__s_path_to_example_definition_file_that_end_with_error
        with self.assertRaises(WopMarsException):
            self.__wm.run()
Пример #32
0
    def test_run(self):
        # OptionManager.instance()["--dot"] = None
        #
        # OptionManager.instance()["--wopfile"] = self.__s_path_to_example_definition_file_finishing
        # with self.assertRaises(SystemExit):
        #     self.__wm.run()

        OptionManager.instance()["--wopfile"] = self.__s_path_to_example_definition_file_that_end_with_error
        with self.assertRaises(WopMarsException):
            self.__wm.run()
Пример #33
0
    def set_args_date_and_size(self, type, dry=False):
        """
        WorkflowManager method:

        The date and the size of the files are set according to the actual date of last modification and size of the system files

        The date of the tables are set according to the date of last modification notified in the modification_table table
        If the type of IOPut is "output" and the execution is "not dry", the date in modification_table is set to the
        current time.time() datetime.

        # todo modify it to take commits into account isntead of the status of 'output' of a table

        :param type: "input" or "output"
        :type type: str
        :param dry: Say if the execution has been simulated.
        :type dry: bool
        """
        session = SQLManager.instance().get_session()
        for f in [f for f in self.files if f.type.name == type]:
            try:
                date = datetime.datetime.fromtimestamp(os.path.getmtime(
                    f.path))
                size = os.path.getsize(f.path)
            except FileNotFoundError as FE:
                # todo ask lionel sans ce rollback, ca bug, pourquoi? la session est vide... comme si la query etait bloquante
                if not OptionManager.instance()["--dry-run"]:
                    session.rollback()
                    raise WopMarsException(
                        "Error during the execution of the workflow",
                        "The " + type + " file " + str(f.path) + " of rule " +
                        str(self.name) + " doesn't exist")
                else:
                    # in dry-run mode, input/output files might not exist
                    date = None
                    size = None
            f.used_at = date
            f.size = size
            session.add(f)
            if type == "input":
                Logger.instance().debug("Input file " + str(f) + " used.")
            elif type == "output" and dry:
                Logger.instance().debug(
                    "Output file " + str(f) +
                    " has been loaded from previous execution.")
            elif type == "output" and not dry:
                Logger.instance().debug("Output file " + str(f) +
                                        " has been created.")
        # this commit is due to a bug that i couldn't figure out: the session empty itself between the two loops...
        # this is not good at all since it may lead to inconsistence in the database
        session.commit()

        for t in [t for t in self.tables if t.type.name == type]:
            t.used_at = t.modification.date
            session.add(t)
        session.commit()
Пример #34
0
    def setUp(self):
        self.test_path = PathManager.get_test_path()  # Get tests path
        OptionManager.initial_test_setup()  # Set tests arguments
        self.__db_url = OptionManager.instance()["--database"]

        self.__example_def_file1 = os.path.join(
            self.test_path, "resource/wopfile/example_def_file1.yml")
        self.__example_def_file1_only_database = \
            os.path.join(self.test_path, "resource/wopfile/example_def_file1_only_database.yml")
        self.__example_def_file2_only_files = os.path.join(
            self.test_path,
            "resource/wopfile/example_def_file2_only_files.yml")
        self.__example_def_file4 = os.path.join(
            self.test_path, "resource/wopfile/example_def_file4.yml")
        self.__example_def_file5_never_ready = os.path.join(
            self.test_path,
            "resource/wopfile/example_def_file5_never_ready.yml")
        self.__example_def_file_input_not_ready = os.path.join(
            self.test_path,
            "resource/wopfile/example_def_file_input_not_ready.yml")
Пример #35
0
    def setUp(self):
        self.s_root_path = PathFinder.get_module_path()
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        self.__local_session = SQLManager.instance().get_session()
        try:
            for i in range(10):
                self.__local_session.add(FooBase(name="testIODB " + str(i)))
            self.__local_session.commit()
        except Exception as e:
            self.__local_session.rollback()
            self.__local_session.close()
            raise e

        self.__io_base_existing = IODbPut(model="FooBase", tablename="FooBase")
        self.__io_base_existing.set_table(FooBase)
        self.__io_base_existing2 = IODbPut(model="FooBase", tablename="FooBase")
        self.__io_base_existing2.set_table(FooBase)
        self.__io_base_existing3 = IODbPut(model="FooBase2", tablename="FooBase2")
        self.__io_base_existing3.set_table(FooBase2)
Пример #36
0
    def build_file_architecture_snp(self):
        """
        This builds the snp example.
        """
        cwd = os.path.join(OptionManager.instance()["--directory"], "wopmars_example_snp")
        example_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "wopmars_example_snp")

        # copy the folder wopmars_example in cwd
        ExampleBuilder.copy(example_directory, cwd)

        # empty.txt is an empty text file used in order to take into account the output directory
        os.remove(os.path.join(cwd, "output/empty.txt"))
Пример #37
0
    def __init__(self):
        """
        The constructor of the SQLManager is supposed to be called once in the whole execution, thanks to the
        SingletonMixin inheritance.

        The constructor create the engine at the localization provided by the user. The option "connect_args" is set to
        False.

        The "PRAGMA foreign_keys=ON" statement is executed here and allows to enforce foreign_keys constraints.

        The Session attribute is a object of class "Type" and allows to make a scoped session bound to the engine on demand.
        The autocommit is set to False and the autoflush, to True.

        The lock is initialized here thanks to the RW lock class and will be used to overide the behaviour of sqlite
        to assess the access to the databse without using the queue of SQLite, bound at 4 sec wait before error.
        """
        s_database_url = OptionManager.instance()["--database"]
        self.d_database_config = {
            'db_connection': None,
            'db_database': None,
            'db_url': s_database_url
        }
        self.d_database_config['db_connection'] = s_database_url.split(
            "://")[0]
        if self.d_database_config['db_connection'] == "sqlite":
            self.d_database_config['db_database'] = s_database_url.split(
                ":///")[1]
        # echo=False mute the log of database
        # connect_args have been necessary because of the accession of the same objects in different Threads.
        if self.d_database_config['db_connection'] == "sqlite":
            self.__engine = create_engine(
                self.d_database_config['db_url'],
                echo=False,
                connect_args={'check_same_thread': False})
        else:
            self.__engine = create_engine(self.d_database_config['db_url'],
                                          echo=False)
        # Below, between "###", code copy-pasted from this post
        # http://stackoverflow.com/questions/2614984/sqlite-sqlalchemy-how-to-enforce-foreign-keys/7831210#7831210
        # enforce foreign key constraints
        ###
        def _fk_pragma_on_connect(dbapi_con, con_record):
            if self.d_database_config['db_connection'] == "sqlite":
                dbapi_con.execute('pragma foreign_keys=ON')

        event.listen(self.__engine, 'connect', _fk_pragma_on_connect)
        ###

        # I don't know why I have used the autoflush=True
        self.__Session = scoped_session(
            sessionmaker(bind=self.__engine, autoflush=True, autocommit=False))
        # The lock
        self.__lock = RWLock()
Пример #38
0
    def get_dag_to_exec(self):
        """
        Set the dag to exec in terms of --sourcerule option and --targetrule option.

        The source rule is checked first (there should not be both set because of the checks at the begining of the software)

        If sourcerule is set, then it is its successors that are searched in the whole dag.
        Else, it is its predecessors.

        The set of obtained rules are used to build the "dag_to_exec". The nodes returned by get_all_successors and
        get_all_predecessors are implicitly all related.
        """
        if OptionManager.instance()["--sourcerule"] is not None:
            try:
                # Get the rule asked by the user as 'sourcerule'
                node_from_rule = [n for n in self.__dag_tools if n.name == OptionManager.instance()["--sourcerule"]][0]
            except IndexError:
                raise WopMarsException(
                    "The given rule to start from: " + OptionManager.instance()["--sourcerule"] + " doesn't exist.")

            self.__dag_to_exec = DAG(self.__dag_tools.get_all_successors(node_from_rule))
            Logger.instance().info("Running the workflow from rule " + str(OptionManager.instance()["--sourcerule"]) +
                                   " -> " + node_from_rule.toolwrapper)
        elif OptionManager.instance()["--targetrule"] is not None:
            try:
                # Get the rule asked by the user as 'targetrule'
                node_from_rule = [n for n in self.__dag_tools if n.name == OptionManager.instance()["--targetrule"]][0]
            except IndexError:
                raise WopMarsException(
                    "The given rule to go to: " + OptionManager.instance()["--targetrule"] + " doesn't exist.")
            self.__dag_to_exec = DAG(self.__dag_tools.get_all_predecessors(node_from_rule))
            Logger.instance().info("Running the workflow to the rule " + str(OptionManager.instance()["--targetrule"]) +
                                   " -> " + node_from_rule.toolwrapper)
        else:
            self.__dag_to_exec = self.__dag_tools

        # ???
        # todo checkout what is going on here
        tables = []
        [tables.extend(tw.tables) for tw in self.__dag_to_exec.nodes()]
        IODbPut.set_tables_properties(tables)

        # For the tools that are in the workflow definition file but not in the executed dag, their status is set to
        # "NOT_PLANNED"
        for tw in set(self.__dag_tools.nodes()).difference(set(self.__dag_to_exec.nodes())):
            tw.set_execution_infos(status="NOT_PLANNED")
            self.__session.add(tw)

        self.__session.commit()
Пример #39
0
    def get_dag_to_exec(self):
        """
        Set the dag to exec in terms of --sourcerule option and --targetrule option.

        The source rule is checked first (there should not be both set because of the checks at the begining of the software)

        If sourcerule is set, then it is its successors that are searched in the whole dag.
        Else, it is its predecessors.

        The set of obtained rules are used to build the "dag_to_exec". The nodes returned by get_all_successors and
        get_all_predecessors are implicitly all related.
        """
        if OptionManager.instance()["--sourcerule"] is not None:
            try:
                # Get the rule asked by the user as 'sourcerule'
                node_from_rule = [n for n in self.__dag_tools if n.name == OptionManager.instance()["--sourcerule"]][0]
            except IndexError:
                raise WopMarsException(
                    "The given rule to start from: " + OptionManager.instance()["--sourcerule"] + " doesn't exist.")

            self.__dag_to_exec = DAG(self.__dag_tools.get_all_successors(node_from_rule))
            Logger.instance().info("Running the workflow from rule " + str(OptionManager.instance()["--sourcerule"]) +
                                   " -> " + node_from_rule.toolwrapper)
        elif OptionManager.instance()["--targetrule"] is not None:
            try:
                # Get the rule asked by the user as 'targetrule'
                node_from_rule = [n for n in self.__dag_tools if n.name == OptionManager.instance()["--targetrule"]][0]
            except IndexError:
                raise WopMarsException(
                    "The given rule to go to: " + OptionManager.instance()["--targetrule"] + " doesn't exist.")
            self.__dag_to_exec = DAG(self.__dag_tools.get_all_predecessors(node_from_rule))
            Logger.instance().info("Running the workflow to the rule " + str(OptionManager.instance()["--targetrule"]) +
                                   " -> " + node_from_rule.toolwrapper)
        else:
            self.__dag_to_exec = self.__dag_tools

        # ???
        # todo checkout what is going on here
        tables = []
        [tables.extend(tw.tables) for tw in self.__dag_to_exec.nodes()]
        IODbPut.set_tables_properties(tables)

        # For the tools that are in the workflow definition file but not in the executed dag, their status is set to
        # "NOT_PLANNED"
        for tw in set(self.__dag_tools.nodes()).difference(set(self.__dag_to_exec.nodes())):
            tw.set_execution_infos(status="NOT_PLANNED")
            self.__session.add(tw)

        self.__session.commit()
Пример #40
0
    def setUp(self):

        self.test_path = PathManager.get_test_path()
        OptionManager.initial_test_setup()  # Set tests arguments
        SQLManager.instance().create_all()  # Create database with tables

        self.__local_session = SQLManager.instance().get_session()
        try:
            for i in range(10):
                self.__local_session.add(FooBase(name="testIODB " + str(i)))
            self.__local_session.commit()
        except Exception as e:
            self.__local_session.rollback()
            self.__local_session.close()
            raise e

        self.__io_base_existing = TableInputOutputInformation(model_py_path="FooBase", table_key="FooBase", table_name="FooBase")
        self.__io_base_existing.set_table(FooBase)
        self.__io_base_existing2 = TableInputOutputInformation(model_py_path="FooBase", table_key="FooBase", table_name="FooBase")
        self.__io_base_existing2.set_table(FooBase)
        self.__io_base_existing3 = TableInputOutputInformation(model_py_path="FooBase2", table_key="FooBase2", table_name="FooBase2")
        self.__io_base_existing3.set_table(FooBase2)
    def setUp(self):
        self.test_path = PathManager.get_test_path()  # Get tests path
        OptionManager.initial_test_setup()  # Set tests arguments

        s_path_to_example_existing_file = os.path.join(
            self.test_path, "resource/input_files/example_existing_file.txt")
        s_path_to_example_existing_file2 = os.path.join(
            self.test_path, "resource/input_files/example_existing_file2.txt")
        s_path_to_example_not_existing_file = os.path.join(
            self.test_path,
            "resource/input_files/example_not_existing_file.txt")

        self.__io_file_existing = FileInputOutputInformation(
            file_key="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing2 = FileInputOutputInformation(
            file_key="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing3 = FileInputOutputInformation(
            file_key="existing_file2", path=s_path_to_example_existing_file)
        self.__io_file_existing4 = FileInputOutputInformation(
            file_key="existing_file", path=s_path_to_example_existing_file2)
        self.__io_file_not_existing = FileInputOutputInformation(
            file_key="not_existing_file",
            path=s_path_to_example_not_existing_file)
Пример #42
0
    def set_args_time_and_size(self, type, dry=False):
        """
        WorkflowManager method:

        The time and the size of the files are set according to the actual time of last modification and size of the system files

        The time of the tables are set according to the time of last modification notified in the modification_table table
        If the type of IOPut is "output" and the execution is "not dry", the time in modification_table is set to the
        current time.time().

        # todo modify it to take commits into account isntead of the status of 'output' of a table

        :param type: "input" or "output"
        :type type: str
        :param dry: Say if the execution has been simulated.
        :type dry: bool
        """
        session = SQLManager.instance().get_session()
        for f in [f for f in self.files if f.type.name == type]:
            try:
                time = os_path_getmtime_ms(f.path)
                size = os.path.getsize(f.path)
            except FileNotFoundError as FE:
                # todo ask lionel sans ce rollback, ca bug, pourquoi? la session est vide... comme si la query etait bloquante
                if not OptionManager.instance()["--dry-run"]:
                    session.rollback()
                    raise WopMarsException("Error during the execution of the workflow",
                                           "The " + type + " file " + str(f.path) + " of rule " + str(self.name) +
                                           " doesn't exist")
                else:
                    # in dry-run mode, input/output files might not exist
                    time = None
                    size = None
            f.used_at = time
            f.size = size
            session.add(f)
            if type == "input":
                Logger.instance().debug("Input file " + str(f) + " used.")
            elif type == "output" and dry:
                Logger.instance().debug("Output file " + str(f) + " has been loaded from previous execution.")
            elif type == "output" and not dry:
                Logger.instance().debug("Output file " + str(f) + " has been created.")
        # this commit is due to a bug that i couldn't figure out: the session empty itself between the two loops...
        # this is not good at all since it may lead to inconsistence in the database
        session.commit()

        for t in [t for t in self.tables if t.type.name == type]:
            t.used_at = t.modification.time
            session.add(t)
        session.commit()
Пример #43
0
    def setUp(self):
        self.s_root_path = PathFinder.get_module_path()
        OptionManager.initial_test_setup()
        print(OptionManager.instance()["--log"])

        SQLManager.instance().create_all()
        self.__local_session = SQLManager.instance().get_session()
        try:
            for i in range(10):
                self.__local_session.add(FooBase(name="testIODB " + str(i)))
            self.__local_session.commit()
        except Exception as e:
            self.__local_session.rollback()
            self.__local_session.close()
            raise e

        self.__io_base_existing = IODbPut(model="FooBase", tablename="FooBase")
        self.__io_base_existing.set_table(FooBase)
        self.__io_base_existing2 = IODbPut(model="FooBase",
                                           tablename="FooBase")
        self.__io_base_existing2.set_table(FooBase)
        self.__io_base_existing3 = IODbPut(model="FooBase2",
                                           tablename="FooBase2")
        self.__io_base_existing3.set_table(FooBase2)
Пример #44
0
    def setUp(self):
        OptionManager.initial_test_setup()

        self.s_root_path = PathFinder.get_module_path()
        s_path_to_example_existing_file = os.path.join(
            self.s_root_path,
            "test/resource/input_files/example_existing_file.txt")
        s_path_to_example_existing_file2 = os.path.join(
            self.s_root_path,
            "test/resource/input_files/example_existing_file2.txt")
        s_path_to_example_not_existing_file = os.path.join(
            self.s_root_path,
            "test/resource/input_files/example_not_existing_file.txt")

        self.__io_file_existing = IOFilePut(
            name="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing2 = IOFilePut(
            name="existing_file", path=s_path_to_example_existing_file)
        self.__io_file_existing3 = IOFilePut(
            name="existing_file2", path=s_path_to_example_existing_file)
        self.__io_file_existing4 = IOFilePut(
            name="existing_file", path=s_path_to_example_existing_file2)
        self.__io_file_not_existing = IOFilePut(
            name="not_existing_file", path=s_path_to_example_not_existing_file)
Пример #45
0
    def run(self):
        """
        Run the tool and fire events.
        :return:
        """

        session_tw = SQLManager.instance().get_session()
        start = time_unix_ms()
        try:
            self.__toolwrapper.set_session(session_tw)
            # if the tool need to be executed because its output doesn't exist
            if not self.__dry:
                Logger.instance().info(
                    "\n" + str(self.__toolwrapper) + "\n" + "command line: \n\t" + self.get_command_line())
                # if you shouldn't simulate
                if not OptionManager.instance()["--dry-run"]:
                    Logger.instance().info("Rule: " + str(self.__toolwrapper.name) + " -> " + self.__toolwrapper.__class__.__name__ + " started.")
                    # mkdir -p output dir: before running we need output dir
                    output_file_fields = self._ToolThread__toolwrapper.specify_output_file()
                    for out_field in output_file_fields:
                        out_file_path = self._ToolThread__toolwrapper.output_file(out_field)
                        out_dir = os.path.dirname(out_file_path)
                        try:
                            os.makedirs(out_dir)
                        except OSError as exception:
                            if exception.errno != errno.EEXIST:
                                raise
                    # end of mkdir -p output dir
                    self.__toolwrapper.run()
                    session_tw.commit()
                    self.__toolwrapper.set_execution_infos(start, time_unix_ms(), "EXECUTED")
                else:
                    Logger.instance().debug("Dry-run mode enabled. Execution skiped.")
                    self.__toolwrapper.set_execution_infos(status="DRY")
            else:
                Logger.instance().info("Rule: " + str(self.__toolwrapper.name) + " -> " + self.__toolwrapper.__class__.__name__ + " skiped.")
                self.__toolwrapper.set_execution_infos(start, time_unix_ms(), "ALREADY_EXECUTED")
        except Exception as e:
            session_tw.rollback()
            self.__toolwrapper.set_execution_infos(start, time_unix_ms(), "EXECUTION_ERROR")
            raise WopMarsException("Error while executing rule " + self.__toolwrapper.name +
                                   " (ToolWrapper " + self.__toolwrapper.toolwrapper + ")",
                                   "Full stack trace: \n" + str(traceback.format_exc()))
        finally:
            # todo twthread , fermer session
            # session_tw.close()
            pass
        self.fire_success()
Пример #46
0
    def __init__(self):
        """
        The constructor of the SQLManager is supposed to be called once in the whole execution, thanks to the
        SingletonMixin inheritance.

        The constructor create the engine at the localization provided by the user. The option "connect_args" is set to
        False.

        The "PRAGMA foreign_keys=ON" statement is executed here and allows to enforce foreign_keys constraints.

        The Session attribute is a object of class "Type" and allows to make a scoped session bound to the engine on demand.
        The autocommit is set to False and the autoflush, to True.

        The lock is initialized here thanks to the RW lock class and will be used to overide the behaviour of sqlite
        to assess the access to the databse without using the queue of SQLite, bound at 4 sec wait before error.
        """
        s_database_url = OptionManager.instance()["--database"]

        self.d_database_config = {'db_connection': None, 'db_database': None, 'db_url': s_database_url}
        self.d_database_config['db_connection'] = s_database_url.split("://")[0]
        if self.d_database_config['db_connection'] == "sqlite":
            self.d_database_config['db_database'] = s_database_url.replace("sqlite:///", "")
        # echo=False mute the log of database
        # connect_args have been necessary because of the accession of the same objects in different Threads.
        if self.d_database_config['db_connection']=="sqlite":
            self.__engine = create_engine(self.d_database_config['db_url'], echo=False, connect_args={'check_same_thread': False})
        else:
            self.__engine = create_engine(self.d_database_config['db_url'], echo=False)
        # Below, between "###", code copy-pasted from this post
        # http://stackoverflow.com/questions/2614984/sqlite-sqlalchemy-how-to-enforce-foreign-keys/7831210#7831210
        # enforce foreign key constraints
        ###
        def _fk_pragma_on_connect(dbapi_con, con_record):
            if self.d_database_config['db_connection'] == "sqlite":
                dbapi_con.execute('pragma foreign_keys=ON')

        event.listen(self.__engine, 'connect', _fk_pragma_on_connect)
        ###

        # I don't know why I have used the autoflush=True
        self.__Session = scoped_session(sessionmaker(bind=self.__engine, autoflush=True, autocommit=False))
        # The lock
        self.__lock = RWLock()
Пример #47
0
    def parse(self):
        """
        Organize the parsing of the Workflow Definition File or the Tool if only one tool is provided thanks to the
        tool command.

        Call the "iterate_wopfile_yml_dic_and_insert_rules_in_db()" or the "load_one_toolwrapper" (depending on the use or not of tool command) method of the
        reader to insert in database the set of objects of the workflow.

        Then, the toolwrappers of the last execution are got back before calling the dag to build itself from the set of tools.

        The DAG is checked to actually being a Directed Acyclic Graph.

        If The "--dot" option is set, the dot and ps file are wrote here.

        :raise: WopMarsParsingException if the workflow is not a DAG.
        :return: the DAG
        """
        if not OptionManager.instance()["tool"]:
            self.__reader.iterate_wopfile_yml_dic_and_insert_rules_in_db(
                OptionManager.instance()["--wopfile"])
        else:
            self.__reader.load_one_toolwrapper(
                OptionManager.instance()["TOOLWRAPPER"],
                OptionManager.instance()["--input"],
                OptionManager.instance()["--output"],
                OptionManager.instance()["--params"])
        # Get back the set of toolwrappers of the workflow before executing them.
        set_toolwrappers = self.get_set_toolwrappers()
        dag_tools = DAG(set_toolwrappers)
        if not is_directed_acyclic_graph(dag_tools):
            # totodo LucG find out the loop to specify it in the error message
            raise WopMarsException(
                "Error while parsing the configuration file: \n\tThe workflow is malformed:",
                "The specified Workflow cannot be represented as a DAG.")
        s_dot_option = OptionManager.instance()["--dot"]
        if s_dot_option:
            Logger.instance().info(
                "Writing the dot and ps files representing the workflow at " +
                str(s_dot_option))
            dag_tools.write_dot(s_dot_option)
            Logger.instance().debug("Dot and ps file wrote.")
        return dag_tools
Пример #48
0
    def parse(self):
        """
        Organize the parsing of the Workflow Definition File or the Tool if only one tool is provided thanks to the
        tool command.

        Call the "read()" or the "load_one_toolwrapper" (depending on the use or not of tool command) method of the
        reader to insert in database the set of objects of the workflow.

        Then, the toolwrappers of the last execution are got back before calling the dag to build itself from the set of tools.

        The DAG is checked to actually being a Directed Acyclic Graph.

        If The "--dot" option is set, the dot and ps file are wrote here.

        :raise: WopMarsParsingException if the workflow is not a DAG.
        :return: the DAG
        """
        if not OptionManager.instance()["tool"]:
            self.__reader.read(OptionManager.instance()["--wopfile"])
        else:
            self.__reader.load_one_toolwrapper(OptionManager.instance()["TOOLWRAPPER"],
                                               OptionManager.instance()["--input"],
                                               OptionManager.instance()["--output"],
                                               OptionManager.instance()["--params"])
        # Get back the set of toolwrappers of the workflow before executing them.
        set_toolwrappers = self.get_set_toolwrappers()
        dag_tools = DAG(set_toolwrappers)
        if not is_directed_acyclic_graph(dag_tools):
            # todo find out the loop to specify it in the error message
            raise WopMarsException("Error while parsing the configuration file: \n\tThe workflow is malformed:",
                                   "The specified Workflow cannot be represented as a DAG.")
        s_dot_option = OptionManager.instance()["--dot"]
        if s_dot_option:
            Logger.instance().info("Writing the dot and ps files representing the workflow at " + str(s_dot_option))
            dag_tools.write_dot(s_dot_option)
            Logger.instance().debug("Dot and ps file wrote.")
        return dag_tools
Пример #49
0
    def setUp(self):
        OptionManager.initial_test_setup()
        self.s_root_path = PathFinder.get_module_path()
        SQLManager.instance().create_all()

        set_tw_to_add = set()
        self.__session = SQLManager.instance().get_session()

        self.input_entry = Type(name="input")
        self.output_entry = Type(name="output")

        ### Toolwrappers for __eq__ test_bak
        opt1 = Option(name="param1", value="1")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper1 = ToolWrapper(rule_name="rule1")
        self.__toolwrapper1.files.extend([f1, f2])
        self.__toolwrapper1.options.append(opt1)

        opt1 = Option(name="param1", value="1")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper2 = ToolWrapper(rule_name="rule2")
        self.__toolwrapper2.files.extend([f1, f2])
        self.__toolwrapper2.options.append(opt1)

        opt1 = Option(name="param2", value="2")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper3 = ToolWrapper(rule_name="rule3")
        self.__toolwrapper3.files.extend([f1, f2])
        self.__toolwrapper3.options.append(opt1)

        ### ToolWrappers for content_respected
        opt1 = Option(name="param1", value="2")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1
        t1.type = self.input_entry

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2
        t2.type = self.output_entry

        self.__foowrapper_right_content = FooWrapper3(rule_name="rule1")
        self.__foowrapper_right_content.files.extend([f1, f2])
        self.__foowrapper_right_content.tables.extend([t1, t2])
        self.__foowrapper_right_content.options.append(opt1)

        opt1 = Option(name="param1", value="String")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2

        self.__foowrapper_wrong_content1 = FooWrapper3(rule_name="rule2")
        self.__foowrapper_wrong_content1.files.extend([f1, f2])
        self.__foowrapper_wrong_content1.tables.extend([t1, t2])
        self.__foowrapper_wrong_content1.options.append(opt1)

        opt1 = Option(name="param2", value="2")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        f3 = IOFilePut(name="input2", path="file2.txt")
        f3.type = self.input_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2

        self.__foowrapper_wrong_content2 = FooWrapper3(rule_name="rule3")
        self.__foowrapper_wrong_content2.files.extend([f1, f2, f3])
        self.__foowrapper_wrong_content2.tables.extend([t1, t2])
        self.__foowrapper_wrong_content2.options.append(opt1)

        opt1 = Option(name="param2", value="2")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2

        self.__foowrapper_wrong_content3 = FooWrapper3(rule_name="rule3")
        self.__foowrapper_wrong_content3.files.extend([f1, f2])
        self.__foowrapper_wrong_content3.tables.extend([t1, t2])
        self.__foowrapper_wrong_content3.options.append(opt1)

        opt1 = Option(name="param1", value="String")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2

        self.__foowrapper_wrong_content4 = FooWrapper3(rule_name="rule3")
        self.__foowrapper_wrong_content4.files.extend([f1, f2])
        self.__foowrapper_wrong_content4.tables.extend([t1, t2])
        self.__foowrapper_wrong_content4.options.append(opt1)

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.set_table(FooBase)
        t1.table = t1

        t2 = IODbPut(model="FooBase", tablename="FooBase")
        t2.set_table(FooBase)
        t2.table = t2

        self.__foowrapper_wrong_content5 = FooWrapper3(rule_name="rule3")
        self.__foowrapper_wrong_content5.files.extend([f1, f2])
        self.__foowrapper_wrong_content5.tables.extend([t1, t2])

        ### TooLWrappers for follows

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper_first = FooWrapper2(rule_name="rule1")
        self.__toolwrapper_first.files.extend([f1, f2])

        f1 = IOFilePut(name="input1", path="file2.txt")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file3.txt")
        f2.type = self.output_entry

        self.__toolwrapper_second = FooWrapper2(rule_name="rule2")
        self.__toolwrapper_second.files.extend([f1, f2])

        ### ToolWrappers for are_input_ready

        s_root_path = PathFinder.get_module_path()

        s_path_to_example_file_that_exists = os.path.join(s_root_path, "test/resource/input_files/input_file1.txt")

        f1 = IOFilePut(name="input1", path=s_path_to_example_file_that_exists)
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper_ready = FooWrapper2(rule_name="rule2")
        self.__toolwrapper_ready.files.extend([f1, f2])

        f1 = IOFilePut(name="input1", path="/not/existent/file")
        f1.type = self.input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = self.output_entry

        self.__toolwrapper_not_ready = FooWrapper2(rule_name="rule2")
        self.__toolwrapper_not_ready.files.extend([f1, f2])
Пример #50
0
 def tearDown(self):
     SQLManager.instance().get_session().close() 
     SQLManager.instance().drop_all()
     OptionManager._drop()
     PathFinder.silentremove("test/output/output_file1.txt")
     SQLManager._drop()
Пример #51
0
 def tearDown(self):
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     OptionManager._drop()
     SQLManager._drop()
Пример #52
0
    def read(self, s_definition_file):
        """
        Reads the file given and insert the rules of the workflow in the database.

        The definition file is supposed to be properly formed. The validation of the content of the definition is done
        during the instanciation of the tools.

        :param: s_definition_file: String containing the path to the definition file.
        :type s_definition_file: str
        :raise: WopmarsException: The content is not validated
        """
        self.load_definition_file(s_definition_file)

        session = SQLManager.instance().get_session()

        # The dict_workflow_definition is assumed to be well formed
        try:
            # The same execution entry for the whole workflow-related database entries.
            execution = Execution(started_at=time_unix_ms())
            # get the types database entries that should have been created previously
            input_entry = session.query(Type).filter(Type.name == "input").one()
            output_entry = session.query(Type).filter(Type.name == "output").one()
            set_wrapper = set()
            # Encounter a rule block
            for rule in self.__dict_workflow_definition:
                str_wrapper_name = None
                # the name of the rule is extracted after the "rule" keyword. There shouldn't be a ":" but it costs nothing.
                str_rule_name = rule.split()[-1].strip(":")
                Logger.instance().debug("Encounter rule " + str_rule_name + ": \n" +
                                        str(DictUtils.pretty_repr(self.__dict_workflow_definition[rule])))
                # The dict of "input"s, "output"s and "params" is re-initialized for each wrapper
                dict_dict_dict_elm = dict(dict_input={"file": {}, "table": {}},
                                          dict_params={},
                                          dict_output={"file": {}, "table": {}})
                for key_second_step in self.__dict_workflow_definition[rule]:
                    # key_second_step is supposed to be "tool", "input", "output" or "params"
                    if type(self.__dict_workflow_definition[rule][key_second_step]) == dict:
                        # if it is a dict, then inputs, outputs or params are coming
                        for key_third_step in self.__dict_workflow_definition[rule][key_second_step]:
                            # todo tabling modification of the indentation levels + appearance of tables in file
                            if key_second_step == "params":
                                key = key_third_step
                                value = self.__dict_workflow_definition[rule][key_second_step][key_third_step]
                                obj_created = Option(name=key,
                                                     value=value)
                                dict_dict_dict_elm["dict_params"][key] = obj_created
                            else:
                                for key_fourth_step in self.__dict_workflow_definition[rule][key_second_step][key_third_step]:
                                    obj_created = None
                                    if key_third_step == "file":
                                        key = key_fourth_step
                                        str_path_to_file = os.path.join(OptionManager.instance()["--directory"],
                                                                        self.__dict_workflow_definition[rule][
                                                                            key_second_step][
                                                                            key_third_step][
                                                                            key])
                                        obj_created = IOFilePut(name=key,
                                                                path=os.path.abspath(str_path_to_file))

                                    elif key_third_step == "table":
                                        key = key_fourth_step
                                        modelname = self.__dict_workflow_definition[rule][
                                            key_second_step][
                                            key_third_step][
                                            key]
                                        obj_created = IODbPut(model=modelname, tablename=key)

                                        dict_dict_dict_elm["dict_" + key_second_step][
                                            key_third_step][
                                            key] = self.__dict_workflow_definition[rule][key_second_step][key_third_step][key]
                                    # all elements of the current rule block are stored in there
                                    # key_second_step is input or output here
                                    dict_dict_dict_elm["dict_" + key_second_step][key_third_step][key] = obj_created
                                    Logger.instance().debug("Object " + key_second_step + " " + key_third_step + ": " +
                                                            key + " created.")
                    else:
                        # if the step is not a dict, then it is supposed to be the "tool" line
                        str_wrapper_name = self.__dict_workflow_definition[rule][key_second_step]
                # At this point, "dict_dict_dict_elm" is like this:
                # {
                #     'dict_params': {
                #         'option1': Option('option1', 'valueofoption1')
                #     },
                #     'dict_input': {
                #         'file' : {
                #             'input1': IOFilePut('input1', 'path/to/input1')
                #         }
                #         'table': {
                #             'table1': IODbPut('table1', 'package.of.table1')
                #         }
                #     },
                # }

                # Instantiate the refered class and add it to the set of objects
                wrapper_entry = self.create_toolwrapper_entry(str_rule_name, str_wrapper_name, dict_dict_dict_elm, input_entry, output_entry)
                # Associating a toolwrapper to an execution
                wrapper_entry.execution = execution
                set_wrapper.add(wrapper_entry)
                Logger.instance().debug("Object toolwrapper: " + str_wrapper_name + " created.")
                # commit/rollback trick to clean the session - SQLAchemy bug suspected
                session.commit()
                session.rollback()
                # todo set_table_properties outside the rules loop to take into account all the tables at once
                # (error if one tool has a foreign key refering to a table that is not in its I/O put
            IODbPut.set_tables_properties(IODbPut.get_execution_tables())
            session.commit()
            session.rollback()
            # This create_all will create all tables that have been found in the toolwrapper
            # if not SQLManager.instance().d_database_config['db_connection'] == 'postgresql':
            # TODO: this function is not creating the triggers after the table in postgresql so I switched it off
            IODbPut.create_triggers()
            SQLManager.instance().create_all()
            session.add_all(set_wrapper)
            # save all operations done so far.
            session.commit()
            for tw in set_wrapper:
                tw.is_content_respected()

        except NoResultFound as e:
            session.rollback()
            raise WopMarsException("Error while parsing the configuration file. The database has not been setUp Correctly.",
                                   str(e))
Пример #53
0
    def setUp(self):
        OptionManager.initial_test_setup()
        SQLManager.instance().create_all()
        #        first
        #       /    \
        #   second   third
        #       \    /
        #       fourth
        #

        self.__session = SQLManager.instance().get_session()

        input_entry = Type(name="input")
        output_entry = Type(name="output")

        f1 = IOFilePut(name="input1", path="file1.txt")
        f1.type = input_entry

        f2 = IOFilePut(name="output1", path="file2.txt")
        f2.type = output_entry

        self.__toolwrapper_first = FooWrapper2(rule_name="rule1")
        self.__toolwrapper_first.files.extend([f1, f2])

        f1 = IOFilePut(name="input1", path="file2.txt")
        f1.type = input_entry

        f2 = IOFilePut(name="output1", path="file3.txt")
        f2.type = output_entry

        self.__toolwrapper_second = FooWrapper2(rule_name="rule2")
        self.__toolwrapper_second.files.extend([f1, f2])

        f1 = IOFilePut(name="input1", path="file2.txt")
        f1.type = input_entry

        f2 = IOFilePut(name="output1", path="file4.txt")
        f2.type = output_entry

        self.__toolwrapper_third = FooWrapper2(rule_name="rule3")
        self.__toolwrapper_third.files.extend([f1, f2])

        f1 = IOFilePut(name="input1", path="file3.txt")
        f1.type = input_entry

        f2 = IOFilePut(name="input2", path="file4.txt")
        f2.type = input_entry

        f3 = IOFilePut(name="output1", path="file5.txt")
        f3.type = output_entry

        self.__toolwrapper_fourth = FooWrapper8(rule_name="rule4")
        self.__toolwrapper_fourth.files.extend([f1, f2, f3])

        list_tool = [self.__toolwrapper_first,
                     self.__toolwrapper_second,
                     self.__toolwrapper_third,
                     self.__toolwrapper_fourth]

        self.__set_tool = set(list_tool)

        SQLManager.instance().get_session().add_all(list_tool)
        SQLManager.instance().get_session().commit()
Пример #54
0
 def tearDown(self):
     SQLManager.instance().get_session().close()
     SQLManager.instance().drop_all()
     PathFinder.dir_content_remove("test/output")
     OptionManager._drop()
     SQLManager._drop()
Пример #55
0
    def test_parse(self):
        OptionManager.initial_test_setup()

        # The good --------------------------:
        input_entry = Type(name="input")
        output_entry = Type(name="output")

        f1 = IOFilePut(name="input1", path="test/resource/input_files/input_file1.txt")
        f1.type = input_entry

        f2 = IOFilePut(name="output1", path="test/output/output_file1.txt")
        f2.type = output_entry

        f3 = IOFilePut(name="input1", path="test/output/output_file1.txt")
        f3.type = input_entry

        f3bis = IOFilePut(name="input1", path="test/output/output_file1.txt")
        f3bis.type = input_entry

        f4 = IOFilePut(name="output1", path="test/output/output_file2.txt")
        f4.type = output_entry

        f5 = IOFilePut(name="output1", path="test/output/output_file3.txt")
        f5.type = output_entry

        f6 = IOFilePut(name="output2", path="test/output/output_file4.txt")
        f6.type = output_entry

        f7 = IOFilePut(name="input1", path="test/output/output_file3.txt")
        f7.type = input_entry

        f8 = IOFilePut(name="input2", path="test/output/output_file2.txt")
        f8.type = input_entry

        f9 = IOFilePut(name="output1", path="test/output/output_file5.txt")
        f9.type = output_entry

        f10 = IOFilePut(name="input1", path="test/output/output_file4.txt")
        f10.type = input_entry

        f11 = IOFilePut(name="output1", path="test/output/output_file6.txt")
        f11.type = output_entry

        f12 = IOFilePut(name="input1", path="test/output/output_file1.txt")
        f12.type = input_entry

        f13 = IOFilePut(name="input2", path="test/output/output_file5.txt")
        f13.type = input_entry

        f14 = IOFilePut(name="input3", path="test/output/output_file6.txt")
        f14.type = input_entry

        f15 = IOFilePut(name="output1", path="test/output/output_file7.txt")
        f15.type = output_entry

        t1 = IODbPut(model="FooBase", tablename="FooBase")
        t1.type = output_entry

        t1bis = IODbPut(model="FooBase", tablename="FooBase")
        t1bis.type = input_entry

        t2 = IODbPut(model="FooBase2", tablename="FooBase2")
        t2.type = output_entry

        t2bis = IODbPut(model="FooBase2", tablename="FooBase2")
        t2bis.type = input_entry

        tw1 = FooWrapper4(rule_name="rule1")
        tw1.files.extend([f1, f2])
        tw2 = FooWrapper5(rule_name="rule2")
        tw2.files.extend([f3, f4])
        tw2.tables.extend([t1])
        tw3 = FooWrapper6(rule_name="rule3")
        tw3.files.extend([f3bis, f5, f6])
        tw4 = FooWrapper7(rule_name="rule4")
        tw4.tables.extend([t1bis, t2])
        tw5 = FooWrapper8(rule_name="rule5")
        tw5.files.extend([f8, f7, f9])
        tw6 = FooWrapper9(rule_name="rule6")
        tw6.files.extend([f10, f11])
        tw6.tables.extend([t2bis])
        tw7 = FooWrapper10(rule_name="rule7")
        tw7.files.extend([f12, f13, f14, f15])

        set_toolwrappers = set([tw1, tw2, tw3, tw4, tw5, tw6, tw7])

        OptionManager.instance()["--dot"] = None

        dag_expected = DAG(set_toolwrappers)
        OptionManager.instance()["--wopfile"] = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file1.yml")
        dag_obtained = self.__parser.parse()
        self.assertEqual(dag_expected, dag_obtained)

        OptionManager.instance()["--wopfile"] = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file_not_a_dag.yml")
        with self.assertRaises(WopMarsException):
            self.__parser.parse()

        # Verify the dot file ----------------:
        OptionManager.instance()["--wopfile"] = os.path.join(self.__s_root_path, "test/resource/wopfile/example_def_file1.yml")
        #dot_path = os.path.join(self.__s_root_path, "test_bak.dot")
        #OptionManager.instance()["--dot"] = dot_path
        self.__parser.parse()
Пример #56
0
    def load_one_toolwrapper(self, s_toolwrapper, s_dict_inputs, s_dict_outputs, s_dict_params):
        """
        Method called when the ``tool`` command is used. It is equivalent to the :meth:`~.wopmars.framework.parsing.Reader.Reader.read` method but create a workflow
        with only one toolwrapper. The workflow is also stored inside the database.

        :param s_toolwrapper: The name of the toolwrapper (will be imported)
        :type s_toolwrapper: str
        :param s_dict_inputs: A string containing the dict of input files
        :type s_dict_inputs: str
        :param s_dict_outputs: A string containing the dict of output files
        :type s_dict_outputs: str
        :param s_dict_params: A string containing the dict of params
        :type s_dict_params: str

        :raise WopMarsException: There is an error while accessing the database
        """
        session = SQLManager.instance().get_session()
        dict_inputs = dict(eval(s_dict_inputs))
        dict_outputs = dict(eval(s_dict_outputs))
        dict_params = dict(eval(s_dict_params))
        try:
            # The same execution entry for the whole workflow-related database entries.
            execution = Execution(started_at=time_unix_ms())
            # get the types that should have been created previously
            input_entry = session.query(Type).filter(Type.name == "input").one()
            output_entry = session.query(Type).filter(Type.name == "output").one()

            Logger.instance().debug("Loading unique toolwrapper " + s_toolwrapper)
            dict_dict_dict_elm = dict(dict_input={"file": {}, "table": {}},
                                 dict_params={},
                                 dict_output={"file": {}, "table": {}})
            for type in dict_inputs:
                if type == "file":
                    for s_input in dict_inputs[type]:
                        obj_created = IOFilePut(name=s_input,
                                                path=os.path.abspath(os.path.join(OptionManager.instance()["--directory"],
                                                                                  dict_inputs[type][s_input])))
                        dict_dict_dict_elm["dict_input"][type][s_input] = obj_created
                        Logger.instance().debug("Object input file: " + s_input + " created.")
                elif type == "table":
                    for s_input in dict_inputs[type]:
                        obj_created = IODbPut(model=dict_inputs[type][s_input],
                                              tablename=s_input)
                        dict_dict_dict_elm["dict_input"][type][s_input] = obj_created
                        Logger.instance().debug("Object input table: " + s_input + " created.")
            for type in dict_outputs:
                if type == "file":
                    for s_output in dict_outputs[type]:
                        obj_created = IOFilePut(name=s_output,
                                                path=os.path.abspath(os.path.join(OptionManager.instance()["--directory"],
                                                                                  dict_outputs[type][s_output])))
                        dict_dict_dict_elm["dict_output"]["file"][s_output] = obj_created
                        Logger.instance().debug("Object output file: " + s_output + " created.")
                elif type == "table":
                    for s_output in dict_outputs[type]:
                        obj_created = IODbPut(model=dict_outputs[type][s_output],
                                              tablename=s_output)
                        dict_dict_dict_elm["dict_output"]["table"][s_output] = obj_created
                        Logger.instance().debug("Object output table: " + s_output + " created.")
            for s_param in dict_params:
                obj_created = Option(name=s_param,
                                     value=dict_params[s_param])
                dict_dict_dict_elm["dict_params"][s_param] = obj_created
                Logger.instance().debug("Object option: " + s_param + " created.")

            # Instantiate the refered class
            wrapper_entry = self.create_toolwrapper_entry("rule_" + s_toolwrapper, s_toolwrapper,
                                                          dict_dict_dict_elm, input_entry, output_entry)
            wrapper_entry.execution = execution
            Logger.instance().debug("Object toolwrapper: " + s_toolwrapper + " created.")
            session.add(wrapper_entry)
            session.commit()
            session.rollback()
            IODbPut.set_tables_properties(IODbPut.get_execution_tables())
            # commit /rollback trick to clean the session
            # todo ask lionel est-ce-que tu as deja eu ce problème à ne pas pouvoir faire des queries et des ajouts
            # dans la meme session?
            session.commit()
            session.rollback()
            # This create_all will create all tables that have been found in the toolwrapper
            # if not SQLManager.instance().d_database_config['db_connection'] == 'postgresql':
            # TODO: this function is not creating the triggers after the table in postgresql so I switched it off
            IODbPut.create_triggers()
            SQLManager.instance().create_all()
            wrapper_entry.is_content_respected()
        except NoResultFound as e:
            session.rollback()
            raise WopMarsException("Error while parsing the configuration file. The database has not been setUp Correctly.",
                                   str(e))
Пример #57
0
 def setUp(self):
     OptionManager.initial_test_setup()
     self.s_root_path = PathFinder.get_module_path()
     SQLManager.instance().create_all()
     self.__session = SQLManager.instance().get_session()
Пример #58
0
    def run_queue(self):
        """
        Call start() method of all elements of the queue.

        The tools inside the queue are taken then their inputs are checked. If they are ready, the tools are started.
        If not, they are put in a buffer list of "not ready tools" or "ready but has not necessary ressources available
        tools".

        The start method is called with a dry argument, if it appears that the input of the ToolWrapper are the same
        than in a previous execution, and that the output are already ready. The dry parameter is set to True and the
        start method will only simulate the execution.

        After that, the code check for the state of the workflow and gather the informations to see if the workflow
        is finished, if it encounter an error or if it is currently running.

        :raises WopMarsException: The workflow encounter a problem and must stop.
        """

        #
        # # TODO THIS METHOD IS NOT THREAD-SAFE (peut etre que si, à voir)
        #

        # If no tools have been added to the queue:
        #  - All tools have been executed and the queue is empty, so nothing happens
        #  - There were remaing tools in the queue but they weren't ready, so they are tested again
        while not self.__queue_exec.empty():
            Logger.instance().debug("Queue size: " + str(self.__queue_exec.qsize()))
            Logger.instance().debug("Queue content: " + str(["rule: " + tt.get_toolwrapper().name + "->" +
                                                             tt.get_toolwrapper().toolwrapper for tt in self.__queue_exec.get_queue_tuple()]))
            # get the first element of the queue to execute
            thread_tw = self.__queue_exec.get()
            tw = thread_tw.get_toolwrapper()
            Logger.instance().debug("Current rule: " + tw.name + "->" + tw.toolwrapper)
            # check if the predecessors of a rule have been already executed: a rule shouldn't be executed if
            # its predecessors have not been executed yet
            if not self.all_predecessors_have_run(tw):
                Logger.instance().debug("Predecessors of rule: " + tw.name + " have not been executed yet.")
            # for running, either the inputs have to be ready or the dry-run mode is enabled
            elif tw.are_inputs_ready() or OptionManager.instance()["--dry-run"]:
                # the state of inputs (table and file) are set in the db here.
                tw.set_args_time_and_size("input")
                Logger.instance().debug("ToolWrapper ready: " + tw.toolwrapper)
                dry = False
                # if forceall option, then the tool is reexecuted anyway
                # check if the actual execution of the toolwrapper is necessary
                # every predecessors of the toolwrapper have to be executed (or simulated)
                if not OptionManager.instance()["--forceall"] and \
                        self.is_this_tool_already_done(tw) and \
                        not bool([node for node in self.__dag_to_exec.predecessors(tw) if node.status != "EXECUTED" and
                                        node.status != "ALREADY_EXECUTED"]):
                    Logger.instance().info("Rule: " + tw.name + " -> " + tw.toolwrapper +
                                           " seemed to have already" +
                                           " been runned with same" +
                                           " parameters.")
                    dry = True

                # todo twthread verification des ressources
                thread_tw.subscribe(self)
                self.__count_exec += 1
                # todo twthread methode start
                thread_tw.set_dry(dry)
                try:
                    # be carefull here: the execution of the toolthreads is recursive meaning that calls to function may
                    # be stacked (run -> notify success -> run(next tool) -> notify success(next tool) -> etc....
                    # todo twthread methode start
                    thread_tw.run()
                except Exception as e:
                    # as mentionned above, there may be recursive calls to this function, so every exception can
                    # pass here multiple times: this attribute is used for recognizing exception that have already been
                    # caught
                    if not hasattr(e, "teb_already_seen"):
                        setattr(e, "teb_already_seen", True)
                        tw.set_execution_infos(status="EXECUTION_ERROR")
                        self.__session.add(tw)
                        self.__session.commit()
                    raise e
            else:
                Logger.instance().debug("ToolWrapper not ready: rule: " + tw.name + " -> " + str(tw.toolwrapper))
                # The buffer contains the ToolWrappers that have inputs which are not ready yet.
                self.__list_queue_buffer.append(thread_tw)

        Logger.instance().debug("Buffer: " + str(["rule: " + t.get_toolwrapper().name + "->" +
                                                  t.get_toolwrapper().toolwrapper for t in self.__list_queue_buffer]))
        Logger.instance().debug("Running rules: " + str(self.__count_exec))

        # There is no more ToolWrapper that are waiting to be executed.
        # Is there some tools that are currently being executed?
        if self.__count_exec == 0:
            # Is there some tools that weren't ready?
            if len(self.__list_queue_buffer) == 0:
                # If there is no tool waiting and no tool being executed, the workflow has finished.
                finished_at = time_unix_ms()
                finished_at_strftime = datetime.datetime.fromtimestamp(finished_at/1000).strftime('%Y-%m-%d %H:%M:%S')
                Logger.instance().info("The workflow has completed. Finished at: " + finished_at_strftime)
                self.set_finishing_informations(finished_at, "FINISHED")
                SQLManager.instance().get_session().close()
                sys.exit(0)
            # uniquement en environnement multiThreadpredece
            elif not self.check_buffer():
                # If there is no tool being executed but there is that are waiting something, the workflow has an issue
                finished_at = time_unix_ms()
                tw_list = [t.get_toolwrapper() for t in self.__list_queue_buffer]
                if len(tw_list) > 0:
                    input_files_not_ready = tw_list[0].get_input_files_not_ready()
                    self.set_finishing_informations(finished_at, "ERROR")
                    raise WopMarsException("The workflow has failed.",
                                           "The inputs '{}' have failed for this tool '{}'".format(input_files_not_ready[0], tw_list[0].name))
Пример #59
0
 def toolwrapper_error(self, msg, tw_name):
     if OptionManager.instance()["--toolwrapper-log"]:
         self.__tw_streamhandler.setFormatter(logging.Formatter(ColorPrint.green(tw_name + ' :: %(asctime)s :: %(levelname)s :: %(message)s')))
         self.__tw_logger.error(msg)
Пример #60
0
    def run(argv):
        """
        Entry-point of the program
        """

        # if the command line is malformed, docopt interrupt the software.
        try:
            if argv[1:] == []: # If not arguments, run the help
                argv.append('-h')
            OptionManager.instance().update(docopt(__doc__, argv=argv[1:]))
        except DocoptExit as SE:
            print("Bad argument in the command line: \n\t" + " ".join(argv) + "\n" + str(SE))
            sys.exit(2)
        try:
            schema_option = Schema({
                '--wopfile': Or("Wopfile", str),
                '--database': Use(PathFinder.check_database_valid_url),
                '-v': Or(0, And(int, lambda n: 1 <= n <= 2)),
                '--dot': Or(None, And(Use(PathFinder.check_valid_path), Use(PathFinder.check_pygraphviz))),
                "--log": Use(PathFinder.check_valid_path),
                '--printtools': Use(bool),
                "--sourcerule": Or(None, str),
                "--targetrule": Or(None, str),
                "--forceall": Use(bool),
                "--dry-run": Use(bool),
                "--directory": Use(PathFinder.create_workingdir),
                "--input": Use(DictUtils.str_to_dict),
                "--output": Use(DictUtils.str_to_dict),
                "--params": Use(DictUtils.str_to_dict),
                "TOOLWRAPPER": Or(None, Use(PathFinder.is_in_python_path)),
                "tool": Use(bool),
                "example": Use(bool),
                "example_snp": Use(bool),
                "--clear-history": Use(bool),
                "--toolwrapper-log": Use(bool)
            })
            # The option values are validated using schema library
            OptionManager.instance().validate(schema_option)
            os.chdir(OptionManager.instance()["--directory"])
        except SchemaError as schema_msg:
            Logger.instance().debug("\nCommand line Args:" + str(OptionManager.instance()))
            # regex for the different possible error messages.
            match_open_def = re.match(r"^open\('(.[^\)]+)'\)", str(schema_msg))
            match_dot_def = re.match(r"^check_valid_path\(('.[^\)]+')\)", str(schema_msg))
            match_wrong_key = re.match(r"^Wrong keys ('.[^\)]+')", str(schema_msg))
            match_pygraphviz = re.match(r".*dot.*", str(schema_msg))
            print(match_pygraphviz)
            # Check the different regex..
            if match_open_def:
                Logger.instance().error("The file " + match_open_def.group(1) + " cannot be opened. It may not exist.")
            elif match_dot_def:
                Logger.instance().error("The path " + match_dot_def.group(1) + " is not valid.")
            elif match_wrong_key:
                # Normally never reach
                Logger.instance().error("The option key " + match_wrong_key.group(1) + " is not known.")
            elif match_pygraphviz:
                Logger.instance().error("The dot file path is not valid or the pygraphviz module is not installed. In the second case, install wopmars with pygraphviz: pip install wopmars[pygraphviz]")
            else:
                # Normally never reach
                Logger.instance().error("An unknown error has occured. Message: " + str(schema_msg))
            sys.exit(2)

        Logger.instance().debug("\nCommand line Args:" + str(OptionManager.instance()))

        if OptionManager.instance()["example"]:
            ExampleBuilder().build()
            sys.exit(1)

        if OptionManager.instance()["example_snp"]:
            ExampleBuilder().build_snp()
            sys.exit(1)


        wm = WorkflowManager()
        try:
            wm.run()
        except WopMarsException as WE:
            Logger.instance().error(str(WE))
            session = SQLManager.instance().get_session()
            try:
                finished_at = time_unix_ms()
                Logger.instance().error("The workflow has encountered an error at: " + str(finished_at))
                wm.set_finishing_informations(finished_at, "ERROR")
            except AttributeError:
                session.rollback()
                Logger.instance().error("The execution has not even begun. No informations will be stored in the database.")
            except Exception as e:
                Logger.instance().error("An error occured during the rollback of the changement of the database which can be now unstable:" +
                                        str(e))
            sys.exit(1)