예제 #1
0
class SourcesCmdHandler(SendingCmdHandler):
    def __init__(self, streams_handler: StreamsHandler, io_handler: IOHandler,
                 logger_queue: Queue):
        self.io_handler = io_handler
        self.__log_writer = NamedLogWriter(logger_queue, 'SourcesCmdHandler')
        super().__init__(streams_handler)

    async def handle(self, task: Task, args: list):
        try:
            file = await self.__prepare_sources_file(task)
            await self.send_files(file)

            self.__log_writer.log('Sent files')
        except Exception as e:
            raise CmdHandlerError(Command.SOURCES, '{}'.format(e))

        return task

    async def __prepare_sources_file(self, task: Task):
        """
        Make zip archive containing all of the source files

        :param task: relevant task
        :return: path to the zip archive with task sources
        """
        return await self.io_handler.get_sources(task)
예제 #2
0
class ResultsCmdHandler(SendingCmdHandler):
    def __init__(self, streams_handler: StreamsHandler,
                 file_handler: IOHandler, logger_queue: Queue):
        self.file_handler = file_handler
        self.__log_writer = NamedLogWriter(logger_queue, 'ResultsCmdHandler')
        super().__init__(streams_handler)

    async def handle(self, task: Task, args: list):
        try:
            file = await self.__prepare_results_file(task)
            await self.send_files(file)

            self.__log_writer.log('Sent files')
        except Exception as e:
            raise CmdHandlerError(Command.RESULTS, '{}'.format(e))

        return task

    async def __prepare_results_file(self, task: Task):
        """
        Make zip archive containing all of the result and log files

        :param task: relevant task
        :return: path to the zip archive with execution results
        """
        return await self.file_handler.get_results(task)
예제 #3
0
class HPCStatsCmdHandler(BaseCmdHandler):
    def __init__(self, template_resolver: TemplateResolver,
                 hpc_stats_resolver: HPCStatsResolver, logger_queue: Queue,
                 server_addr: str):
        self.__server_addr = server_addr
        self.__template_resolver = template_resolver
        self.__hpc_stats_resolver = hpc_stats_resolver
        self.__log_writer = NamedLogWriter(logger_queue, 'HPCStatsCmdHandler')
        self.__sender = HTTPSender(self.__server_addr, logger_queue,
                                   self.__template_resolver,
                                   self.__hpc_stats_resolver)

    async def handle(self, task: Task, args: list):
        await self.__process_hpc_stats()

    async def __process_hpc_stats(self):
        """
        Method for getting HPC statistics like CPU usage

        :return: None
        """
        response = await self.__sender.get_and_send_hpc_stats()
        self.__log_writer.log(
            'Server response for HPC stats: {}'.format(response))

        return None
예제 #4
0
    def build(workdir, scheds_list, logger_queue: Queue):
        """
        Return TemplateResolver with Scheduler model, appropriate for the system

        :param workdir: path to working directory
        :param scheds_list: known Scheduler list
        :param logger_queue: class instance that should be used for logging
        :return: TemplateResolver with appropriate Scheduler
        """
        log_writer = NamedLogWriter(logger_queue, 'TemplateResolverBuilder')
        template_resolver = None
        executor = Executor()
        for sched in scheds_list:  # type: Scheduler
            try:
                template_resolver = TemplateResolver(workdir, sched,
                                                     logger_queue)
                caught = False
                cmd = template_resolver.get_hpc_stats_cmd()
                executor.execute_shell_command(cmd)
            except ShellExecutionError as error:
                log_writer.log('Scheduler {} wasn\'t found. Output: {}'.format(
                    sched.get_name(), error))
                caught = True

            if not caught:
                return template_resolver
        raise RuntimeError('Unknown scheduler or no scheduler was found')
예제 #5
0
class CompilationHandler:
    __cmake_handler = None
    __executor = Executor()
    __cmake_version = ''

    def __init__(self, template_resolver: TemplateResolver,
                 logger_queue: Queue, workdir: str):
        self.__workdir = workdir
        self.__template_resolver = template_resolver
        self.__log_writer = NamedLogWriter(logger_queue,
                                           CompilationHandler.__name__)

        if self.__check_cmake():
            self.__cmake_handler = CMakeHandler(logger_queue, workdir,
                                                self.__cmake_version)

    def __check_cmake(self):
        """
        Check whether the machine supports building projects with CMake

        :return: True if CMake is supported, False otherwise
        """
        cmake_supported = True
        try:
            version = self.__executor.execute_shell_command('cmake --version')
            self.__cmake_version = version.split()[-1]
        except ShellExecutionError:
            cmake_supported = False

        return cmake_supported

    async def handle_compilation(self, task: Task, args: list):
        task.set_bin_name(
            os.path.join(task.path_to_task_bin(self.__workdir),
                         task.get_name() + '.out'))

        compilation_commands = self.__get_compile_cmds(task, args)

        compilation_output = await self.__executor.async_exec_cmds_with_wrapping(
            commands=compilation_commands,
            dir_to_use=task.path_to_task_bin(self.__workdir),
        )

        compilation_log_file = os.path.join(
            task.get_dir_name_for_task(self.__workdir), 'compilation.log')
        with open(compilation_log_file, 'w') as output_file:
            output_file.write(compilation_output)

        return

    @staticmethod
    def __is_c_compiler(compiler):
        c_compiler = 'cc' in compiler \
                     or '++' in compiler \
                     or 'xx' in compiler
        return c_compiler

    def __get_compile_cmds(self, task: Task, args: list):
        """
        Get compilation command to compile all of the task source files.

        :param task: related task
        :param args: additional arguments
        :return: list of compilation commands
        """
        compiler = task.get_compiler()
        if '' != compiler:
            bin_path = task.path_to_task_bin(self.__workdir)
            if not os.path.exists(bin_path):
                os.makedirs(bin_path)

            path = task.path_to_task_src(self.__workdir)
            if task.is_file_archive():
                if self.__cmake_handler is not None and task.uses_cmake():
                    if not self.__cmake_handler.is_cmake_target(path):
                        self.__cmake_handler.create_cmake_lists(task)
                    commands = self.__cmake_handler.get_compilation_commands_using_cmake(
                        task)
                else:
                    commands = self.__no_cmake_compile_cmd(
                        compiler, task, args)
            else:
                commands = self.__get_compile_cmd_for_single_file(
                    compiler, task, args)

            self.__log_writer.log(json.dumps(commands, indent=4),
                                  level=LogLevel.DEBUG)
            return commands
        else:
            raise RuntimeError('No compiler is set for the task: {}'.format(
                task.get_name()))

    def __get_compile_cmd_for_single_file(self, compiler, task, args):
        files = os.path.join(task.path_to_task_src(self.__workdir),
                             task.get_name())

        command = self.__get_compile_cmd_for_args(compiler, files, task, args)
        return [command]

    def __no_cmake_compile_cmd(self, compiler, task: Task, args: list):
        files = self.__get_flat_archive_files(self.__is_c_compiler(compiler),
                                              task)

        command = self.__get_compile_cmd_for_args(compiler, files, task, args)
        return [command]

    @staticmethod
    def __get_compile_cmd_for_args(compiler, files, task, args):
        command = '{} {} -o {} {} >{} '.format(compiler, files,
                                               task.get_bin_name(),
                                               ' '.join(args),
                                               task.get_log_name())
        return command

    def __get_flat_archive_files(self, c_compiler, task: Task):
        """
        Get files list relevant for compilation

        :param c_compiler: whether the program would be compiled using C/C++ compiler
        :param task: task to handle
        :return: source files list
        """
        path = task.path_to_task_src(self.__workdir)
        dirfiles = os.listdir(path)
        if c_compiler:
            files = list(
                filter(lambda x: '.cxx' in x or '.cpp' in x or '.c' in x,
                       dirfiles))
        else:
            files = list(filter(lambda x: '.f' in x, dirfiles))
        files = list(map(lambda x: os.path.join(path, x), files))
        files = ' '.join(files)
        return files
예제 #6
0
class HPCStatsResolver:

    __templ_dct = {
        # HPC statistics-related parameters
        '__irr__':      '__irr__',
        '__avail__':    '__avail__',
        '__total__':    '__total__',
        '__infrep__':   '__infrep__',
    }

    def __init__(self, templ_sign: str, logger_queue: Queue):
        self.templ_sign = templ_sign
        self.__log_writer = NamedLogWriter(logger_queue, 'HPCStatsResolver')

    def resolve_stats(self, pattern: str, hpc_stats: str):
        """
        Parse HPC statistics and get the required information

        :param pattern:
        :param hpc_stats: string with HPC statistics
        :return: dictionary containing relevant parts of the HPC statistics
        """
        try:
            pattern = pattern.split('\n')
            hpc_stats = hpc_stats.split('\n')

            avail_cpu_num, total_cpu_num = self.__handle_stats(pattern, hpc_stats)
        except Exception as e:
            self.__log_writer.log('{}'.format(e), level=LogLevel.ERROR)
            raise e

        res = {
            'total_cpu_num': total_cpu_num,
            'avail_cpu_num': avail_cpu_num
        }
        return res

    def __handle_stats(self, pattern, hpc_stats):
        pattern = self.__sync_length(hpc_stats, pattern)

        pairs = zip(pattern, hpc_stats)
        relevant = self.__leave_only_relevant(pairs)
        relevant = self.__handle_separators(relevant)
        return self.__compute_stats(relevant)

    def __sync_length(self, hpc_stats: list, pattern: list):
        """
        Append template pattern to synchronize its length with HPC statistics length

        :param hpc_stats: list with HPC statistics
        :param pattern: list with template pattern
        :return:
        """
        infrep = self.__templ_dct['__infrep__']
        can_inf_repeat = infrep in pattern
        length = len(hpc_stats)
        if len(pattern) < length and can_inf_repeat:
            infrep_only = list(filter(lambda x: infrep in x, pattern))[0]
            while len(pattern) < length:
                pattern.append(infrep_only)
        return pattern

    def __is_relevant(self, stat_line: str):
        """
        Determine whether the line is relevant for HPC statistics
        :param stat_line: line from the HPC statistics
        :return: True if relevant, False otherwise
        """
        return self.__templ_dct['__avail__'] in stat_line or self.__templ_dct['__total__'] in stat_line

    def __leave_only_relevant(self, pairs):
        """
        Filter out all of the irrelevant parts of the HPC statistics with respect to template

        :param pairs: list of pairs (statistics_element, template_element)
        :return: list of relevant pairs
        """
        # Leave only relevant lines
        relevant = list(filter(lambda x: self.__is_relevant(x[0]), pairs))

        # Leave only relevant parts of the line
        relevant = list(map(lambda x: zip(x[0].split(' '), x[1].split(' ')), relevant))
        new_relevant = []
        for zips in relevant:
            new_relevant.append(list(filter(lambda x: self.__is_relevant(x[0]), zips)))
        relevant.clear()

        # Get rid of empty parts
        new_relevant = list(filter(lambda x: len(x) > 0, new_relevant))

        # Flatten result so that it would contain only tuples
        new_relevant = flatten_list(new_relevant)
        return new_relevant

    def __extract_separators(self, new_relevant: list):
        """
        Determine symbols that separate relevant parts of HPC statistics

        :param new_relevant: list of relevant pairs
        :return: list of separators
        """
        buf = list(filter(lambda x: len(x) > 0, new_relevant[0][0].split(self.templ_sign)))

        stat_related = [
            self.__templ_dct['__avail__'],
            self.__templ_dct['__total__'],
            self.__templ_dct['__infrep__'],
            self.__templ_dct['__irr__']
        ]
        buf = ''.join(list(filter(lambda x: x not in stat_related, buf)))

        return buf

    def __handle_separators(self, relevant: list):
        """
        Get rid of the separators both in template and in HPC statistics element

        :param relevant: list of relevant pairs
        :return: list of relevant pairs with nested lists and no separators
        """
        separators = self.__extract_separators(relevant)
        relevant = list(map(lambda x: (re.split('[{}]'.format(separators), x[0]),
                                       re.split('[{}]'.format(separators), x[1])),
                            relevant))
        return relevant

    def __compute_stats(self, relevant: list):
        """
        Reduce the extracted statistics

        :param relevant: list of relevant parts of the HPC statistics
        :return: number of available CPUs, total number of CPUs
        """
        relevant = list(
            map(
                lambda x: dict(
                    zip(
                        # Remove '@' signs
                        list(map(lambda y: y.strip(self.templ_sign), x[0])),
                        # Convert strings to ints
                        list(map(lambda y: int(y), x[1]))
                    )
                ),
                relevant
            )
        )

        # TODO: if any other parts of HPC statistics becomes relevant this code should be changed
        total_cpu_num = 0
        avail_cpu_num = 0
        for stat in relevant:
            total_cpu_num += stat.get(self.__templ_dct['__total__'], 0)
            avail_cpu_num += stat.get(self.__templ_dct['__avail__'], 0)

        return avail_cpu_num, total_cpu_num
예제 #7
0
class StreamsHandler:
    __ERROR_DESC_START = 'Output:'

    def __init__(self, input_stream: StreamReader, output_stream: StreamWriter,
                 io_handler: IOHandler, logger_queue: Queue):
        self.__in_stream = input_stream
        self.__out_stream = output_stream
        self.__io_handler = io_handler
        self.__log_writer = NamedLogWriter(logger_queue, 'StreamsHandler')

    async def parse_incoming_data(self):
        incoming = await asyncio.wait_for(self.__in_stream.readuntil(
            NetCommand.separator),
                                          timeout=2.0)
        return NetCommand.parse_net_command(incoming)

    async def recv_file(self, task: Task) -> Task:
        """
        Receive incoming file from the reader channel

        :param task: related task
        :return: task
        """
        bufsize = task.get_filesize()
        in_file = await self.__in_stream.read(bufsize)
        recv_name = task.get_name()

        await self.__io_handler.write_recvd_data(task, recv_name, in_file)

        return task

    async def inform_client(self,
                            task: Task,
                            message: str,
                            error: Exception = None) -> None:
        """
        Send information to the server using async writer channel

        :param task: related task
        :param error: related error message
        :param message: message to send
        :return: nothing
        """
        to_send = {'task_info': {}, 'message': '', 'error': ''}

        if task is not None:
            # Add info about the received task
            task_msg = {
                'name': task.get_name(),
                'username': task.get_user(),
                'status': task.get_str_status()
            }
            to_send['task_info'] = task_msg

        to_send['message'] = message

        levelname = LogLevel.DEBUG
        if error is not None and error != 'None':
            error = '{}'.format(error)
            idx = error.find(self.__ERROR_DESC_START)
            if idx >= 0:
                error = error[idx + len(self.__ERROR_DESC_START):]

            to_send['error'] = '{}'.format(error)
            to_send['task_info']['status'] = 'ERROR'
            levelname = LogLevel.ERROR
        self.__log_writer.log('Request response: {}'.format(
            json.dumps(to_send, indent=4)),
                              level=levelname)

        try:
            # Sending response to CKD web application
            self.__out_stream.write(json.dumps(to_send, indent=4).encode())
            await self.__out_stream.drain()
        except Exception as e:
            self.__log_writer.log(
                'Error occurred while responding to the CKD application: {}'.
                format(e))
        self.__log_writer.log('Response sent.', level=LogLevel.DEBUG)

    async def write_data(self, data: bytes):
        self.__out_stream.write(data)
        await self.__out_stream.drain()
예제 #8
0
class CMakeHandler:
    __C_MAKE_LISTS_TXT__ = 'CMakeLists.txt'

    def __init__(self, logger_queue: Queue, workdir: str, cmake_version):
        self.__workdir = workdir
        self.__log_writer = NamedLogWriter(logger_queue, 'CMakeHandler')
        self.__cmake_version = cmake_version

    def is_cmake_target(self, path: str):
        """
        Checks whether there is a CMakeLists.txt file in the archive

        :param path: path to archive contents
        :return: True if contains, False otherwise
        """
        return self.__C_MAKE_LISTS_TXT__ in os.listdir(path)

    def get_compilation_commands_using_cmake(self, task: Task):
        """
        Constructs commands to build CMake project and sets target executable name for task.
        For now supports only project build using Unix Makefiles.

        :param task: task to handle
        :return: command list to build CMake project
        """
        path = task.path_to_task_src(self.__workdir)
        bin_path = task.path_to_task_bin(self.__workdir)

        if not os.path.exists(bin_path):
            os.makedirs(bin_path)

        commands = [
            # invoking cmake in the 'bin' directory so that all of the generated files are inside 'bin' directory
            'cmake {} --warn-uninitialized --warn-unused-vars -Wno-dev'.format(
                path),
            # invoking build through the CMake build tool using appropriate tool for the system
            'cmake --build .',
        ]
        self.__log_writer.log(json.dumps(commands, indent=4), LogLevel.DEBUG)

        with open(os.path.join(path, self.__C_MAKE_LISTS_TXT__),
                  'r') as cmake_file:
            lines = cmake_file.read().splitlines()

        lines = list(filter(lambda x: 'project' in x, lines))
        # line = 'project(project_name)'
        lines = list(filter(lambda x: len(x) > 0, lines))
        lines = list(map(lambda x: re.split(r'[() ]', x), lines))
        self.__log_writer.log('LINES: {}'.format(lines))
        bin_name = os.path.join(bin_path, lines[0][1])
        task.set_bin_name(bin_name)

        return commands

    def create_cmake_lists(self, task: Task):
        """
        Generates CMakeLists.txt to make an archive into the CMake project

        :param task: task to handle
        :return: None
        """
        path = task.path_to_task_src(self.__workdir)
        cmake_lists_filename = os.path.join(path, self.__C_MAKE_LISTS_TXT__)

        main_file = self.__get_main_file_in_project(path)

        task_name = task.get_name()
        file_contents = [
            'cmake_minimum_required(VERSION {})'.format(self.__cmake_version),
            'project({})'.format(task_name),
            'SET(CMAKE_BUILD_TYPE Release)',
        ]

        extension = main_file.split('.')[-1]

        file_contents.append('file(GLOB SOURCES "./*.{}")'.format(extension))

        file_contents.append(' '.join(
            ['add_executable('.format(task_name), '${SOURCES})']))

        dirs = self.__filter_dirs(path)

        include_dirs = list(filter(lambda x: 'include' in x, dirs))
        include_dirs = set(include_dirs)

        file_contents.append('include_directories({})'.format(
            ', '.join(include_dirs)))

        dirs = set(dirs) - include_dirs

        for dir_name in dirs:
            file_contents.append('add_subdirectory({})'.format(dir_name))

        file_contents = '\n'.join(file_contents)
        file_contents = correct_line_breaks(file_contents)

        with open(cmake_lists_filename, 'w') as cmake_lists_file:
            cmake_lists_file.write(file_contents)

    @staticmethod
    def __get_main_file_in_project(path):
        # TODO: rework this solution. Maybe search for main() function in the source code files?
        return list(filter(lambda x: 'main' in x, os.listdir(path)))[0]

    @staticmethod
    def __filter_dirs(path):
        files = os.listdir(path)
        files = list(map(lambda x: os.path.join(path, x), files))

        return list(filter(lambda x: os.path.isdir(x), files))
예제 #9
0
class RequestHandler:
    HANDLER_ERROR = 'Handler Error:'

    def __init__(self, template_resolver: TemplateResolver, workdir: str, logger_queue: Queue,
                 writing_end_of_pipe: Connection):
        self.__workdir = workdir

        self.__writing_conn = writing_end_of_pipe
        self.__sources = set()

        self.__template_resolver = template_resolver
        self.__log_writer = NamedLogWriter(logger_queue, 'RequestHandler')
        self.__io_handler = IOHandler(self.__workdir, logger_queue)
        self.__logger_queue = logger_queue

        self.__cmd_handler_factory = CmdHandlerFactory()

    async def __handle(self, reader: StreamReader, writer: StreamWriter) -> None:
        """
        Method for async handling incoming request

        :param reader: async reader channel
        :param writer: async writer channel
        :return: nothing
        """

        task = None  # type: Task
        self.__log_writer.log('Async Handling connection', level=LogLevel.DEBUG)
        streams_handler = StreamsHandler(reader, writer, self.__io_handler, self.__logger_queue)
        try:
            source, cmd, task, args = await streams_handler.parse_incoming_data()

            task_desc = task.to_json(indent=4) if task is not None else 'Empty task'
            self.__log_writer.log(
                'Incoming request:\nsource:\t{}\ncmd:\t{}\ntask:\n{}\nargs:\t{}'.format(source, cmd, task_desc, args),
                level=LogLevel.DEBUG
            )

            if self.__template_resolver is None and self.__sources == set():
                self.__template_resolver = self.__create_template_resolver(source)
                self.__send_scheduler_info_through_conn()

            if source is not None and source != 'None':
                if source not in self.__sources:
                    self.__writing_conn.send(source)

                self.__log_writer.log('Web application address: {}'.format(source))
                self.__sources.add(source)

            cmd_handler = self.__cmd_handler_factory.get_handler(cmd, streams_handler, self.__template_resolver,
                                                                 self.__logger_queue, source, self.__workdir)

            args = self.__parse_args(args)
            await cmd_handler.handle(task, args)

        except ValueError as ve:
            await streams_handler.inform_client(
                task,
                self.HANDLER_ERROR,
                RuntimeError('{}. Supported commands: {}'.format(ve, self.__get_supported_cmds())),
            )

        except Exception as e:
            if task is not None and task.exists(self.__workdir):
                task.set_status(TaskStatus.ERROR)
                task.set_error(e)
                await self.__io_handler.save_task(task)
                self.__log_writer.log(task.to_json(indent=4), level=LogLevel.ERROR)
            await streams_handler.inform_client(task, self.HANDLER_ERROR, e)

    def __create_template_resolver(self, source):
        downloader = SchedsDownloader(source)
        scheds_list = downloader.get_scheds_from_db()
        return TemplateResolverBuilder.build(self.__workdir, scheds_list, self.__logger_queue)

    def __send_scheduler_info_through_conn(self):
        sched = self.__template_resolver.get_sched()
        self.__writing_conn.send(self.__workdir)
        self.__writing_conn.send(sched.to_json())

    async def safe_handle(self, reader: StreamReader, writer: StreamWriter) -> None:
        """
        Method for async handling incoming request that doesn't raise Exceptions

        :param reader: async reader channel
        :param writer: async writer channel
        :return: nothing
        """
        try:
            await self.__handle(reader, writer)
        except Exception as e:
            self.__log_writer.log('Safe handle fallback: {}'.format(e), level=LogLevel.ERROR)

    @staticmethod
    def __parse_args(in_args: str) -> list:
        """
        Convert arguments string to list

        :param in_args: arguments string
        :return: arguments as a list of strings
        """
        args = []
        if isinstance(in_args, type('str')) and len(in_args) > 0:
            args = in_args.split(' ')
        # if still string then there was only one elem ==> turn it into list
        if isinstance(args, type('str')) and args != '':
            args = [args]
        return args

    def __get_supported_cmds(self):
        return list(
            map(
                lambda x: x.value,
                self.__cmd_handler_factory.get_supported_commands()
            )
        )
예제 #10
0
class StatusDaemon:
    __looping = None
    __stats_resolver = None

    def __init__(self, server_addr: str, logger_queue: Queue, template_resolver: TemplateResolver,
                 reading_end_of_pipe: Connection, err_threshold=10):
        self.__err_count = 0
        self.__err_threshold = err_threshold

        self.__reading_conn = reading_end_of_pipe

        self.__template_resolver = None
        self.sources = set()
        self.__senders = []
        if server_addr is not None and template_resolver is not None:
            self.__template_resolver = template_resolver
            self.__stats_resolver = HPCStatsResolver(template_resolver.get_templ_sign(), logger_queue)
            self.__senders.append(HTTPSender(server_addr, logger_queue, template_resolver, self.__stats_resolver))
            self.sources.add(server_addr)

        self.__executor = Executor()

        self.__logger_queue = logger_queue
        self.__log_writer = NamedLogWriter(logger_queue, 'StatusDaemon')

        self.__log_writer.log('Created instance of StatusDaemon')

    def main_loop(self):
        self.__log_writer.log('Starting StatusDaemon')
        self.__looping = True

        exception = self.__executor.wrap_async_call(self.__loop)
        self.__log_writer.log('{}'.format(exception), LogLevel.ERROR)

    async def __loop(self):
        self.__log_writer.log('_loop called', LogLevel.DEBUG)
        try:
            while self.__looping:
                try:
                    if self.__reading_conn.poll():
                        self.__handle_incoming_data()

                    response = await asyncio.gather(*[sender.get_and_send_hpc_stats() for sender in self.__senders])
                    self.__log_writer.log(response, LogLevel.DEBUG)
                    self.__err_count = 0
                except Exception as e:
                    self.__handle_exception(e)

                await asyncio.sleep(59)
        finally:
            self.shutdown()

    def __handle_incoming_data(self):
        if self.__template_resolver is None:
            workdir = self.__reading_conn.recv()
            sched_json = self.__reading_conn.recv()
            sched = Scheduler(json.loads(sched_json))
            self.__template_resolver = TemplateResolver(workdir, sched, self.__logger_queue)

        new_server_addr = self.__reading_conn.recv()
        if new_server_addr not in self.sources:
            new_sender = HTTPSender(new_server_addr, self.__logger_queue, self.__template_resolver,
                                    self.__stats_resolver)
            self.__senders.append(new_sender)

    def __handle_exception(self, e):
        self.__log_writer.log(e, LogLevel.ERROR)

    def shutdown(self):
        self.__looping = False
        self.__log_writer.log('Shutting down StatusDaemon')
예제 #11
0
class RunCmdHandler(AbstractCmdHandler):
    def __init__(self, streams_handler: StreamsHandler,
                 template_resolver: TemplateResolver, io_handler: IOHandler,
                 logger_queue: Queue, workdir: str, server_addr: str):

        # for writing files
        self.__io_handler = io_handler

        # for receiving/sending data to the web application using sockets
        self.__streams_handler = streams_handler

        self.__executor = Executor()

        # support classes
        self.__template_resolver = template_resolver
        self.__compilation_handler = CompilationHandler(
            template_resolver, logger_queue, workdir)

        self.__task_stats_handler = StatsCmdHandler(streams_handler,
                                                    template_resolver,
                                                    io_handler, logger_queue)
        self.__hpc_stats_handler = HPCStatsCmdHandler(
            template_resolver=template_resolver,
            hpc_stats_resolver=HPCStatsResolver(
                template_resolver.get_templ_sign(), logger_queue),
            logger_queue=logger_queue,
            server_addr=server_addr)

        self.__workdir = workdir

        self.__log_writer = NamedLogWriter(logger_queue, 'RunCmdHandler')

        super().__init__()

    async def handle(self, task: Task, args: list):
        """
        Method for handling compilation and scheduling of the incoming task
        :param task: incoming task
        :param args: additional arguments for compilation
        :raises CmdHandlerError
        :return: nothing
        """
        self.__set_task_log(task)

        task = await self.__streams_handler.recv_file(task)

        task.set_status(TaskStatus.COMPILING)
        await self.__io_handler.save_task(task)
        await self.__streams_handler.inform_client(task, 'Task is compiling')

        try:
            await self.__compilation_handler.handle_compilation(task, args)
            if self.__template_resolver.sched_uses_desc():
                await self.__create_task_passport(task)
            await self.__run_task(task)

            # Update HPC info on the web application side
            await self.__hpc_stats_handler.handle(task, [])
        except Exception as e:
            raise CmdHandlerError(Command.RUN, e)

    async def __create_task_passport(self, task):
        passport = self.__template_resolver.get_passport(task)
        passport_name = await self.__io_handler.write_task_passport(
            task, passport)
        task.set_passport_name(passport_name)

    async def __run_task(self, task):
        """
        Handle 'run' command: compile source files into binary and schedule its execution

        :param task: related task
        :raises ShellExecutionError
        :return: None
        """
        run_command = self.__template_resolver.get_run_for_task(task)

        res = await self.__executor.async_exec_cmds_with_wrapping(
            commands=[run_command],
            dir_to_use=task.get_dir_name_for_task(self.__workdir),
        )

        self.__update_jobid(res, task)

        await self.__io_handler.save_task(task)

        return None

    def __set_task_log(self, task):
        """
        Set full path to log for the incoming task

        :param task: relevant task
        :return: nothing
        """
        log_name = os.path.join(task.get_dir_name_for_task(self.__workdir),
                                '{}.log'.format(task.get_name()))
        task.set_log_name(log_name)

    def __update_jobid(self, res, task):
        """
        Updates jobid depending on the command output

        :param res: 'run' command output
        :param task: related task
        :raises MissingTemplateElement
        :return: nothing
        """
        jobid = self.__template_resolver.resolve_jobid_from_output(res)

        task.set_jobid(jobid)
        self.__log_writer.log('Job Id for task {} is {}'.format(
            task.get_name(), task.get_jobid()))
예제 #12
0
class TaskServer:
    __server = None
    __host = None
    __port = None
    __handler = None

    def __init__(self,
                 host_n_port,
                 writing_end_of_pipe,
                 logger_queue: Queue,
                 template_resolver=None,
                 workdir='.'):

        self.__host = host_n_port[0]
        self.__port = host_n_port[1]

        self.__handler = RequestHandler(template_resolver, workdir,
                                        logger_queue, writing_end_of_pipe)
        self.__executor = Executor()

        self.__log_writer = NamedLogWriter(logger_queue, 'TaskServer')
        self.__log_writer.log('Created instance of TaskServer')

    def main_loop(self):
        """
        Synchronous method that starts the async method that will initialize the server

        :return: nothing
        """
        self.__log_writer.log('Starting TaskServer')
        exception = self.__executor.wrap_async_call(self.__loop)
        self.__log_writer.log('{}'.format(exception), LogLevel.ERROR)

    async def __loop(self):
        """
        Async method that actually initializes server

        :return: nothing
        """
        self.__log_writer.log('_loop called', LogLevel.DEBUG)
        self.__server = await asyncio.start_server(self.__handler.safe_handle,
                                                   self.__host, self.__port)
        await self.__server.wait_closed()
        self.__log_writer.log('_loop exited', LogLevel.DEBUG)

    def shutdown(self):
        if self.__server is not None:
            self.__server.close()
        self.__log_writer.log('Shutting down TaskServer')
예제 #13
0
class HTTPSender:
    def __init__(self, server_addr: str, logger_queue: Queue,
                 template_resolver: TemplateResolver,
                 hpc_stats_handler: HPCStatsResolver):
        self.__server_addr = server_addr
        self.__template_resolver = template_resolver
        self.__hpc_stats_handler = hpc_stats_handler
        self.__log_writer = NamedLogWriter(logger_queue, 'HTTPSender')
        self.__executor = Executor()
        self.__log_writer.log(
            'Created instance with server addr: {}'.format(server_addr))

    def __get_csrf_cookie(self):
        """
        Acquire CSRF cookie from the server

        :return: CSRF cookie
        """
        cookie_handler = request.HTTPCookieProcessor()
        opener = request.build_opener(request.HTTPHandler(), cookie_handler)
        request.install_opener(opener)

        opener.open('{}'.format(self.__server_addr))

        # attempt to get the csrf token from the cookie jar
        csrf_cookie = None
        for cookie in cookie_handler.cookiejar:
            if cookie.name == 'csrftoken':
                csrf_cookie = cookie
                break
        if not csrf_cookie:
            raise IOError('No csrf cookie found')

        self.__log_writer.log('csrf_cookie: {}'.format(csrf_cookie),
                              level=LogLevel.DEBUG)

        return csrf_cookie

    async def get_and_send_hpc_stats(self):
        """
        Get HPC stats like current CPU workload and send it to the server

        :return: string representation of the response
        """
        hpc_stats = await self.__executor.async_exec_shell_command(
            self.__template_resolver.get_hpc_stats_cmd())

        hpc_stats = self.__hpc_stats_handler.resolve_stats(
            self.__template_resolver.get_hpc_stats_pattern(), hpc_stats)
        hpc_stats = json.dumps(hpc_stats)

        try:
            cookie = await self.__executor.async_execution(
                self.__get_csrf_cookie)
        except Exception as e:
            raise URLError('while getting cookie: {}'.format(e))

        cookie = cookie.value.encode('ascii')
        encoded = parser.urlencode(
            dict(csrfmiddlewaretoken=cookie, data=hpc_stats))
        encoded = encoded.encode()
        try:
            response = await self.__executor.async_execution(
                request.urlopen, '{}/hpcStats/'.format(self.__server_addr),
                encoded)
        except Exception as e:
            raise URLError('while updating info: {}'.format(e))

        return response.read().decode()
예제 #14
0
class TemplateResolver:
    """
    Class that handles the substitution template params for the real values.

    Manages both Scheduler command templates and Scheduler output template for the HPC statistics.
    """

    __templ_sign = '@'
    __sched = None  # type: Scheduler
    __io_handler = None

    def __init__(self, workdir: str, scheduler: Scheduler,
                 logger_queue: Queue):
        self.workdir = workdir
        self.__sched = scheduler
        self.__hpc_stats_resolver = HPCStatsResolver(self.__templ_sign,
                                                     logger_queue)
        self.__log_writer = NamedLogWriter(logger_queue, 'TemplateResolver')

    def __init_templ_dct(self, templ_dct, task: Task):
        """
        Function to initialize template dictionary with methods of the related Task object

        :param task: related task
        :return: nothing
        """
        templ_dct['__procnum__'] = task.get_procnum()
        templ_dct['__walltime__'] = task.get_walltime()
        templ_dct['__memory__'] = task.get_memory()
        templ_dct['__filename__'] = task.get_filename()
        templ_dct['__descname__'] = task.get_passport_name()
        templ_dct['__jobid__'] = task.get_jobid()
        templ_dct['__name__'] = task.get_name()
        templ_dct['__user__'] = task.get_user()
        templ_dct['__taskdir__'] = task.get_dir_name_for_task(self.workdir)
        templ_dct['__binname__'] = task.get_bin_name()
        templ_dct['__logname__'] = task.get_log_name()
        templ_dct['__workdir__'] = self.__get_workdir()

    def get_sched(self):
        return self.__sched

    def __get_workdir(self):
        return self.workdir

    def __substitute_template_params(self, templ: str, task: Task):
        """
        Main method of the TemplateResolver.
        Substitutes template elements for corresponding values.

        :param templ: template string
        :param task: related task
        :return: string with substituted template elements for real values
        """
        templ = templ.split(self.__templ_sign)
        templ_dct = {}
        res = []
        self.__init_templ_dct(templ_dct, task)
        for part in templ:
            if part in templ_dct.keys():
                to_add = templ_dct[part]
            else:
                to_add = part
            res.append(to_add)
            res = list(map(lambda x: str(x), res))
        return ''.join(res)

    def sched_uses_desc(self):
        return self.__sched.uses_desc()

    def get_run_for_task(self, task: Task):
        """
        Get 'run' command with real values instead of template parameters
        Also saves task passport

        :param task: related task
        :return: 'run' command with real values
        """
        return self.__substitute_template_params(self.__sched.get_run_cmd(),
                                                 task)

    def resolve_jobid_from_output(self, cmd_output: str):
        template = self.__sched.get_jobid_template()
        template_list = template.split()
        cmd_output_list = cmd_output.strip('"\' ').split()

        pairs = list(zip(template_list, cmd_output_list))
        pairs = list(filter(lambda x: self.__templ_sign in x[0], pairs))

        if len(pairs) == 0:
            raise MissingTemplateElement(template, cmd_output)

        return pairs[0][1]

    def get_passport(self, task: Task):
        template = self.__sched.get_passport_template()

        template = template.splitlines()
        if 'mpi' in task.get_compiler().lower():
            template = self.__mpi_adapt_passport_template(
                template, '__NO_MPI__', '__MPI__')
        else:
            template = self.__mpi_adapt_passport_template(
                template, '__MPI__', '__NO_MPI__')

        template = '\n'.join(template)
        passport = self.__substitute_template_params(template, task)

        self.__log_writer.log('PASSPORT:\n{}'.format(passport),
                              level=LogLevel.DEBUG)

        return passport

    @staticmethod
    def __mpi_adapt_passport_template(template, to_exclude, to_strip):
        template = list(filter(lambda x: to_exclude not in x, template))

        new_template = []
        for elem in template:
            if to_strip in elem:
                to_add = ' '.join(
                    list(filter(lambda x: to_strip not in x, elem.split())))
            else:
                to_add = elem
            new_template.append(to_add)
        return new_template

    def get_stats_for_task(self, task: Task):
        """
        Get 'stats' command with real values instead of template parameters
        Also saves task passport

        :param task: related task
        :return: 'stats' command with real values
        """
        return self.__substitute_template_params(self.__sched.get_stats_cmd(),
                                                 task)

    def get_cancel_for_task(self, task: Task):
        """
        Get 'cancel' command with real values instead of template parameters
        Also saves task passport

        :param task: related task
        :return: 'cancel' command with real values
        """
        return self.__substitute_template_params(self.__sched.get_cancel_cmd(),
                                                 task)

    def get_hpc_stats_cmd(self):
        """
        Return Scheduler command that returns HPC statistics
        :return: Scheduler command that returns HPC statistics
        """
        return self.__sched.get_hpc_stats_cmd()

    def get_hpc_stats_pattern(self):
        return self.__sched.get_hpc_stats_pattern()

    def get_templ_sign(self):
        return self.__templ_sign