Пример #1
0
    def test_find_history_entry_after_restart(self):
        self.simulate_logging(execution_id='id1')

        new_service = ExecutionLoggingService(test_utils.temp_folder, LogNameCreator())
        entry = new_service.find_history_entry('id1')
        self.assertIsNotNone(entry)
        self.validate_history_entry(entry, id='id1')
Пример #2
0
    def test_find_history_entry_after_restart(self):
        self.simulate_logging(execution_id='id1')

        new_service = ExecutionLoggingService(test_utils.temp_folder)
        entry = new_service.find_history_entry('id1')
        self.assertIsNotNone(entry)
        self.validate_history_entry(entry, id='id1')
    def test_history_entries_after_restart(self):
        self.simulate_logging(execution_id='id1')

        new_service = ExecutionLoggingService(test_utils.temp_folder,
                                              LogNameCreator())
        entry = new_service.get_history_entries()[0]
        self.validate_history_entry(entry, id='id1')
Пример #4
0
def main():
    tool_utils.validate_web_imports_exist(os.getcwd())

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(os.path.join('logs'))

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH)

    secret = get_secret(TEMP_FOLDER)

    config_service = ConfigService(CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    alerts_service = alerts_service

    execution_logs_path = os.path.join('logs', 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(id_generator)

    execution_logging_initiator = ExecutionLoggingInitiator(
        execution_service, execution_logging_service)
    execution_logging_initiator.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    server.init(server_config, execution_service, execution_logging_service,
                config_service, alerts_service, file_upload_feature,
                file_download_feature, secret)
Пример #5
0
    def setUp(self):
        test_utils.setup()

        self.exit_codes = {}
        self.post_info_provider = _MapBasedPostExecInfo(self.exit_codes)

        self.logging_service = ExecutionLoggingService(test_utils.temp_folder)
    def setUp(self):
        test_utils.setup()

        self.authorizer = Authorizer([], [], ['power_user'], [],
                                     EmptyGroupProvider())
        self.logging_service = ExecutionLoggingService(test_utils.temp_folder,
                                                       LogNameCreator(),
                                                       self.authorizer)
Пример #7
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [
        os.path.join(output_folder, file) for file in os.listdir(output_folder)
        if file.lower().endswith('.log')
    ]

    for log_file in log_files:
        (correct, parameters_text
         ) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(
            parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(
            execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file,
                              new_content.encode(execution.logging.ENCODING),
                              byte_content=True)
Пример #8
0
    def setUp(self):
        test_utils.setup()

        executor._process_creator = _MockProcessWrapper

        authorizer = Authorizer([], [], [], EmptyGroupProvider())
        self.logging_service = ExecutionLoggingService(test_utils.temp_folder, LogNameCreator(), authorizer)
        self.executor_service = ExecutionService(_IdGeneratorMock())

        self.controller = ExecutionLoggingController(self.executor_service, self.logging_service)
        self.controller.start()
Пример #9
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    for log_file in log_files:
        (correct, parameters_text) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[1]
        file_utils.write_file(log_file, new_content.encode(execution.logging.ENCODING), byte_content=True)
Пример #10
0
def __migrate_old_files(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [
        os.path.join(output_folder, file) for file in os.listdir(output_folder)
        if file.lower().endswith('.log')
    ]

    def is_new_format(log_file):
        with open(log_file, 'r') as f:
            first_line = f.readline().strip()

            if not first_line.startswith('id:'):
                return False

            for line in f:
                if line.strip() == execution.logging.OUTPUT_STARTED_MARKER:
                    return True

        return False

    old_files = [
        log_file for log_file in log_files if not is_new_format(log_file)
    ]

    if not old_files:
        return

    existing_ids = set()
    for file in log_files:
        correct, parameters_text = ExecutionLoggingService._read_parameters_text(
            file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(
            parameters_text)
        if not parameters or 'id' not in parameters:
            continue

        existing_ids.add(parameters['id'])

    id_generator = (str(id) for id in itertools.count())
    id_generator = filter(lambda id: id not in existing_ids, id_generator)

    for old_file in old_files:
        log_basename = os.path.basename(old_file)
        filename = os.path.splitext(log_basename)[0]

        match = re.fullmatch(
            '(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename)
        if match:
            script_name = match.group(1)
            username = match.group(2)
            start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S')
            id = next(id_generator)
        else:
            script_name = 'unknown'
            username = '******'
            start_time = sec_to_datetime(os.path.getctime(old_file))
            id = next(id_generator)

        new_begin = ''
        new_begin += 'id:' + id + '\n'
        new_begin += 'user_name:' + username + '\n'
        new_begin += 'user_id:' + username + '\n'
        new_begin += 'script:' + script_name + '\n'
        new_begin += 'start_time:' + str(to_millis(start_time)) + '\n'
        new_begin += 'command:unknown' + '\n'
        new_begin += execution.logging.OUTPUT_STARTED_MARKER + '\n'

        file_content = file_utils.read_file(old_file)
        file_content = new_begin + file_content
        file_utils.write_file(old_file, file_content)
Пример #11
0
def main():
    try:
        tool_utils.validate_web_build_exists(os.getcwd())
    except InvalidWebBuildException as e:
        print(str(e))
        sys.exit(-1)

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(LOG_FOLDER)

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER, SERVER_CONF_PATH,
                               LOG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH, TEMP_FOLDER)

    secret = get_secret(TEMP_FOLDER)

    tornado_client_config.initialize()

    group_provider = create_group_provider(server_config.user_groups,
                                           server_config.authenticator,
                                           server_config.admin_users)

    authorizer = Authorizer(server_config.allowed_users,
                            server_config.admin_users, group_provider)

    config_service = ConfigService(authorizer, CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    alerts_service = alerts_service

    execution_logs_path = os.path.join(LOG_FOLDER, 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(id_generator)

    execution_logging_initiator = ExecutionLoggingInitiator(
        execution_service, execution_logging_service)
    execution_logging_initiator.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    server.init(server_config, server_config.authenticator, authorizer,
                execution_service, execution_logging_service, config_service,
                alerts_service, file_upload_feature, file_download_feature,
                secret)
Пример #12
0
def main():
    tool_utils.validate_web_imports_exist(os.getcwd())

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, "rt") as f:
        log_config = json.load(f)
        file_utils.prepare_folder(os.path.join('logs'))

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(SCRIPT_CONFIGS_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH)
    ssl_context = None
    if server_config.is_ssl():
        ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        ssl_context.load_cert_chain(server_config.get_ssl_cert_path(),
                                    server_config.get_ssl_key_path())

    file_utils.prepare_folder(TEMP_FOLDER)

    settings = {
        "cookie_secret": get_tornado_secret(),
        "login_url": "/login.html"
    }

    auth = TornadoAuth(server_config.authenticator, server_config.authorizer)

    user_file_storage = UserFileStorage(get_tornado_secret())
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    result_files_folder = file_download_feature.get_result_files_folder()

    handlers = [(r"/conf/title", GetServerTitle),
                (r"/scripts/list", GetScripts),
                (r"/scripts/info", GetScriptInfo),
                (r"/scripts/execute", ScriptExecute),
                (r"/scripts/execute/stop", ScriptStop),
                (r"/scripts/execute/io/(.*)", ScriptStreamSocket),
                (r'/admin/execution_log/short', GetShortHistoryEntriesHandler),
                (r'/admin/execution_log/long/(.*)',
                 GetLongHistoryEntryHandler),
                (r'/' + os.path.basename(result_files_folder) + '/(.*)',
                 DownloadResultFile, {
                     'path': result_files_folder
                 }), (r"/", ProxiedRedirectHandler, {
                     "url": "/index.html"
                 })]

    if auth.is_enabled():
        handlers.append((r'/login', LoginHandler))
        handlers.append((r'/auth/config', AuthConfigHandler))
        handlers.append((r'/logout', LogoutHandler))

    handlers.append((r"/username", GetUsernameHandler))

    handlers.append((r"/(.*)", AuthorizedStaticFileHandler, {"path": "web"}))

    application = tornado.web.Application(handlers, **settings)

    application.auth = auth

    application.server_title = server_config.title
    application.authorizer = server_config.authorizer

    application.file_download_feature = file_download_feature
    application.file_upload_feature = FileUploadFeature(
        user_file_storage, TEMP_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    application.alerts_service = alerts_service

    execution_logs_path = os.path.join('logs', 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)
    application.execution_logging_service = execution_logging_service

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(execution_logging_service,
                                         alerts_service, id_generator)
    application.execution_service = execution_service

    http_server = httpserver.HTTPServer(application, ssl_options=ssl_context)
    http_server.listen(server_config.port, address=server_config.address)

    io_loop = tornado.ioloop.IOLoop.current()

    intercept_stop_when_running_scripts(io_loop, execution_service)

    http_protocol = 'https' if server_config.ssl else 'http'
    print('Server is running on: %s://%s:%s' %
          (http_protocol, server_config.address, server_config.port))
    io_loop.start()
Пример #13
0
def main():
    project_path = os.getcwd()

    try:
        tool_utils.validate_web_build_exists(project_path)
    except InvalidWebBuildException as e:
        print(str(e))
        sys.exit(-1)

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(LOG_FOLDER)

        logging.config.dictConfig(log_config)

    server_version = tool_utils.get_server_version(project_path)
    logging.info('Starting Script Server' +
                 (', v' +
                  server_version if server_version else ' (custom version)'))

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER, SERVER_CONF_PATH,
                               LOG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH, TEMP_FOLDER)

    secret = get_secret(server_config.secret_storage_file)

    tornado_client_config.initialize()

    group_provider = create_group_provider(server_config.user_groups,
                                           server_config.authenticator,
                                           server_config.admin_users)

    authorizer = Authorizer(server_config.allowed_users,
                            server_config.admin_users,
                            server_config.full_history_users,
                            server_config.code_editor_users, group_provider)

    config_service = ConfigService(authorizer, CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.alerts_config)
    alerts_service = alerts_service

    execution_logs_path = os.path.join(LOG_FOLDER, 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator, authorizer)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries(
            None, system_call=True)
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(authorizer, id_generator)

    execution_logging_controller = ExecutionLoggingController(
        execution_service, execution_logging_service)
    execution_logging_controller.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    executions_callback_feature = ExecutionsCallbackFeature(
        execution_service, server_config.callbacks_config)
    executions_callback_feature.start()

    schedule_service = ScheduleService(config_service, execution_service,
                                       CONFIG_FOLDER)

    server.init(server_config, server_config.authenticator, authorizer,
                execution_service, schedule_service, execution_logging_service,
                config_service, alerts_service, file_upload_feature,
                file_download_feature, secret, server_version, CONFIG_FOLDER)
Пример #14
0
    def setUp(self):
        test_utils.setup()

        self.logging_service = ExecutionLoggingService(test_utils.temp_folder,
                                                       LogNameCreator())
Пример #15
0
    def test_history_entries_after_restart(self):
        self.simulate_logging(execution_id='id1')

        new_service = ExecutionLoggingService(test_utils.temp_folder, LogNameCreator())
        entry = new_service.get_history_entries()[0]
        self.validate_history_entry(entry, id='id1')
Пример #16
0
def __migrate_old_files(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    def is_new_format(log_file):
        with open(log_file, 'r') as f:
            first_line = f.readline().strip()

            if not first_line.startswith('id:'):
                return False

            for line in f:
                if line.strip() == execution.logging.OUTPUT_STARTED_MARKER:
                    return True

        return False

    old_files = [log_file for log_file in log_files if not is_new_format(log_file)]

    if not old_files:
        return

    existing_ids = set()
    for file in log_files:
        correct, parameters_text = ExecutionLoggingService._read_parameters_text(file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or 'id' not in parameters:
            continue

        existing_ids.add(parameters['id'])

    id_generator = (str(id) for id in itertools.count())
    id_generator = filter(lambda id: id not in existing_ids, id_generator)

    for old_file in old_files:
        log_basename = os.path.basename(old_file)
        filename = os.path.splitext(log_basename)[0]

        match = re.fullmatch('(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename)
        if match:
            script_name = match.group(1)
            username = match.group(2)
            start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S')
            id = next(id_generator)
        else:
            script_name = 'unknown'
            username = '******'
            start_time = sec_to_datetime(os.path.getctime(old_file))
            id = next(id_generator)

        new_begin = ''
        new_begin += 'id:' + id + '\n'
        new_begin += 'user_name:' + username + '\n'
        new_begin += 'user_id:' + username + '\n'
        new_begin += 'script:' + script_name + '\n'
        new_begin += 'start_time:' + str(to_millis(start_time)) + '\n'
        new_begin += 'command:unknown' + '\n'
        new_begin += execution.logging.OUTPUT_STARTED_MARKER + '\n'

        file_content = file_utils.read_file(old_file)
        file_content = new_begin + file_content
        file_utils.write_file(old_file, file_content)