示例#1
0
    def test_unique_id_after_init_with_random_ids(self):
        ids = ['test1', '123', '1', '3', '10', 'my_script#5']
        generator = IdGenerator(ids)

        for index in range(50):
            next_id = generator.next_id()
            self.assertFalse(next_id in ids)
            ids.append(next_id)
示例#2
0
 def test_3_next_ids(self):
     generator = IdGenerator([])
     id1 = generator.next_id()
     id2 = generator.next_id()
     id3 = generator.next_id()
     self.assertNotEqual(id1, id2)
     self.assertNotEqual(id1, id3)
     self.assertNotEqual(id2, id3)
示例#3
0
    def __init__(self, config_service: ConfigService,
                 execution_service: ExecutionService, conf_folder):
        self._schedules_folder = os.path.join(conf_folder, 'schedules')
        file_utils.prepare_folder(self._schedules_folder)

        self._config_service = config_service
        self._execution_service = execution_service

        (jobs, ids) = restore_jobs(self._schedules_folder)
        self._scheduled_executions = jobs
        self._id_generator = IdGenerator(ids)
        self.stopped = False

        self.scheduler = sched.scheduler(timefunc=time.time)
        self._start_scheduler()

        for job in jobs.values():
            self.schedule_job(job)
示例#4
0
def main():
    tool_utils.validate_web_imports_exist(os.getcwd())

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(os.path.join('logs'))

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH)

    secret = get_secret(TEMP_FOLDER)

    config_service = ConfigService(CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    alerts_service = alerts_service

    execution_logs_path = os.path.join('logs', 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(id_generator)

    execution_logging_initiator = ExecutionLoggingInitiator(
        execution_service, execution_logging_service)
    execution_logging_initiator.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    server.init(server_config, execution_service, execution_logging_service,
                config_service, alerts_service, file_upload_feature,
                file_download_feature, secret)
示例#5
0
    def test_unique_id_after_init(self):
        generator1 = IdGenerator([])
        ids = []
        for index in range(10):
            ids.append(generator1.next_id())

        generator2 = IdGenerator(ids)
        next_id = generator2.next_id()
        self.assertFalse(next_id in ids)
示例#6
0
class ScheduleService:
    def __init__(self, config_service: ConfigService,
                 execution_service: ExecutionService, conf_folder):
        self._schedules_folder = os.path.join(conf_folder, 'schedules')
        file_utils.prepare_folder(self._schedules_folder)

        self._config_service = config_service
        self._execution_service = execution_service

        (jobs, ids) = restore_jobs(self._schedules_folder)
        self._scheduled_executions = jobs
        self._id_generator = IdGenerator(ids)
        self.stopped = False

        self.scheduler = sched.scheduler(timefunc=time.time)
        self._start_scheduler()

        for job in jobs.values():
            self.schedule_job(job)

    def create_job(self, script_name, parameter_values,
                   incoming_schedule_config, user: User):
        if user is None:
            raise InvalidUserException('User id is missing')

        config_model = self._config_service.load_config_model(
            script_name, user, parameter_values)
        self.validate_script_config(config_model)

        schedule_config = read_schedule_config(incoming_schedule_config)

        if not schedule_config.repeatable and date_utils.is_past(
                schedule_config.start_datetime):
            raise InvalidScheduleException(
                'Start date should be in the future')

        id = self._id_generator.next_id()

        job = SchedulingJob(id, user, schedule_config, script_name,
                            parameter_values)

        self.save_job(job)

        self.schedule_job(job)

        return id

    @staticmethod
    def validate_script_config(config_model):
        if not config_model.schedulable:
            raise UnavailableScriptException(config_model.name +
                                             ' is not schedulable')

        for parameter in config_model.parameters:
            if parameter.secure:
                raise UnavailableScriptException(
                    'Script contains secure parameters (' +
                    parameter.str_name() + '), this is not supported')

    def schedule_job(self, job: SchedulingJob):
        schedule = job.schedule

        if not schedule.repeatable and date_utils.is_past(
                schedule.start_datetime):
            return

        next_datetime = schedule.get_next_time()
        LOGGER.info('Scheduling ' + job.get_log_name() + ' at ' +
                    next_datetime.astimezone(
                        tz=None).strftime('%H:%M, %d %B %Y'))

        self.scheduler.enterabs(next_datetime.timestamp(), 1,
                                self._execute_job, (job, ))

    def _execute_job(self, job: SchedulingJob):
        LOGGER.info('Executing ' + job.get_log_name())

        script_name = job.script_name
        parameter_values = job.parameter_values
        user = job.user

        try:
            config = self._config_service.load_config_model(
                script_name, user, parameter_values)
            self.validate_script_config(config)

            execution_id = self._execution_service.start_script(
                config, parameter_values, user.user_id, user.audit_names)
            LOGGER.info('Started script #' + str(execution_id) + ' for ' +
                        job.get_log_name())
        except:
            LOGGER.exception('Failed to execute ' + job.get_log_name())

        self.schedule_job(job)

    def save_job(self, job: SchedulingJob):
        user = job.user
        script_name = job.script_name

        filename = file_utils.to_filename(
            '%s_%s_%s.json' % (script_name, user.get_audit_name(), job.id))
        file_utils.write_file(os.path.join(self._schedules_folder, filename),
                              json.dumps(job.as_serializable_dict(), indent=2))

    def _start_scheduler(self):
        def scheduler_loop():
            while not self.stopped:
                try:
                    self.scheduler.run(blocking=False)
                except:
                    LOGGER.exception('Failed to execute scheduled job')

                now = date_utils.now()
                sleep_delta = timedelta(minutes=1) - timedelta(
                    microseconds=now.microsecond, seconds=now.second)
                _sleep(sleep_delta.total_seconds())

        self.scheduling_thread = threading.Thread(daemon=True,
                                                  target=scheduler_loop)
        self.scheduling_thread.start()

    def _stop(self):
        self.stopped = True

        def stopper():
            pass

        # just schedule the next execution to exit thread immediately
        self.scheduler.enter(1, 0, stopper)
示例#7
0
def main():
    try:
        tool_utils.validate_web_build_exists(os.getcwd())
    except InvalidWebBuildException as e:
        print(str(e))
        sys.exit(-1)

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(LOG_FOLDER)

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER, SERVER_CONF_PATH,
                               LOG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH, TEMP_FOLDER)

    secret = get_secret(TEMP_FOLDER)

    tornado_client_config.initialize()

    group_provider = create_group_provider(server_config.user_groups,
                                           server_config.authenticator,
                                           server_config.admin_users)

    authorizer = Authorizer(server_config.allowed_users,
                            server_config.admin_users, group_provider)

    config_service = ConfigService(authorizer, CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    alerts_service = alerts_service

    execution_logs_path = os.path.join(LOG_FOLDER, 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(id_generator)

    execution_logging_initiator = ExecutionLoggingInitiator(
        execution_service, execution_logging_service)
    execution_logging_initiator.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    server.init(server_config, server_config.authenticator, authorizer,
                execution_service, execution_logging_service, config_service,
                alerts_service, file_upload_feature, file_download_feature,
                secret)
示例#8
0
def main():
    tool_utils.validate_web_imports_exist(os.getcwd())

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, "rt") as f:
        log_config = json.load(f)
        file_utils.prepare_folder(os.path.join('logs'))

        logging.config.dictConfig(log_config)

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(SCRIPT_CONFIGS_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH)
    ssl_context = None
    if server_config.is_ssl():
        ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        ssl_context.load_cert_chain(server_config.get_ssl_cert_path(),
                                    server_config.get_ssl_key_path())

    file_utils.prepare_folder(TEMP_FOLDER)

    settings = {
        "cookie_secret": get_tornado_secret(),
        "login_url": "/login.html"
    }

    auth = TornadoAuth(server_config.authenticator, server_config.authorizer)

    user_file_storage = UserFileStorage(get_tornado_secret())
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    result_files_folder = file_download_feature.get_result_files_folder()

    handlers = [(r"/conf/title", GetServerTitle),
                (r"/scripts/list", GetScripts),
                (r"/scripts/info", GetScriptInfo),
                (r"/scripts/execute", ScriptExecute),
                (r"/scripts/execute/stop", ScriptStop),
                (r"/scripts/execute/io/(.*)", ScriptStreamSocket),
                (r'/admin/execution_log/short', GetShortHistoryEntriesHandler),
                (r'/admin/execution_log/long/(.*)',
                 GetLongHistoryEntryHandler),
                (r'/' + os.path.basename(result_files_folder) + '/(.*)',
                 DownloadResultFile, {
                     'path': result_files_folder
                 }), (r"/", ProxiedRedirectHandler, {
                     "url": "/index.html"
                 })]

    if auth.is_enabled():
        handlers.append((r'/login', LoginHandler))
        handlers.append((r'/auth/config', AuthConfigHandler))
        handlers.append((r'/logout', LogoutHandler))

    handlers.append((r"/username", GetUsernameHandler))

    handlers.append((r"/(.*)", AuthorizedStaticFileHandler, {"path": "web"}))

    application = tornado.web.Application(handlers, **settings)

    application.auth = auth

    application.server_title = server_config.title
    application.authorizer = server_config.authorizer

    application.file_download_feature = file_download_feature
    application.file_upload_feature = FileUploadFeature(
        user_file_storage, TEMP_FOLDER)

    alerts_service = AlertsService(server_config.get_alerts_config())
    application.alerts_service = alerts_service

    execution_logs_path = os.path.join('logs', 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator)
    application.execution_logging_service = execution_logging_service

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries()
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(execution_logging_service,
                                         alerts_service, id_generator)
    application.execution_service = execution_service

    http_server = httpserver.HTTPServer(application, ssl_options=ssl_context)
    http_server.listen(server_config.port, address=server_config.address)

    io_loop = tornado.ioloop.IOLoop.current()

    intercept_stop_when_running_scripts(io_loop, execution_service)

    http_protocol = 'https' if server_config.ssl else 'http'
    print('Server is running on: %s://%s:%s' %
          (http_protocol, server_config.address, server_config.port))
    io_loop.start()
示例#9
0
def main():
    project_path = os.getcwd()

    try:
        tool_utils.validate_web_build_exists(project_path)
    except InvalidWebBuildException as e:
        print(str(e))
        sys.exit(-1)

    logging_conf_file = os.path.join(CONFIG_FOLDER, 'logging.json')
    with open(logging_conf_file, 'rt') as f:
        log_config = json.load(f)
        file_utils.prepare_folder(LOG_FOLDER)

        logging.config.dictConfig(log_config)

    server_version = tool_utils.get_server_version(project_path)
    logging.info('Starting Script Server' +
                 (', v' +
                  server_version if server_version else ' (custom version)'))

    file_utils.prepare_folder(CONFIG_FOLDER)
    file_utils.prepare_folder(TEMP_FOLDER)

    migrations.migrate.migrate(TEMP_FOLDER, CONFIG_FOLDER, SERVER_CONF_PATH,
                               LOG_FOLDER)

    server_config = server_conf.from_json(SERVER_CONF_PATH, TEMP_FOLDER)

    secret = get_secret(server_config.secret_storage_file)

    tornado_client_config.initialize()

    group_provider = create_group_provider(server_config.user_groups,
                                           server_config.authenticator,
                                           server_config.admin_users)

    authorizer = Authorizer(server_config.allowed_users,
                            server_config.admin_users,
                            server_config.full_history_users,
                            server_config.code_editor_users, group_provider)

    config_service = ConfigService(authorizer, CONFIG_FOLDER)

    alerts_service = AlertsService(server_config.alerts_config)
    alerts_service = alerts_service

    execution_logs_path = os.path.join(LOG_FOLDER, 'processes')
    log_name_creator = LogNameCreator(
        server_config.logging_config.filename_pattern,
        server_config.logging_config.date_format)
    execution_logging_service = ExecutionLoggingService(
        execution_logs_path, log_name_creator, authorizer)

    existing_ids = [
        entry.id for entry in execution_logging_service.get_history_entries(
            None, system_call=True)
    ]
    id_generator = IdGenerator(existing_ids)

    execution_service = ExecutionService(authorizer, id_generator)

    execution_logging_controller = ExecutionLoggingController(
        execution_service, execution_logging_service)
    execution_logging_controller.start()

    user_file_storage = UserFileStorage(secret)
    file_download_feature = FileDownloadFeature(user_file_storage, TEMP_FOLDER)
    file_download_feature.subscribe(execution_service)
    file_upload_feature = FileUploadFeature(user_file_storage, TEMP_FOLDER)

    alerter_feature = FailAlerterFeature(execution_service, alerts_service)
    alerter_feature.start()

    executions_callback_feature = ExecutionsCallbackFeature(
        execution_service, server_config.callbacks_config)
    executions_callback_feature.start()

    schedule_service = ScheduleService(config_service, execution_service,
                                       CONFIG_FOLDER)

    server.init(server_config, server_config.authenticator, authorizer,
                execution_service, schedule_service, execution_logging_service,
                config_service, alerts_service, file_upload_feature,
                file_download_feature, secret, server_version, CONFIG_FOLDER)
示例#10
0
 def test_next_id(self):
     generator = IdGenerator([])
     id = generator.next_id()
     self.assertIsNotNone(id)
     self.assertIsInstance(id, str)
     self.assertFalse(is_empty(id))