def _load_runner_files(conf_folder): runners_folder = os.path.join(conf_folder, 'runners') if not os.path.exists(runners_folder): return conf_files = [ os.path.join(runners_folder, file) for file in os.listdir(runners_folder) if file.lower().endswith('.json') ] conf_files = [f for f in conf_files if not file_utils.is_broken_symlink(f)] result = [] for conf_file in conf_files: content = file_utils.read_file(conf_file) try: json_object = custom_json.loads(content, object_pairs_hook=OrderedDict) result.append((conf_file, json_object, content)) except: LOGGER.exception('Failed to load file for migration: ' + conf_file) continue return result
def __introduce_access_config(context): file_path = context.conf_file if not os.path.exists(file_path): return content = file_utils.read_file(file_path) json_object = custom_json.loads(content, object_pairs_hook=OrderedDict) def move_to_access(field, parent_object): if 'access' not in json_object: json_object['access'] = {} json_object['access'][field] = parent_object[field] del parent_object[field] changed = False if 'auth' in json_object: auth_object = json_object['auth'] if 'allowed_users' in auth_object: move_to_access('allowed_users', auth_object) changed = True fields = ['admin_users', 'trusted_ips'] for field in fields: if field in json_object: changed = True move_to_access(field, json_object) if changed: _write_json(file_path, json_object, content)
def test_comments_json(self): config = _from_json( custom_json.loads(""" { // "title": "my server" "title": "my server2" }""") ) self.assertEqual('my server2', config.title)
def _validate_config(test_case, expected_filename, expected_body): configs_path = os.path.join(test_utils.temp_folder, 'runners') path = os.path.join(configs_path, expected_filename) all_paths = str(os.listdir(configs_path)) test_case.assertTrue( os.path.exists(path), 'Failed to find path ' + path + '. Existing paths: ' + all_paths) actual_body = custom_json.loads(file_utils.read_file(path)) test_case.assertEqual(expected_body, actual_body)
def _path_to_json(self, path): if path is None: return None path = file_utils.normalize_path(path, self._config_folder) if os.path.exists(path): try: file_content = file_utils.read_file(path) return custom_json.loads(file_content) except: LOGGER.exception('Failed to load included file ' + path) return None else: LOGGER.warning( 'Failed to load included file, path does not exist: ' + path) return None
def restore_jobs(schedules_folder): files = [ file for file in os.listdir(schedules_folder) if file.endswith('.json') ] job_dict = {} ids = [] # list of ALL ids, including broken configs for file in files: try: content = file_utils.read_file(os.path.join( schedules_folder, file)) job_json = custom_json.loads(content) ids.append(job_json['id']) job = scheduling_job.from_dict(job_json) job_dict[job.id] = job except: LOGGER.exception('Failed to parse schedule file: ' + file) return job_dict, ids
def test_json_comments(self): config = get_sorted_config(custom_json.loads( """{ // Comment 1 "parameters": [ // Comment 2 {"name": "param2", "description": "desc 1"}, {"type": "int", "name": "paramA"}, {"default": "false", "name": "param1", "no_value": true} ], // Comment 3 "name": "Conf X" }""") ) expected = OrderedDict([ ('name', 'Conf X'), ('parameters', [ OrderedDict([('name', 'param2'), ('description', 'desc 1')]), OrderedDict([('name', 'paramA'), ('type', 'int')]), OrderedDict([('name', 'param1'), ('no_value', True), ('default', 'false')]) ]), ]) self.assertEqual(expected, config)
def post(self, user): arguments = self.form_reader.values execution_info = external_model.to_execution_info(arguments) parameter_values = execution_info.param_values if self.form_reader.files: for key, value in self.form_reader.files.items(): parameter_values[key] = value.path schedule_config = custom_json.loads(parameter_values['__schedule_config']) del parameter_values['__schedule_config'] try: id = self.application.schedule_service.create_job( execution_info.script, parameter_values, external_model.parse_external_schedule(schedule_config), user) except (UnavailableScriptException, InvalidScheduleException) as e: raise tornado.web.HTTPError(422, reason=str(e)) except InvalidValueException as e: raise tornado.web.HTTPError(422, reason=e.get_user_message()) self.write(json.dumps({'id': id}))
def from_json(conf_path, temp_folder): if os.path.exists(conf_path): file_content = file_utils.read_file(conf_path) else: file_content = "{}" config = ServerConfig() json_object = custom_json.loads(file_content) address = "0.0.0.0" port = 5000 ssl = json_object.get("ssl") if ssl is not None: key_path = model_helper.read_obligatory(ssl, 'key_path', ' for ssl') cert_path = model_helper.read_obligatory(ssl, 'cert_path', ' for ssl') config.ssl = True config.ssl_key_path = key_path config.ssl_cert_path = cert_path port = 5443 if json_object.get("address"): address = json_object.get("address") config.address = address if json_object.get("port"): port = json_object.get("port") config.port = port if json_object.get('title'): config.title = json_object.get('title') config.enable_script_titles = read_bool_from_config('enable_script_titles', json_object, default=True) access_config = json_object.get('access') if access_config: allowed_users = access_config.get('allowed_users') user_groups = model_helper.read_dict(access_config, 'groups') user_header_name = access_config.get('user_header_name') else: allowed_users = None user_groups = {} user_header_name = None auth_config = json_object.get('auth') if auth_config: config.authenticator = create_authenticator(auth_config, temp_folder) auth_type = config.authenticator.auth_type if auth_type == 'google_oauth' and allowed_users is None: raise Exception('access.allowed_users field is mandatory for ' + auth_type) def_trusted_ips = [] def_admins = [] else: def_trusted_ips = ['127.0.0.1', '::1'] def_admins = def_trusted_ips if access_config: trusted_ips = strip( read_list(access_config, 'trusted_ips', default=def_trusted_ips)) admin_users = _parse_admin_users(access_config, default_admins=def_admins) full_history_users = _parse_history_users(access_config) code_editor_users = _parse_code_editor_users(access_config, admin_users) else: trusted_ips = def_trusted_ips admin_users = def_admins full_history_users = [] code_editor_users = def_admins security = model_helper.read_dict(json_object, 'security') config.allowed_users = _prepare_allowed_users(allowed_users, admin_users, user_groups) config.alerts_config = json_object.get('alerts') config.callbacks_config = json_object.get('callbacks') config.logging_config = parse_logging_config(json_object) config.user_groups = user_groups config.admin_users = admin_users config.full_history_users = full_history_users config.code_editor_users = code_editor_users config.user_header_name = user_header_name config.ip_validator = TrustedIpValidator(trusted_ips) config.max_request_size_mb = read_int_from_config('max_request_size', json_object, default=10) config.secret_storage_file = json_object.get( 'secret_storage_file', os.path.join(temp_folder, 'secret.dat')) config.xsrf_protection = _parse_xsrf_protection(security) return config
def _load_groups(self, groups_file): if not os.path.exists(groups_file): return {} content = file_utils.read_file(groups_file) return custom_json.loads(content)