def _get_task_template(canary): """Gets a tuple (template_revision, template_dict). Args: canary (bool): whether canary template should be returned. Returns: Tuple (template_revision, template_dict): template_revision (str): revision of the template, e.g. commit hash. template_dict (dict): parsed template, or None if not found. May contain $parameters that must be expanded using format_obj(). """ text = None revision = None if canary: logging.warning('using canary swarming task template') revision, text = component_config.get_self_config( 'swarming_task_template_canary.json', store_last_good=True) if not text: revision, text = component_config.get_self_config( 'swarming_task_template.json', store_last_good=True) template = json.loads(text) template.pop('__comment__', None) return revision, template
def update_template_configs(): """Updates the local template configuration from the config service. Ensures that all config files are at the same path revision. """ template_revision, template_config = config.get_self_config( TEMPLATES_CFG_FILENAME, config_pb2.InstanceTemplateConfig, store_last_good=True) manager_revision, manager_config = config.get_self_config( MANAGERS_CFG_FILENAME, config_pb2.InstanceGroupManagerConfig, store_last_good=True) if template_revision != manager_revision: logging.error('Not updating configuration due to revision mismatch.') return stored_config = Configuration.cached() if stored_config.revision == template_revision: # Config is up-to-date so just report validity. # The stored_config will always be valid. metrics.config_valid.set(True, fields={'config': TEMPLATES_CFG_FILENAME}) metrics.config_valid.set(True, fields={'config': MANAGERS_CFG_FILENAME}) return errors = False context = config.validation_context.Context.logging() if template_config: validate_template_config(template_config, context) if context.result().has_errors: logging.warning( 'Not updating configuration due to errors in templates.cfg') errors = True context = config.validation_context.Context.logging() if manager_config: validate_manager_config(manager_config, context) if context.result().has_errors: logging.error( 'Not updating configuration due to errors in managers.cfg') errors = True if errors: return logging.info('Updating configuration to %s', template_revision) stored_config.modify( updated_by='', # this is called from cron, there's no user here manager_config=protobuf.text_format.MessageToString(manager_config), revision=template_revision, template_config=protobuf.text_format.MessageToString(template_config), )
def fetch_script(path): if path not in cached: rev, content = config.get_self_config(path, store_last_good=True) if content: logging.info('Using bot config script "%s" at rev %s', path, rev) cached[path] = (rev, content) return cached[path]
def get_bootstrap(host_url, bootstrap_token=None): """Returns the mangled version of the utility script bootstrap.py. Try to find the content in the following order: - get the file from luci-config - return the default version Returns: File instance. """ # Calculate the header to inject at the top of the file. if bootstrap_token: quoted = urllib.quote_plus(bootstrap_token) assert bootstrap_token == quoted, bootstrap_token header = ('#!/usr/bin/env python\n' '# coding: utf-8\n' 'host_url = %r\n' 'bootstrap_token = %r\n') % (host_url or '', bootstrap_token or '') # Check in luci-config imported file if present. rev, cfg = config.get_self_config('scripts/bootstrap.py', store_last_good=True) if cfg: return File(header + cfg, config.config_service_hostname(), None, rev) # Fallback to the one embedded in the tree. path = os.path.join(ROOT_DIR, 'swarming_bot', 'config', 'bootstrap.py') with open(path, 'rb') as f: return File(header + f.read(), None, None, None)
def _fetch_bot_groups(): """Loads bots.cfg and parses it into _BotGroups struct. If bots.cfg doesn't exist, returns default config that allows any caller from 'bots' IP whitelist to act as a bot. """ # store_last_good=True tells config component to update the config file # in a cron job. Here we just read from datastore. In case it's the first # call ever, or config doesn't exist, it returns (None, None). rev, cfg = config.get_self_config(BOTS_CFG_FILENAME, bots_pb2.BotsCfg, store_last_good=True) if not cfg: return _DEFAULT_BOT_GROUPS # The code below assumes the config is already validated (as promised by # components.config), so it logs and ignores errors, without aborting. There # should be no error at this point. logging.info('Using bots.cfg at rev %s', rev) direct_matches = {} prefix_matches = [] default_group = None for entry in cfg.bot_group: group_cfg = _bot_group_proto_to_tuple(entry, cfg.trusted_dimensions or []) for bot_id_expr in entry.bot_id: try: for bot_id in _expand_bot_id_expr(bot_id_expr): # This should not happen in validated config. If it does, log the # error, but carry on, since dying here will bring service offline. if bot_id in direct_matches: logging.error( 'Bot "%s" is specified in two different bot groups', bot_id) continue direct_matches[bot_id] = group_cfg except ValueError as exc: logging.error('Invalid bot_id expression "%s": %s', bot_id_expr, exc) for bot_id_prefix in entry.bot_id_prefix: if not bot_id_prefix: logging.error('Skipping empty bot_id_prefix') continue prefix_matches.append((bot_id_prefix, group_cfg)) # Default group? if not entry.bot_id and not entry.bot_id_prefix: if default_group is not None: logging.error('Default bot group is specified twice') else: default_group = group_cfg return _BotGroups(direct_matches, prefix_matches, default_group)
def update_template_configs(): """Updates the local template configuration from the config service. Ensures that all config files are at the same path revision. """ template_revision, template_config = config.get_self_config( TEMPLATES_CFG_FILENAME, config_pb2.InstanceTemplateConfig, store_last_good=True) manager_revision, manager_config = config.get_self_config( MANAGERS_CFG_FILENAME, config_pb2.InstanceGroupManagerConfig, store_last_good=True) if template_revision != manager_revision: logging.error('Not updating configuration due to revision mismatch.') return stored_config = Configuration.cached() if stored_config.revision != template_revision: context = config.validation_context.Context.logging() if template_config: validate_template_config(template_config, context) if context.result().has_errors: logging.warning( 'Not updating configuration due to errors in templates.cfg') return context = config.validation_context.Context.logging() if manager_config: validate_manager_config(manager_config, context) if context.result().has_errors: logging.error( 'Not updating configuration due to errors in managers.cfg') return logging.info('Updating configuration to %s', template_revision) stored_config.modify( manager_config=protobuf.text_format.MessageToString( manager_config), revision=template_revision, template_config=protobuf.text_format.MessageToString( template_config), )
def _get_settings(): """Returns (rev, cfg) where cfg is a parsed SettingsCfg message. If config does not exists, returns (None, None). Mock this method in tests to inject changes to the defaults. """ # store_last_good=True tells config component to update the config file # in a cron job. Here we just read from the datastore. return config.get_self_config( _SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True)
def _fetch_pools_config(): """Loads pools.cfg and parses it into a _PoolsCfg instance.""" # store_last_good=True tells config components to update the config file # in a cron job. Here we just read from the datastore. In case it's the first # call ever, or config doesn't exist, it returns (None, None). rev, cfg = config.get_self_config( POOLS_CFG_FILENAME, pools_pb2.PoolsCfg, store_last_good=True) if not cfg: if _LOCAL_FAKE_CONFIG: assert utils.is_local_dev_server() return _LOCAL_FAKE_CONFIG logging.error('There is no pools.cfg, no task is accepted') return _PoolsCfg({}, (None, None)) # The config is already validated at this point. ctx = validation.Context.logging() template_map = _resolve_task_template_inclusions(ctx, cfg.task_template) deployment_map = _resolve_task_template_deployments( ctx, template_map, cfg.task_template_deployment) bot_monitorings = _resolve_bot_monitoring(ctx, cfg.bot_monitoring) default_isolate = default_cipd = None if cfg.HasField('default_external_services'): ext = cfg.default_external_services default_isolate = IsolateServer(ext.isolate.server, ext.isolate.namespace) default_cipd = CipdServer(ext.cipd.server, ext.cipd.client_version) pools = {} for msg in cfg.pool: for name in msg.name: pools[name] = PoolConfig( name=name, rev=rev, scheduling_users=frozenset(_to_ident(u) for u in msg.schedulers.user), scheduling_groups=frozenset(msg.schedulers.group), trusted_delegatees={ _to_ident(d.peer_id): TrustedDelegatee( peer_id=_to_ident(d.peer_id), required_delegation_tags=frozenset(d.require_any_of.tag)) for d in msg.schedulers.trusted_delegation }, service_accounts=frozenset(msg.allowed_service_account), service_accounts_groups=tuple(msg.allowed_service_account_group), task_template_deployment=_resolve_deployment( ctx, msg, template_map, deployment_map), bot_monitoring=bot_monitorings.get(name), external_schedulers=_resolve_external_schedulers( msg.external_schedulers), default_isolate=default_isolate, default_cipd=default_cipd) return _PoolsCfg(pools, (default_isolate, default_cipd))
def _get_settings(): """Returns (rev, cfg) where cfg is a parsed SettingsCfg message. If config does not exists, returns (None, <cfg with defaults>). The config is cached in the datastore. """ # store_last_good=True tells config component to update the config file # in a cron job. Here we just read from the datastore. rev, cfg = config.get_self_config(SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True) cfg = cfg or config_pb2.SettingsCfg() return rev, cfg
def _desugar_template(proto_cfg): for template in proto_cfg.templates: for value in template.metadata_from_file: # Assumes value is on the form key:path part = value.split(':', 1) if len(part) < 2: logging.error('Invalid metadata_from_file value: %s', value) return proto_cfg _, content = config.get_self_config(part[1], None, store_last_good=True) template.metadata.append('%s:%s' % (part[0], content)) del template.metadata_from_file[:] return proto_cfg
def _get_settings(): """Returns (rev, cfg) where cfg is a parsed SettingsCfg message. The config is cached in the datastore. """ rev, cfg = config.get_self_config(SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True) cfg = cfg or config_pb2.SettingsCfg() if cfg.mp_server: current_config = machine_provider.MachineProviderConfiguration.cached() if cfg.mp_server != current_config.instance_url: logging.info('Updating Machine Provider server to %s', cfg.mp_server) current_config.modify(instance_url=cfg.mp_server) return rev, cfg
def get_bot_config(): """Returns the current version of bot_config.py. Try to find the content in the following order: - get the file from luci-config - return the default version Returns: File instance. """ # Check in luci-config imported file if present. rev, cfg = config.get_self_config('scripts/bot_config.py', store_last_good=True) if cfg: return File(cfg, config.config_service_hostname(), None, rev) # Fallback to the one embedded in the tree. path = os.path.join(ROOT_DIR, 'swarming_bot', 'config', 'bot_config.py') with open(path, 'rb') as f: return File(f.read(), None, None, None)
def _get_settings(): """Returns (rev, cfg) where cfg is a parsed SettingsCfg message. If config does not exists, returns (None, <cfg with defaults>). The config is cached in the datastore. """ rev = None cfg = None try: # store_last_good=True tells config component to update the config file # in a cron job. Here we just read from datastore. rev, cfg = config.get_self_config(SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True) except config.CannotLoadConfigError as ex: logging.info('Could not load settings.cfg: %s; using defaults', ex) if not cfg: cfg = config_pb2.SettingsCfg(reusable_task_age_secs=7 * 24 * 60 * 60, bot_death_timeout_secs=10 * 60) return rev, cfg
def _fetch_pools_config(): """Loads pools.cfg and parses it into a _PoolsCfg instance.""" # store_last_good=True tells config components to update the config file # in a cron job. Here we just read from the datastore. In case it's the first # call ever, or config doesn't exist, it returns (None, None). rev, cfg = config.get_self_config(POOLS_CFG_FILENAME, pools_pb2.PoolsCfg, store_last_good=True) if not cfg: return _PoolsCfg({}, False) # The config is already validated at this point. ctx = validation.Context.logging() template_map = _resolve_task_template_inclusions(ctx, cfg.task_template) deployment_map = _resolve_task_template_deployments( ctx, template_map, cfg.task_template_deployment) pools = {} for msg in cfg.pool: for name in msg.name: pools[name] = PoolConfig( name=name, rev=rev, scheduling_users=frozenset( _to_ident(u) for u in msg.schedulers.user), scheduling_groups=frozenset(msg.schedulers.group), trusted_delegatees={ _to_ident(d.peer_id): TrustedDelegatee(peer_id=_to_ident(d.peer_id), required_delegation_tags=frozenset( d.require_any_of.tag)) for d in msg.schedulers.trusted_delegation }, service_accounts=frozenset(msg.allowed_service_account), service_accounts_groups=tuple( msg.allowed_service_account_group), task_template_deployment=_resolve_deployment( ctx, msg, template_map, deployment_map)) return _PoolsCfg(pools, cfg.forbid_unknown_pools)
def _fetch_and_expand_bots_cfg(ctx): """Fetches bots.cfg with all includes from config service, validating it. All validation errors are reported through the given validation context. Doesn't stop on a first error, parses as much of the config as possible. Args: ctx: validation.Context to use for accepting validation errors. Returns: ExpandedBotsCfg if bots.cfg exists. None if there's no bots.cfg file, this is not an error. """ # Note: store_last_good=True has a side effect of returning configs that # passed @validation.self_rule validators. This is the primary reason we are # using it. rev, cfg = config.get_self_config(BOTS_CFG_FILENAME, bots_pb2.BotsCfg, store_last_good=True) if not cfg: logging.info('No bots.cfg found') return None logging.info('Expanding and validating bots.cfg at rev %s', rev) digest = _DigestBuilder() digest.update('ROOT_REV', rev) # Fetch all included bot config scripts. _include_bot_config_scripts(cfg, digest, ctx) # TODO(vadimsh): Fetch and expand bot lists. # TODO(tandrii): Fetch and expand additional bot annotation includes. # Revalidate the fully expanded config, it may have new errors not detected # when validating each file individually. _validate_bots_cfg(cfg, ctx) return ExpandedBotsCfg(cfg, rev, digest.get())
def test_get_self_config(self): revision, cfg = config.get_self_config('bar.cfg', test_config_pb2.Config) self.assertEqual(revision, 'deadbeef') self.assertEqual(cfg.param, 'value')
def fetch_script(path): if path not in cached: rev, content = config.get_self_config(path, store_last_good=True) cached[path] = (rev, content) return cached[path]