def _register_policy(self, pack, policy): content = self._meta_loader.load(policy) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file(pack_ref=pack, file_path=policy, use_pack_cache=True) content['metadata_file'] = metadata_file policy_api = PolicyAPI(**content) policy_api = policy_api.validate() policy_db = PolicyAPI.to_model(policy_api) try: policy_db.id = Policy.get_by_name(policy_api.name).id except StackStormDBObjectNotFoundError: LOG.debug('Policy "%s" is not found. Creating new entry.', policy) try: policy_db = Policy.add_or_update(policy_db) extra = {'policy_db': policy_db} LOG.audit('Policy "%s" is updated.', policy_db.ref, extra=extra) except Exception: LOG.exception('Failed to create policy %s.', policy_api.name) raise
def _register_policy(self, pack, policy): content = self._meta_loader.load(policy) pack_field = content.get("pack", None) if not pack_field: content["pack"] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=policy, use_pack_cache=True) content["metadata_file"] = metadata_file policy_api = PolicyAPI(**content) policy_api = policy_api.validate() policy_db = PolicyAPI.to_model(policy_api) try: policy_db.id = Policy.get_by_name(policy_api.name).id except StackStormDBObjectNotFoundError: LOG.debug('Policy "%s" is not found. Creating new entry.', policy) try: policy_db = Policy.add_or_update(policy_db) extra = {"policy_db": policy_db} LOG.audit('Policy "%s" is updated.', policy_db.ref, extra=extra) except Exception: LOG.exception("Failed to create policy %s.", policy_api.name) raise
def _get_action_alias_db(self, pack, action_alias, ignore_metadata_file_error=False): """ Retrieve ActionAliasDB object. :param ignore_metadata_file_error: True to ignore the error when we can't infer metadata_file attribute (e.g. inside tests). :type ignore_metadata_file_error: ``bool`` """ content = self._meta_loader.load(action_alias) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory try: metadata_file = content_utils.get_relative_path_to_pack_file(pack_ref=pack, file_path=action_alias, use_pack_cache=True) except ValueError as e: if not ignore_metadata_file_error: raise e else: content['metadata_file'] = metadata_file action_alias_api = ActionAliasAPI(**content) action_alias_api.validate() action_alias_db = ActionAliasAPI.to_model(action_alias_api) return action_alias_db
def _register_sensor_from_pack(self, pack, sensor): sensor_metadata_file_path = sensor LOG.debug("Loading sensor from %s.", sensor_metadata_file_path) content = self._meta_loader.load(file_path=sensor_metadata_file_path) pack_field = content.get("pack", None) if not pack_field: content["pack"] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field) ) entry_point = content.get("entry_point", None) if not entry_point: raise ValueError("Sensor definition missing entry_point") # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=sensor, use_pack_cache=True ) content["metadata_file"] = metadata_file # Pass override information altered = self._override_loader.override(pack, "sensors", content) sensors_dir = os.path.dirname(sensor_metadata_file_path) sensor_file_path = os.path.join(sensors_dir, entry_point) artifact_uri = "file://%s" % (sensor_file_path) content["artifact_uri"] = artifact_uri content["entry_point"] = entry_point sensor_api = SensorTypeAPI(**content) sensor_model = SensorTypeAPI.to_model(sensor_api) sensor_types = SensorType.query(pack=sensor_model.pack, name=sensor_model.name) if len(sensor_types) >= 1: sensor_type = sensor_types[0] LOG.debug( "Found existing sensor id:%s with name:%s. Will update it.", sensor_type.id, sensor_type.name, ) sensor_model.id = sensor_type.id try: sensor_model = SensorType.add_or_update(sensor_model) except: LOG.exception("Failed creating sensor model for %s", sensor) return sensor_model, altered
def test_get_relative_path_to_pack_file(self): packs_base_paths = get_fixtures_packs_base_path() pack_ref = 'dummy_pack_1' # 1. Valid paths file_path = os.path.join(packs_base_paths, 'dummy_pack_1/pack.yaml') result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) self.assertEqual(result, 'pack.yaml') file_path = os.path.join(packs_base_paths, 'dummy_pack_1/actions/action.meta.yaml') result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) self.assertEqual(result, 'actions/action.meta.yaml') file_path = os.path.join(packs_base_paths, 'dummy_pack_1/actions/lib/foo.py') result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) self.assertEqual(result, 'actions/lib/foo.py') # Already relative file_path = 'actions/lib/foo2.py' result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) self.assertEqual(result, 'actions/lib/foo2.py') # 2. Invalid path - outside pack directory expected_msg = r'file_path (.*?) is not located inside the pack directory (.*?)' file_path = os.path.join(packs_base_paths, 'dummy_pack_2/actions/lib/foo.py') self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, pack_ref=pack_ref, file_path=file_path) file_path = '/tmp/foo/bar.py' self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, pack_ref=pack_ref, file_path=file_path) file_path = os.path.join(packs_base_paths, '../dummy_pack_1/pack.yaml') self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, pack_ref=pack_ref, file_path=file_path) file_path = os.path.join(packs_base_paths, '../../dummy_pack_1/pack.yaml') self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, pack_ref=pack_ref, file_path=file_path)
def _update_pack_model(self, pack_ref, data_files, written_file_paths): """ Update PackDB models (update files list). """ file_paths = [] # A list of paths relative to the pack directory for new files for file_path in written_file_paths: file_path = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) file_paths.append(file_path) pack_db = Pack.get_by_ref(pack_ref) pack_db.files = set(pack_db.files) pack_db.files.update(set(file_paths)) pack_db.files = list(pack_db.files) pack_db = Pack.add_or_update(pack_db) return pack_db
def _register_sensor_from_pack(self, pack, sensor): sensor_metadata_file_path = sensor LOG.debug('Loading sensor from %s.', sensor_metadata_file_path) content = self._meta_loader.load(file_path=sensor_metadata_file_path) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) entry_point = content.get('entry_point', None) if not entry_point: raise ValueError('Sensor definition missing entry_point') # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file(pack_ref=pack, file_path=sensor, use_pack_cache=True) content['metadata_file'] = metadata_file sensors_dir = os.path.dirname(sensor_metadata_file_path) sensor_file_path = os.path.join(sensors_dir, entry_point) artifact_uri = 'file://%s' % (sensor_file_path) content['artifact_uri'] = artifact_uri content['entry_point'] = entry_point sensor_api = SensorTypeAPI(**content) sensor_model = SensorTypeAPI.to_model(sensor_api) sensor_types = SensorType.query(pack=sensor_model.pack, name=sensor_model.name) if len(sensor_types) >= 1: sensor_type = sensor_types[0] LOG.debug('Found existing sensor id:%s with name:%s. Will update it.', sensor_type.id, sensor_type.name) sensor_model.id = sensor_type.id try: sensor_model = SensorType.add_or_update(sensor_model) except: LOG.exception('Failed creating sensor model for %s', sensor) return sensor_model
def _get_action_alias_db(self, pack, action_alias, ignore_metadata_file_error=False): """ Retrieve ActionAliasDB object. :param ignore_metadata_file_error: True to ignore the error when we can't infer metadata_file attribute (e.g. inside tests). :type ignore_metadata_file_error: ``bool`` """ content = self._meta_loader.load(action_alias) pack_field = content.get("pack", None) if not pack_field: content["pack"] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory try: metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=action_alias, use_pack_cache=True) except ValueError as e: if not ignore_metadata_file_error: raise e else: content["metadata_file"] = metadata_file # Pass override information altered = self._override_loader.override(pack, "aliases", content) action_alias_api = ActionAliasAPI(**content) action_alias_api.validate() action_alias_db = ActionAliasAPI.to_model(action_alias_api) return action_alias_db, altered
def _register_action(self, pack, action): content = self._meta_loader.load(action) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file(pack_ref=pack, file_path=action, use_pack_cache=True) content['metadata_file'] = metadata_file action_api = ActionAPI(**content) try: action_api.validate() except jsonschema.ValidationError as e: # We throw a more user-friendly exception on invalid parameter name msg = six.text_type(e) is_invalid_parameter_name = 'does not match any of the regexes: ' in msg if is_invalid_parameter_name: match = re.search('\'(.+?)\' does not match any of the regexes', msg) if match: parameter_name = match.groups()[0] else: parameter_name = 'unknown' new_msg = ('Parameter name "%s" is invalid. Valid characters for parameter name ' 'are [a-zA-Z0-0_].' % (parameter_name)) new_msg += '\n\n' + msg raise jsonschema.ValidationError(new_msg) raise e # Use in-memory cached RunnerTypeDB objects to reduce load on the database if self._use_runners_cache: runner_type_db = self._runner_type_db_cache.get(action_api.runner_type, None) if not runner_type_db: runner_type_db = action_validator.get_runner_model(action_api) self._runner_type_db_cache[action_api.runner_type] = runner_type_db else: runner_type_db = None action_validator.validate_action(action_api, runner_type_db=runner_type_db) model = ActionAPI.to_model(action_api) action_ref = ResourceReference.to_string_reference(pack=pack, name=str(content['name'])) existing = action_utils.get_action_by_ref(action_ref) if not existing: LOG.debug('Action %s not found. Creating new one with: %s', action_ref, content) else: LOG.debug('Action %s found. Will be updated from: %s to: %s', action_ref, existing, model) model.id = existing.id try: model = Action.add_or_update(model) extra = {'action_db': model} LOG.audit('Action updated. Action %s from %s.', model, action, extra=extra) except Exception: LOG.exception('Failed to write action to db %s.', model.name) raise
def _register_action(self, pack, action): content = self._meta_loader.load(action) pack_field = content.get("pack", None) if not pack_field: content["pack"] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=action, use_pack_cache=True) content["metadata_file"] = metadata_file action_api = ActionAPI(**content) try: action_api.validate() except jsonschema.ValidationError as e: # We throw a more user-friendly exception on invalid parameter name msg = six.text_type(e) is_invalid_parameter_name = "does not match any of the regexes: " in msg if is_invalid_parameter_name: match = re.search("'(.+?)' does not match any of the regexes", msg) if match: parameter_name = match.groups()[0] else: parameter_name = "unknown" new_msg = ( 'Parameter name "%s" is invalid. Valid characters for parameter name ' "are [a-zA-Z0-0_]." % (parameter_name)) new_msg += "\n\n" + msg raise jsonschema.ValidationError(new_msg) raise e # Use in-memory cached RunnerTypeDB objects to reduce load on the database if self._use_runners_cache: runner_type_db = self._runner_type_db_cache.get( action_api.runner_type, None) if not runner_type_db: runner_type_db = action_validator.get_runner_model(action_api) self._runner_type_db_cache[ action_api.runner_type] = runner_type_db else: runner_type_db = None action_validator.validate_action(action_api, runner_type_db=runner_type_db) model = ActionAPI.to_model(action_api) action_ref = ResourceReference.to_string_reference( pack=pack, name=str(content["name"])) # NOTE: Here we only retrieve existing object to perform an upsert if it already exists in # the database. To do that, we only need access to the "id" attribute (and ref and pack # for our ActionDB abstraction). Retrieving only those fields is fast and much efficient # especially for actions like aws pack ones which contain a lot of parameters. existing = action_utils.get_action_by_ref( action_ref, only_fields=["id", "ref", "pack"]) if not existing: LOG.debug("Action %s not found. Creating new one with: %s", action_ref, content) else: LOG.debug( "Action %s found. Will be updated from: %s to: %s", action_ref, existing, model, ) model.id = existing.id try: model = Action.add_or_update(model) extra = {"action_db": model} LOG.audit("Action updated. Action %s from %s.", model, action, extra=extra) except Exception: LOG.exception("Failed to write action to db %s.", model.name) raise
def _register_rules_from_pack(self, pack, rules): registered_count = 0 # TODO: Refactor this monstrosity for rule in rules: LOG.debug("Loading rule from %s.", rule) try: content = self._meta_loader.load(rule) pack_field = content.get("pack", None) if not pack_field: content["pack"] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=rule, use_pack_cache=True) content["metadata_file"] = metadata_file rule_api = RuleAPI(**content) rule_api.validate() rule_db = RuleAPI.to_model(rule_api) # Migration from rule without pack to rule with pack. # There might be a rule with same name but in pack `default` # generated in migration script. In this case, we want to # delete so we don't have duplicates. if pack_field != DEFAULT_PACK_NAME: try: rule_ref = ResourceReference.to_string_reference( name=content["name"], pack=DEFAULT_PACK_NAME) LOG.debug( "Looking for rule %s in pack %s", content["name"], DEFAULT_PACK_NAME, ) existing = Rule.get_by_ref(rule_ref) LOG.debug("Existing = %s", existing) if existing: LOG.debug( "Found rule in pack default: %s; Deleting.", rule_ref) Rule.delete(existing) except: LOG.exception("Exception deleting rule from %s pack.", DEFAULT_PACK_NAME) try: rule_ref = ResourceReference.to_string_reference( name=content["name"], pack=content["pack"]) existing = Rule.get_by_ref(rule_ref) if existing: rule_db.id = existing.id LOG.debug("Found existing rule: %s with id: %s", rule_ref, existing.id) except StackStormDBObjectNotFoundError: LOG.debug("Rule %s not found. Creating new one.", rule) try: rule_db = Rule.add_or_update(rule_db) increment_trigger_ref_count(rule_api=rule_api) extra = {"rule_db": rule_db} LOG.audit("Rule updated. Rule %s from %s.", rule_db, rule, extra=extra) except Exception: LOG.exception("Failed to create rule %s.", rule_api.name) # If there was an existing rule then the ref count was updated in # to_model so it needs to be adjusted down here. Also, update could # lead to removal of a Trigger so now is a good time for book-keeping. if existing: cleanup_trigger_db_for_rule(existing) except Exception as e: if self._fail_on_failure: msg = 'Failed to register rule "%s" from pack "%s": %s' % ( rule, pack, six.text_type(e), ) raise ValueError(msg) LOG.exception("Failed registering rule from %s.", rule) else: registered_count += 1 return registered_count
def _register_action(self, pack, action): content = self._meta_loader.load(action) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception( 'Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) # Add in "metadata_file" attribute which stores path to the pack metadata file relative to # the pack directory metadata_file = content_utils.get_relative_path_to_pack_file( pack_ref=pack, file_path=action, use_pack_cache=True) content['metadata_file'] = metadata_file action_api = ActionAPI(**content) try: action_api.validate() except jsonschema.ValidationError as e: # We throw a more user-friendly exception on invalid parameter name msg = six.text_type(e) is_invalid_parameter_name = 'does not match any of the regexes: ' in msg if is_invalid_parameter_name: match = re.search( '\'(.+?)\' does not match any of the regexes', msg) if match: parameter_name = match.groups()[0] else: parameter_name = 'unknown' new_msg = ( 'Parameter name "%s" is invalid. Valid characters for parameter name ' 'are [a-zA-Z0-0_].' % (parameter_name)) new_msg += '\n\n' + msg raise jsonschema.ValidationError(new_msg) raise e # Use in-memory cached RunnerTypeDB objects to reduce load on the database if self._use_runners_cache: runner_type_db = self._runner_type_db_cache.get( action_api.runner_type, None) if not runner_type_db: runner_type_db = action_validator.get_runner_model(action_api) self._runner_type_db_cache[ action_api.runner_type] = runner_type_db else: runner_type_db = None action_validator.validate_action(action_api, runner_type_db=runner_type_db) model = ActionAPI.to_model(action_api) action_ref = ResourceReference.to_string_reference( pack=pack, name=str(content['name'])) existing = action_utils.get_action_by_ref(action_ref) if not existing: LOG.debug('Action %s not found. Creating new one with: %s', action_ref, content) else: LOG.debug('Action %s found. Will be updated from: %s to: %s', action_ref, existing, model) model.id = existing.id try: model = Action.add_or_update(model) extra = {'action_db': model} LOG.audit('Action updated. Action %s from %s.', model, action, extra=extra) except Exception: LOG.exception('Failed to write action to db %s.', model.name) raise
def _register_rules_from_pack(self, pack, rules): registered_count = 0 # TODO: Refactor this monstrosity for rule in rules: LOG.debug('Loading rule from %s.', rule) try: content = self._meta_loader.load(rule) pack_field = content.get('pack', None) if not pack_field: content['pack'] = pack pack_field = pack if pack_field != pack: raise Exception('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)) metadata_file = content_utils.get_relative_path_to_pack_file(pack_ref=pack, file_path=rule, use_pack_cache=True) content['metadata_file'] = metadata_file rule_api = RuleAPI(**content) rule_api.validate() rule_db = RuleAPI.to_model(rule_api) # Migration from rule without pack to rule with pack. # There might be a rule with same name but in pack `default` # generated in migration script. In this case, we want to # delete so we don't have duplicates. if pack_field != DEFAULT_PACK_NAME: try: rule_ref = ResourceReference.to_string_reference(name=content['name'], pack=DEFAULT_PACK_NAME) LOG.debug('Looking for rule %s in pack %s', content['name'], DEFAULT_PACK_NAME) existing = Rule.get_by_ref(rule_ref) LOG.debug('Existing = %s', existing) if existing: LOG.debug('Found rule in pack default: %s; Deleting.', rule_ref) Rule.delete(existing) except: LOG.exception('Exception deleting rule from %s pack.', DEFAULT_PACK_NAME) try: rule_ref = ResourceReference.to_string_reference(name=content['name'], pack=content['pack']) existing = Rule.get_by_ref(rule_ref) if existing: rule_db.id = existing.id LOG.debug('Found existing rule: %s with id: %s', rule_ref, existing.id) except StackStormDBObjectNotFoundError: LOG.debug('Rule %s not found. Creating new one.', rule) try: rule_db = Rule.add_or_update(rule_db) increment_trigger_ref_count(rule_api=rule_api) extra = {'rule_db': rule_db} LOG.audit('Rule updated. Rule %s from %s.', rule_db, rule, extra=extra) except Exception: LOG.exception('Failed to create rule %s.', rule_api.name) # If there was an existing rule then the ref count was updated in # to_model so it needs to be adjusted down here. Also, update could # lead to removal of a Trigger so now is a good time for book-keeping. if existing: cleanup_trigger_db_for_rule(existing) except Exception as e: if self._fail_on_failure: msg = ('Failed to register rule "%s" from pack "%s": %s' % (rule, pack, six.text_type(e))) raise ValueError(msg) LOG.exception('Failed registering rule from %s.', rule) else: registered_count += 1 return registered_count