def load_policy( self, data, config=None, session_factory=None, validate=C7N_VALIDATE, output_dir=None, cache=False, ): if validate: if not self.custodian_schema: self.custodian_schema = generate() errors = schema_validate({"policies": [data]}, self.custodian_schema) if errors: raise errors[0] config = config or {} if not output_dir: temp_dir = self.get_temp_dir() config["output_dir"] = temp_dir if cache: config["cache"] = os.path.join(temp_dir, "c7n.cache") config["cache_period"] = 300 conf = Config.empty(**config) p = policy.Policy(data, conf, session_factory) p.validate() return p
def validate(options): load_resources() if len(options.configs) < 1: log.error('no config files specified') sys.exit(1) used_policy_names = set() schm = schema.generate() errors = [] for config_file in options.configs: config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): raise ValueError("Invalid path for config %r" % config_file) options.dryrun = True fmt = config_file.rsplit('.', 1)[-1] with open(config_file) as fh: if fmt in ('yml', 'yaml'): data = yaml.safe_load(fh.read()) elif fmt in ('json',): data = json.load(fh) else: log.error("The config file must end in .json, .yml or .yaml.") raise ValueError("The config file must end in .json, .yml or .yaml.") errors += schema.validate(data, schm) conf_policy_names = { p.get('name', 'unknown') for p in data.get('policies', ())} dupes = conf_policy_names.intersection(used_policy_names) if len(dupes) >= 1: errors.append(ValueError( "Only one policy with a given name allowed, duplicates: %s" % ( ", ".join(dupes) ) )) used_policy_names = used_policy_names.union(conf_policy_names) if not errors: null_config = Config.empty(dryrun=True, account_id='na', region='na') for p in data.get('policies', ()): try: policy = Policy(p, null_config, Bag()) policy.validate() except Exception as e: msg = "Policy: %s is invalid: %s" % ( p.get('name', 'unknown'), e) errors.append(msg) if not errors: log.info("Configuration valid: {}".format(config_file)) continue log.error("Configuration invalid: {}".format(config_file)) for e in errors: log.error("%s" % e) if errors: sys.exit(1)
def validate(options): load_resources() if options.config is not None: # support the old -c option options.configs.append(options.config) if len(options.configs) < 1: # no configs to test # We don't have the parser object, so fake ArgumentParser.error print('custodian validate: error: no config files specified', file=sys.stderr) sys.exit(2) used_policy_names = set() schm = schema.generate() errors = [] for config_file in options.configs: config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): raise ValueError("Invalid path for config %r" % config_file) options.dryrun = True format = config_file.rsplit('.', 1)[-1] with open(config_file) as fh: if format in ('yml', 'yaml'): data = yaml.safe_load(fh.read()) if format in ('json',): data = json.load(fh) errors = schema.validate(data, schm) conf_policy_names = {p['name'] for p in data.get('policies', ())} dupes = conf_policy_names.intersection(used_policy_names) if len(dupes) >= 1: errors.append(ValueError( "Only one policy with a given name allowed, duplicates: %s" % ( ", ".join(dupes) ) )) used_policy_names = used_policy_names.union(conf_policy_names) if not errors: null_config = Bag(dryrun=True, log_group=None, cache=None, assume_role="na") for p in data.get('policies', ()): try: Policy(p, null_config, Bag()) except Exception as e: msg = "Policy: %s is invalid: %s" % ( p.get('name', 'unknown'), e) errors.append(msg) if not errors: log.info("Configuration valid: {}".format(config_file)) continue log.error("Configuration invalid: {}".format(config_file)) for e in errors: log.error(" %s" % e) if errors: sys.exit(1)
def test_instance_age(self): data = { 'policies': [ {'name': 'ancient-instances', 'resource': 'ec2', 'query': [{'instance-state-name': 'running'}], 'filters': [{'days': 60, 'type': 'instance-age'}] }]} schema = generate(['ec2']) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_runtime(self): data = lambda runtime: { 'policies': [{ 'name': 'test', 'resource': 's3', 'mode': { 'execution-options': {'metrics_enabled': False}, 'type': 'periodic', 'schedule': 'xyz', 'runtime': runtime}}] } errors_with = lambda r: list(Validator(generate()).iter_errors(data(r))) self.assertEqual(len(errors_with('python2.7')), 0) self.assertEqual(len(errors_with('python3.6')), 0) self.assertEqual(len(errors_with('python4.5')), 1)
def test_value_filter_short_form_invalid(self): for rtype in ["elb", "rds", "ec2"]: data = { 'policies': [ {'name': 'instance-policy', 'resource': 'elb', 'filters': [ {"tag:Role": "webserver"}]} ]} schema = generate([rtype]) # Disable standard value short form schema['definitions']['filters']['valuekv'] = {'type': 'number'} validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 1)
def test_instance_age(self): data = { "policies": [ { "name": "ancient-instances", "resource": "ec2", "query": [{"instance-state-name": "running"}], "filters": [{"days": 60, "type": "instance-age"}], } ] } schema = generate(["ec2"]) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_mark_for_op(self): data = { "policies": [ { "name": "ebs-mark-delete", "resource": "ebs", "filters": [], "actions": [{"type": "mark-for-op", "op": "delete", "days": 30}], } ] } schema = generate(["ebs"]) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_mark_for_op(self): data = { 'policies': [{ 'name': 'ebs-mark-delete', 'resource': 'ebs', 'filters': [], 'actions': [{ 'type': 'mark-for-op', 'op': 'delete', 'days': 30}]}] } schema = generate(['ebs']) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_offhours_stop(self): data = { 'policies': [ {'name': 'ec2-offhours-stop', 'resource': 'ec2', 'filters': [ {'tag:aws:autoscaling:groupName': 'absent'}, {'type': 'offhour', 'tag': 'maid_downtime', 'default_tz': 'et', 'hour': 19}] }] } schema = generate(['ec2']) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_value_filter_short_form_invalid(self): for rtype in ["elb", "rds", "ec2"]: data = { "policies": [ { "name": "instance-policy", "resource": "elb", "filters": [{"tag:Role": "webserver"}], } ] } schema = generate([rtype]) # Disable standard value short form schema["definitions"]["filters"]["valuekv"] = {"type": "number"} validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 1)
def test_runtime(self): data = lambda runtime: { # NOQA "policies": [ { "name": "test", "resource": "s3", "mode": { "execution-options": {"metrics_enabled": False}, "type": "periodic", "schedule": "xyz", "runtime": runtime, }, } ] } errors_with = lambda r: list( # NOQA Validator(generate()).iter_errors(data(r))) self.assertEqual(len(errors_with("python2.7")), 0) self.assertEqual(len(errors_with("python3.6")), 0) self.assertEqual(len(errors_with("python4.5")), 1)
def test_offhours_stop(self): data = { "policies": [ { "name": "ec2-offhours-stop", "resource": "ec2", "filters": [ {"tag:aws:autoscaling:groupName": "absent"}, { "type": "offhour", "tag": "c7n_downtime", "default_tz": "et", "offhour": 19, }, ], } ] } schema = generate(["ec2"]) validator = Validator(schema) errors = list(validator.iter_errors(data)) self.assertEqual(len(errors), 0)
def test_schema(self): try: schema = generate() Validator.check_schema(schema) except Exception: self.fail("Invalid schema")
def setUp(self): if not self.validator: self.validator = Validator(generate())
def test_schema_expand(self): # refs should only ever exist in a dictionary by itself test_schema = {'$ref': '#/definitions/filters_common/value_from'} result = _expand_schema(test_schema, generate()['definitions']) self.assertEquals(result, ValuesFrom.schema)
def test_schema_expand_not_found(self): test_schema = {'$ref': '#/definitions/filters_common/invalid_schema'} result = _expand_schema(test_schema, generate()['definitions']) self.assertEquals(result, None)
def test_schema_serialization(self): try: dumps(generate()) except: self.fail("Failed to serialize schema")
def test_schema_expand_not_found(self): test_schema = { '$ref': '#/definitions/filters_common/invalid_schema' } result = ElementSchema.schema(generate()['definitions'], test_schema) self.assertEqual(result, None)
from msrest.serialization import Model from msrest.service_client import ServiceClient from vcr_unittest import VCRTestCase from c7n.config import Config, Bag from c7n.policy import ExecutionContext from c7n.schema import generate from c7n.testing import TestUtils from c7n.utils import local_session from .azure_serializer import AzureSerializer # Ensure the azure provider is loaded. from c7n_azure import provider # noqa BASE_FOLDER = os.path.dirname(__file__) C7N_SCHEMA = generate() DEFAULT_SUBSCRIPTION_ID = 'ea42f556-5106-4743-99b0-c129bfa71a47' CUSTOM_SUBSCRIPTION_ID = '00000000-5106-4743-99b0-c129bfa71a47' DEFAULT_USER_OBJECT_ID = '00000000-0000-0000-0000-000000000002' DEFAULT_TENANT_ID = '00000000-0000-0000-0000-000000000003' DEFAULT_INSTRUMENTATION_KEY = '00000000-0000-0000-0000-000000000004' DEFAULT_STORAGE_KEY = 'DEC0DEDITtVwMoyAuTz1LioKkC+gB/EpRlQKNIaszQEhVidjWyP1kLW1z+jo'\ '/MGFHKc+t+M20PxoraNCslng9w==' GRAPH_RESPONSE = { "value": [ { "NOTE": "THIS RESPONSE FAKED BY AZURE_COMMON.PY", "odata.type": "Microsoft.DirectoryServices.User", "objectType": "User", "objectId": DEFAULT_USER_OBJECT_ID,
def test_empty_with_lazy_load(self): empty_registry = PluginRegistry('stuff') self.patch(schema, 'clouds', empty_registry) policy_schema = generate() self.assertEqual(policy_schema['properties']['policies']['items'], {'type': 'object'})
def setUp(self): if not self.validator: self.validator = Validator(generate())
def test_schema_serialization(self): try: dumps(generate()) except: self.fail("Failed to serialize schema")
from c7n.resources import load_resources from c7n.utils import CONN_CACHE from .zpill import PillTest logging.getLogger('placebo.pill').setLevel(logging.DEBUG) logging.getLogger('botocore').setLevel(logging.WARNING) load_resources() ACCOUNT_ID = '644160558196' C7N_VALIDATE = bool(os.environ.get('C7N_VALIDATE', '')) C7N_SCHEMA = generate() skip_if_not_validating = unittest.skipIf( not C7N_VALIDATE, reason='We are not validating schemas.') # Set this so that if we run nose directly the tests will not fail if 'AWS_DEFAULT_REGION' not in os.environ: os.environ['AWS_DEFAULT_REGION'] = 'us-east-1' class BaseTest(PillTest): def cleanUp(self): # Clear out thread local session cache CONN_CACHE.session = None
def setUp(self): if not self.validator: self.validator = JsonSchemaValidator(generate())
def test_schema(self): try: schema = generate() Validator.check_schema(schema) except Exception: self.fail("Invalid schema")