Example #1
0
def apply_default_config(config):
    if config.get("cloudwatchlogs"):
        cloudwatchlogs_default_config = {
            'region': 'eu-central-1',
            'log_level': 'INFO',
            'groupname': 'monocyte_logs'
        }
        yamlreader.data_merge(cloudwatchlogs_default_config,
                              config['cloudwatchlogs'])

        log_level_map = {
            'DEBUG': logging.DEBUG,
            'INFO': logging.INFO,
            'WARN': logging.WARN,
            'ERROR': logging.ERROR
        }
        cloudwatchlogs_default_config['log_level'] = log_level_map[
            cloudwatchlogs_default_config['log_level'].upper()]

        config['cloudwatchlogs'] = cloudwatchlogs_default_config

    default_config = {
        "handler_names": [
            "cloudformation.Stack", "ec2.Instance", "ec2.Volume",
            "rds2.Instance", "rds2.Snapshot", "dynamodb.Table", "s3.Bucket",
            "acm.Certificate"
        ],
        "ignored_resources": {
            "cloudformation": ["cloudtrail-logging"]
        },
        "ignored_regions": ["cn-north-1", "us-gov-west-1"],
        "allowed_regions_prefixes": ["eu"]
    }
    for key in default_config.keys():
        config[key] = config.get(key, default_config[key])
Example #2
0
def load_config(global_config_dir=CFGDIR):
    global_config = {}
    if os.path.isdir(global_config_dir):
        global_config = yamlreader.yaml_load(global_config_dir, {})

    user_config = {}
    user_config_dir = os.path.expanduser("~/.afp-cli")
    if os.path.isdir(user_config_dir):
        user_config = yamlreader.yaml_load(user_config_dir, {})

    yamlreader.data_merge(global_config, user_config)
    return global_config
Example #3
0
def load_config(global_config_dir=CFGDIR):
    global_config = {}
    if os.path.isdir(global_config_dir):
        global_config = yamlreader.yaml_load(global_config_dir, {})

    user_config = {}
    user_config_dir = os.path.expanduser("~/.afp-cli")
    if os.path.isdir(user_config_dir):
        global_config = yamlreader.yaml_load(user_config_dir, {})

    yamlreader.data_merge(global_config, user_config)
    return global_config
Example #4
0
def main(arguments):
    cli_config = convert_arguments_to_config(arguments)
    config_path = get_config_path_from_args(arguments)
    whitelist_uri = get_whitelist_from_args(arguments)

    config = yamlreader.data_merge(read_config(config_path), cli_config)
    config = yamlreader.data_merge(config, load_whitelist(whitelist_uri))
    apply_default_config(config)

    monocyte = Monocyte(**config)

    try:
        return monocyte.search_and_destroy_unwanted_resources()
    except Exception:
        monocyte.logger.exception("Error while running monocyte:")
        return 1
def parse_unknown_args(unknown):
    assert len(
        unknown
    ) % 2 == 0, "some custom config keys from arguments have no value assigned"
    import itertools
    custom_parameters = dict(
        itertools.zip_longest(*[iter(unknown)] * 2, fillvalue=""))
    formatted_custom_parameters = {}
    for key, value in custom_parameters.items():
        new_dict = safe_load(value)
        while key:
            new_dict, key = get_dict(new_dict, key)
        formatted_custom_parameters = yamlreader.data_merge(
            formatted_custom_parameters, new_dict)
    return formatted_custom_parameters
Example #6
0
 def __init__(self, user, config, account_config, logger=None):
     default_config = {
         'aws': {
             'access_key': None,
             'secret_key': None
         },
         'provider': {
             'class': 'Provider'
         }
     }
     self.logger = logger or logging.getLogger(__name__)
     self.user = user
     self.application_config = data_merge(default_config, config)
     self.account_config = account_config
     self.provider = None
     self._setup_provider()
 def __init__(self, user, config, account_config, logger=None):
     default_config = {
         'aws': {
             'access_key': None,
             'secret_key': None
         },
         'provider': {
             'class': 'Provider'
         }
     }
     self.logger = logger or logging.getLogger(__name__)
     self.user = user
     self.application_config = data_merge(default_config, config)
     self.account_config = account_config
     self.provider = None
     self._setup_provider()
Example #8
0
def merge_yamls(configs):
    data = {}
    for config in configs:
        path = config.get('path')
        with open(path, 'r') as f:
            text = f.read()

            text = interpolate_vars(text, config.get('module'))
            new_data = yaml.load(text, Loader=yaml.FullLoader)
        if new_data is not None:
            data = data_merge(data, new_data)

    path = os.path.join(RES_DIR, 'config.yaml')
    with open(path, 'w') as f:
        f.write(
            safe_dump(data,
                      indent=2,
                      default_flow_style=False,
                      canonical=False))

    CLEANUP_FILES_QUEUE.append(path)
Example #9
0
def __main():
    config_data = vars(__parse_args())
    config_data = {key: config_data[key] for key in config_data
                   if config_data[key] is not None}
    if 'config' in config_data:
        try:
            config_data = data_merge(yaml_load(config_data['config']),
                                     config_data)
        except YamlReaderError as exc:
            raise("Could not read configfile: %s" % exc)
    if len(config_data['input']) == 0:
        raise("No input direcotires configured.")
    if 'gendersfile' not in config_data:
        config_data['gendersfile'] = '/etc/genders'
    genders_generator = GenerateGenders(
        config_data.get('input'),
        config_data.get('gendersfile'),
        config_data.get('domain', {}),
        config_data.get('verbosity')
    )
    genders_generator.generate_genders_file()
Example #10
0
def _gen():
    import io
    from os import path

    from yaml import safe_load
    from yamlreader import data_merge, yaml_load

    here = path.abspath(path.dirname(__file__))

    files = [path.join(here, "swagger", p) for p in ["swagger.yml", "db.yml"]
             ]  # , 'proxy.yml', 'storage.yml']]
    base = yaml_load(files)
    base["paths"] = OrderedDict(
        sorted(base["paths"].items(), key=lambda t: t[0]))

    with io.open(path.join(here, "swagger", "view.tmpl.yml"),
                 "r",
                 encoding="utf-8") as f:
        template = Template(str(f.read()))

    tags = base["tags"]

    def to_type(t):
        if t is None:
            return "string"
        if t is int:
            return "integer"
        if t is float:
            return "number"

    # integrate all views using the template
    for database, connector in manager.db.connectors.items():

        db.resolve(database)  # trigger filling up columns

        # add database tag
        tags.append(
            dict(name="db_" + database,
                 description=connector.description or ""))

        for view, dbview in connector.views.items():
            if not dbview.can_access() or dbview.query_type == "private":
                continue
            # if database != u'dummy' or view != u'b_items_verify':
            #  continue

            for tag in dbview.tags:
                if tag not in tags:
                    tags.append(tag)

            args = []
            for arg in dbview.arguments:
                info = dbview.get_argument_info(arg)
                args.append(
                    dict(
                        name=arg,
                        type=to_type(info.type),
                        as_list=info.as_list,
                        enum_values=None,
                        description=info.description,
                        example=info.example,
                    ))

            for arg in (a for a in dbview.replacements
                        if a not in secure_replacements):
                extra = dbview.valid_replacements.get(arg)
                arg_type = "string"
                enum_values = None
                if isinstance(extra, list):
                    enum_values = extra
                if extra == int or extra == float:
                    arg_type = to_type(extra)
                args.append(
                    dict(
                        name=arg,
                        type=arg_type,
                        as_list=False,
                        enum=enum_values,
                        description="",
                    ))

            filters = set()

            if "where" in dbview.replacements or "and_where" in dbview.replacements:
                # filter possible
                for k in dbview.filters.keys():
                    filters.add(k)
                if not filters:
                    for k in list(dbview.columns.keys()):
                        filters.add(k)

            if "agg_score" in dbview.replacements:
                # score query magic handling
                agg_score = connector.agg_score
                args.append(
                    dict(
                        name="agg",
                        type="string",
                        as_list=False,
                        enum=agg_score.valid_replacements.get("agg"),
                    ))

            props = []
            for k, prop in dbview.columns.items():
                p = prop.copy()
                p["name"] = k
                if "type" not in p or p["type"] == "categorical":
                    p["type"] = "string"
                props.append(p)

            if dbview.idtype:
                # assume when id type given then we have ids
                props.append(dict(name="_id", type="integer"))
                if not any((p["name"] == "id" for p in props)):
                    props.append(dict(name="id", type="string"))

            features = {
                "generic": dbview.query_type in ["generic", "helper", "table"],
                "desc": dbview.query_type in ["table"],
                "lookup": dbview.query_type in ["lookup"],
                "score": dbview.query_type in ["score"],
            }

            keys = {
                "database": database,
                "view": view,
                "type": dbview.query_type,
                "description": dbview.description or "",
                "summary": dbview.summary or "",
                "args": args,
                "empty": not args and not filters,
                "filters": filters,
                "features": features,
                "tags": dbview.tags or [],
                "props": props,
                "propsempty": not props,
            }

            view_yaml = template.render(**keys)
            # _log.info(view_yaml)
            part = safe_load(view_yaml)
            base = data_merge(base, part)

    # post process using extensions
    for p in manager.registry.list("tdp-swagger-postprocessor"):
        base = p.load().factory(base)

    return base
Example #11
0
 def test_merge_dict_to_list(self):
     self.assertEqual(data_merge([1, 2], {1:2}), [1, 2, {1: 2}])
Example #12
0
 def test_merge_complex_list_to_list(self):
     self.assertEqual(data_merge([1, 2], [{1:2}, 1]), [1, 2, {1: 2}, 1])
Example #13
0
 def test_merge_deep_dict_to_deep_dict(self):
     self.assertEqual(data_merge({1:2,3:{1:2}},{3:{2:5}}),{1: 2, 3: {1: 2, 2: 5}})
Example #14
0
 def test_merge_dict_to_dict(self):
     self.assertEqual(data_merge({1:2,3:[1,2,3]},{3:6}),{1:2,3:[1,2,3,6]})
Example #15
0
 def test_merge_dict_to_list(self):
     self.assertEqual(data_merge([1, 2], {1:2}), [1, 2, {1: 2}])
def main():
    parser = argparse.ArgumentParser(
        description='Policy gradient algorithms with demonstration data.')
    parser.add_argument('--configs',
                        type=str,
                        nargs='+',
                        help='path to additional config parameters')
    parser.add_argument('--continue_task',
                        type=str,
                        default=None,
                        help='task name to continue training')
    args, unknown = parser.parse_known_args()
    custom_parameters = parse_unknown_args(unknown)
    if args.continue_task is not None:
        config_path = os.path.join(args.continue_task, "config.yaml")
        if not os.path.exists(config_path):
            config_path = os.path.join('Runs', config_path)
        cfg = yamlreader.yaml_load(config_path)
    else:
        configs = None
        if args.configs is not None:
            configs = []
            for config in args.configs:
                if '.yaml' not in config:
                    config += '.yaml'
                configs.append('../configs/config/' + config)
        cfg = yamlreader.yaml_load(configs,
                                   default_path='../configs/default.yaml')
    cfg = yamlreader.data_merge(cfg, custom_parameters)

    # parse multiple runs
    multiple_runs = cfg['runs'] != 'single_run'
    runs_from = cfg['runs'][0] if multiple_runs else 1
    runs_to = cfg['runs'][1] if multiple_runs else 2

    original_seed = cfg['seed']
    for run_no in range(runs_from, runs_to):
        run_seed = original_seed + run_no - 1
        if cfg['algorithm'] == "IRL+DAPG":
            # start IRL
            print("\n----- Starting IRL for DAPG -----\n")
            cfg['algorithm'] = 'IRL'
            if cfg['based_IRL']['IRL_config'] is None:
                IRL_cfg = yamlreader.yaml_load('../configs/config/IRL.yaml')
            else:
                config_path = os.path.join('../configs/config',
                                           cfg['based_IRL']['IRL_config'])
                if '.yaml' not in config_path:
                    config_path += '.yaml'
                IRL_cfg = yamlreader.yaml_load(config_path)
            IRL_cfg = yamlreader.data_merge(cfg, IRL_cfg)
            IRL_cfg = yamlreader.data_merge(IRL_cfg, custom_parameters)
            irl_task = parse_task(IRL_cfg)[1]
            train(IRL_cfg,
                  run_no=run_no,
                  multiple_runs=multiple_runs,
                  seed=run_seed)
            # start DAPG based on IRL job
            print("\n----- Starting DAPG based on finished IRL job -----\n")
            cfg['algorithm'] = 'DAPG_based_IRL'
            cfg['based_IRL']['IRL_job'] = irl_task
            cfg['based_IRL']['IRL_run_no'] = run_no if multiple_runs else None
            cfg['use_DAPG'] = True
            train(cfg,
                  run_no=run_no,
                  multiple_runs=multiple_runs,
                  seed=run_seed)
            # make run with initialisation if dump_paths_percentage is set
            start_from_initialisation = IRL_cfg['IRL'][
                'dump_paths_percentage'] is not None
            if start_from_initialisation:
                cfg['based_IRL']['get_paths_for_initialisation'] = True
                train(cfg,
                      run_no=run_no,
                      multiple_runs=multiple_runs,
                      seed=run_seed)
            # reset variables
            cfg['algorithm'] = "IRL+DAPG"
            cfg['based_IRL']['IRL_job'] = None
            cfg['based_IRL']['IRL_run_no'] = None
            if start_from_initialisation:
                del cfg['based_IRL']['get_paths_for_initialisation']
        else:
            train(cfg,
                  run_no=run_no,
                  multiple_runs=multiple_runs,
                  seed=run_seed)
Example #17
0
 def test_merge_deep_dict_to_deep_dict(self):
     self.assertEqual(data_merge({1:2,3:{1:2}},{3:{2:5}}),{1: 2, 3: {1: 2, 2: 5}})
Example #18
0
 def test_merge_dict_to_dict(self):
     self.assertEqual(data_merge({1:2,3:[1,2,3]},{3:6}),{1:2,3:[1,2,3,6]})
Example #19
0
 def test_merge_tuple_to_list(self):
     self.assertEqual(data_merge([1, 2], ({1:2}, 3)), [1, 2, ({1: 2}, 3)])
Example #20
0
 def test_merge_string_to_list(self):
     self.assertEqual(data_merge([1, 2], "hello"), [1, 2, "hello"])
Example #21
0
 def test_merge_tuple_to_list(self):
     self.assertEqual(data_merge([1, 2], ({1:2}, 3)), [1, 2, ({1: 2}, 3)])
Example #22
0
 def test_merge_complex_list_to_list(self):
     self.assertEqual(data_merge([1, 2], [{1:2}, 1]), [1, 2, {1: 2}, 1])
Example #23
0
 def test_merge_string_to_list(self):
     self.assertEqual(data_merge([1, 2], "hello"), [1, 2, "hello"])
Example #24
0
    preproc.debug = args.debug

    meta = {}
    body = ""
    for source_file in source_files:
        raw = ""
        # Read in md file as str
        for line in fileinput.input(source_file):
            raw += line

        # Split into head (yaml) and body (md)
        this_head, this_body = get_yaml_data(raw)
        # Load yaml str into metadata dict
        this_meta = yaml.safe_load(this_head)
        # Merge this metadata dict with others (function imported from pypi package yamlreader)
        meta = data_merge(meta, this_meta)
        # Merge this body str with others
        body = body + this_body

    preproc.init_vars_metadata(meta)
    preproc.init_vars_args(args)

    new_body = preproc.process(body)

    delim = "---" + os.linesep
    # Create new file. The metadata here is ugly but usable
    new_file = delim + yaml.safe_dump(meta) + delim + new_body

    if out_file:
        with open(out_file, "w") as f:
            f.write(new_file)