Пример #1
0
    def test_policy(self):
        user_policy = self.policy.derive(
            DefaultMunch.fromDict(
                {
                    's': {
                        'has_role':
                        lambda name: name in {'admin', 'org_admin'},
                        'org': {
                            'id': 'foo'
                        }
                    },
                    'org': {
                        'id': 'foo'
                    }
                },
                default=undefined))
        self.assertEqual(user_policy.check('user_add'), True)

        user_policy = self.policy.derive(
            DefaultMunch.fromDict(
                {
                    's': {
                        'has_role': lambda name: name in set(),
                        'org': {
                            'id': 'foo'
                        }
                    },
                    'org': {
                        'id': 'foo'
                    }
                },
                default=undefined))
        self.assertEqual(user_policy.check('user_add'), False)

        user_policy = self.policy.derive(
            DefaultMunch.fromDict(
                {
                    's': {
                        'has_role': lambda name: name in {'org_admin'},
                        'org': {
                            'id': 'foo'
                        }
                    },
                    'org': {
                        'id': 'bar'
                    }
                },
                default=undefined))
        self.assertEqual(user_policy.check('user_add'), False)
Пример #2
0
 def __init__(self, values, level_directory, state, selector):
     self.values = DefaultMunch.fromDict(values)
     self.state = state
     self.selector = selector
     self.level_directory = level_directory
     logger.info("New gate %s from %s" %
                 (self.values.gate_id, self.level_directory))
Пример #3
0
    def __init__(self):
        """Constructor method"""
        self.exit = False
        """**default:** ``False``

        Exit flag
        """
        self.rate = 44100
        """**default:** ``44100``

        Number of audio samples collected in 1 second"""
        self.channels = 1
        """**default:** ``1`` (Mono Channel)

        Number of audio channel to listen"""
        self.chunk_size = 1024
        """**default:** ``1024``

        Number of frames in the input audio buffer"""
        self.wait = 0.5
        """**default:** ``0.5``

        Time duration to wait for claps to complete in :meth:`Listener.clapWait()`"""
        self.method = Objectify.fromDict({
            'name': 'threshold',
            'value': 512
        }, False)
        """**default:** :code:`{'name': 'threshold','value': 7000}`

        Detection method used for identifing claps"""
        self.actions = [
            m for m in dir(self) if m.startswith('on') and m.endswith('Claps')
        ]
        """When the class initialised, it collects all the actions defined inside this class as well as any classes where are derived with this class as base class
Пример #4
0
    def delete_method(self):
        try:
            util_obj = PropertiesUtil()
            util_obj.cybs_path = os.path.join(
                os.getcwd(), "samples/authentication/Resources", "cybs.json")
            details_dict1 = util_obj.properties_util()
            details_dict1 = DefaultMunch.fromDict(details_dict1)

            mconfig = MerchantConfiguration()
            mconfig.set_merchantconfig(details_dict1)
            mconfig.validate_merchant_details(details_dict1, mconfig)

            mconfig.request_type_method = self.request_type
            mconfig.request_target = self.request_target
            mconfig.url = self.url + mconfig.request_host + mconfig.request_target

            self.process(mconfig)
        except ApiException as e:
            print(e)
        except KeyError as e:
            print(GlobalLabelParameters.NOT_ENTERED + str(e))
        except IOError as e:
            print(GlobalLabelParameters.FILE_NOT_FOUND + str(e.filename))
        except Exception as e:
            print((e))
Пример #5
0
def _read_definition(definition_path):

    with open(definition_path) as f:

        definition = DefaultMunch.fromDict(safe_load(f), None)

    return definition
Пример #6
0
def _read_config(default_path):
    if os.path.exists(default_path):
        with open(default_path, 'rt') as f:
            config = yaml.safe_load(f.read())
            return DefaultMunch.fromDict(config, None)
    else:
        print('Config does not exist: "{}"\nExiting.'.format(default_path))
        exit(1)
Пример #7
0
 def __init__(self, level_directory, name, selector):
     '''Initialize a Room to defaults'''
     self.name = name
     self.state = state.LevelState.New
     self.level_directory = level_directory
     self.selector = selector
     values = {"bricks": []}
     self.values = DefaultMunch.fromDict(values)
Пример #8
0
    def create(self, trailer_str):
        trailer_obj = DefaultMunch.fromDict(trailer_str, User())
        trailer_obj['id'] = self.users.__len__()
        trailer_obj.adresse['id'] = self.users.__len__()
        trailer_obj['birthday'] = date.fromisoformat(trailer_obj['birthday'])
        usr_obj = trailer_obj

        self.users.append(usr_obj)
        return usr_obj
Пример #9
0
 def __init__(self, values, level_directory, state, selector):
     self.values = DefaultMunch.fromDict(values)
     self.level_directory = level_directory
     # this is the reference to the Room in its directory.
     # It is instantiated when needed.
     self.state = state
     self.room = None
     self.selector = selector
     logger.info("New room_spec %s from %s" %
                 (self.values.room_id, self.level_directory))
Пример #10
0
def get_config():
    parser = argparse.ArgumentParser(description='Trading Engine named Strela')
    parser.add_argument('-config', help='a path to configuration file')

    args = parser.parse_args()
    filename = args.config

    with open(filename, 'r') as ymlfile:
        cfg_dict = yaml.load(ymlfile)
        return DefaultMunch.fromDict(cfg_dict, None)
Пример #11
0
def test_copy_default():
    undefined = object()
    m = DefaultMunch.fromDict({'urmom': {'sez': {'what': 'what'}}}, undefined)
    c = m.copy()
    assert c is not m
    assert c.urmom is not m.urmom
    assert c.urmom.sez is not m.urmom.sez
    assert c.urmom.sez.what == 'what'
    assert c == m
    assert c.urmom.sez.foo is undefined
    assert c.urmom.sez.__undefined__ is undefined
Пример #12
0
def test_copy_default():
    undefined = object()
    m = DefaultMunch.fromDict({'urmom': {'sez': {'what': 'what'}}}, undefined)
    c = m.copy()
    assert c is not m
    assert c.urmom is not m.urmom
    assert c.urmom.sez is not m.urmom.sez
    assert c.urmom.sez.what == 'what'
    assert c == m
    assert c.urmom.sez.foo is undefined
    assert c.urmom.sez.__undefined__ is undefined
Пример #13
0
 def load(self, state):
     self.state = state
     logger.info("Load room %s from: %s, state: %s" %
                 (self.name, self.level_directory, self.state))
     file_name = self.level_directory + "/" + self.name + "/" + self.filename_map[
         state]
     logger.info("Load room from: %s, state: %s" % (file_name, self.state))
     values = json_helper.load_and_validate_json(
         file_name, "file_room_bricks.json", decode_hook=self.decode_room())
     logger.info("Has read: %s", values)
     self.values = DefaultMunch.fromDict(values)
     logger.info("Has values: %s", self.values)
Пример #14
0
 def load(self, directory, load_state):
     self.directory = directory
     self.state = state.StateList(directory + "/state.txt")
     logger.info("Saved states are: %s" % self.state)
     if not self.state.has_state(load_state):
         raise Exception("Level has no saved state %s" % load_state)
     logger.info(self.status_to_filename[load_state])
     obj = json_helper.load_and_validate_json(
         directory + "/" + self.status_to_filename[load_state],
         "file_rooms_logic.json",
         decode_hook=decode_level(directory, self.status, self.selector))
     self.status = load_state
     self.values = DefaultMunch.fromDict(obj)
     self.structure_check_coherency()
Пример #15
0
def test_threshold():
    proc = SignalProcessor(
        method=Objectify.fromDict({
            'name': 'threshold',
            'value': 100
        }, False))
    assert proc.algorithm.name == DETECTION_ALGORITHMS[
        proc.alg_id], 'Algorithm does not match'
    assert proc.findClap(
        data=array('b', [99])) == False, 'Clap Detection Error'
    assert proc.findClap(
        data=array('b', [100])) == False, 'Clap Detection Error'
    assert proc.findClap(
        data=array('b', [101])) == True, 'Clap Detection Error'
    assert proc.findClap(
        data=array('b', [102])) == True, 'Clap Detection Error'
Пример #16
0
 def test_verify(self):
     user_policy = self.policy.derive(
         DefaultMunch.fromDict(
             {
                 's': {
                     'has_role': lambda name: name in {'org_admin'},
                     'org': {
                         'id': 'foo'
                     }
                 },
                 'org': {
                     'id': 'bar'
                 }
             },
             default=undefined))
     with self.assertRaises(TestPermissionError):
         user_policy.verify('user_add')
Пример #17
0
def create_document(s3, message, bucket, key):
    try:
        head = munch.fromDict(s3.head_object(Bucket=bucket, Key=key))
        meta_request = head.ResponseMetadata.HTTPHeaders['x-amz-meta-request']
        document_request = Requests.objects.get(pk=meta_request)

        with transaction.atomic():
            file = '/'.join([S3_BASEURL, settings.S3_BUCKET, key])
            kwargs = document_request.data
            document = Document.objects.create(**kwargs, file=file, request_id=document_request.pk)
            document_request.delete()
        
        message.delete()
        print_message(DOCUMENT_CREATED_MESSAGE.format(document.pk))

    except Requests.DoesNotExist:
        message.delete()
        print_message(REQUEST_NOT_FOUND_MESSAGE.format(meta_request))
Пример #18
0
 def test_permission(self):
     user_policy = self.policy.derive(
         DefaultMunch.fromDict(
             {
                 's': {
                     'has_role': lambda name: name in {'org_admin'},
                     'org': {
                         'id': 'foo'
                     }
                 },
                 'org': {
                     'id': 'bar'
                 }
             },
             default=undefined))
     permission = user_policy.permission('user_add')
     self.assertIn("can't add that user", str(permission))
     self.assertIn("not a member", str(permission))
     self.assertIn("not an administrator", str(permission))
Пример #19
0
    def normalized_as_object(
        self,
        document,
        schema=None,
        always_return_document=False,
        allow_name_conflicts=False,
        callable_numbers=False,
    ):
        """ Returns normalized_as_dict() as an object with keys callable.

        See normalized method doctring for more information such as expected parameters
        """
        return DefaultMunch.fromDict(
            self.normalized_as_dict(
                document,
                schema,
                always_return_document,
                allow_name_conflicts,
                callable_numbers,
            ))
Пример #20
0
def train_test(run_name, train_path, test_paths, args):
    config = {}
    project_root = 'modeling'
    run = {
        'model': 'fasttext',
        'train_data': train_path,
        'test_data': test_paths[0],
        'name': run_name,
        'tmp_path': os.path.join(project_root, 'tmp'),
        'data_path': os.path.join(project_root, 'data'),
        'other_path': os.path.join(project_root, 'other', 'models'),
        'output_path': os.path.join(project_root, 'output', run_name),
        **params
    }
    if os.path.isdir(run['output_path']):
        shutil.rmtree(run['output_path'])
    os.makedirs(run['output_path'])
    f_path = os.path.join(run['output_path'], 'run_config.json')
    with open(f_path, 'w') as f:
        json.dump(run, f, indent=4)
    run = DefaultMunch.fromDict(run, None)
    model = FastTextModel()
    logger.info(f'Starting train/test for {run_name}...')
    logger.info(f'Training model on {train_path}...')
    model.train(run)
    results = []
    for test_path, centroid_day in zip(test_paths, args['centroid_days_test']):
        run['test_data'] = test_path
        logger.info(f'Testing model on {test_path}...')
        result = model.test(run)
        logger.info(f"... F1-score: {result['f1_macro']:.3f}")
        result['train_path'] = train_path
        result['test_path'] = test_path
        result['name'] = run_name
        result['centroid_day'] = centroid_day
        result = {**args, **result}
        results.append(result)
    # cleanup
    if not save_model:
        shutil.rmtree(run['output_path'])
    return results
Пример #21
0
    def __init__(self, values, selector):
        self.values = DefaultMunch.fromDict(values)
        self.selector = selector

        # check subparts against schema, if they exist
        v = self.values
        logger.info("Some schema checking for %s" %
                    v.parameters.structure_class)
        if "pads" in v:
            for f in v.pads:
                if "definition" in f:
                    schema = "bricks/" + v.parameters.structure_class + "/pad.json"
                    logger.info("Check pad fragment against %s" % schema)
                    json_helper.check_json_fragment(f.definition, schema)
        if "parameters" in v:
            if "structure_parameters" in v.parameters:
                schema = "bricks/" + v.parameters.structure_class + "/structure_parameters.json"
                logger.info("Check structure_parameters fragment against %s" %
                            schema)
                json_helper.check_json_fragment(
                    v.parameters.structure_parameters, schema)
Пример #22
0
def X(_dict={}, **params):
    '''
    :class:`X` is a `Munch <https://pypi.org/project/munch/>`_.
    :meth:`X` is also a function that returns a new :class:`X`
    from a ``dict`` or a set of parameters.

    :class:`Munch` is "`a dictionary that supports attribute-style access`".
    :meth:`X` offers us a few options::

        user = X(id=1, name='sheila', email='*****@*****.**')
        user                            # we can create a new X from parameters
            X{'email': '*****@*****.**', 'id': 1, 'name': 'sheila'}
        user['id']           # we can index by the usual brace attribute syntax
            1
        user.name                        # or dot syntax if the key is a string
            'sheila'

        book = X({'title': 'thing explainer', 'author': 'randall monroe'})
        book                  # we can also create a new X by wrapping any dict
            X{'author': 'randall monroe', 'title': 'thing explainer'}
        len(book)            # just like a dict, an X's length is its key count
            2
        book.pages                              # we didn't define this one yet
            None

    Missing keys return ``None``; :class:`X` prefers to fail silently.

    :class:`X` also gives us the :meth:`toTOML` and :meth:`fromTOML` methods
    to help serialize to and deserialize from the
    `TOML <https://toml.io/>`_ format.
    Their signatures and behaviour match the :meth:`toJSON`/:meth:`fromJSON`
    and :meth:`toYAML`/:meth:`fromYAML` methods inherited from :class:`Munch`.

    :param _dict: a ``dict`` to :class:`X`-ify
    '''

    params.update(_dict)
    return Munch.fromDict(params, None)
Пример #23
0
from munch import Munch, DefaultMunch
import copy

root = os.path.join(os.path.dirname(__file__), '..')


def load(url):
    with urllib.request.urlopen(url) as f:
        return json.loads(f.read().decode())


data = DefaultMunch.fromDict(
    {
        'zotero':
        load('https://api.zotero.org/schema'),
        'jurism':
        load(
            'https://raw.githubusercontent.com/Juris-M/zotero-schema/master/schema-jurism.json'
        )
    }, None)

print('Generating item field metadata...')
ValidFields = DefaultMunch(None, {})
ValidTypes = {}
Alias = {}
Itemfields = set()
ItemCreators = {}
for client in data.keys():
    ItemCreators[client] = {}

    for spec in data[client].itemTypes:
Пример #24
0
 def update(self, id, usr2update):
     trailer_obj = DefaultMunch.fromDict(usr2update, User())
     self.users[id] = trailer_obj
     return usr2update
Пример #25
0
from pprint import pformat
from re import sub

from flask_babel import lazy_gettext, lazy_ngettext

from munch import DefaultMunch as Munch

from toml import dumps, loads

from . import dt

Munch.__name__ = 'X'
Munch.__repr__ = lambda self: f'X{pformat(self.__dict__)}'

Munch.toTOML = lambda self: dumps(self)
Munch.fromTOML = lambda data: Munch.fromDict(loads(data))


def X(_dict={}, **params):
    '''
    :class:`X` is a `Munch <https://pypi.org/project/munch/>`_.
    :meth:`X` is also a function that returns a new :class:`X`
    from a ``dict`` or a set of parameters.

    :class:`Munch` is "`a dictionary that supports attribute-style access`".
    :meth:`X` offers us a few options::

        user = X(id=1, name='sheila', email='*****@*****.**')
        user                            # we can create a new X from parameters
            X{'email': '*****@*****.**', 'id': 1, 'name': 'sheila'}
        user['id']           # we can index by the usual brace attribute syntax
Пример #26
0
from mplotter import *
from DDPGv2Agent import Agent, Noise
from collections import deque
from FireflyEnv.env_utils import range_angle
from munch import DefaultMunch

rewards = deque(maxlen=100)

# read configuration parameters
#from Config import Config
#arg = Config()

filename = '20191231-172726-01081157'  # agent information
argument = torch.load(arg.data_path + 'data/' + '20191231-172726_arg.pkl')

arg = DefaultMunch.fromDict(argument)

# fix random seed
import random
random.seed(arg.SEED_NUMBER)
import torch
torch.manual_seed(arg.SEED_NUMBER)
if torch.cuda.is_available():
    torch.cuda.manual_seed(arg.SEED_NUMBER)
import numpy as np
np.random.seed(arg.SEED_NUMBER)
import time

torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
Пример #27
0
def _main(cli_args, chosen_deployment_name):
    """Deployment entry point.

    :param cli_args: The command-line arguments
    :type cli_args: ``list``
    :param chosen_deployment_name: The deployment file
    :type chosen_deployment_name: ``str``
    :returns: True on success
    :rtype: ``bool``
    """

    config_file = os.path.join(
        OKD_DEPLOYMENTS_DIRECTORY, chosen_deployment_name,
        io.get_deployment_config_filename(chosen_deployment_name))
    if not os.path.isfile(config_file):
        print('Config file does not exist ({})'.format(chosen_deployment_name))
        return False
    with codecs.open(config_file, 'r', 'utf8') as stream:
        deployment = DefaultMunch.fromDict(yaml.load(stream))

    # First check:
    # is the version present
    # and do we support it?
    if 'version' not in deployment:
        print('The deployment configuration has no version.')
        return False
    if deployment.version not in SUPPORTED_DEPLOYMENT_VERSIONS:
        supported_versions = str(SUPPORTED_DEPLOYMENT_VERSIONS[0])
        for version in SUPPORTED_DEPLOYMENT_VERSIONS[1:]:
            supported_versions += ', {}'.format(version)
        print('The deployment configuration file version ({})'
              ' is not supported.'.format(deployment.version))
        print('Supported versions are: {}'.format(supported_versions))
        return False

    # There must be an okd/inventories directory
    inventory_dir = deployment.okd.inventory_dir
    if not os.path.isdir('okd/inventories/{}'.format(inventory_dir)):
        print('Missing "okd/inventories" directory')
        print('Expected to find the inventory directory "{}"'
              ' but it was not there.'.format(inventory_dir))
        print('Every deployment must have an "inventories" directory')
        return False

    # If the cluster SSH user is not defined,
    # insert it.
    if 'ssh_user' not in deployment.cluster:
        print('Setting default SSH user "{}"'.format(
            OKD_DEFAULT_CLUSTER_SSH_USER))
        deployment.cluster.ssh_user = OKD_DEFAULT_CLUSTER_SSH_USER

    # -----
    # Hello
    # -----
    io.banner(deployment.name, full_heading=True, quiet=False)
    if not cli_args.auto_acknowledge and not cli_args.just_plan:

        # Display the orchestration description
        # (f there is one)
        if deployment.description:
            io.description(deployment.description)

        confirmation_word = io.get_confirmation_word()
        target = 'CREATE the Cluster' \
            if cli_args.cluster else 'INSTALL OpenShift/OKD'
        confirmation = input('Enter "{}" to {}: '.format(
            confirmation_word, target))
        if confirmation != confirmation_word:
            print('Phew! That was close!')
            return True

    # Some key information...
    okd_admin_password = os.environ.get(OKD_ADMIN_PASSWORD_ENV)
    if not okd_admin_password:
        io.error('You must define {}'.format(OKD_ADMIN_PASSWORD_ENV))

    okd_api_hostname = deployment.cluster.public_hostname
    okd_api_port = deployment.cluster.api_port

    # -------
    # Ansible (A specific version)
    # -------
    # Install the ansible version name in the deployment file

    cmd = 'pip install --upgrade pip setuptools --user'
    rv, _ = io.run(cmd, '.', cli_args.quiet)
    if not rv:
        return False

    cmd = 'pip install --upgrade pip setuptools --user'
    rv, _ = io.run(cmd, '.', cli_args.quiet)
    if not rv:
        return False

    cmd = 'pip install ansible=={} --user'. \
        format(deployment.okd.ansible_version)
    rv, _ = io.run(cmd, '.', cli_args.quiet)
    if not rv:
        return False

    t_dir = deployment.cluster.terraform_dir
    if cli_args.cluster:

        # ------
        # Render (jinja2 files)
        # ------
        # Translate content of Jinja2 template files
        # using the deployment configuration's YAML file content.

        if not cli_args.skip_rendering:

            cmd = './render.py {} --ssh-user {}'.\
                format(chosen_deployment_name,
                       deployment.cluster.ssh_user)
            cwd = '.'
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

        # If the deployment file has a 'my_machines' section
        # then we assume the user's provided their own cluster
        # and the Terraform step is not needed.
        if 'my_machines' in deployment:

            # -----------------
            # Manual Templating
            # -----------------
            # The user has provided their own cluster
            # and defined it in the my_machines section
            # of their deployment configuration.
            #
            # Here we process the rendered inventory files
            # just as Terraform would do.
            io.banner('Templating ...')
            print('inventory')
            if not templater.render(deployment):
                return False

            print('bastion/inventory')
            file_name = 'ansible/bastion/inventory.yaml.tpl'
            if not templater.\
                    render(deployment,
                           template_file_name=file_name):
                return False

            print('post-okd/inventory')
            file_name = 'ansible/post-okd/inventory.yaml.tpl'
            if not templater. \
                    render(deployment,
                           template_file_name=file_name,
                           admin_password=okd_admin_password):
                return False

        else:

            # ---------
            # Terraform
            # ---------
            # Create compute instances for the cluster.

            cmd = 'terraform init'
            cwd = 'terraform/{}'.format(t_dir)
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

            # Plan or Apply?
            action = 'plan' if cli_args.just_plan else 'apply -auto-approve'
            cmd = 'terraform {}' \
                  ' -state=.terraform.{}'.format(action,
                                                 chosen_deployment_name)
            cwd = 'terraform/{}'.format(t_dir)
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

        if cli_args.just_plan:
            # Just plan means just that...
            return True

        # -------
        # Ansible
        # -------
        # Run the bastion site file.

        if not cli_args.skip_pre_okd:

            extra_env = ''
            if deployment.okd.certificates:
                if deployment.okd.certificates.generate_api_cert:

                    certbot_email = os.environ.get(OKD_CERTBOT_EMAIL_ENV)
                    if not certbot_email:
                        io.error(
                            'You must define {}'.format(OKD_CERTBOT_EMAIL_ENV))
                        return False

                    extra_env += ' -e master_cert_email="{}"'.\
                        format(certbot_email)
                    extra_env += ' -e public_hostname="{}"'. \
                        format(deployment.cluster.public_hostname)

                elif (deployment.okd.certificates.wildcard_cert
                      or deployment.okd.certificates.master_api_cert):

                    # User-supplied certificates -
                    # expect a vault password file
                    # in the deployment directory
                    extra_env += ' --vault-password-file' \
                                 ' {}/{}/vault-pass.txt'.\
                        format(OKD_DEPLOYMENTS_DIRECTORY,
                               chosen_deployment_name)

            if OKD_DEPLOYMENTS_DIRECTORY != 'deployments':
                extra_env += ' -e deployments_directory="{}"'.\
                    format(OKD_DEPLOYMENTS_DIRECTORY)
            else:
                extra_env += ' -e deployments_directory="../../deployments"'

            keypair_name = os.environ.get(OKD_KEYPAIR_NAME_ENV)
            if not keypair_name:
                io.error('You must define {}'.format(OKD_KEYPAIR_NAME_ENV))
                return False

            cmd = 'ansible-playbook site.yaml' \
                  ' {}' \
                  ' -e keypair_name={}' \
                  ' -e inventory_dir={}' \
                  ' -e cluster_ssh_user={}' \
                  ' -e deployment_name={}'.format(extra_env,
                                                  keypair_name,
                                                  deployment.okd.inventory_dir,
                                                  deployment.cluster.ssh_user,
                                                  chosen_deployment_name)
            cwd = 'ansible/bastion'
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

        # Now expose the Bastion's IP...

        if 'my_machines' in deployment:

            # Simulate the final step in Terraform,
            # i.e. exposing the bastion.
            # Doing this simplifies things for the user
            # i.e. "it looks and feels the same"

            io.banner('terraform output ...')
            print('bastion_ip = {}'.format(deployment.my_machines.bastion))

        else:

            cmd = 'terraform output' \
                  ' -state=.terraform.{}'.format(chosen_deployment_name)
            cwd = 'terraform/{}'.format(t_dir)
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

        # Leave.
        return True

    # If we get here we're installing OpenShift/OKD
    # (on a cluster that is assumed to exist)...

    # -----
    # Clone (OpenShift Ansible Repo)
    # -----
    # ...and checkout the revision defined by the deployment tag.

    if not cli_args.skip_okd:

        # If the expected clone directory does not exist
        # then clone OpenShift Ansible.
        if not os.path.exists('openshift-ansible'):

            cmd = 'git clone' \
                  ' https://github.com/openshift/openshift-ansible.git' \
                  ' --no-checkout'
            cwd = '.'
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

        # Checkout the required OpenShift Ansible TAG
        cmd = 'git checkout tags/{}'. \
            format(deployment.okd.ansible_tag)
        cwd = 'openshift-ansible'
        rv, _ = io.run(cmd, cwd, cli_args.quiet)
        if not rv:
            return False

    # -------
    # Ansible (Pre-OKD)
    # -------

    if not cli_args.skip_pre_okd:

        extra_env = ''
        if deployment.okd.certificates and\
                deployment.okd.certificates.generate_api_cert:
            extra_env += ' -e public_hostname={}'. \
                format(deployment.cluster.public_hostname)
        cmd = 'ansible-playbook site.yaml' \
              ' {}' \
              ' -i ../../okd/inventories/{}/inventory.yaml'.\
            format(extra_env, inventory_dir)
        cwd = 'ansible/pre-okd'
        rv, _ = io.run(cmd, cwd, cli_args.quiet)
        if not rv:
            return False

    # -------
    # Ansible (OKD)
    # -------
    # Deploy using the playbooks named in the deployment
    # (from the checked-out version).

    if not cli_args.skip_okd:

        for play in deployment.okd.play:
            cmd = 'ansible-playbook ../openshift-ansible/playbooks/{}.yml' \
                  ' -i inventories/{}/inventory.yaml'.\
                format(play, inventory_dir)
            cwd = 'okd'
            rv, _ = io.run(cmd, cwd, cli_args.quiet)
            if not rv:
                return False

    # -------
    # Ansible (Post-OKD)
    # -------

    if not cli_args.skip_post_okd:

        # Always run the 'site' playbook.
        # This adds the OKD admin and (optional) developer user accounts
        # and other common things like template deployment.
        #
        # The following variables are made available to all the playbooks: -
        #
        # - okd_api_hostname
        # - okd_admin
        # - okd_admin_password

        extra_env = ''
        dev_password = os.environ.get(OKD_DEVELOPER_PASSWORD_ENV)
        if dev_password:
            extra_env += ' -e okd_developer_password={}'.format(dev_password)
        # The template namespace
        # (optionally defined in the configuration)
        if deployment.okd.template and deployment.okd.template.namespace:
            template_namespace = deployment.okd.template.namespace
            extra_env += ' -e template_namespace={}'.format(template_namespace)
        cmd = 'ansible-playbook site.yaml' \
              '{}' \
              ' -e okd_api_hostname=https://{}:{}' \
              ' -e okd_admin=admin' \
              ' -e okd_admin_password={}' \
              ' -e okd_deployment={}'. \
            format(extra_env,
                   okd_api_hostname, okd_api_port,
                   okd_admin_password, chosen_deployment_name)
        cwd = 'ansible/post-okd'
        rv, _ = io.run(cmd, cwd, cli_args.quiet)
        if not rv:
            return False

        # Now iterate through the plays listed in the cluster's
        # 'post_okd' list...

        if deployment.okd.post_okd:
            for play in deployment.okd.post_okd:
                # Any user-defined 'extra' variables?
                play_vars = ''
                if play.vars:
                    for var in play.vars:
                        play_vars += '-e {} '.format(var)
                    play_vars = play_vars[:-1]
                # Run the user playbook...
                cmd = 'ansible-playbook playbooks/{}/deploy.yaml' \
                    ' -e okd_api_hostname=https://{}:{}' \
                    ' -e okd_admin=admin' \
                    ' -e okd_admin_password={}' \
                    ' -e okd_deployment={}' \
                    ' {}'.\
                    format(play.play,
                           okd_api_hostname, okd_api_port,
                           okd_admin_password, chosen_deployment_name,
                           play_vars)
                cwd = 'ansible/post-okd'
                rv, _ = io.run(cmd, cwd, cli_args.quiet)
                if not rv:
                    return False

    # -------
    # Success
    # -------

    # OK if we get here.
    # Cluster created and OKD installed.
    return True
Пример #28
0
def test_fromDict_default():
    undefined = object()
    b = DefaultMunch.fromDict({'urmom': {'sez': {'what': 'what'}}}, undefined)
    assert b.urmom.sez.what == 'what'
    assert b.urmom.sez.foo is undefined
Пример #29
0
def test_fromDict_default():
    undefined = object()
    b = DefaultMunch.fromDict({'urmom': {'sez': {'what': 'what'}}}, undefined)
    assert b.urmom.sez.what == 'what'
    assert b.urmom.sez.foo is undefined
Пример #30
0
MAXIMUM_STACK_SIZE = settings.WEBHOOK_MAXIMUM_STACK_SIZE
MAXIMUM_TRIES = settings.WEBHOOK_MAXIMUM_TRIES
QUEUE_NAME = settings.WEBHOOK_QUEUE_NAME

sqs = boto3.resource('sqs')
s3 = boto3.client('s3')

queue = sqs.get_queue_by_name(QueueName=QUEUE_NAME)

stack = []
stack_size = 0


while True:
	for message in queue.receive_messages(MaxNumberOfMessages=10):
		body = munch.fromDict(json.loads(message.body))

		for record in body.Records:
			stack.append(munch(bucket=record.s3.bucket.name, key=record.s3.object.key))

	previous_stack_size = stack_size
	stack_size = len(stack)

	if previous_stack_size != stack_size and stack_size < MAXIMUM_STACK_SIZE:
		continue	

	# Process SQS document messages
	gevent.wait([gevent.spawn(create_document, s3, message, i.bucket, i.key) for i in stack[:MAXIMUM_STACK_SIZE]])

	# Process documents who should be webhooked
	documents_qs = Document.objects.filter(tries__lt=MAXIMUM_TRIES, is_ready=False)
Пример #31
0
def test_pickle():
    b = DefaultMunch.fromDict({"a": "b"})
    assert pickle.loads(pickle.dumps(b)) == b
Пример #32
0
  # missing variable mapping
  schema.csl.fields.text['volume-title'] = [ 'volumeTitle' ]

  # status is publication status, not legal status
  schema.csl.fields.text.status = [ 'status ']

  with open(os.path.join(root, 'setup/csl-vars.json')) as f:
    csl_vars = set(json.load(f).keys())
    fix_csl_vars(schema.csl.fields.text, 'jurism', csl_vars)
    fix_csl_vars(schema.csl.fields.date, 'jurism', csl_vars)
    fix_csl_vars(schema.csl.names, 'jurism', csl_vars)

  return Munch.toDict(schema)

data = DefaultMunch.fromDict({
  'zotero': fix_zotero_schema(load('https://api.zotero.org/schema', 'zotero.json')),
  'jurism': fix_jurism_schema(load('https://raw.githubusercontent.com/Juris-M/zotero-schema/master/schema-jurism.json', 'juris-m.json')),
}, None)

class ExtraFields:
  @staticmethod
  def to_json(obj):
    if isinstance(obj, Munch):
      return {
        k: v
        for k, v in Munch.toDict(obj).items()
        if not v is None and not (type(v) == list and len(v) == 0)
      }
    else:
      return obj

  def __init__(self):
Пример #33
0
def test_pickle():
    b = DefaultMunch.fromDict({"a": "b"})
    assert pickle.loads(pickle.dumps(b)) == b