Example #1
0
def config_logging():
    """
    loads configuration for the logging module
    
    The file that is used to load logging configuration is "./log_config.yaml"    
    """

    yaml = YAML(typ="safe")
    with current_app.open_resource(current_app.config['LOGGING_CONFIG']) as f:
        config = yaml.load(f)

    # check if debug data to be output
    log_lvl = "INFO"
    if os.environ.get('VERLOOP_DEBUG', False):
        log_lvl = "DEBUG"
    
    update_config = {
            "handlers":
            {
                "console":
                {
                    "level": log_lvl
                },
                "file":
                {
                    "filename": current_app.config['LOGS']
                }
            }
        }

    pydash.merge(config, update_config)

    logging.config.dictConfig(config)
    LOG.debug("Logger Configured!")
    def _convertDictionaryToObject(self, name, dictionary):
        name = re.sub(r"[^\w]", "",
                      name)  #strip name of non-alphanumeric characters
        name = "random" if name == "" else name  #assign "random" as name if name is empty after strip
        if (type(dictionary) == dict):
            invalid_name_for_deletion = []
            new_names = {}
            for item in dictionary:
                if type(dictionary[item]) == dict or type(
                        dictionary[item]) == list:
                    dictionary[item] = self._convertDictionaryToObject(
                        item, dictionary[item])
                # strip name of non-alphanumeric characters
                if re.match(r"[^\w]", item):
                    new_name = re.sub(r"[^\w]", "", item)
                    new_name = "random" if item == "" else new_name
                    new_names[new_name] = dictionary[item]
                    invalid_name_for_deletion.append(item)

            if len(invalid_name_for_deletion) > 0:
                for item in invalid_name_for_deletion:
                    del dictionary[item]
                pydash.merge(dictionary, new_names)

            return namedtuple(name, dictionary.keys())(*dictionary.values())
        else:  # this is actually a list
            for index, item in enumerate(dictionary):
                if type(item) == dict or type(item) == list:
                    dictionary[index] = self._convertDictionaryToObject(
                        name, item)
            return dictionary
Example #3
0
    def __init__(self, config_path=None):

        self._version = None
        self._data = {}
        dd = self._data
        dd['FLASK'] = {}

        _.merge(dd, yaml.load(_get_default_yaml()))

        if config_path is None:
            config_path = os.path.join(cwd(), 'recordingmonitor.yml')
            log.info('Load configuration from DEFAULT path: %s', config_path)
        else:
            log.info('Load configuration from path: %s', config_path)

        self._config_path = config_path

        try:
            with open(config_path, 'r') as stream:
                file_conf = yaml.load(stream)
                _.merge(dd, file_conf)

        except FileNotFoundError:
            log.debug(
                'There is NO config file, continue using defaults and env')
            print('There is NO config file, continue using defaults and env')

        # LOAD ENVIRONMENT:
        eover = self._gather_from_environment()
        for stg, val in eover.items():
            _.set_(dd, stg, val)

        # set default values for settings which could not be
        # initalized declaratively:
        self.apply_defaults('log.version', 1)
        self.apply_defaults('capture.paths.jobsRoot',
                            os.path.join(cwd(), 'tmp'))

        # list of debugging options
        self.apply_defaults('maintenance.rmdir', True)
        self.apply_defaults('capture.rmdir', True)

        # ----------------------------------------------------------------------
        # CONFIGURE LOGGING:
        # ----------------------------------------------------------------------
        logging.config.dictConfig(dd['log'])

        self._reset_frozendebug()

        if self.get('FLASK.DEBUG'):
            self._test_logging()

        # move connection string to expected place
        dbconfig = self.get('db.connection')
        if dbconfig is not None:
            self.connection_string = dbconfig

        log.debug('main database: %s', self.connection_string)
Example #4
0
def plot_line(*args, trace_kwargs=None, layout_kwargs=None, **kwargs):
    '''Plot line from df'''
    trace_kwargs = _.merge(dict(), trace_kwargs)
    layout_kwargs = _.merge(dict(), layout_kwargs)
    return plot_go(*args,
                   trace_class='Scatter',
                   trace_kwargs=trace_kwargs,
                   layout_kwargs=layout_kwargs,
                   **kwargs)
Example #5
0
def plot_scatter(*args, trace_kwargs=None, layout_kwargs=None, **kwargs):
    '''Plot scatter from df'''
    trace_kwargs = ps.merge(dict(mode='markers'), trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(*args,
                   trace_class='Scatter',
                   trace_kwargs=trace_kwargs,
                   layout_kwargs=layout_kwargs,
                   **kwargs)
Example #6
0
def plot_line(*args, trace_kwargs=None, layout_kwargs=None, **kwargs):
    '''Plot line from df'''
    trace_kwargs = ps.merge(dict(mode='lines', line=dict(width=1)),
                            trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(*args,
                   trace_class='Scatter',
                   trace_kwargs=trace_kwargs,
                   layout_kwargs=layout_kwargs,
                   **kwargs)
Example #7
0
def plot_bar(
    *args, barmode='stack', orientation='v',
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot bar chart from df'''
    trace_kwargs = ps.merge(dict(orientation=orientation), trace_kwargs)
    layout_kwargs = ps.merge(dict(barmode=barmode), layout_kwargs)
    return plot_go(
        *args, trace_class='Bar',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #8
0
def plot_histogram(
    *args, barmode='overlay', xbins=None, histnorm='count', orientation='v',
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot histogram from df'''
    trace_kwargs = ps.merge(dict(orientation=orientation, xbins={}, histnorm=histnorm), trace_kwargs)
    layout_kwargs = ps.merge(dict(barmode=barmode), layout_kwargs)
    return plot_go(
        *args, trace_class='Histogram',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #9
0
def plot_scatter(
    *args,
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot scatter from df'''
    trace_kwargs = ps.merge(dict(mode='markers'), trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(
        *args, trace_class='Scatter',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #10
0
def plot_line(
    *args,
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot line from df'''
    trace_kwargs = ps.merge(dict(mode='lines', line=dict(width=1)), trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(
        *args, trace_class='Scatter',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #11
0
def plot_bar(
    *args, barmode='stack', orientation='v',
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot bar chart from df'''
    trace_kwargs = ps.merge(dict(orientation=orientation), trace_kwargs)
    layout_kwargs = ps.merge(dict(barmode=barmode), layout_kwargs)
    return plot_go(
        *args, trace_class='Bar',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #12
0
def plot_histogram(
    *args, barmode='overlay', xbins=None, histnorm='count', orientation='v',
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot histogram from df'''
    trace_kwargs = ps.merge(dict(orientation=orientation, xbins={}, histnorm=histnorm), trace_kwargs)
    layout_kwargs = ps.merge(dict(barmode=barmode), layout_kwargs)
    return plot_go(
        *args, trace_class='Histogram',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
def response_for_exception(request, exception):
    response = base_response_for_exception(request, exception)

    if isinstance(exception, ValidationError):
        dont_flash = [
            'password',
            'password_confirmation',
        ]

        old_input = pydash.merge(parser.parse(request.GET.urlencode()),
                                 parser.parse(request.POST.urlencode()))
        errors = exception.message_dict if hasattr(
            exception, 'error_dict') else {
                '__all__': exception.messages
            }

        response = HttpRedirector(request) \
            .back() \
            .with_input(pydash.omit(old_input, dont_flash)) \
            .with_errors(errors)

    if request.match('api/*') and not isinstance(response, JsonResponse):
        message = str(exception)
        data = {}

        if isinstance(exception, ValidationError):
            status_code = 422
            message = _('Invalid data!')
            data['errors'] = exception.message_dict if hasattr(
                exception, 'error_dict') else {
                    '__all__': exception.messages
                }
        elif isinstance(exception, Http404):
            status_code = 404
        elif isinstance(exception, BadRequest):
            status_code = 400
        elif isinstance(exception, AuthenticationException):
            status_code = 401
        elif isinstance(exception, PermissionDenied):
            status_code = 403
        else:
            status_code = 500

            if not settings.DEBUG:
                message = _('Something went wrong')

        if len(message.strip()) == 0:
            message = _(status_codes._codes[status_code][0])

        return JsonResponse(pydash.merge({'message': message}, data),
                            status=status_code)

    return response
Example #14
0
def plot_area(
    *args, fill='tonexty', stack=False,
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot area from df'''
    if stack:
        df, y_col = args[:2]
        stack_df = stack_cumsum(df, y_col)
        args = (stack_df,) + args[1:]
    trace_kwargs = ps.merge(dict(fill=fill, mode='lines', line=dict(width=1)), trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(
        *args, trace_class='Scatter',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
Example #15
0
def plot_area(
    *args, fill='tonexty', stack=False,
    trace_kwargs=None, layout_kwargs=None,
        **kwargs):
    '''Plot area from df'''
    if stack:
        df, y_col = args[:2]
        stack_df = stack_cumsum(df, y_col)
        args = (stack_df,) + args[1:]
    trace_kwargs = ps.merge(dict(fill=fill, mode='lines', line=dict(width=1)), trace_kwargs)
    layout_kwargs = ps.merge(dict(), layout_kwargs)
    return plot_go(
        *args, trace_class='Scatter',
        trace_kwargs=trace_kwargs, layout_kwargs=layout_kwargs,
        **kwargs)
 def _send_x_delete(self, api, headers, params, cookies, auth,
                    genericParameters):
     parameters = {
         "url": re.sub(r"(?<!:)/+", "/", f"{self.baseUrl}/{api}"),
         "headers": headers,
         "cookies": cookies,
         "verify": self.sslverify,
         "params": params
     }
     if auth:
         parameters["auth"] = auth
     pydash.merge(parameters, genericParameters)
     response = self.session.delete(**parameters)
     self.log_action("DELETE", response)
     return response
Example #17
0
    def _create_match_array(self, request, match_array, agg_query, base_string=""):
        nested_queries = {}
        # is_agg_query_scope = any([key.startswith(
        #     op) for op in self.agg_operators for key in request])
        for f in request:
            f_val = request[f]
            (f, op) = self._extract_operator(f)
            if f_val is None and op is None:
                continue

            if type(f_val) is dict:
                newrequest = f_val
                (f_subclass, ) = self._gather_info_from_toflerdb(
                    f, ['subclass'])
                if collection.intersection(f_subclass, [
                        'to:ComplexRelationalProperty',
                        'to:RelationalProperty']):
                    newrequest = self._prepare_direct_request_body(
                        f, f_val)

                new_agg_query = {}
                child_nested_queries = self._create_match_array(
                    newrequest, match_array, new_agg_query, base_string + f + ".")

                if new_agg_query:
                    agg_query.setdefault("aggs", {}).update(new_agg_query)

                if child_nested_queries:
                    grp_name = base_string.replace(".", "_") if base_string else "root"
                    agg_query.setdefault(grp_name, {
                        "terms": {"field": "id"},
                        "aggs": {}
                    })["aggs"].update(child_nested_queries)

            else:
                field_name = self._get_field_name(f, base_string)
                is_agg_query, is_nested_query, op_query = self._operator_query_builder(
                    op, field_name, f_val, f, base_string)
                if not is_agg_query:
                    match_array.append(op_query)
                else:
                    #for merging if agg belongs to same nested scope
                    if is_nested_query:
                        merge(nested_queries, op_query)
                    else:
                        agg_query.update(op_query)

        return nested_queries
    async def test_remove_status_code(self, *args):
        mock_cache = {
            'endpoint': 'some-endpoint',
            'timeout': 10,
            '_id': 'some-id'
        }
        mock_status_codes = [200]
        mock_id = 'some-value'
        mock_db = MagicMock()
        mock_hgetall = CoroutineMock()
        mock_srem = CoroutineMock()
        mock_hgetall.return_value = mock_cache
        mock_db.hgetall = mock_hgetall
        mock_db.srem = mock_srem

        try:
            await EndpointCacher.remove_status_codes(mock_status_codes,
                                                     mock_id, mock_db)
        except Exception as err:
            mock_hgetall.assert_awaited()
            expect(mock_hgetall.await_args[0][0]).to(equal(mock_id))

        mock_cache = pydash.merge(mock_cache, {'response_codes': 'some-value'})
        await EndpointCacher.remove_status_codes(mock_status_codes, mock_id,
                                                 mock_db)
        expect(mock_srem.await_count).to(equal(len(mock_status_codes)))
 async def test_get_by_id(self, *args):
     response_codes_id = 'some-id'
     expected_cache = {
         'endpoint': 'some-endpoint',
         'timeout': 10,
         'response_codes': response_codes_id,
         '_id': 'some-id'
     }
     expected_response_codes = [200, 300]
     mock_id = 'some-value'
     mock_db = MagicMock()
     mock_hgetall = CoroutineMock()
     mock_smembers = CoroutineMock()
     mock_hgetall.return_value = expected_cache
     mock_smembers.return_value = expected_response_codes
     mock_db.hgetall = mock_hgetall
     mock_db.smembers = mock_smembers
     cache = await EndpointCacher.get_by_id(mock_id, mock_db)
     mock_hgetall.assert_awaited()
     expect(mock_hgetall.await_args[0][0]).to(equal(mock_id))
     mock_smembers.assert_awaited()
     expect(mock_smembers.await_args[0][0]).to(equal(response_codes_id))
     expect(cache).to(
         equal(
             pydash.merge(expected_cache,
                          {'response_codes': expected_response_codes})))
 def delete(self,
            api,
            additionalHeaders={},
            params={},
            cookies={},
            auth=None,
            genericParameters={}):
     headers = {
         "Content-Type": "application/json",
         "Accept": "application/json"
     }
     pydash.merge(headers, additionalHeaders)
     response = self._send_x_delete(api, headers, params, cookies, auth,
                                    genericParameters)
     self._check_response(response)
     return Response(response)
 async def test_get_all(self, *args):
     with asynctest.patch.object(DB, 'fetch_members') as fetch_members_mock:
         response_codes_id = 'some-id'
         expected_cache = {
             'endpoint': 'some-endpoint',
             'timeout': 10,
             'response_codes': response_codes_id,
             '_id': 'some-id'
         }
         expected_response_codes = [200, 300]
         mock_keys = ['some-id']
         mock_db = MagicMock()
         mock_hgetall = CoroutineMock()
         mock_smembers = CoroutineMock()
         fetch_members_mock.return_value = mock_keys
         mock_hgetall.return_value = expected_cache
         mock_db.hgetall = mock_hgetall
         mock_db.smembers = mock_smembers
         mock_smembers.return_value = expected_response_codes
         caches = await EndpointCacher.get_all(mock_db)
         fetch_members_mock.assert_awaited()
         mock_hgetall.assert_awaited()
         mock_smembers.assert_awaited()
         expect(mock_smembers.await_args[0][0]).to(equal(response_codes_id))
         expect(caches).to(have_len(1))
         expect(caches[0]).to(
             equal(
                 pydash.merge(expected_cache,
                              {'response_codes': expected_response_codes})))
Example #22
0
 def __init__(self, android_id, auth_token, language='en-US', device={},
              **kwargs):
     self.android_id = android_id
     self.auth_token = auth_token
     self.language = language
     self.device = pydash.merge({}, DEFAULT_DEVICE, device)
     self.request_options_base = kwargs
Example #23
0
def test_merge_no_link_dict():
    case1 = {'foo': {'bar': None}}
    case2 = {'foo': {'bar': False}}
    result = _.merge({}, case1, case2)
    result['foo']['bar'] = True

    assert case1 == {'foo': {'bar': None}}
    assert case2 == {'foo': {'bar': False}}
Example #24
0
def test_merge_no_link_dict():
    case1 = {'foo': {'bar': None}}
    case2 = {'foo': {'bar': False}}
    result = _.merge({}, case1, case2)
    result['foo']['bar'] = True

    assert case1 == {'foo': {'bar': None}}
    assert case2 == {'foo': {'bar': False}}
Example #25
0
def test_merge_no_link_dict():
    case1 = {"foo": {"bar": None}}
    case2 = {"foo": {"bar": False}}
    result = _.merge({}, case1, case2)
    result["foo"]["bar"] = True

    assert case1 == {"foo": {"bar": None}}
    assert case2 == {"foo": {"bar": False}}
Example #26
0
 async def get_matched_paths(path: str, db: AsyncIOMotorCollection):
     matches = []
     async for ctx in db.find({}):
         if pydash.has(ctx, 'path'):
             match = re.match(ctx['path'], path)
             match and matches.append(pydash.merge(
                 ctx, {'regex_groups': match.groups()}))
     return matches
Example #27
0
def test_merge_no_link_dict():
    case1 = {"foo": {"bar": None}}
    case2 = {"foo": {"bar": False}}
    result = _.merge({}, case1, case2)
    result["foo"]["bar"] = True

    assert case1 == {"foo": {"bar": None}}
    assert case2 == {"foo": {"bar": False}}
Example #28
0
    def load_config(self):
        """
        Loads configurations from /config/ (Path relative to __nephos_dir__)

        Returns
        -------

        """

        # loading configuration
        self.logging_config = self.load_data("logging.yaml", True)
        self.maintenance_config = self.load_data("maintenance.yaml", True)
        self.modules_config = self.load_data("modules.yaml", True)

        # updating configuration as needed with manual data / environment variables
        config_update = list(self._config_update())
        pydash.merge(self.logging_config, config_update[0])
        pydash.merge(self.modules_config, config_update[1])
Example #29
0
    async def update(_id: str, ctx: object, db: AioRedis):
        """
        updates an endpoint cache.

        @param id: (str) id of endpoint cache to update
        @param ctx: (object) data to use for update
        @param db: (object) db connection
        """
        await EndpointCacher._set_indexes(pydash.merge(ctx, {'_id': _id}), db)
        await db.hmset_dict(_id, ctx)
Example #30
0
 async def generate_token(payload: dict, db):
     sanitized_payload = pydash.merge(
         pydash.omit(payload, 'password', 'token'),
         {'timestamp': repr(time.time())}
     )
     token = Token.generate(sanitized_payload)
     update_ctx = {
         'token': token
     }
     await Admin.update(payload['_id'], update_ctx, db)
Example #31
0
    def __init__(self, uuid, options, configs, event_loop):

        self._uuid = uuid
        self._event_loop = event_loop

        self._options = options
        self._configs = configs

        pydash.merge(self._options['configs'], self._configs)
        # virtualworker.py파일이 있는 절대 경로
        dir_path = os.path.dirname(os.path.realpath(__file__))
        self._act_def_paths = dir_path + '/virtual_core/actions'
        self._msg_def_paths = dir_path + '/virtual_core/message_handlers'
        self.load_hooks()

        self.mqtt_client = None

        super().__init__(self._uuid, self._act_def_paths, self._msg_def_paths,
                         self._options, self._event_loop)
Example #32
0
def get_conf():
    conf = {}
    with open(os.path.join(os.path.dirname(__file__), 'config.yml'), 'r') as f:
        conf = yaml.load(f)
    user_conf_path = os.path.join(os.path.expanduser('~'), '.anydb/config.yml')
    if os.path.exists(user_conf_path) and not os.path.isdir(user_conf_path):
        with open(user_conf_path, 'r') as f:
            conf = _.merge(conf, yaml.load(f))
    conf = munchify(conf)
    conf.data = os.path.expanduser(conf.data)
    return conf
Example #33
0
    async def update_entry(_id: str, ctx: object, db: AioRedis):
        """
        Updates a rate limiter entry.

        @param ctx: (object) data to use for update
        @param db: (object) db connection
        """
        await asyncio.gather(
            RateLimiter._set_indexes(pydash.merge(ctx, {'_id': _id}), db),
            db.hmset_dict(_id, ctx)
        )
    def _send_x_post(self, api, payload, headers, params, cookies, auth,
                     genericParameters):
        parameters = {
            "url": re.sub(r"(?<!:)/+", "/",
                          f"{self.baseUrl}/{api}"),  #remove duplicate /
            "headers": headers,
            "cookies": cookies,
            "verify": self.sslverify,
            "params": params
        }
        if type(payload) == dict:
            parameters["json"] = payload
        else:
            parameters["data"] = payload

        if auth:
            parameters["auth"] = auth
        pydash.merge(parameters, genericParameters)
        response = self.session.post(**parameters)
        self.log_action("POST", response)
        return response
Example #35
0
 def __call__(self, parser, args, values, option_string=None):
     # I run some inspection here so the user can have a little more freedom with
     # their method signature if they want to without having to go to all *args and **kwargs
     inspected_args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations = inspect.getfullargspec(
         func)
     if varkw == None and varargs == None and len(
             inspected_args
     ) == 1 and inspected_args[0] == 'self':
         func(self)
     elif varkw == None:
         params = pick(
             merge(kwargs, vars(args)),
             list(inspect.signature(func).parameters.keys()))
         if func.__name__ in params:
             params[func.__name__] = values
         func(self, *original_args, **params)
     else:
         params = merge(kwargs, vars(args))
         if func.__name__ in params:
             params[func.__name__] = values
         func(self, *original_args, **params)
Example #36
0
def get_base_repository_info(config):
    base_repositories = config.get('base_repositories')
    base_repository_info_list = []
    for base_repository in base_repositories:
        image_name_components = parse_image_name(base_repository)
        full_repo = image_name_components.get('full_repo')
        registry = image_name_components.get('registry')
        repo = image_name_components.get('repo')
        tag = image_name_components.get('tag')

        pprint(image_name_components)

        i = repo.rfind('/')
        if i != -1:
            base_repository_name = repo[i + 1:]
        else:
            base_repository_name = repo

        registry_config = {}
        if registry is not None:
            registry_config_file = os.path.join(os.getcwd(), 'registries',
                                                registry + '.yml')
            if os.path.exists(registry_config_file):
                registry_config = load_yaml(registry_config_file)
                pprint(registry_config_file)
                pprint(registry_config)
            else:
                registry_config = pydash.get(
                    config, ['base_repository_registries', registry], {})

        if tag is not None:
            tags = [tag]
        else:
            tags = list_repository_tags(
                repo,
                registry=registry,
                username=registry_config.get('username'),
                password=registry_config.get('password'),
                verify=registry_config.get('verify'))

        tags = [tag for tag in tags if tag != 'latest']
        tag_groups = group_tags(tags)

        base_repository_info = pydash.merge(image_name_components, {
            'name': base_repository_name,
            'tags': tags,
            'tag_groups': tag_groups
        })

        base_repository_info_list.append(base_repository_info)

    return base_repository_info_list
Example #37
0
    def createChartData(self, conjs):
        data = []
        node = {}
        for conj in conjs:
            # breakpoint()
            print(conj)
            conj = conj['values']
            v = conj['root']['tag']
            t = next(
                iter([a for a in self.affopts
                      if a['tag'] == conj['affopt']]))['gloss']
            vb = next(iter([verb for verb in self.verbs if verb['tag'] == v]))
            if 'red' == vb['thematic_relation']:
                p = [
                    p for p in self.pronouns
                    if p['tag'] == conj['pronoun']['agent']
                ][0]['gloss']
            elif 'blue' == vb['thematic_relation']:
                p = [
                    p for p in self.pronouns
                    if p['tag'] == conj['pronoun']['patient']
                ][0]['gloss']
            else:
                p = next(
                    iter([
                        p for p in self.pronouns
                        if p['tag'] == conj['pronoun']['agent']
                    ]))['gloss'] + ' > ' + next(
                        iter([
                            p for p in self.pronouns
                            if p['tag'] == conj['pronoun']['patient']
                        ]))['obj_gloss']
            val = self.returnValue(conj)
            # newconj = {v: { t: { p: val}}}
            newconj = {vb['gloss']: {p: {t: val}}}
            node = merge(node, newconj)

        for verb in node.keys():
            nv = {"name": verb, "children": []}
            for second in node[verb].keys():
                ns = {"name": second, "children": []}
                for third in node[verb][second].keys():
                    nt = {
                        "name": third,
                        "children": [{
                            "name": node[verb][second][third]
                        }]
                    }
                    ns['children'].append(nt)
                nv['children'].append(ns)
            data.append(nv)
        return data
Example #38
0
 def __init__(self, spec, aeb_space):
     self.spec = spec
     self.aeb_space = aeb_space
     aeb_space.env_space = self
     self.env_spec = spec['env']
     self.info_space = aeb_space.info_space
     self.envs = []
     for e, env_spec in enumerate(self.env_spec):
         env_spec = ps.merge(spec['meta'].copy(), env_spec)
         try:
             env = OpenAIEnv(env_spec, self, e)
         except gym.error.Error:
             env = UnityEnv(env_spec, self, e)
         self.envs.append(env)
Example #39
0
 def download(self, url, cookies):
     """
     Download file at the given URL with an authentication cookie specified
     in the cookies dictionary. The authentication cookie is typically
     called "MarketDA", but the name is specified in the delivery response,
     so could in principle change.
     """
     download_request_options = {
         "url": url,
         "headers": {"User-Agent": self.download_user_agent(), "Accept-Encoding": "identity"},
         "cookies": cookies,
         "verify": False,
     }
     options = pydash.merge({}, self.request_options_base, download_request_options)
     r = requests.get(**options)
     r.raise_for_status()
     return r.content
Example #40
0
 def request(self, endpoint, **kwargs):
     request_options_common = {
         "url": BASE_URL + endpoint,
         "headers": {
             "Accept-Language": self.language,
             "Authorization": "GoogleLogin auth=" + self.auth_token,
             "X-DFE-Device-Id": self.android_id,
             "X-DFE-Client-Id": CLIENT_ID,
             "User-Agent": self.user_agent(),
         },
         "verify": False,
     }
     options = pydash.merge({}, self.request_options_base, request_options_common, kwargs)
     r = requests.get(**options)
     r.raise_for_status()
     data = r.content
     message = finsky.protos.response_pb2.ResponseWrapper.FromString(data)
     return message
Example #41
0
 def __init__(self, android_id, auth_token, language="en-US", device={}, **kwargs):
     self.android_id = android_id
     self.auth_token = auth_token
     self.language = language
     self.device = pydash.merge({}, DEFAULT_DEVICE, device)
     self.request_options_base = kwargs
Example #42
0
def config(args=None):
    '''
    Parameters
    ----------
    args : argparse.Namespace, optional
        Arguments as parsed by :func:`parse_args`.

    See also
    --------
    :func:`parse_args`

    Returns
    -------
    configobj.ConfigObj
        Parsed (and potentially modified) configuration.
    '''
    if args is None:
        args = parse_args()

    config = md.config.Config(args.config)

    if args.command == 'locate':
        print config.filename
    elif args.command == 'show':
        if args.get:
            data = pydash.get(config.data.dict(), args.get)
        else:
            data = config.data.dict()

        if args.json:
            # Output in JSON.
            json.dump(obj=data, fp=sys.stdout, indent=4)
        elif args.yaml:
            # Output in YAML format.
            print yaml.dump(data, default_flow_style=False),
        elif isinstance(data, dict):
            # Output in `ini` format.
            output = io.BytesIO()
            configobj.ConfigObj(data).write(output)
            print output.getvalue(),
        else:
            print data
    elif args.command == 'edit':
        for action_i in ('append', 'prepend', 'set', 'remove', 'remove_key'):
            if getattr(args, action_i):
                action = action_i
                break

        if action in ('append', 'prepend', 'set', 'remove'):
            # Unpack key and new value.
            key, new_value = getattr(args, action)

            # Look up existing value.
            config_value = pydash.get(config.data, key)

            if action == 'set':
                # Set a key to a string value.

                # Create dictionary structure containing only the specified key
                # and value.
                nested_value = pydash.set_({}, key, new_value)
                # Merge nested value into existing configuration structure.
                pydash.merge(config.data, nested_value)
            else:
                # Action is a list action.

                if config_value is None:
                    # Create dictionary structure containing only empty list for
                    # specified key.
                    config_value = []
                    nested_value = pydash.set_({}, key, config_value)
                    # Merge nested value into existing configuration structure.
                    pydash.merge(config.data, nested_value)
                elif not isinstance(config_value, list):
                    print >> sys.stderr, 'Value at %s is not a list.' % key
                    raise SystemExit(1)

                if new_value in config_value:
                    # Remove value even if we are appending or prepending to
                    # avoid duplicate values.
                    config_value.remove(new_value)

                if args.append:
                    config_value.append(new_value)
                elif args.prepend:
                    config_value.insert(0, new_value)
        elif action == 'remove_key':
            key = getattr(args, action)

            if pydash.get(config.data, key) is not None:
                # Key exists.

                # Split key into levels.
                # Use [negative lookbehind assertion][1] to only split on
                # non-escaped '.' characters.
                #
                # [1]: https://stackoverflow.com/a/21107911/345236
                levels = re.split(r'(?<!\\)\.', key)
                parents = levels[:-1]

                parent = config.data

                for parent_i in parents:
                    parent = parent[parent_i]

                # Delete key from deepest parent.
                del parent[levels[-1]]
        if args.dry_run:
            output = io.BytesIO()
            config.data.write(output)
            print output.getvalue(),
        else:
            config.save()
    return config
Example #43
0
def test_merge_no_link_list():
    case = {"foo": [{}]}
    result = _.merge({}, case)
    result["foo"][0]["bar"] = True

    assert case == {"foo": [{}]}
Example #44
0
def test_merge(case, expected):
    assert _.merge(*case) == expected
Example #45
0
def test_merge_no_link_list():
    case = {'foo': [{}]}
    result = _.merge({}, case)
    result['foo'][0]['bar'] = True

    assert case == {'foo': [{}]}