def test_factory(self):
     D, kw = self.D, self.kw
     assert merge(defaultdict(int, D({1: 2})), D({2: 3})) == {1: 2, 2: 3}
     assert merge(defaultdict(int, D({1: 2})), D({2: 3}), factory=lambda: defaultdict(int)) == defaultdict(
         int, D({1: 2, 2: 3})
     )
     assert not (merge(defaultdict(int, D({1: 2})), D({2: 3}), factory=lambda: defaultdict(int)) == {1: 2, 2: 3})
     assert raises(TypeError, lambda: merge(D({1: 2}), D({2: 3}), factoryy=dict))
Example #2
0
 def test_factory(self):
     D, kw = self.D, self.kw
     assert merge(defaultdict(int, D({1: 2})), D({2: 3})) == {1: 2, 2: 3}
     assert (merge(defaultdict(int, D({1: 2})), D({2: 3}),
                   factory=lambda: defaultdict(int)) ==
             defaultdict(int, D({1: 2, 2: 3})))
     assert not (merge(defaultdict(int, D({1: 2})), D({2: 3}),
                       factory=lambda: defaultdict(int)) == {1: 2, 2: 3})
     assert raises(TypeError, lambda: merge(D({1: 2}), D({2: 3}), factoryy=dict))
Example #3
0
def _extract_extra_data(start, stop, d, fields, comp_re, no_fields_filter):
    def _project_header_data(source_data, source_ts, selected_fields, comp_re):
        """Extract values from a header for merging into events

        Parameters
        ----------
        source : dict
        selected_fields : set
        comp_re : SRE_Pattern

        Returns
        -------
        data_keys : dict
        data : dict
        timestamps : dict
        """
        fields = (set(filter(comp_re.match, source_data)) - selected_fields)
        data = {k: source_data[k] for k in fields}
        timestamps = {k: source_ts[k] for k in fields}

        return {}, data, timestamps

    if fields:
        event_fields = set(d['data_keys'])
        selected_fields = set(filter(comp_re.match, event_fields))
        discard_fields = event_fields - selected_fields
    else:
        discard_fields = set()
        selected_fields = set(d['data_keys'])

    objs_config = d.get('configuration', {}).values()
    config_data = merge(obj_conf['data'] for obj_conf in objs_config)
    config_ts = merge(obj_conf['timestamps'] for obj_conf in objs_config)
    all_extra_data = {}
    all_extra_ts = {}
    all_extra_dk = {}
    if not no_fields_filter:
        for dt, ts in [(config_data, config_ts),
                       (start, defaultdict(lambda: start['time'])),
                       (stop, defaultdict(lambda: stop['time']))]:
            # Look in the descriptor, then start, then stop.
            l_dk, l_data, l_ts = _project_header_data(dt, ts, selected_fields,
                                                      comp_re)
            all_extra_data.update(l_data)
            all_extra_ts.update(l_ts)
            selected_fields.update(l_data)
            all_extra_dk.update(l_dk)

    return (all_extra_dk, all_extra_data, all_extra_ts, discard_fields)
Example #4
0
    def _render_(self, template_name, **extra_args):
        repr_args = merge(self.configuration.copy(), extra_args)

        if self.configuration['visualization_js']:
            repr_args['visualization_js'] = env.get_template(self.configuration['visualization_js']).render(**repr_args)
        else:
            raise Exception('Empty Visualization code!')

        if 'functions_js' in self.configuration and self.configuration['functions_js'] is not None:
            repr_args['functions_js'] = env.get_template(self.configuration['functions_js']).render(**repr_args)

        template = env.get_template(template_name)

        if not 'figure_id' in repr_args or not repr_args['figure_id']:
            repr_args['figure_id'] = 'fig-{0}'.format(uuid.uuid4())

        if not 'vis_uuid' in repr_args or not repr_args['vis_uuid']:
            repr_args['vis_uuid'] = 'matta-vis-{0}'.format(uuid.uuid4())

        if not 'define_js_module' in repr_args:
            repr_args['define_js_module'] = True

        if self.configuration['visualization_css']:
            try:
                repr_args['visualization_css'] = env.get_template(self.configuration['visualization_css']).render(**repr_args)
            except IOError:
                repr_args['visualization_css'] = None

        return template.render(**repr_args)
Example #5
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches_and_values = tuple(
            (match, match.value(self))
            for match in self._first_important_matches(matches))
        for match, value in relevant_matches_and_values:
            if not isinstance(value, Mapping):
                raise InvalidTypeError(self.name, value, match.source,
                                       value.__class__.__name__,
                                       self._type.__name__)

        # mapkeys with important matches
        def key_is_important(match, key):
            return match.valueflags(self).get(key) == ParameterFlag.final

        important_maps = tuple(
            dict((k, v) for k, v in iteritems(match_value)
                 if key_is_important(match, k))
            for match, match_value in relevant_matches_and_values)
        # dump all matches in a dict
        # then overwrite with important matches
        return merge(
            concatv((v for _, v in relevant_matches_and_values),
                    reversed(important_maps)))
Example #6
0
    def request(self,
                url,
                method,
                data=None,
                params=None,
                headers=None,
                json=None):
        header = {'Authorization': self.__auth}
        if headers is not None:
            header = merge(header, headers)

        try:
            url = self.build_url(url)
            method = method.upper()
            self.__log('{0} request to: {1}'.format(method, url))
            request = requests.request(method=method,
                                       url=url,
                                       data=data,
                                       json=json,
                                       params=params,
                                       headers=header)
            self.__log('Status code: {0}'.format(request.status_code))
            return request
        except:
            raise Exception('{0} request failed to: {1}'.format(method, url))
Example #7
0
    def _render_(self, template_name, **extra_args):
        repr_args = merge(self.configuration.copy(), extra_args)

        if self.configuration['visualization_js']:
            repr_args['visualization_js'] = env.get_template(
                self.configuration['visualization_js']).render(**repr_args)
        else:
            raise Exception('Empty Visualization code!')

        if 'functions_js' in self.configuration and self.configuration[
                'functions_js'] is not None:
            repr_args['functions_js'] = env.get_template(
                self.configuration['functions_js']).render(**repr_args)

        template = env.get_template(template_name)

        if not 'figure_id' in repr_args or not repr_args['figure_id']:
            repr_args['figure_id'] = 'fig-{0}'.format(uuid.uuid4())

        if not 'vis_uuid' in repr_args or not repr_args['vis_uuid']:
            repr_args['vis_uuid'] = 'matta-vis-{0}'.format(uuid.uuid4())

        if not 'define_js_module' in repr_args:
            repr_args['define_js_module'] = True

        if self.configuration['visualization_css']:
            try:
                repr_args['visualization_css'] = env.get_template(
                    self.configuration['visualization_css']).render(
                        **repr_args)
            except IOError:
                repr_args['visualization_css'] = None

        return template.render(**repr_args)
Example #8
0
 def _transaction(**kwargs):
     return merge(
         {
             "from": eth_tester.get_accounts()[0],
             "to": BURN_ADDRESS,
             "gas": 21000
         },
         kwargs,
     )
Example #9
0
 def facet_map(self):
     facs = (self.flevels.groupby(['facet']).agg({
         'facet_level':
         lambda x: x.dropna().drop_duplicates().tolist()
     }).pipe(lambda xf: u.fill_none(xf)).to_dict(orient='index'))
     return pipe(facs,
                 curry(valmap)(lambda x: x['facet_level']),
                 curry(keyfilter)(lambda x: x != 'Overall'),
                 lambda x: merge(x, self.flevels_r))
Example #10
0
 def modifyTransaction(self, transaction_hash, **transaction_params):
     assert_valid_transaction_params(transaction_params)
     current_transaction = get_required_transaction(self.web3,
                                                    transaction_hash)
     current_transaction_params = extract_valid_transaction_params(
         current_transaction)
     new_transaction = merge(current_transaction_params, transaction_params)
     return replace_transaction(self.web3, current_transaction,
                                new_transaction)
Example #11
0
def test_merge_with_non_dict_mappings():
    class Foo(Mapping):
        def __init__(self, d):
            self.d = d

        def __iter__(self):
            return iter(self.d)

        def __getitem__(self, key):
            return self.d[key]

        def __len__(self):
            return len(self.d)

    d = Foo({1: 1})
    rv = merge(d)

    assert merge(d) is d or merge(d) == {1: 1}
    assert merge_with(sum, d) == {1: 1}
Example #12
0
 def test_merge_iterable_arg(self):
     D, kw = self.D, self.kw
     assert merge([D({
         1: 1,
         2: 2
     }), D({3: 4})], **kw) == D({
         1: 1,
         2: 2,
         3: 4
     })
Example #13
0
def merge_genesis_overrides(defaults, overrides):
    allowed_fields = set(defaults.keys())
    override_fields = set(overrides.keys())
    unexpected_fields = tuple(
        sorted(override_fields.difference(allowed_fields)))

    if unexpected_fields:
        err = "The following invalid fields were supplied to override default genesis values: {0}."
        raise ValueError(err.format(unexpected_fields))

    merged_params = merge(defaults, overrides)
    return merged_params
Example #14
0
    def _render_(self, template_name='base.html', **extra_args):
        repr_args = merge(self.configuration.copy(), extra_args)

        if self.configuration['visualization_js']:
            repr_args['visualization_js'] = env.get_template(
                self.configuration['visualization_js']).render(**repr_args)
        else:
            raise Exception('Empty Visualization code!')

        if 'functions_js' in self.configuration and self.configuration[
                'functions_js'] is not None:
            repr_args['functions_js'] = env.get_template(
                self.configuration['functions_js']).render(**repr_args)

        template = env.get_template(template_name)

        if not 'figure_id' in repr_args or not repr_args['figure_id']:
            repr_args['figure_id'] = 'fig-{0}'.format(uuid.uuid4())

        if not 'vis_uuid' in repr_args or not repr_args['vis_uuid']:
            repr_args['vis_uuid'] = 'datagram-vis-{0}'.format(uuid.uuid4())

        if not 'define_js_module' in repr_args:
            repr_args['define_js_module'] = True

        if self.configuration['visualization_css']:
            try:
                repr_args['visualization_css'] = env.get_template(
                    self.configuration['visualization_css']).render(
                        **repr_args)
            except IOError:
                repr_args['visualization_css'] = None

        # some dependencies have names with invalid characters for variable names in Javascript
        repr_args['requirements_as_args'] = list(
            map(lambda x: x.replace('-', '_'), repr_args['requirements']))

        # if there are defined events, we merge them here
        repr_args['event_names'] = []

        if 'allowed_events' in repr_args['options'] and repr_args['options'][
                'allowed_events']:
            repr_args['event_names'].extend(
                repr_args['options']['allowed_events'])

        repr_args['event_names'].extend(self.datagram_events)
        repr_args['event_names'] = list(set(repr_args['event_names']))

        return template.render(**repr_args)
Example #15
0
def load_datasets(cache_dir, dbc, dsets, parse_all=False, resume=True):
    import cytoolz.dicttoolz as dz
    cache = dask.cache.Cache(8e9)
    cache.register()
    dask.set_options(get=dask.threaded.get, pool=ThreadPool())
    configs = list(
        map(lambda x: os.path.join('config/data', x),
            os.listdir('config/data')))
    cmap = {k: DatasetConfig.from_yaml(k) for k in configs}
    dsids = dz.valmap(lambda ds: ds.id, cmap)
    cmap = dz.merge(cmap, {d.id: d for d in cmap.values()})
    if parse_all:
        dsets = dsids
    for d in dsets:
        process_dataset(cmap[d], dbc, cache_dir)
Example #16
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches = self._first_important_matches(matches)

        # mapkeys with important matches
        def key_is_important(match, key):
            return match.valueflags(self.__class__).get(key) is ParameterFlag.final
        important_maps = tuple(dict((k, v)
                                    for k, v in iteritems(match.value(self.__class__))
                                    if key_is_important(match, k))
                               for match in relevant_matches)
        # dump all matches in a dict
        # then overwrite with important matches
        return merge(concatv((m.value(self.__class__) for m in relevant_matches),
                             reversed(important_maps)))
Example #17
0
def _get_wf_call_statuses(metadata):
    calls = metadata['calls'].keys()
    states = set([])
    call_stats = {}

    for c in calls:
        tasks = metadata['calls'][c]
        counts = pipe(tasks, map(get('executionStatus')), frequencies)
        new_states = list(filter(lambda x: x not in states, counts.keys()))
        if new_states:
            for s in new_states:
                states.add(s)
        call_stats[c] = counts

    base_states = {s: 0 for s in states}

    final_stats = valmap(lambda d: merge(base_states, d), call_stats)
    return (calls, sorted(states), final_stats)
Example #18
0
def compile_sources(source_bundle: SourceBundle,
                    version_check: bool = True) -> Dict:
    """Compiled solidity contracts for a single source directory"""
    sources = collect_sources(source_bundle=source_bundle)
    source_config = prepare_source_configuration(sources=sources)
    solc_configuration = merge(BASE_COMPILER_CONFIGURATION,
                               dict(sources=source_config))  # does not mutate.

    remappings_config = prepare_remappings_configuration(
        base_path=source_bundle.base_path)
    solc_configuration['settings'].update(remappings_config)

    version: VersionString = VersionString(
        SOLIDITY_COMPILER_VERSION) if version_check else None
    allow_paths = [source_bundle.base_path, *source_bundle.other_paths]
    compiler_output = __execute(compiler_version=version,
                                input_config=solc_configuration,
                                allow_paths=allow_paths)
    return compiler_output
Example #19
0
    def load(self, defaults=None):
        config = defaults or {}

        if os.path.isabs(self.config_path):
            search_paths = [self.config_path]
        else:
            search_paths = map(self._join_path, self.default_paths)

        try:
            valid_paths = filter(lambda p: p and self.isfile(p), search_paths)
            file_path = next(valid_paths)
            with self.open(file_path, 'r') as f:
                loaded = yaml.load(f.read())
                config = merge(config, loaded)
        except Exception:
            self.logger.error('Error parsing config file "{}"'.format(
                self.config_path))
            pass

        return DotDict(config)
Example #20
0
    def _render_(self, template_name='base.html', **extra_args):
        repr_args = merge(self.configuration.copy(), extra_args)

        if self.configuration['visualization_js']:
            repr_args['visualization_js'] = env.get_template(self.configuration['visualization_js']).render(**repr_args)
        else:
            raise Exception('Empty Visualization code!')

        if 'functions_js' in self.configuration and self.configuration['functions_js'] is not None:
            repr_args['functions_js'] = env.get_template(self.configuration['functions_js']).render(**repr_args)

        template = env.get_template(template_name)

        if not 'figure_id' in repr_args or not repr_args['figure_id']:
            repr_args['figure_id'] = 'fig-{0}'.format(uuid.uuid4())

        if not 'vis_uuid' in repr_args or not repr_args['vis_uuid']:
            repr_args['vis_uuid'] = 'datagram-vis-{0}'.format(uuid.uuid4())

        if not 'define_js_module' in repr_args:
            repr_args['define_js_module'] = True

        if self.configuration['visualization_css']:
            try:
                repr_args['visualization_css'] = env.get_template(self.configuration['visualization_css']).render(**repr_args)
            except IOError:
                repr_args['visualization_css'] = None

        # some dependencies have names with invalid characters for variable names in Javascript
        repr_args['requirements_as_args'] = list(map(lambda x: x.replace('-', '_'), repr_args['requirements']))

        # if there are defined events, we merge them here
        repr_args['event_names'] = []

        if 'allowed_events' in repr_args['options'] and repr_args['options']['allowed_events']:
            repr_args['event_names'].extend(repr_args['options']['allowed_events'])

        repr_args['event_names'].extend(self.datagram_events)
        repr_args['event_names'] = list(set(repr_args['event_names']))

        return template.render(**repr_args)
Example #21
0
    def _merge(self, matches):
        # get matches up to and including first important_match
        #   but if no important_match, then all matches are important_matches
        relevant_matches_and_values = tuple((match, match.value(self)) for match in
                                            self._first_important_matches(matches))
        for match, value in relevant_matches_and_values:
            if not isinstance(value, Mapping):
                raise InvalidTypeError(self.name, value, match.source, value.__class__.__name__,
                                       self._type.__name__)

        # mapkeys with important matches
        def key_is_important(match, key):
            return match.valueflags(self).get(key) == ParameterFlag.final
        important_maps = tuple(dict((k, v)
                                    for k, v in iteritems(match_value)
                                    if key_is_important(match, k))
                               for match, match_value in relevant_matches_and_values)
        # dump all matches in a dict
        # then overwrite with important matches
        return merge(concatv((v for _, v in relevant_matches_and_values),
                             reversed(important_maps)))
    def request(self, url, method, data=None, params=None, headers=None, json=None):
        header = {'Authorization': self.__auth}
        if headers is not None:
            header = merge(header, headers)

        try:
            url = self.build_url(url)
            method = method.upper()
            self.__log('{0} request to: {1}'.format(method, url))
            request = requests.request(
                method=method,
                url=url,
                data=data,
                json=json,
                params=params,
                headers=header
            )
            self.__log('Status code: {0}'.format(request.status_code))
            return request
        except:
            raise Exception('{0} request failed to: {1}'.format(method, url))
Example #23
0
        st.fixed_dictionaries({}),
        st.fixed_dictionaries({'to': address}),
    ),
    st.one_of(
        st.fixed_dictionaries({}),
        st.fixed_dictionaries({'value': tx_value}),
    ),
    st.one_of(
        st.fixed_dictionaries({}),
        st.fixed_dictionaries({'gas': tx_gas}),
    ),
    st.one_of(
        st.fixed_dictionaries({}),
        st.fixed_dictionaries({'gas_price': tx_gas_price}),
    ),
).map(lambda parts: merge(*parts))


class EVMStateFuzzer(RuleBasedStateMachine):
    sent_transactions = Bundle('Transactions')

    def __init__(self, *args, **kwargs):
        from eth_tester import (
            EthereumTester,
            PyEthereum16Backend,
        )
        backend = PyEthereum16Backend()
        self.eth_tester = EthereumTester(backend=backend)
        super(EVMStateFuzzer, self).__init__(*args, **kwargs)

    @rule(target=sent_transactions, transaction=transaction_st)
Example #24
0
def test_merge_iterable_arg():
    assert merge([{1: 1, 2: 2}, {3: 4}]) == {1: 1, 2: 2, 3: 4}
Example #25
0
def test_merge():
    assert merge({1: 1, 2: 2}, {3: 4}) == {1: 1, 2: 2, 3: 4}
Example #26
0
 def test_merge_iterable_arg(self):
     D, kw = self.D, self.kw
     assert merge([D({1: 1, 2: 2}), D({3: 4})], **kw) == D({1: 1, 2: 2, 3: 4})
Example #27
0
 def test_merge(self):
     D, kw = self.D, self.kw
     assert merge(D({1: 1, 2: 2}), D({3: 4}), **kw) == D({1: 1, 2: 2, 3: 4})
Example #28
0
 def modifyTransaction(self, transaction_hash, **transaction_params):
     assert_valid_transaction_params(transaction_params)
     current_transaction = get_required_transaction(self.web3, transaction_hash)
     current_transaction_params = extract_valid_transaction_params(current_transaction)
     new_transaction = merge(current_transaction_params, transaction_params)
     return replace_transaction(self.web3, current_transaction, new_transaction)
Example #29
0
def get_table(mds, fs, headers, fields=None, stream_name='primary', fill=False,
              convert_times=True, timezone=None, handler_registry=None,
              handler_overrides=None, localize_times=True):
    """
    Make a table (pandas.DataFrame) from given run(s).

    Parameters
    ----------
    mds : MDSRO
    fs : FileStoreRO
    headers : Header or iterable of Headers
        The headers to fetch the events for
    fields : list, optional
        whitelist of field names of interest; if None, all are returned
    stream_name : string, optional
        Get data from a single "event stream." To obtain one comprehensive
        table with all streams, use `stream_name=ALL` (where `ALL` is a
        sentinel class defined in this module). The default name is
        'primary', but if no event stream with that name is found, the
        default reverts to `ALL` (for backward-compatibility).
    fill : bool, optional
        Whether externally-stored data should be filled in. Defaults to False.
    convert_times : bool, optional
        Whether to convert times from float (seconds since 1970) to
        numpy datetime64, using pandas. True by default, returns naive
        datetime64 objects in UTC
    timezone : str, optional
        e.g., 'US/Eastern'
    handler_registry : dict, optional
        mapping filestore specs (strings) to handlers (callable classes)
    handler_overrides : dict, optional
        mapping data keys (strings) to handlers (callable classes)
    localize_times : bool, optional
        If the times should be localized to the 'local' time zone.  If
        True (the default) the time stamps are converted to the localtime
        zone (as configure in mds).

        This is problematic for several reasons:

          - apparent gaps or duplicate times around DST transitions
          - incompatibility with every other time stamp (which is in UTC)

        however, this makes the dataframe repr look nicer

        This implies convert_times.

        Defaults to True to preserve back-compatibility.
    Returns
    -------
    table : pandas.DataFrame
    """
    # A word about the 'fields' argument:
    # Notice that we assume that the same field name cannot occur in
    # more than one descriptor. We could relax this assumption, but
    # we current enforce it in bluesky, so it is safe for now.
    try:
        headers.items()
    except AttributeError:
        pass
    else:
        headers = [headers]

    if handler_overrides is None:
        handler_overrides = {}
    if handler_registry is None:
        handler_registry = {}

    if fields is None:
        fields = []
    fields = set(fields)
    _check_fields_exist(fields, headers)

    dfs = []
    for header in headers:
        # cache these attribute look-ups for performance
        start = header['start']
        stop = header.get('stop', {})
        descriptors = header['descriptors']
        if stop is None:
            stop = {}

        # shim for back-compat with old data that has no 'primary' descriptor
        if not any(d for d in descriptors if d.get('name') == 'primary'):
            stream_name = ALL

        for descriptor in descriptors:
            descriptor_name = descriptor.get('name')
            if (stream_name is not ALL) and (stream_name != descriptor_name):
                continue
            is_external = _external_keys(descriptor)
            objs_config = descriptor.get('configuration', {}).values()
            config_data = merge(obj_conf['data'] for obj_conf in objs_config)
            discard_fields = set()
            extra_fields = set()
            if fields:
                event_fields = set(descriptor['data_keys'])
                discard_fields = event_fields - fields
                extra_fields = fields - event_fields
            payload = mds.get_events_table(descriptor)
            descriptor, data, seq_nums, times, uids, timestamps = payload
            df = pd.DataFrame(index=seq_nums)
            # if converting to datetime64 (in utc or 'local' tz)
            if convert_times or localize_times:
                times = pd.to_datetime(times, unit='s')
            # make sure this is a series
            times = pd.Series(times, index=seq_nums)

            # if localizing to 'local' time
            if localize_times:
                times = (times
                         .dt.tz_localize('UTC')     # first make tz aware
                         .dt.tz_convert(timezone)   # convert to 'local'
                         .dt.tz_localize(None)      # make naive again
                         )

            df['time'] = times
            for field, values in six.iteritems(data):
                if field in discard_fields:
                    logger.debug('Discarding field %s', field)
                    continue
                df[field] = values
            if list(df.columns) == ['time']:
                # no content
                continue
            for field in df.columns:
                if is_external.get(field) is not None and fill:
                    logger.debug('filling data for %s', field)
                    # TODO someday we will have bulk get_datum in FS
                    datum_uids = df[field]
                    if field not in handler_overrides:
                        with fs.handler_context(handler_registry) as _fs:
                            values = [_fs.get_datum(value)
                                      for value in datum_uids]
                    else:
                        handler = handler_overrides[field]
                        mock_registry = defaultdict(lambda: handler)
                        with fs.handler_context(mock_registry) as _fs:
                            values = [_fs.get_datum(value)
                                      for value in datum_uids]
                    df[field] = values
            for field in extra_fields:
                # Look in the descriptor, then start, then stop.
                # Broadcast any values through the whole df.
                if field in config_data:
                    df[field] = config_data[field]
                elif field in start:
                    df[field] = start[field]
                elif field in stop:
                    df[field] = stop[field]
                # (else omit it from the events of this descriptor)
            dfs.append(df)
    if dfs:
        return pd.concat(dfs)
    else:
        # edge case: no data
        return pd.DataFrame()
Example #30
0
def get_events(mds, fs, headers, fields=None, stream_name=ALL, fill=False,
               handler_registry=None, handler_overrides=None, plugins=None,
               **kwargs):
    """
    Get Events from given run(s).

    Parameters
    ----------
    mds : MDSRO
    fs : FileStoreRO
    headers : Header or iterable of Headers
        The headers to fetch the events for
    fields : list, optional
        whitelist of field names of interest or regular expression;
        if None, all are returned
    stream_name : string, optional
        Get events from only one "event stream" with this name. Default value
        is special sentinel class, `ALL`, which gets all streams together.
    fill : bool, optional
        Whether externally-stored data should be filled in. Defaults to False.
    handler_registry : dict, optional
        mapping filestore specs (strings) to handlers (callable classes)
    handler_overrides : dict, optional
        mapping data keys (strings) to handlers (callable classes)
    plugins : dict or None, optional
        mapping keyword arguments (strings) to Plugins
    kwargs
        passed through to any plugins

    Yields
    ------
    event : Event
        The event, optionally with non-scalar data filled in

    Raises
    ------
    ValueError if any key in `fields` is not in at least one descriptor pre header.
    KeyError if a kwarg is passed without a corresponding plugin.
    """
    # A word about the 'fields' argument:
    # Notice that we assume that the same field name cannot occur in
    # more than one descriptor. We could relax this assumption, but
    # we current enforce it in bluesky, so it is safe for now.
    try:
        headers.items()
    except AttributeError:
        pass
    else:
        headers = [headers]

    no_fields_filter = False
    if fields is None:
        no_fields_filter = True
        fields = []
    fields = set(fields)
    _check_fields_exist(fields, headers)

    comp_re = _compile_re(fields)

    for k in kwargs:
        if k not in plugins:
            raise KeyError("No plugin was found to handle the keyword "
                           "argument %r" % k)

    for header in headers:
        # cache these attribute look-ups for performance
        start = header['start']
        stop = header.get('stop', {})
        for descriptor in header['descriptors']:
            descriptor_name = descriptor.get('name')
            if (stream_name is not ALL) and (stream_name != descriptor_name):
                continue
            objs_config = descriptor.get('configuration', {}).values()
            config_data = merge(obj_conf['data'] for obj_conf in objs_config)
            config_ts = merge(obj_conf['timestamps']
                              for obj_conf in objs_config)
            if fields:
                event_fields = set(descriptor['data_keys'])
                selected_fields = set(filter(comp_re.match, event_fields))
                discard_fields = event_fields - selected_fields
            else:
                discard_fields = set()
                selected_fields = set()

            all_extra_data = {}
            all_extra_ts = {}

            if not no_fields_filter:
                # Look in the descriptor, then start, then stop.
                config_data_fields = set(filter(comp_re.match, config_data)) - selected_fields
                for field in config_data_fields:
                    selected_fields.add(field)
                    all_extra_data[field] = config_data[field]
                    all_extra_ts[field] = config_ts[field]

                start_fields = set(filter(comp_re.match, start)) - selected_fields
                for field in start_fields:
                    all_extra_data[field] = start[field]
                    all_extra_ts[field] = start['time']

                stop_fields = set(filter(comp_re.match, stop)) - selected_fields
                for field in stop_fields:
                    all_extra_data[field] = stop[field]
                    all_extra_ts[field] = stop['time']

            for event in mds.get_events_generator(descriptor):
                event_data = event.data  # cache for perf
                event_timestamps = event.timestamps
                event_data.update(all_extra_data)
                event_timestamps.update(all_extra_ts)
                for field in discard_fields:
                    del event_data[field]
                    del event_timestamps[field]
                if not event_data:
                    # Skip events that are now empty because they had no
                    # applicable fields.
                    continue
                if fill:
                    fill_event(fs, event, handler_registry, handler_overrides)
                yield event
        # Now yield any events from plugins.
        for k, v in kwargs.items():
            for ev in plugins[k].get_events(header, v):
                yield ev
Example #31
0
 def __call__(self, *args, **kwargs):
     return dicttoolz.merge(*self._combined(*args, **kwargs))
Example #32
0
def merge_contract_sources(*compiled_sources):
    return merge(
        *compiled_sources)  # TODO: Handle file-level output aggregation
 def examine_rec_order(self, **kwargs):
     listed_loans = self.lc_connection.get_listed_loans().json()['loans']
     listed_dict = {x['id']: x for x in listed_loans}
     note_dict = self.get_model_rec(**kwargs)
     return [merge(note_dict[key], listed_dict[key]) for key in note_dict]
def test_merge_iterable_arg():
    assert merge([{1: 1, 2: 2}, {3: 4}]) == {1: 1, 2: 2, 3: 4}
def test_merge():
    assert merge({1: 1, 2: 2}, {3: 4}) == {1: 1, 2: 2, 3: 4}