Exemplo n.º 1
0
def test_reduce_raise(case, exception):
    raised = False

    try:
        _.reduce_(*case)
    except exception:
        raised = True

    assert raised
Exemplo n.º 2
0
def pulse_global(message_groups: List[List[Dict]],
                 interval: int = 5) -> Union[List[Dict], None]:
    if not message_groups:
        return []

    pulses: List[List[Dict]] = _.map_(message_groups,
                                      lambda message: pulse(message, interval))

    def remove_nones(pulses: List[Union[List[Dict], None]],
                     pulse: Union[List[Dict], None]) -> List[List[Dict]]:
        if pulse is None:
            return pulses

        return _.push(pulses, pulse)

    pulses = _.reduce_(pulses, remove_nones, [])

    def pulses_align(pulses: List[List[Dict]]) -> List[List[Dict]]:
        earliest_time = _.min_by(
            pulses, lambda pulse: pulse[0].get('time'))[0].get('time')
        latest_time = _.max_by(
            pulses, lambda pulse: pulse[-1].get('time'))[0].get('time')

        return _.map_(
            pulses, lambda pulse: _.concat(
                zero_pulses(start_time=earliest_time,
                            end_time=pulse[0].get('time'),
                            interval=interval), pulse,
                zero_pulses(start_time=pulse[-1].get('time'),
                            end_time=latest_time,
                            interval=interval)))

    pulses = pulses_align(pulses)

    def collapser(collapsed_pulses: List[Dict],
                  pulses: List[Dict]) -> List[Dict]:
        if not collapsed_pulses:
            return pulses

        def message_adder(index):
            collapsed_pulse = collapsed_pulses[index]
            pulse = pulses[index]

            return _.assign(
                collapsed_pulse,
                {'rate': collapsed_pulse.get('rate') + pulse.get('rate')})

        return _.map_(_.range_(len(collapsed_pulses)), message_adder)

    pulse_clusters = _.reduce_(pulses, collapser, [])
    max_pulse_rate = _.max_by(pulse_clusters, 'rate').get('rate')

    def rate_normalizer(max_rate: int):
        return lambda pulse_dict: _.assign(
            pulse_dict, {'rate': pulse_dict.get('rate') / max_rate})

    return _.map_(pulse_clusters, rate_normalizer(max_pulse_rate))
Exemplo n.º 3
0
def extract_selections(selections, fragments):
    # extract any fragment selection sets into a single array of selections
    return reduce_(selections,
                   lambda acc, curr:
                   [*acc, *fragments[curr.name.value].selection_set.selections] if curr.kind == 'fragment_spread'
                   else [*acc, curr],
                   [])
def param_signature(field_type):
    def fun(acc, f):
        if f == '_id' or (getattr(inner_type(field_type.fields[f].type), 'ast_node', None) is not None and inner_type(
                field_type.fields[f].type).ast_node.kind == 'object_type_definition'):
            # TODO: exclude @cypher fields
            # TODO: exclude object types?
            return acc + ''
        else:
            return acc + f' {f}: {inner_type(field_type.fields[f].type).name}, '

    return reduce_(list(field_type.fields.keys()), fun, '')
Exemplo n.º 5
0
def test_curry_arity_max_from_func():
    def func(data, accum, id):
        accum[id] = _.reduce_(data, lambda total, n: total + n)
        return accum

    ids = [1]
    data = [1, 2]

    curried_func_with_data = _.curry(func)(data)
    result = _.reduce_(ids, curried_func_with_data, {})

    assert result == {1: 3}
Exemplo n.º 6
0
def test_curry_arity_max_from_func():
    def func(data, accum, id):
        accum[id] = _.reduce_(data, lambda total, n: total + n)
        return accum

    ids = [1]
    data = [1, 2]

    curried_func_with_data = _.curry(func)(data)
    result = _.reduce_(ids, curried_func_with_data, {})

    assert result == {1: 3}
def augment_types(types, schema, sdl):
    """
    #  * Generate type extensions for each type:
    #  *   - add _id field
    #  * @param {string[]} types
    #  * @param schema
    #  * @param {string} sdl
    #  * @returns {string} SDL type extensions
    """
    return reduce_(types,
                   lambda acc, t: acc + '' if t in ['Mutation', 'Query']
                   else acc + f'extend type {t} {{ _id:ID }}', sdl)
Exemplo n.º 8
0
def most_active_n_users(messages: List[Dict], n: int) -> List[str]:

  def user_scorer(users_with_ranks: Dict, message):
    user = message.get('userid')
    return _.assign(users_with_ranks,
                    {user: users_with_ranks.get(user, 0) + 1})

  users_with_ranks: Dict = _.reduce_(messages, user_scorer, {})
  sorted_users_by_rank: List[Tuple[str, int]] = sorted(
    users_with_ranks,
    key=users_with_ranks.get
  )

  return _.take(sorted_users_by_rank, min(len(users_with_ranks), n))
def add_mutations_to_schema(schema):
    types = types_to_augment(schema)

    # FIXME: don't use printSchema (custom directives are lost), instead use extend schema
    # FIXME: type extensions are lost
    mutation_schema_sdl = print_schema(schema)

    # TODO: compose augment funcs
    # let mutationSchemaSDLWithTypes = augmentTypes(types, schema, mutationSchemaSDL);

    mutation_schema_sdl_with_types_and_mutations = augment_mutations(types, schema, mutation_schema_sdl)

    def resolve_neo4j(obj, info, **kwargs):
        return neo4j_graphql(obj, info.context, info, **kwargs)

    # console.log(mutationSchemaSDLWithTypesAndMutations);
    def f(acc, t):
        # FIXME: inspect actual mutations, not construct mutation names here
        acc['Mutation'][f'Create{t}'] = resolve_neo4j
        for field_type in types:
            for rel_mutation in add_relationship_mutations(schema.type_map[field_type], True):
                acc['Mutation'][rel_mutation] = resolve_neo4j
        return acc

    resolvers = reduce_(types, f, {'Query': {}, 'Mutation': {}})

    # delegate query resolvers to original schema
    def f2(acc, t):
        acc['Query'][t] = resolve_neo4j
        return acc

    resolvers = reduce_(list(schema.query_type.fields.keys()), f2, resolvers)

    mutation_schema = make_executable_schema(mutation_schema_sdl_with_types_and_mutations, resolvers)

    final_schema = mutation_schema
    return final_schema
Exemplo n.º 10
0
def omit_by(obj, iteratee=None):
    """
    The opposite of :func:`pick_by`. This method creates an object composed of the string keyed
    properties of object that predicate doesn't return truthy for. The predicate is invoked with two
    arguments: ``(value, key)``.

    Args:
        obj (mixed): Object to process.
        iteratee (mixed, optional): Iteratee used to determine which properties to omit.

    Returns:
        dict: Results of omitting properties.

    Example:

        >>> omit_by({'a': 1, 'b': '2', 'c': 3}, lambda v: isinstance(v, int))
        {'b': '2'}

    .. versionadded:: 4.0.0

    .. versionchanged:: 4.2.0
        Support deep paths for `iteratee`.
    """
    if not callable(iteratee):
        paths = pyd.map_(iteratee, to_path)

        if any(len(path) > 1 for path in paths):
            cloned = clone_deep(obj)
        else:
            cloned = to_dict(obj)

        def _unset(obj, path):
            pyd.unset(obj, path)
            return obj

        ret = pyd.reduce_(paths, _unset, cloned)
    else:
        argcount = getargcount(iteratee, maxargs=2)

        ret = {
            key: value
            for key, value in iterator(obj)
            if not callit(iteratee, value, key, argcount=argcount)
        }

    return ret
Exemplo n.º 11
0
def omit_by(obj, iteratee=None):
    """The opposite of :func:`pick_by`. This method creates an object composed
    of the string keyed properties of object that predicate doesn't return
    truthy for. The predicate is invoked with two arguments: ``(value, key)``.

    Args:
        obj (mixed): Object to process.
        iteratee (mixed, optional): Iteratee used to determine which properties
            to omit.

    Returns:
        dict: Results of omitting properties.

    Example:

        >>> omit_by({'a': 1, 'b': '2', 'c': 3}, lambda v: isinstance(v, int))
        {'b': '2'}

    .. versionadded:: 4.0.0

    .. versionchanged:: 4.2.0
        Support deep paths for `iteratee`.
    """
    if not callable(iteratee):
        paths = pyd.map_(iteratee, to_path)

        if any(len(path) > 1 for path in paths):
            cloned = clone_deep(obj)
        else:
            cloned = to_dict(obj)

        def _unset(obj, path):
            pyd.unset(obj, path)
            return obj

        ret = pyd.reduce_(paths, _unset, cloned)
    else:
        argcount = getargcount(iteratee, maxargs=2)

        ret = {key: value for key, value in iterator(obj)
               if not callit(iteratee, value, key, argcount=argcount)}

    return ret
Exemplo n.º 12
0
    def get_yield_strength_to_composed_data():
        conn = db.get_connection()
        cur = conn.cursor()
        main_query_select = 'SELECT main_info_id, sortament, sigma_t FROM rloveshhenko$mydbtest.mechanical_properties WHERE sigma_t != " " and main_info_id in (SELECT id FROM mydbtest.main_info WHERE classification like "%Сталь%");'

        ids_query_select = 'SELECT distinct main_info_id FROM rloveshhenko$mydbtest.mechanical_properties WHERE sigma_t != " " and main_info_id in (SELECT id FROM mydbtest.main_info WHERE classification like "%Сталь%");'

        cur.execute(main_query_select)
        data = cur.fetchall()
        cur.execute(ids_query_select)
        ids = cur.fetchall()

        sigmas = map_(
            ids, lambda item: {
                'id':
                item['main_info_id'],
                'sigmas':
                map_(
                    filter_(
                        data, lambda it: it['main_info_id'] == item[
                            'main_info_id']), lambda x: get(x, 'sigma_t'))
            })

        new_sigmas = map_(
            sigmas, lambda item: {
                'id':
                item['id'],
                'sigma':
                format(
                    reduce_(
                        item['sigmas'], lambda total, x: float(total) + float(
                            x) / len(item['sigmas']), 0), '.2f')
            })

        for item in new_sigmas:
            cur.execute(
                "UPDATE rloveshhenko$mydbtest.composed_data SET sigma_t = %s WHERE id = %s",
                (item['sigma'], item['id']))
            conn.commit()

        cur.close()
        return True
Exemplo n.º 13
0
    async def checkRevision(self, report):

        br = ButtonRequests(self._context)
        proposed_instructions = pydash.get(
            report, 'revision.propositions.instructions')
        if proposed_instructions is None:
            return True

        #TODO 정확한지 확인
        diff_insts = pydash.difference_with(proposed_instructions,
                                            report['instructions'],
                                            pydash.is_equal)

        for inst in diff_insts:

            def cb(result, value):
                result[value['key']] = value
                return result

            args = pydash.reduce_(inst['action']['args'], cb)
            if inst['action']['func_name'] == 'wait':

                if args['param']['value'] == 'success':
                    request = br.find_request_by_button_id(
                        'signal', self._context.worker.uuid)
                    if report:
                        await self._context.api_report.approve_revision(report)
                        br.process(request, 'success')
                    else:
                        print(
                            'found revision to finish wait(signal) but there is no button request for it'
                        )
                else:
                    print('condition is not match on wait')

            else:
                print('cannot handle revision of instruction')

        return True
Exemplo n.º 14
0
 def _callback(group):
     return pyd.reduce_(group, callback, None)
Exemplo n.º 15
0
 def func(data, accum, id):
     accum[id] = _.reduce_(data, lambda total, n: total + n)
     return accum
Exemplo n.º 16
0
def values_for_records(fields, records):
    return pydash.reduce_(fields, reduce_to_values(records), [])
Exemplo n.º 17
0
 def reduce(self, iteratee=None, initial=None):
     """Reduce :meth:`all` using `iteratee`."""
     return pyd.reduce_(self.all(), iteratee, initial)
Exemplo n.º 18
0
 def func(data, accum, id):
     accum[id] = _.reduce_(data, lambda total, n: total + n)
     return accum
Exemplo n.º 19
0
def pulse(messages: List[Dict], interval: int = 5) -> Union[List[Dict], None]:
    """
  Representative Pulse for a stream of messages
  :param messages: List[{userid:str, content:str, timestamp:datetime}]
  :param interval: Size of a pulse point, in minutes
  :return: List[{rate, time}]
  """
    if len(messages) < 2:
        return None

    round_to_nearest_interval_minutes = functools.partial(
        round_to_nearest_n_minutes, interval, rounder_func=math.ceil)

    def datetime_clusterer(tuplet: Tuple[List[Dict], int],
                           message: Dict) -> Tuple[List, int]:

        message_anchored_time = round_to_nearest_interval_minutes(
            message.get('timestamp'))

        # No cluster: create a cluster
        if not tuplet:
            return [{'rate': 1, 'time': message_anchored_time}], 1

        pulse_clusters, max_pulse_rate = tuplet
        latest_pulse = pulse_clusters.pop()

        # Message fits in cluster - cluster it up!
        if latest_pulse.get('time') == message_anchored_time:

            new_pulse_rate = latest_pulse.get('rate') + 1
            new_pulse_clusters = _.push(
                pulse_clusters, _.assign(latest_pulse,
                                         {'rate': new_pulse_rate}))

            if max_pulse_rate >= new_pulse_rate:
                return new_pulse_clusters, max_pulse_rate
            return new_pulse_clusters, new_pulse_rate

        # Message doesn't fit in cluster
        # lock in latest cluster, create new cluster but also fill
        # in missing clusters in between
        old_pulse_cluster = _.push(pulse_clusters, latest_pulse)
        old_pulse_cluster = _.concat(
            zero_pulses(start_time=latest_pulse.get('time'),
                        end_time=message_anchored_time,
                        interval=interval), old_pulse_cluster)

        return _.push(old_pulse_cluster,
                      _.assign({}, {
                          'rate': 1,
                          'time': message_anchored_time
                      })), max_pulse_rate

    pulse_clusters, max_pulse_rate = _.reduce_(messages, datetime_clusterer,
                                               ())

    def rate_normalizer(max_rate: int):
        return lambda pulse_dict: _.assign(
            pulse_dict, {'rate': pulse_dict.get('rate') / max_rate})

    return _.map_(pulse_clusters, rate_normalizer(max_pulse_rate))
Exemplo n.º 20
0
 def _callback(group):
     return pyd.reduce_(group, callback, None)
Exemplo n.º 21
0
def values_for_records(fields, records):
    return pydash.reduce_(fields, reduce_to_values(records), [])
Exemplo n.º 22
0
def test_reduce_(case, expected):
    assert _.reduce_(*case) == expected
Exemplo n.º 23
0
 def reduce(self, callback=None, initial=None):
     """Reduce :meth:`all` using `callback`."""
     return pyd.reduce_(self.all(), callback, initial)
Exemplo n.º 24
0
def decimal_from_list(l):
    return _.reduce_(l, lambda x, y: 10 * x + y)