Esempio n. 1
0
 def resolve(self):
     """Used for testing purposes, otherwise use resolve_one()."""
     data = join(map(self.resolve_one, self.get_keys()))
     logger.trace(  # type: ignore[attr-defined]
         "Resolved dvc.yaml:\n%s", data
     )
     return {STAGES_KWD: data}
Esempio n. 2
0
 def _foreach(self, context: Context, name: str, foreach_data, do_data):
     iterable = self._resolve_foreach_data(context, name, foreach_data)
     args = (context, name, do_data, iterable)
     it = (range(len(iterable))
           if not isinstance(iterable, Mapping) else iterable)
     gen = (self._each_iter(*args, i) for i in it)
     return join(gen)
Esempio n. 3
0
def get_closure(func):
    if isinstance(func, type):
        methods = inspect.getmembers(func, predicate=inspect.ismethod)
        return join(get_closure(meth.im_func) for _, meth in methods) or {}

    code = Code.from_code(func.__code__)
    names = _code_names(code)
    return project(func.__globals__, names)
Esempio n. 4
0
def as_tuple(obj: DataClass, flatten: bool = False) -> tuple:
    if flatten:
        return funcy.join(
            (
                as_tuple(field, flatten=flatten)
                if is_dataclass_instance(field)
                else (field,)
            )
            for field in values(obj).values()
        )
    else:
        return astuple(obj)
Esempio n. 5
0
def as_dict(obj: DataClass, flatten: bool = False) -> Dict[str, Any]:
    if flatten:
        return funcy.join(
            (
                as_dict(field, flatten=flatten)
                if is_dataclass_instance(field)
                else {field_name: field}
            )
            for field_name, field in values(obj).items()
        )
    else:
        return asdict(obj)
Esempio n. 6
0
 def modify(*args, **kwargs):
     try:
         service_log.put("Params for %s: %s, %s" % (func.func_name, str(args[1:]), str(kwargs)))
         link_func = unittest.TestCase.__dict__[func.func_name]
         return link_func(*args, **kwargs)
     except Exception, tx:
         limit_print_exc = 10
         msg_line = "#" + ("-"*100)
         service_log.error(tx)
         service_log.put("\n%s\n%s\n%s" % (msg_line, traceback.format_exc(limit_print_exc), msg_line))
         trace_stack_log = funcy.join(traceback.format_stack(limit=limit_print_exc))
         service_log.put("Traceback stack:\n%s\n%s" % (str(trace_stack_log), msg_line))
         raise AssertionError(tx)
Esempio n. 7
0
def _introspect(func, seen):
    seen.add(func)

    if inspect.isbuiltin(func) or not hasattr(func, '__module__') or not hasattr(func, '__name__'):
        return {}

    if isinstance(func, type):
        methods = inspect.getmembers(func, predicate=inspect.ismethod)
        return join(_introspect(meth, seen) for _, meth in methods if meth not in seen) or {}

    if not hasattr(func, '__defaults__') or not hasattr(func, '__code__'):
        return {}

    func_name = _full_name(func)
    consts = merge(get_defaults(func), get_assignments(func))
    consts_spec = walk_keys(lambda k: '%s.%s' % (func_name, k), consts)
    consts_spec.update({'%s.%s' % (func.__module__, name): value
                        for name, value in get_closure(func).items()})

    # Recurse
    callables = filter(callable, consts_spec.values())
    recurse_specs = (_introspect(f, seen) for f in callables if f not in seen)
    return merge(join(recurse_specs) or {}, consts_spec)
 def encode_print(msg):
     """ Вывод строки под кодировку консоли.
     :param msg: строка для вывода
     """
     import traceback
     try:
         if platform.uname()[0] != "Windows":
             msg = msg.encode("cp1251")
             return msg
         else:
             return msg.encode("cp1251")
     except Exception, tx:
         import funcy
         logs = funcy.join(traceback.format_stack(limit=10))
         service_log.put("Trace:   " + str(logs))
         service_log.put("End Trace!!!!")
         raise AssertionError(tx)
Esempio n. 9
0
    def _foreach(self, context: Context, name: str, foreach_data, in_data):
        def each_iter(value, key=DEFAULT_SENTINEL):
            c = Context.clone(context)
            c["item"] = value
            if key is not DEFAULT_SENTINEL:
                c["key"] = key
            suffix = str(key if key is not DEFAULT_SENTINEL else value)
            return self._resolve_stage(c, f"{name}-{suffix}", in_data)

        iterable = resolve(foreach_data, context)
        if isinstance(iterable, Sequence):
            gen = (each_iter(v) for v in iterable)
        elif isinstance(iterable, Mapping):
            gen = (each_iter(v, k) for k, v in iterable.items())
        else:
            raise Exception(f"got type of {type(iterable)}")
        return join(gen)
Esempio n. 10
0
 def modify(*args, **kwargs):
     try:
         service_log.put("Params for %s: %s, %s" %
                         (func.func_name, str(args[1:]), str(kwargs)))
         link_func = unittest.TestCase.__dict__[func.func_name]
         return link_func(*args, **kwargs)
     except Exception, tx:
         limit_print_exc = 10
         msg_line = "#" + ("-" * 100)
         service_log.error(tx)
         service_log.put(
             "\n%s\n%s\n%s" %
             (msg_line, traceback.format_exc(limit_print_exc), msg_line))
         trace_stack_log = funcy.join(
             traceback.format_stack(limit=limit_print_exc))
         service_log.put("Traceback stack:\n%s\n%s" %
                         (str(trace_stack_log), msg_line))
         raise AssertionError(tx)
Esempio n. 11
0
def import_jobs_rss():
    _job_feeds_obj = JobFeed.objects.filter(in_edit=False, is_activated=True)
    job_feeds = list(_job_feeds_obj.values_list('link', flat=True))
    excl = list(RejectedList.objects.values_list('title', flat=True))
    incl = list(AcceptedList.objects.values_list('title', flat=True))

    excl_filter = partial(is_not_excl, excl)
    incl_filter = partial(is_incl, incl)

    items = \
        filter(incl_filter,
               filter(excl_filter,
                      map(make_validate_dict,
                          map(prepare_link_title,
                              filter(is_new_job,
                                     join(
                                         map(get_rss_items, job_feeds)))))))
    for x in items:
        save_job(x)
Esempio n. 12
0
def import_jobs_rss():
    _job_feeds_obj = JobFeed.objects.filter(in_edit=False, is_activated=True)
    job_feeds = list(_job_feeds_obj.values_list('link', flat=True))
    excl = list(RejectedList.objects.values_list('title', flat=True))
    incl = list(AcceptedList.objects.values_list('title', flat=True))

    excl_filter = partial(is_not_excl, excl)
    incl_filter = partial(is_incl, incl)

    items = \
        filter(incl_filter,
               filter(excl_filter,
                      map(make_validate_dict,
                          map(prepare_link_title,
                              filter(is_new_job,
                                     join(
                                         map(get_rss_items, job_feeds)))))))
    for x in items:
        save_job(x)
Esempio n. 13
0
def search(request):
    q = request.GET.get('q')
    if not q:
        return {'series': None}

    exclude_tags = keep(silent(int), request.GET.getlist('exclude_tags'))
    serie_tags, tag_series, tag_ids = series_tags_data()

    q_string, q_tags = _parse_query(q)
    q_tags, wrong_tags = split(lambda t: t.lower() in tag_ids, q_tags)
    if wrong_tags:
        message = 'Unknown tag%s %s.' % ('s' if len(wrong_tags) > 1 else '', ', '.join(wrong_tags))
        messages.warning(request, message)
    if not q_string and not q_tags:
        return {'series': None}

    qs = search_series_qs(q_string)
    if q_tags:
        q_tag_ids = keep(tag_ids.get(t.lower()) for t in q_tags)
        include_series = reduce(set.intersection, (tag_series[t] for t in q_tag_ids))
        if include_series:
            qs = qs.filter(id__in=include_series)
        else:
            message = 'No series annotated with %s.' \
                % (q_tags[0] if len(q_tags) == 1 else 'all these tags simultaneously')
            messages.warning(request, message)
            return {'series': []}
    if exclude_tags:
        exclude_series = join(tag_series[t] for t in exclude_tags)
        qs = qs.exclude(id__in=exclude_series)

    series_ids = qs.values_list('id', flat=True)
    tags = distinct(imapcat(serie_tags, series_ids), key=itemgetter('id'))
    # TODO: do not hide excluded tags

    return {
        'series': qs,
        'tags': tags,
        'serie_tags': serie_tags,
    }
Esempio n. 14
0
 def resolve(self):
     stages = self.data.get(STAGES_KWD, {})
     data = join(starmap(self._resolve_entry, stages.items()))
     logger.trace("Resolved dvc.yaml:\n%s", data)
     return {STAGES_KWD: data}
Esempio n. 15
0
def hash_key(state):
    """Compact representation of board for rewards"""
    return join(map(str, list(state.value.flat)))
Esempio n. 16
0
def par_compose(aagcollection):
    """
    Parallel composition of a collection of aiger circuits. Inputs are shared and all other attributes are not.
    FIXME: Shared inputs are by aiger index and not by symbol name
    """
    def new_index(aag, lit, par_header, prev_gates, prev_latches):
        """
        Computes a new index for literal lit from aag.
        This new index takes into account the header of the parallel
        composition and offsets from gates and latches from other aags
        """
        lit_kind = lit_type(aag, lit)
        if lit_kind in ["c", "i"]:
            return lit
        elif lit_kind == "l":
            par_offset = 2 * (par_header.num_inputs)
            aag_offset = 2 * (aag.header.num_inputs)
            return lit + par_offset - aag_offset + 2 * prev_latches
        elif lit_kind == "a":
            par_offset = 2 * (par_header.num_inputs + par_header.num_latches)
            aag_offset = 2 * (aag.header.num_inputs + aag.header.num_latches)
            return lit + par_offset - aag_offset + 2 * prev_gates

    par_inputs = list(
        reduce(lambda x, y: x.union(y),
               [set(aag.inputs) for aag in aagcollection]))
    par_inputs.sort()

    num_inputs = len(par_inputs)
    num_latches = sum([len(aag.latches) for aag in aagcollection])
    num_outputs = sum([len(aag.outputs) for aag in aagcollection])
    num_gates = sum([len(aag.gates) for aag in aagcollection])
    max_var_index = len(par_inputs) + num_latches + num_gates

    par_header = Header(max_var_index, num_inputs, num_latches, num_outputs,
                        num_gates)

    par_outputs = []
    par_latches = []
    par_gates = []
    # FIXME: Need to have a nontrivial symbol table. What to do with duplicate output, latch names?
    par_symbols = SymbolTable(
        fn.join([aag.symbols.inputs for aag in aagcollection]), [], [])

    prev_gates = 0
    prev_latches = 0
    for aag in aagcollection:
        # Generate index map. Uncomment to construct the index map explicitly
        # index_kind = {i: lit_type(aag,i) for i in range(aag.header.max_var_index*2 + 2)}
        # index_map = { i: new_index(aag, i, par_header, prev_gates, prev_latches)
        #               for i in range(aag.header.max_var_index*2+2)
        #           }

        # Bind circuit aag and prev_gates, prev_latches parameters to function
        newi = lambda x: new_index(aag, x, par_header, prev_gates, prev_latches
                                   )

        # Add latches, gates, and outputs
        par_latches += [[newi(i[0]), newi(i[1])] for i in aag.latches]
        par_gates += [[newi(i[0]), newi(i[1]), newi(i[2])] for i in aag.gates]
        par_outputs += [newi(i) for i in aag.outputs]

        # Increment offset from adding gates and latches
        prev_gates += len(aag.gates)
        prev_latches += len(aag.latches)

    return AAG(par_header, par_inputs, par_outputs, par_latches, par_gates,
               par_symbols, [''])
Esempio n. 17
0
def get_options(default, path):
    return funcy.join([default] + funcy.walk(loadexp, path))
Esempio n. 18
0
 def resolve(self):
     stages = self.data.get(STAGES, {})
     data = join(starmap(self._resolve_entry, stages.items()))
     return {**self.data, STAGES: data}
Esempio n. 19
0
 def resolve_all(self):
     return join(map(self.resolve_one, self.normalized_iterable))
Esempio n. 20
0
def search(request):
    # Save last specie in session
    specie = request.GET.get('specie')
    if specie != request.session.get('specie'):
        request.session['specie'] = specie

    q = request.GET.get('q')
    if not q:
        return {'series': None}

    exclude_tags = lkeep(silent(int), request.GET.getlist('exclude_tags'))
    series_tags, tag_series, tag_ids = series_tags_data()

    # Parse query
    q_string, q_tags = _parse_query(q)
    q_tags, wrong_tags = lsplit(lambda t: t.lower() in tag_ids, q_tags)
    if wrong_tags:
        message = 'Unknown tag%s %s.' % ('s' if len(wrong_tags) > 1 else '',
                                         ', '.join(wrong_tags))
        messages.warning(request, message)
    if not q_string and not q_tags:
        return {'series': None}

    # Build qs
    qs = search_series_qs(q_string)
    if specie:
        qs = qs.filter(specie=specie)

    if q_tags:
        q_tag_ids = lkeep(tag_ids.get(t.lower()) for t in q_tags)
        include_series = reduce(set.intersection,
                                (tag_series[t] for t in q_tag_ids))
        if include_series:
            qs = qs.filter(id__in=include_series)
        else:
            message = 'No series annotated with %s.' \
                % (q_tags[0] if len(q_tags) == 1 else 'all these tags simultaneously')
            messages.warning(request, message)
            return {'series': []}

    series_ids = qs.values_list('id', flat=True).order_by()
    tags = ldistinct(mapcat(series_tags, series_ids), key=itemgetter('id'))

    if exclude_tags:
        exclude_series = join(tag_series[t] for t in exclude_tags)
        qs = qs.exclude(id__in=exclude_series)

    series = paginate(request, qs, 10)

    # Get annotations statuses
    annos_qs = SeriesAnnotation.objects.filter(series__in=series) \
                               .values_list('series_id', 'tag_id', 'best_cohens_kappa')
    tags_validated = {(s, t): k == 1 for s, t, k in annos_qs}

    return dict(
        {
            'series': series,
            'page': series,
            'tags_validated': tags_validated,
            'tags': tags,
            'series_tags': series_tags,
        }, **_search_stats(qs))
Esempio n. 21
0
 def _func_call(self, *args):
     func_args = self.func_args or ()
     func_kwargs = self.func_kwargs or {}
     args = funcy.join((args, func_args))  # py34
     return self.func(*args, **func_kwargs)
Esempio n. 22
0
def log(state):
    """Log all rewards to tab-separated file"""
    raw = state.value
    result = join(map(lambda k: k + "\t" + str(raw[k]) + "\n", raw))
    with open('rewards.tsv', 'w') as f:
        f.write(result)