Example #1
0
def removePyc(folder, only_excess=True, show_logs=True):

    folder = sp(folder)

    for root, dirs, files in os.walk(folder):

        pyc_files = filter(lambda filename: filename.endswith(".pyc"), files)
        py_files = set(filter(lambda filename: filename.endswith(".py"), files))
        excess_pyc_files = (
            filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
        )

        for excess_pyc_file in excess_pyc_files:
            full_path = os.path.join(root, excess_pyc_file)
            if show_logs:
                log.debug("Removing old PYC file: %s", full_path)
            try:
                os.remove(full_path)
            except:
                log.error("Couldn't remove %s: %s", (full_path, traceback.format_exc()))

        for dir_name in dirs:
            full_path = os.path.join(root, dir_name)
            if len(os.listdir(full_path)) == 0:
                try:
                    os.rmdir(full_path)
                except:
                    log.error("Couldn't remove empty directory %s: %s", (full_path, traceback.format_exc()))
 def test_load_config(self):
     environkeys = ("APPDATA", "XDG_CONFIG_HOME", "HOME")
     environkeys = {key: os.getenv(key) for key in environkeys}
     knownkey = list(filter(None, environkeys.values()))
     knownkey = knownkey[0] if knownkey else None
     for environkey, oldkey in environkeys.items():
         os.environ[environkey] = knownkey
         try:
             reload_module(settings)
         except Exception:
             raise AssertionError("Could not load config " "for key {}".format(environkey))
         finally:
             del os.environ[environkey]
             if oldkey is not None:
                 os.environ[environkey] = oldkey
     for environkey in filter(lambda key: key in os.environ, environkeys):
         del os.environ[environkey]
     configfiles = [os.path.join(os.path.dirname(sys.modules[settings.__name__].__file__), "praw.ini"), "praw.ini"]
     configdata = {}
     for cfile in configfiles:
         try:
             with open(cfile, "r") as f:
                 configdata[f] = f.read()
                 os.remove(f.name)
         except IOError:
             pass
     try:
         assertRaisesRegex(self, Exception, re.escape(str(configfiles)), reload_module, settings)
     finally:
         for environkey, oldkey in environkeys.items():
             if oldkey is not None:
                 os.environ[environkey] = oldkey
         for fileobj, filedata in configdata.items():
             with open(fileobj.name, "w") as f:
                 f.write(filedata)
Example #3
0
    def process_trade(self, trade_event):

        if trade_event.sid not in self.open_orders:
            return

        if trade_event.volume < 1:
            # there are zero volume trade_events bc some stocks trade
            # less frequently than once per minute.
            return

        orders = self.open_orders[trade_event.sid]
        orders.sort(key=lambda o: o.dt)
        # Only use orders for the current day or before
        current_orders = filter(lambda o: o.dt <= trade_event.dt, orders)

        processed_orders = []
        for txn, order in self.process_transactions(trade_event, current_orders):
            processed_orders.append(order)
            yield txn, order

        # remove closed orders. we should only have to check
        # processed orders
        def not_open(order):
            return not order.open

        closed_orders = filter(not_open, processed_orders)
        for order in closed_orders:
            orders.remove(order)

        if len(orders) == 0:
            del self.open_orders[trade_event.sid]
Example #4
0
    def process(self):
        """
        Process the file upload and add products to the range
        """
        all_ids = set(self.extract_ids())
        products = self.range.included_products.all()
        existing_skus = products.values_list("stockrecord__partner_sku", flat=True)
        existing_skus = set(filter(bool, existing_skus))
        existing_upcs = products.values_list("upc", flat=True)
        existing_upcs = set(filter(bool, existing_upcs))
        existing_ids = existing_skus.union(existing_upcs)
        new_ids = all_ids - existing_ids

        products = Product._default_manager.filter(
            models.Q(stockrecord__partner_sku__in=new_ids) | models.Q(upc__in=new_ids)
        )
        for product in products:
            self.range.add_product(product)

        # Processing stats
        found_skus = products.values_list("stockrecord__partner_sku", flat=True)
        found_skus = set(filter(bool, found_skus))
        found_upcs = set(filter(bool, products.values_list("upc", flat=True)))
        found_ids = found_skus.union(found_upcs)
        missing_ids = new_ids - found_ids
        dupes = set(all_ids).intersection(existing_ids)

        self.mark_as_processed(products.count(), len(missing_ids), len(dupes))
Example #5
0
    def startup(self, group):
        """ Prepare for new run. """

        # Compute the inclusion lists for recording
        params = list(filter(self._check_path, group.params))
        unknowns = list(filter(self._check_path, group.unknowns))
        resids = list(filter(self._check_path, group.resids))

        self._filtered[group.pathname] = (params, unknowns, resids)
Example #6
0
    def __init__(self, worker_id, conf, namespaces=None, pollster_list=None):

        namespaces = namespaces or ["compute", "central"]
        pollster_list = pollster_list or []
        group_prefix = conf.polling.partitioning_group_prefix

        # features of using coordination and pollster-list are exclusive, and
        # cannot be used at one moment to avoid both samples duplication and
        # samples being lost
        if pollster_list and conf.coordination.backend_url:
            raise PollsterListForbidden()

        super(AgentManager, self).__init__(worker_id, conf)

        def _match(pollster):
            """Find out if pollster name matches to one of the list."""
            return any(fnmatch.fnmatch(pollster.name, pattern) for pattern in pollster_list)

        if type(namespaces) is not list:
            namespaces = [namespaces]

        # we'll have default ['compute', 'central'] here if no namespaces will
        # be passed
        extensions = (self._extensions("poll", namespace, self.conf).extensions for namespace in namespaces)
        # get the extensions from pollster builder
        extensions_fb = (self._extensions_from_builder("poll", namespace) for namespace in namespaces)
        if pollster_list:
            extensions = (moves.filter(_match, exts) for exts in extensions)
            extensions_fb = (moves.filter(_match, exts) for exts in extensions_fb)

        self.extensions = list(itertools.chain(*list(extensions))) + list(itertools.chain(*list(extensions_fb)))

        if self.extensions == []:
            raise EmptyPollstersList()

        discoveries = (self._extensions("discover", namespace, self.conf).extensions for namespace in namespaces)
        self.discoveries = list(itertools.chain(*list(discoveries)))
        self.polling_periodics = None

        self.partition_coordinator = coordination.PartitionCoordinator(self.conf)
        self.heartbeat_timer = utils.create_periodic(
            target=self.partition_coordinator.heartbeat, spacing=self.conf.coordination.heartbeat, run_immediately=True
        )

        # Compose coordination group prefix.
        # We'll use namespaces as the basement for this partitioning.
        namespace_prefix = "-".join(sorted(namespaces))
        self.group_prefix = "%s-%s" % (namespace_prefix, group_prefix) if group_prefix else namespace_prefix

        self.notifier = oslo_messaging.Notifier(
            messaging.get_transport(self.conf),
            driver=self.conf.publisher_notifier.telemetry_driver,
            publisher_id="ceilometer.polling",
        )

        self._keystone = None
        self._keystone_last_exception = None
Example #7
0
def group_detail(group_id):
    group = Group.query.get_or_404(group_id)
    submit = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.submit, group.permissions))
    review = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.review, group.permissions))
    pay = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.pay, group.permissions))
    resp = {
        u"name": group.name,
        u"users": list(group.users),
        u"divisions": {u"submit": list(set(submit)), u"review": list(set(review)), u"pay": list(set(pay))},
    }
    return jsonify(resp)
 def _get_nodes(self, account=None, type=None):
     """Return affected nodes matching the filters."""
     result = self.affected_nodes
     if account:
         # Allow to filter by multiple accounts
         if not isinstance(account, list):
             account = [account]
         result = filter(lambda n: all([n.affects_account(a) for a in account]), result)
     if type:
         result = filter(lambda n: n.type == type, result)
     return list(result)
Example #9
0
    def getAndAssertTableRowAction(self, response, table_name, action_name, row_id):
        table = response.context[table_name + "_table"]
        rows = list(moves.filter(lambda x: x.id == row_id, table.data))
        self.assertEqual(1, len(rows), "Did not find a row matching id '%s'" % row_id)
        row_actions = table.get_row_actions(rows[0])
        actions = list(moves.filter(lambda x: x.name == action_name, row_actions))

        msg_args = (action_name, table_name, row_id)
        self.assertTrue(len(actions) > 0, "No action named '%s' found in '%s' table for id '%s'" % msg_args)

        self.assertEqual(1, len(actions), "Multiple actions named '%s' found in '%s' table for id '%s'" % msg_args)

        return actions[0]
Example #10
0
def user_detail(user_id):
    user = User.query.get_or_404(user_id)
    # Set up divisions
    submit = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.submit, user.permissions))
    review = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.review, user.permissions))
    pay = map(lambda p: p.division, filter(lambda p: p.permission == PermissionType.pay, user.permissions))
    resp = {
        u"name": user.name,
        u"groups": list(user.groups),
        u"divisions": {u"submit": list(set(submit)), u"review": list(set(review)), u"pay": list(set(pay))},
        u"admin": user.admin,
        u"requests": user.requests,
    }
    return jsonify(resp)
Example #11
0
def get_recognizer(args):
    """Get the file recognizer object from the configured options."""
    # Make sure we have empty sets when we have empty strings.
    skip_dirs = set(filter(None, args.skip_dirs.split(",")))
    skip_exts = set(filter(None, args.skip_exts.split(",")))
    fr = FileRecognizer(
        skip_hidden_files=args.skip_hidden_files,
        skip_backup_files=args.skip_backup_files,
        skip_hidden_dirs=args.skip_hidden_dirs,
        skip_dirs=skip_dirs,
        skip_exts=skip_exts,
        skip_symlink_files=not args.follow_symlinks,
        skip_symlink_dirs=not args.follow_symlinks,
    )
    return fr
Example #12
0
 def inner(function):
     if isinstance(ambiguous_keyname, list):
         keynames = map(self._get_precise_keyname, filter(bool, ambiguous_keyname))
         for keyname in keynames:
             self._statusbar_dispatch_table[keyname] = function
     elif isinstance(ambiguous_keyname, binary_type):
         keynames = map(self._get_precise_keyname, filter(bool, BLANK.split(ambiguous_keyname)))
         for keyname in keynames:
             self._statusbar_dispatch_table[keyname] = function
     elif isinstance(ambiguous_keyname, text_type):
         keynames = map(self._get_precise_keyname, filter(bool, BLANK.split(b(ambiguous_keyname))))
         for keyname in keynames:
             self._statusbar_dispatch_table[keyname] = function
     else:
         raise (Exception("bad argument error"))
Example #13
0
    def create_fresh_generator(self):

        if self.event_list:
            event_gen = (event for event in self.event_list)
            unfiltered = self.update_source_id(event_gen)

        # Set up iterators for each expected field.
        else:
            if self.concurrent:
                # in this context the count is the number of
                # trades per sid, not the total.
                date_generator = date_gen(start=self.start, end=self.end, delta=self.delta, repeats=len(self.sids))
            else:
                date_generator = date_gen(start=self.start, end=self.end, delta=self.delta)

            source_id = self.get_hash()

            unfiltered = (
                create_trade(
                    sid=sid, price=float(i % 10) + 1.0, amount=(i * 50) % 900 + 100, datetime=date, source_id=source_id
                )
                for (i, date), sid in itertools.product(enumerate(date_generator), self.sids)
            )

        # If we specified a sid filter, filter out elements that don't
        # match the filter.
        if self.filter:
            filtered = filter(lambda event: event.sid in self.filter, unfiltered)

        # Otherwise just use all events.
        else:
            filtered = unfiltered

        # Return the filtered event stream.
        return filtered
    def exclude(self, iterable):
        """
        Returns a filtered list of applying ~self to the elements of iterable.

        This is a similar API to QuerySet.exclude.
        """
        return list(filter((~self).eval, iterable))
Example #15
0
    def getTrailers(self, files):
        def test(s):
            return re.search("(^|[\W_])trailer\d*[\W_]", s.lower()) and self.filesizeBetween(
                s, self.file_sizes["trailer"]
            )

        return set(filter(test, files))
Example #16
0
    def _load_task(self, **kw):
        valid_keys = set(["id", "uuid", "description"])
        id_keys = valid_keys.intersection(kw.keys())

        if len(id_keys) != 1:
            raise KeyError("Only 1 ID keyword argument may be specified")

        key = list(id_keys)[0]
        if key not in valid_keys:
            raise KeyError("Argument must be one of %r" % valid_keys)

        line = None
        task = dict()

        # If the key is an id, assume the task is pending (completed tasks
        # don't have IDs).
        if key == "id":
            tasks = self.load_tasks(command=Status.PENDING)
            line = kw[key]

            if len(tasks[Status.PENDING]) >= line:
                task = tasks[Status.PENDING][line - 1]

        else:
            # Search all tasks for the specified key.
            tasks = self.load_tasks(command=Command.ALL)

            matching = list(filter(lambda t: t.get(key, None) == kw[key], sum(tasks.values(), [])))

            if matching:
                task = matching[0]
                line = tasks[Status.to_file(task["status"])].index(task) + 1

        return line, task
Example #17
0
    def test_get_query(self):
        actionexecution_1_id = self._get_actionexecution_id(self._do_post(LIVE_ACTION_1))

        resp = self.app.get("/v1/executions?action=%s" % LIVE_ACTION_1["action"])
        self.assertEqual(resp.status_int, 200)
        matching_execution = filter(lambda ae: ae["id"] == actionexecution_1_id, resp.json)
        self.assertEqual(len(list(matching_execution)), 1, "/v1/executions did not return correct liveaction.")
Example #18
0
def _get_labels_from_paml(tree, relations, pamout, model):
    """
    in case problem in labelling... and of course it is not my fault...
    retrieve node_ids from outfile... from relations line.
    This may occur when loading a model that was run outside ETE.
    """
    from copy import copy

    old2new = {}
    # label leaves
    for line in open(pamout, "r").readlines():
        if re.search("^#[0-9][0-9]*:", line):
            nam, paml_id = re.sub("#([0-9]+): (.*)", "\\2 \\1", line.strip()).split()
            node = tree & nam
            old2new[node.node_id] = int(paml_id)
            node.add_feature("node_id", int(paml_id))
        if line.startswith("Sums of codon"):
            break
    # label the root
    tree.add_feature("node_id", int(len(tree) + 1))
    # label other internal nodes
    for node in tree.traverse(strategy="postorder"):
        if node.is_root():
            continue
        paml_id = next(filter(lambda x: x[1] == node.node_id, relations))[0]
        old2new[node.up.node_id] = paml_id
        node.up.node_id = paml_id
    ### change keys in branches dict of model
    branches = copy(model.branches)
    for b in model.branches:
        model.branches[b] = branches[old2new[b]]
Example #19
0
def find_files(options):
    when = options.date
    if not when:
        when = gen_filename(options, ext="")
    log("looking for files between last full backup and %s...", when)
    # newest file first
    all = sorted(filter(is_data_file, os.listdir(options.repository)), reverse=True)
    # Find the last full backup before date, then include all the
    # incrementals between that full backup and "when".
    needed = []
    for fname in all:
        root, ext = os.path.splitext(fname)
        if root <= when:
            needed.append(fname)
            if ext in (".fs", ".fsz"):
                break
    # Make the file names relative to the repository directory
    needed = [os.path.join(options.repository, f) for f in needed]
    # Restore back to chronological order
    needed.reverse()
    if needed:
        log("files needed to recover state as of %s:", when)
        for f in needed:
            log("\t%s", f)
    else:
        log("no files found")
    return needed
Example #20
0
def test_every_widget_exposed():
    """ Is widget exposed by tw2.forms also in tw2.bootstrap? """

    is_widget = lambda obj: isinstance(obj, twc.widgets.WidgetMeta)

    twf_widgets = filter(is_widget, [getattr(twf, attr) for attr in dir(twf)])
    twb_widgets = filter(is_widget, [getattr(twb, attr) for attr in dir(twb)])
    twf_widget_names = [w.__name__ for w in twf_widgets]
    twb_widget_names = [w.__name__ for w in twb_widgets]

    def _is_widget_exposed(name):
        msg = "%r in tw2.forms but not tw2.bootstrap" % name
        assert name in twb_widget_names, msg

    for twf_widget in twf_widgets:
        yield _is_widget_exposed, twf_widget.__name__
Example #21
0
def _perform_grouping(args):
    (entries_json, hosts_json, ltol, stol, angle_tol, primitive_cell, scale, comparator, groups) = args

    entries = json.loads(entries_json, cls=MontyDecoder)
    hosts = json.loads(hosts_json, cls=MontyDecoder)
    unmatched = list(zip(entries, hosts))
    while len(unmatched) > 0:
        ref_host = unmatched[0][1]
        logger.info("Reference tid = {}, formula = {}".format(unmatched[0][0].entry_id, ref_host.formula))
        ref_formula = ref_host.composition.reduced_formula
        logger.info("Reference host = {}".format(ref_formula))
        matches = [unmatched[0]]
        for i in range(1, len(unmatched)):
            test_host = unmatched[i][1]
            logger.info("Testing tid = {}, formula = {}".format(unmatched[i][0].entry_id, test_host.formula))
            test_formula = test_host.composition.reduced_formula
            logger.info("Test host = {}".format(test_formula))
            m = StructureMatcher(
                ltol=ltol,
                stol=stol,
                angle_tol=angle_tol,
                primitive_cell=primitive_cell,
                scale=scale,
                comparator=comparator,
            )
            if m.fit(ref_host, test_host):
                logger.info("Fit found")
                matches.append(unmatched[i])
        groups.append(json.dumps([m[0] for m in matches], cls=MontyEncoder))
        unmatched = list(filter(lambda x: x not in matches, unmatched))
        logger.info("{} unmatched remaining".format(len(unmatched)))
Example #22
0
    def find_neighbors_data(self, c, distance, metric=None):
        """ Find node neighbors based on distance between `data` attribute

        Parameters
        -----------
        c : array-like, shape = N,
            `data` array to search around
        distance: float
            Maximum range for inclusion the returned neighbors list
        metric : callable, optional (default : None)
            Metric function for deciding 'closeness' wrt to `data` attribute
            If `None`, Euclidean distance will be used

        Returns
        -------
        neighbors : list of int
            List of node ids in the "neighborhood"

        Notes
        ------
        Includes the query node in the result

        """
        m = metric
        if metric is None:
            m = eud

        neighbors = filter(lambda n: m(self.gna(n, "data"), c) <= distance, self.G.nodes())
        return list(neighbors)
Example #23
0
    def test_365_calendar(self):
        f = mock.MagicMock(
            lbtim=SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}),
            lbyr=2013,
            lbmon=1,
            lbdat=1,
            lbhr=12,
            lbmin=0,
            lbsec=0,
            spec=PPField3,
        )
        f.time_unit = six.create_bound_method(PPField3.time_unit, f)
        f.calendar = cf_units.CALENDAR_365_DAY
        (
            factories,
            references,
            standard_name,
            long_name,
            units,
            attributes,
            cell_methods,
            dim_coords_and_dims,
            aux_coords_and_dims,
        ) = convert(f)

        def is_t_coord(coord_and_dims):
            coord, dims = coord_and_dims
            return coord.standard_name == "time"

        coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims))
        self.assertEqual(len(coords_and_dims), 1)
        coord, dims = coords_and_dims[0]
        self.assertEqual(guess_coord_axis(coord), "T")
        self.assertEqual(coord.units.calendar, "365_day")
Example #24
0
def _configure(args):

    config_files = []
    virtual_path = os.getenv("VIRTUAL_ENV")
    cfg_file = "{0}.conf".format(PROJECT)

    # if virtualenv is active; then leverage <virtualenv>/etc
    # and <virtualenv>/etc/<project>
    if virtual_path:
        config_files.append(os.path.join(virtual_path, "etc", cfg_file))
        config_files.append(os.path.join(virtual_path, "etc", PROJECT, cfg_file))

    config_files.extend(cfg.find_config_files(project=PROJECT))

    cfg.CONF(
        args,
        project=PROJECT,
        version=version.version_string(),
        default_config_files=list(moves.filter(os.path.isfile, config_files)),
    )

    # if no config_dir was provided then we will set it to the
    # path of the most specific config file found.
    if not cfg.CONF.config_dir:
        cfg.CONF.set_default("config_dir", os.path.dirname(cfg.CONF.config_file[-1]))
Example #25
0
    def parse_args(self, arguments):
        arguments_copy = copy.copy(arguments)
        parsed_options, unhandled_args = self.parse_options(arguments_copy)

        # Error if any unhandled argument are options (ie. start with '-' or '--')
        unknown_options = list(filter(lambda arg: is_option(arg), [arg[0] for arg in unhandled_args]))
        if unknown_options:
            raise ParsingError("Unknown option: %s" % ",".join(unknown_options))

        # All unhandled arguments have to be positional arguments
        if len(unhandled_args) > len(self.positional_paramdefs):
            pos_param_names = ", ".join([p.name for p in self.positional_paramdefs])
            raise ParsingError(
                "Tool expects %d positional arguments (%s) but %d (%s) were passed in."
                % (
                    len(self.positional_paramdefs),
                    pos_param_names,
                    len(unhandled_args),
                    ", ".join([arg[0] for arg in unhandled_args]),
                )
            )

        parsed_positionals = self.parse_positionals([arg[0] for arg in unhandled_args])
        result = self.combine_results(parsed_options, parsed_positionals, unhandled_args)
        self.validate_mandatory(result)

        return result
Example #26
0
def unique_to_each(*iterables):
    """Return the elements from each of the input iterables that aren't in the
    other input iterables.

    For example, suppose you have a set of packages, each with a set of
    dependencies::

        {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}

    If you remove one package, which dependencies can also be removed?

    If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
    associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
    ``pkg_2``, and ``D`` is only needed for ``pkg_3``::

        >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
        [['A'], ['C'], ['D']]

    If there are duplicates in one input iterable that aren't in the others
    they will be duplicated in the output. Input order is preserved::

        >>> unique_to_each("mississippi", "missouri")
        [['p', 'p'], ['o', 'u', 'r']]

    It is assumed that the elements of each iterable are hashable.

    """
    pool = [list(it) for it in iterables]
    counts = Counter(chain.from_iterable(map(set, pool)))
    uniques = {element for element in counts if counts[element] == 1}
    return [list(filter(uniques.__contains__, it)) for it in pool]
Example #27
0
    def test_register_consumer(self):
        """
        The client can register consumers using a decorator or
        calling directly :method register_consumer:. The client also
        can deregister consumers.
        """

        @self.event_hub.register_consumer
        def write_to_file(event_json, *args, **kwargs):
            f = open("events.log", "a+w")
            f.write(event_json)

        push_consumer = PushConsumer(self.backend, self.event_hub_id)
        self.event_hub.register_consumer(push_consumer)

        # The previous consumers are indeed registered
        assert self.event_hub.is_registered(push_consumer) is True
        assert self.event_hub.is_registered(write_to_file) is True

        # Registering the same PushConsumer as a sequence of consumers
        repeated_push_consumer = PushConsumer(self.backend, self.event_hub_id)
        self.event_hub.register_consumer(repeated_push_consumer)

        # The previous operation has no effect as the consumer has
        # been previously registered
        registered = list(self.event_hub.registered_consumers)
        assert len(list(filter(self.event_hub.is_registered, registered))) == 2

        # Deregister previous consumers
        for consumer in [write_to_file, push_consumer]:
            self.event_hub.deregister_consumer(consumer)

        assert len(self.event_hub.registered_consumers) == 0
Example #28
0
    def get_cert(self, commonname, sans):
        """
            Returns an (cert, privkey, cert_chain) tuple.

            commonname: Common name for the generated certificate. Must be a
            valid, plain-ASCII, IDNA-encoded domain name.

            sans: A list of Subject Alternate Names.
        """

        potential_keys = self.asterisk_forms(commonname)
        for s in sans:
            potential_keys.extend(self.asterisk_forms(s))
        potential_keys.append((commonname, tuple(sans)))

        name = next(filter(lambda key: key in self.certs, potential_keys), None)
        if name:
            entry = self.certs[name]
        else:
            entry = CertStoreEntry(
                cert=dummy_cert(self.default_privatekey, self.default_ca, commonname, sans),
                privatekey=self.default_privatekey,
                chain_file=self.default_chain_file,
            )
            self.certs[(commonname, tuple(sans))] = entry

        return entry.cert, entry.privatekey, entry.chain_file
Example #29
0
def all_pairs_matching_predicate(values, pred):
    """
    Return an iterator of all pairs, (v0, v1) from values such that

    `pred(v0, v1) == True`

    Parameters
    ----------
    values : iterable
    pred : function

    Returns
    -------
    pairs_iterator : generator
       Generator yielding pairs matching `pred`.

    Examples
    --------
    >>> from zipline.utils.test_utils import all_pairs_matching_predicate
    >>> from operator import eq, lt
    >>> list(all_pairs_matching_predicate(range(5), eq))
    [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
    >>> list(all_pairs_matching_predicate("abcd", lt))
    [('a', 'b'), ('a', 'c'), ('a', 'd'), ('b', 'c'), ('b', 'd'), ('c', 'd')]
    """
    return filter(lambda pair: pred(*pair), product(values, repeat=2))
Example #30
0
    def get_task(self, **kw):
        valid_keys = ["id", "uuid", "description"]

        if len(kw) != 1:
            raise KeyError("get_task must receive one keyword argument")

        key = list(kw.keys())[0]
        if key not in valid_keys:
            raise KeyError("Argument must be one of %r" % valid_keys)

        tasks = self.load_tasks()

        if key == "id":
            id = kw[key]

            if len(tasks["pending"]) < id:
                raise ValueError("No such pending task with id %i." % id)

            task = tasks["pending"][id - 1]
        else:
            matching = list(filter(lambda t: t[key] == kw[key], tasks["pending"]))

            if not matching:
                raise ValueError("No such pending task with %s %r." % (key, kw[key]))

            task = matching[0]
            id = tasks["pending"].index(task) + 1

        return id, task