Example #1
1
def parseOptions(snakefile, tasks):
    # Break passed args in to sub lists per supplied task
    # This allows for:
    #    test -w build --all
    # Where -w is only for test and --all is only for build
    args_per_task = OrderedDict()
    cur_task = None
    for arg in sys.argv[1:]:
        if arg in tasks:
            cur_task = arg
            args_per_task[arg] = []
        else:
            assert cur_task is not None, "Tasks must come before args"
            args_per_task[cur_task].append(arg)

    # Parse the args for each task providing nice failures and help text
    parsed_per_task = OrderedDict()
    for task, args in args_per_task.viewitems():
        # Get a parser that knows all of fields in question
        parser = buildArgParser(snakefile, task)

        # Store the options with the task
        # Even if no options were passed in the key must be set so that we
        # know the user selected this task
        parsed_per_task[task] = vars(parser.parse_args(args))

    return parsed_per_task
Example #2
1
 def _process(self):
     form = VCRoomListFilterForm(request.args)
     results = None
     if request.args and form.validate():
         reverse = form.direction.data == "desc"
         from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None
         to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None
         results = find_event_vc_rooms(from_dt=from_dt, to_dt=to_dt, distinct=True)
         results = group_list(
             (r for r in results if r.event),
             key=lambda r: r.event.getStartDate().date(),
             sort_by=lambda r: r.event.getStartDate(),
             sort_reverse=reverse,
         )
         results = OrderedDict(sorted(results.viewitems(), key=itemgetter(0), reverse=reverse))
     return WPVCService.render_template(
         "vc_room_list.html", form=form, results=results, action=url_for(".vc_room_list")
     )
Example #3
1
class DotMap(OrderedDict):
    def __init__(self, *args, **kwargs):
        self._map = OrderedDict()
        if args:
            d = args[0]
            if type(d) is dict:
                for k, v in self.__call_items(d):
                    if type(v) is dict:
                        v = DotMap(v)
                    self._map[k] = v
        if kwargs:
            for k, v in self.__call_items(kwargs):
                self._map[k] = v

    def __call_items(self, obj):
        if hasattr(obj, "iteritems") and ismethod(getattr(obj, "iteritems")):
            return obj.iteritems()
        else:
            return obj.items()

    def items(self):
        return self.iteritems()

    def iteritems(self):
        return self.__call_items(self._map)

    def __iter__(self):
        return self._map.__iter__()

    def next(self):
        return self._map.next()

    def __setitem__(self, k, v):
        self._map[k] = v

    def __getitem__(self, k):
        if k not in self._map:
            # automatically extend to new DotMap
            self[k] = DotMap()
        return self._map[k]

    def __setattr__(self, k, v):
        if k == "_map":
            super(DotMap, self).__setattr__(k, v)
        else:
            self[k] = v

    def __getattr__(self, k):
        if k == "_map":
            super(DotMap, self).__getattr__(k)
        else:
            return self[k]

    def __delattr__(self, key):
        return self._map.__delitem__(key)

    def __contains__(self, k):
        return self._map.__contains__(k)

    def __str__(self):
        items = []
        for k, v in self.__call_items(self._map):
            items.append("{0}={1}".format(k, repr(v)))
        out = "DotMap({0})".format(", ".join(items))
        return out

    def __repr__(self):
        return str(self)

    def toDict(self):
        d = {}
        for k, v in self.items():
            if type(v) is DotMap:
                v = v.toDict()
            d[k] = v
        return d

    def pprint(self):
        pprint(self.toDict())

        # proper dict subclassing

    def values(self):
        return self._map.values()

    @classmethod
    def parseOther(self, other):
        if type(other) is DotMap:
            return other._map
        else:
            return other

    def __cmp__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__cmp__(other)

    def __eq__(self, other):
        other = DotMap.parseOther(other)
        if not isinstance(other, dict):
            return False
        return self._map.__eq__(other)

    def __ge__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__ge__(other)

    def __gt__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__gt__(other)

    def __le__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__le__(other)

    def __lt__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__lt__(other)

    def __ne__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__ne__(other)

    def __delitem__(self, key):
        return self._map.__delitem__(key)

    def __len__(self):
        return self._map.__len__()

    def clear(self):
        self._map.clear()

    def copy(self):
        return self

    def get(self, key, default=None):
        return self._map.get(key, default)

    def has_key(self, key):
        return key in self._map

    def iterkeys(self):
        return self._map.iterkeys()

    def itervalues(self):
        return self._map.itervalues()

    def keys(self):
        return self._map.keys()

    def pop(self, key, default=None):
        return self._map.pop(key, default)

    def popitem(self):
        return self._map.popitem()

    def setdefault(self, key, default=None):
        self._map.setdefault(key, default)

    def update(self, *args, **kwargs):
        if len(args) != 0:
            self._map.update(*args)
        self._map.update(kwargs)

    def viewitems(self):
        return self._map.viewitems()

    def viewkeys(self):
        return self._map.viewkeys()

    def viewvalues(self):
        return self._map.viewvalues()

    @classmethod
    def fromkeys(cls, seq, value=None):
        d = DotMap()
        d._map = OrderedDict.fromkeys(seq, value)
        return d
Example #4
1
class SortedDotDict(object):
    def __init__(self, *args, **kwargs):
        super(SortedDotDict, self).__init__(*args, **kwargs)
        self._dict = SortedDict()

    def __contains__(self, *args, **kwargs):
        return self._dict.__contains__(*args, **kwargs)

    def __eq__(self, *args, **kwargs):
        return self._dict.__eq__(*args, **kwargs)

    def __format__(self, *args, **kwargs):
        return self._dict.__format__(*args, **kwargs)

    def __ge__(self, *args, **kwargs):
        return self._dict.__ge__(*args, **kwargs)

    def __getattr__(self, key):
        try:
            return self._dict[key]
        except:
            raise AttributeError(key)

    def __iter__(self):
        vals = list(self.values())
        for k in vals:
            yield k

    def __getitem__(self, key):
        return self._dict[key]

    def __setitem__(self, key, value):
        self._dict[key] = value

    def __delitem__(self, key):
        del self._dict[key]

    def keys(self):
        return list(self._dict.keys())

    def values(self):
        vals = list(self._dict.values())
        vals = [v for v in vals if isinstance(v, (ConfigurationGroup, Value))]
        vals.sort()
        return vals

    def items(self):
        return list(self._dict.items())

    def iterkeys(self):
        return iter(self._dict.keys())

    def itervalues(self):
        return iter(self._dict.values())

    def iteritems(self):
        return iter(self._dict.items())

    def get(self, *args, **kwargs):
        return self._dict.get(*args, **kwargs)

    def clear(self):
        return self._dict.clear()

    def copy(self):
        s = SortedDotDict()
        s._dict = self._dict.copy()
        return s

    def fromkeys(self):
        return self._dict.fromkeys()

    def has_key(self, key):
        return key in self._dict

    def pop(self, *args, **kwargs):
        return self._dict.pop(*args, **kwargs)

    def popitem(self, *args, **kwargs):
        return self._dict.popitem(*args, **kwargs)

    def setdefault(self, key, default):
        return self._dict.setdefault(key, default)

    def update(self, d):
        return self._dict.update(d)

    def viewitems(self, *args, **kwargs):
        return self._dict.viewitems(*args, **kwargs)

    def viewvalues(self, *args, **kwargs):
        return self._dict.viewvalues(*args, **kwargs)
Example #5
1
class FilterPipe(BCRelay):
    def __init__(self, *argz, **kwz):
        super(FilterPipe, self).__init__(*argz, **kwz)
        self.rules = OrderedDict()
        for name, rule in self.conf.rules.viewitems():
            if "regex" in rule:
                log.noise("Compiling filter (name: {}): {!r}".format(name, rule.regex))
                check = re.compile(rule.regex)
            else:
                check = None  # boolean rule

            try:
                action, optz = rule.action.split("-", 1)
            except ValueError:
                action, optz = rule.action, list()
            else:
                if action == "limit":
                    optz = map(int, optz.split("/"))
                else:
                    optz = [optz]

            self.rules[name] = check, action, optz, rule.get("match")
        self.rule_hits, self.rule_notes, self.rule_drops = dict(), set(), defaultdict(int)

    def dispatch(self, msg):
        for name, (check, action, optz, attr) in self.rules.viewitems():
            try:
                msg_match = msg if not attr else (("{" + attr + "}").format(data=msg.data) or "")
            except Exception as err:
                log.noise("Filtering attr-get error ({}) for message: {!r}".format(err, msg))
                msg_match = ""

            if not (check.search(msg_match) if check is not None else bool(msg_match)):
                if "nomatch" in optz:
                    if action == "allow":
                        return msg
                    elif action == "drop":
                        return
                continue

            if action == "limit":
                if name not in self.rule_hits:
                    self.rule_hits[name] = deque()
                win, ts, (c, t) = self.rule_hits[name], time(), optz
                ts_thresh = ts - t
                win.append(ts)
                while win[0] < ts_thresh:
                    win.popleft()
                rate = len(win)
                if rate > c:
                    log.noise(("Rule ({}) triggering rate" " above threshold ({}/{}): {}").format(name, c, t, rate))
                    self.rule_drops[name] += 1
                    if name not in self.rule_notes:
                        self.rule_notes.add(name)
                        return (
                            "  ...limiting messages matching" " filter-rule {} ({}/{}, dropped (for uptime): {})"
                        ).format(name, c, t, self.rule_drops[name])
                    else:
                        return
                self.rule_notes.discard(name)
                return msg

            elif "nomatch" not in optz:
                if action == "allow":
                    return msg
                elif action == "drop":
                    return

        if self.conf.policy == "allow":
            return msg
Example #6
1
                        AND referenced_table_name \
                            IS NOT NULL ORDER BY table_name, column_name;"
)

for table in cur.fetchall():
    if table[1] not in graph[table[0]]:
        graph[table[0]].append(table[1])

graph = OrderedDict(sorted(graph.viewitems(), key=lambda x: len(x[1])))

for item in graph:
    print str(item) + ":" + str(graph[item])

while len(graph) > 0:
    for item in graph:
        if len(graph[item]) == 0:
            tables.append(item)
            dependent_tables = [key for key, value in graph.iteritems() if item in value]
            if dependent_tables:
                for table in dependent_tables:
                    graph[table].remove(item)
            del graph[item]

    graph = OrderedDict(sorted(graph.viewitems(), key=lambda x: len(x[1])))

for table in tables:
    cur.execute("SHOW CREATE TABLE `" + str(table) + "`;")
    table_details = str(cur.fetchone()[1]).replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS")
    data += "\n" + table_details + ";\n\n"

    cur.execute("SELECT * FROM `" + str(table) + "`;")
    for row in cur.fetchall():
Example #7
1
class DotMap(MutableMapping, OrderedDict):
    def __init__(self, *args, **kwargs):
        self._map = OrderedDict()
        self._dynamic = True
        if kwargs:
            if "_dynamic" in kwargs:
                self._dynamic = kwargs["_dynamic"]
        if args:
            d = args[0]
            if isinstance(d, dict):
                for k, v in self.__call_items(d):
                    if type(v) is dict:
                        v = DotMap(v, _dynamic=self._dynamic)
                    if type(v) is list:
                        l = []
                        for i in v:
                            n = i
                            if type(i) is dict:
                                n = DotMap(i, _dynamic=self._dynamic)
                            l.append(n)
                        v = l
                    self._map[k] = v
        if kwargs:
            for k, v in self.__call_items(kwargs):
                if k is not "_dynamic":
                    self._map[k] = v

    def __call_items(self, obj):
        if hasattr(obj, "iteritems") and ismethod(getattr(obj, "iteritems")):
            return obj.iteritems()
        else:
            return obj.items()

    def items(self):
        return self.iteritems()

    def iteritems(self):
        return self.__call_items(self._map)

    def __iter__(self):
        return self._map.__iter__()

    def next(self):
        return self._map.next()

    def __setitem__(self, k, v):
        self._map[k] = v

    def __getitem__(self, k):
        if k not in self._map and self._dynamic and k != "_ipython_canary_method_should_not_exist_":
            # automatically extend to new DotMap
            self[k] = DotMap()
        return self._map[k]

    def __setattr__(self, k, v):
        if k in {"_map", "_dynamic", "_ipython_canary_method_should_not_exist_"}:
            super(DotMap, self).__setattr__(k, v)
        else:
            self[k] = v

    def __getattr__(self, k):
        if k == {"_map", "_dynamic", "_ipython_canary_method_should_not_exist_"}:
            super(DotMap, self).__getattr__(k)
        else:
            return self[k]

    def __delattr__(self, key):
        return self._map.__delitem__(key)

    def __contains__(self, k):
        return self._map.__contains__(k)

    def __str__(self):
        items = []
        for k, v in self.__call_items(self._map):
            # bizarre recursive assignment situation (why someone would do this is beyond me)
            if id(v) == id(self):
                items.append("{0}=DotMap(...)".format(k))
            else:
                items.append("{0}={1}".format(k, repr(v)))
        out = "DotMap({0})".format(", ".join(items))
        return out

    def __repr__(self):
        return str(self)

    def toDict(self):
        d = {}
        for k, v in self.items():
            if type(v) is DotMap:
                # bizarre recursive assignment support
                if id(v) == id(self):
                    v = d
                else:
                    v = v.toDict()
            elif type(v) is list:
                l = []
                for i in v:
                    n = i
                    if type(i) is DotMap:
                        n = i.toDict()
                    l.append(n)
                v = l
            d[k] = v
        return d

    def pprint(self):
        pprint(self.toDict())

    def empty(self):
        return not any(self)

        # proper dict subclassing

    def values(self):
        return self._map.values()

        # ipython support

    def __dir__(self):
        return self.keys()

    @classmethod
    def parseOther(self, other):
        if type(other) is DotMap:
            return other._map
        else:
            return other

    def __cmp__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__cmp__(other)

    def __eq__(self, other):
        other = DotMap.parseOther(other)
        if not isinstance(other, dict):
            return False
        return self._map.__eq__(other)

    def __ge__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__ge__(other)

    def __gt__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__gt__(other)

    def __le__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__le__(other)

    def __lt__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__lt__(other)

    def __ne__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__ne__(other)

    def __delitem__(self, key):
        return self._map.__delitem__(key)

    def __len__(self):
        return self._map.__len__()

    def clear(self):
        self._map.clear()

    def copy(self):
        return DotMap(self.toDict())

    def __copy__(self):
        return self.copy()

    def __deepcopy__(self, memo=None):
        return self.copy()

    def get(self, key, default=None):
        return self._map.get(key, default)

    def has_key(self, key):
        return key in self._map

    def iterkeys(self):
        return self._map.iterkeys()

    def itervalues(self):
        return self._map.itervalues()

    def keys(self):
        return self._map.keys()

    def pop(self, key, default=None):
        return self._map.pop(key, default)

    def popitem(self):
        return self._map.popitem()

    def setdefault(self, key, default=None):
        self._map.setdefault(key, default)

    def update(self, *args, **kwargs):
        if len(args) != 0:
            self._map.update(*args)
        self._map.update(kwargs)

    def viewitems(self):
        return self._map.viewitems()

    def viewkeys(self):
        return self._map.viewkeys()

    def viewvalues(self):
        return self._map.viewvalues()

    @classmethod
    def fromkeys(cls, seq, value=None):
        d = DotMap()
        d._map = OrderedDict.fromkeys(seq, value)
        return d

    def __getstate__(self):
        return self.__dict__

    def __setstate__(self, d):
        self.__dict__.update(d)
Example #8
1
def main(args=None):
    import argparse

    parser = argparse.ArgumentParser(
        description="Tool to measure resources consumed"
        " by a group of processes, no matter how hard they fork."
        " Does that by creating a temp cgroup and running passed command there."
    )
    parser.add_argument("cmdline", nargs="+", help="Command to run and any arguments for it.")
    parser.add_argument(
        "-g",
        "--cgroup",
        default="bench/tmp",
        metavar="{ /path | tagged-path }",
        help="Hierarchy path to create temp-cgroup under"
        ' ("/" means root cgroup, default: %(default)s).'
        " Any missing path components will be created."
        " If relative name is specified, it will be interpreted from /tagged path.",
    )
    parser.add_argument(
        "-c",
        "--rcs",
        default="cpuacct, blkio, memory",
        metavar="rc1[,rc2,...]",
        help="Comma-separated list of rc hierarchies to get metrics from (default: %(default)s)."
        " Should have corresponding path mounted under {}.".format(cg_root),
    )
    parser.add_argument(
        "-q", "--quiet", action="store_true", help="Redirect stderr/stdout for started pid to /dev/null."
    )
    parser.add_argument("-d", "--debug", action="store_true", help="Verbose operation mode.")
    opts = parser.parse_args(sys.argv[1:] if args is None else args)

    global log
    import logging

    logging.basicConfig(level=logging.DEBUG if opts.debug else logging.INFO)
    log = logging.getLogger()

    # Check all rc tasks-file paths
    cg_subpath = "tmp.{}".format(cmd_pid)
    cg_tasks, cg_path = OrderedDict(), join("tagged", opts.cgroup).lstrip("/")
    for rc in map(bytes.strip, opts.rcs.split(",")):
        tasks = join(cg_root, rc, cg_path, cg_subpath, "tasks")
        assert "\n" not in tasks, repr(tasks)
        os.makedirs(dirname(tasks))
        assert exists(tasks), tasks
        cg_tasks[rc] = tasks

        # Append cmdline, send data to child
    data = cg_tasks.values()
    if opts.quiet:
        data.append("-")
    data = "\n".join(it.chain(data, ["\0".join(map(lambda arg: arg.encode("hex"), opts.cmdline))]))
    cmd_w.write(struct.pack(len_fmt, len(data)) + data)
    cmd_w.flush()

    # Wait for signal to start counting
    mark = cmd_start_r.read(1)
    ts0 = time()
    assert mark == ".", repr(mark)
    cmd_start_r.close()

    pid, status = os.waitpid(cmd_pid, 0)
    ts1 = time()

    err = status >> 8
    if status & 0xFF:
        print("Unclean exit of child pid due to signal: {}".format((status & 0xFF) >> 1))
        err = err or 1

        # Make sure everything finished running there
    leftovers = set()
    for tasks in cg_tasks.values():
        with open(tasks) as src:
            leftovers.update(map(int, src.read().splitlines()))
    if leftovers:
        print(
            "Main pid has finished, but cgroups have leftover threads"
            " still running: {}".format(", ".join(map(bytes, leftovers))),
            file=sys.stderr,
        )
        err = err or 1

        # Collect/print accounting data
    acct = OrderedDict()
    acct["cmd"] = " ".join(opts.cmdline)
    acct["wall_clock"] = "{:.3f}".format(ts1 - ts0)
    acct["exit_status"] = "{} {}".format(status >> 8, status & 0xFF >> 1)

    acct_srcs = OrderedDict()
    for cg_path in map(dirname, cg_tasks.viewvalues()):
        for p in os.listdir(cg_path):
            acct_srcs[p] = join(cg_path, p)

    acct_nums = OrderedDict(
        [
            ("cpuacct", ["usage", "usage_percpu"]),
            (
                "memory",
                [
                    "max_usage_in_bytes",
                    "memsw.max_usage_in_bytes",
                    "kmem.max_usage_in_bytes",
                    "kmem.tcp.max_usage_in_bytes",
                ],
            ),
        ]
    )
    for rc, metrics in acct_nums.viewitems():
        for p in metrics:
            p = "{}.{}".format(rc, p)
            if p not in acct_srcs:
                continue
            with open(acct_srcs[p]) as src:
                numbers = map(int, src.read().strip().split())
                acct[p] = " ".join(map(num_format, numbers))

    for p in "time sectors io_merged io_serviced io_wait_time".split():
        p = "blkio.{}".format(p)
        try:
            src = acct_srcs[p]
        except KeyError:
            pass
        else:
            with open(src) as src:
                src = src.read().splitlines()
            for line in src:
                line = line.split()
                if not line or line[0] == "Total":
                    continue
                t = None
                try:
                    dev, t, v = line
                except ValueError:
                    dev, v = line
                dev = dev_resolve(*map(int, dev.split(":")))
                if not dev:
                    continue
                label = "{}[{}]".format(p, dev)
                if t:
                    label += "[{}]".format(t)
                acct[label] = num_format(int(v))

    for k, v in acct.viewitems():
        print("{}: {}".format(k, v), file=sys.stderr)

        # Cleanup tmp dirs
    leftovers = set()
    for tasks in cg_tasks.values():
        tasks_dir = dirname(tasks)
        try:
            os.rmdir(tasks_dir)
        except (OSError, IOError):
            leftovers.add(tasks_dir)
    if leftovers:
        print("Leftover cgroup dirs remaining:{}\n".format("\n  ".join([""] + sorted(leftovers))), file=sys.stderr)
        err = err or 1

    return err
Example #9
1
class BaseCache(object):
    """
    BaseCache is a class that saves and operates on an OrderedDict. It has a
    certain capacity, stored in the attribute `maxsize`. Whether this
    capacity is reached, can be checked by using the boolean property
    `is_full`. To implement a custom cache, inherit from this class and
    override the methods ``__getitem__`` and ``__setitem__``.
    Call the method `sunpy.database.caching.BaseCache.callback` as soon
    as an item from the cache is removed.
    """

    __metaclass__ = ABCMeta

    def __init__(self, maxsize=float("inf")):
        self.maxsize = maxsize
        self._dict = OrderedDict()

    def get(self, key, default=None):  # pragma: no cover
        """Return the corresponding value to `key` if `key` is in the cache,
        `default` otherwise. This method has no side-effects, multiple calls
        with the same cache and the same passed key must always return the same
        value.

        """
        try:
            return self._dict[key]
        except KeyError:
            return default

    @abstractmethod
    def __getitem__(self, key):
        """abstract method: this method must be overwritten by inheriting
        subclasses. It defines what happens if an item from the cache is
        attempted to be accessed.

        """
        return  # pragma: no cover

    @abstractmethod
    def __setitem__(self, key, value):
        """abstract method: this method must be overwritten by inheriting
        subclasses. It defines what happens if a new value should be assigned
        to the given key. If the given key does already exist in the cache or
        not must be checked by the person who implements this method.
        """

    @abstractproperty
    def to_be_removed(self):
        """The item that will be removed on the next
        :meth:`sunpy.database.caching.BaseCache.remove` call.

        """

    @abstractmethod
    def remove(self):
        """Call this method to manually remove one item from the cache. Which
        item is removed, depends on the implementation of the cache. After the
        item has been removed, the callback method is called.

        """

    def callback(self, key, value):
        """This method should be called (by convention) if an item is removed
        from the cache because it is full. The passed key and value are the
        ones that are removed. By default this method does nothing, but it
        can be customized in a custom cache that inherits from this base class.

        """

    @property
    def is_full(self):
        """True if the number of items in the cache equals :attr:`maxsize`,
        False otherwise.

        """
        return len(self._dict) == self.maxsize

    def __delitem__(self, key):
        self._dict.__delitem__(key)

    def __contains__(self, key):
        return key in self._dict.keys()

    def __len__(self):
        return len(self._dict)

    def __iter__(self):
        for key in self._dict.__iter__():
            yield key

    def __reversed__(self):  # pragma: no cover
        for key in self._dict.__reversed__():
            yield key

    def clear(self):  # pragma: no cover
        return self._dict.clear()

    def keys(self):  # pragma: no cover
        return self._dict.keys()

    def values(self):  # pragma: no cover
        return self._dict.values()

    def items(self):  # pragma: no cover
        return self._dict.items()

    def iterkeys(self):  # pragma: no cover
        return self._dict.iterkeys()

    def itervalues(self):  # pragma: no cover
        for value in self._dict.itervalues():
            yield value

    def iteritems(self):  # pragma: no cover
        for key, value in self._dict.iteritems():
            yield key, value

    def update(self, *args, **kwds):  # pragma: no cover
        self._dict.update(*args, **kwds)

    def pop(self, key, default=MutableMapping._MutableMapping__marker):  # pragma: no cover
        return self._dict.pop(key, default)

    def setdefault(self, key, default=None):  # pragma: no cover
        return self._dict.setdefault(key, default)

    def popitem(self, last=True):  # pragma: no cover
        return self._dict.popitem(last)

    def __reduce__(self):  # pragma: no cover
        return self._dict.__reduce__()

    def copy(self):  # pragma: no cover
        return self._dict.copy()

    def __eq__(self, other):  # pragma: no cover
        return self._dict.__eq__(other)

    def __ne__(self, other):  # pragma: no cover
        return self._dict.__ne__(other)

    def viewkeys(self):  # pragma: no cover
        return self._dict.viewkeys()

    def viewvalues(self):  # pragma: no cover
        return self._dict.viewvalues()

    def viewitems(self):  # pragma: no cover
        return self._dict.viewitems()

    @classmethod
    def fromkeys(cls, iterable, value=None):  # pragma: no cover
        return OrderedDict.fromkeys(iterable, value)

    def __repr__(self):  # pragma: no cover
        return "{0}({1!r})".format(self.__class__.__name__, dict(self._dict))
Example #10
0
class AccumUnits(object):
    """
    make & print human readable lists of quantifiable things like
    time, distance, weight - track them the way computers like
    (as granular as you wish) output them the way humans like
    (without having to think about it more than once)
    todo: add unit conversions
    """

    def __init__(self, unit_names=None, unit_qnts=None, VERBOSE=False):
        """
        whether passed-in or using defaults, last unit should have quant=1
        as it is the most 'granular' unit you are using.
        Pick an instance name to reflect the units being handled
        """
        default_units = ["year", "month", "day", "hr", "min", "sec"]  # always go big to small
        default_quants = [12, 30, 24, 60, 60, 1]  # don't calculate, just list
        # default_units = ['mile', 'foot', 'inch']                      # just like in physics class
        # default_quants = [5280, 12, 1]
        if unit_names is None:
            if VERBOSE:
                print("using default unit labels:")
                print(default_units)
            self.unit_names = default_units
        else:
            self.unit_names = unit_names
        if unit_qnts is None:
            if VERBOSE:
                print("using default unit quantities:")
                print(default_quants)
            unit_qnts = default_quants
        assert isinstance(self.unit_names, list)
        assert isinstance(unit_qnts, list)
        assert len(set(self.unit_names)) == len(unit_qnts)  # set() to be rid of duplicate names
        self.timeunits = OrderedDict()
        sec = 1
        self.seclist = deque()
        while unit_qnts:  # multiply to get successive units
            xun = unit_qnts.pop()
            self.seclist.appendleft(sec * xun)
            sec *= xun
        for ktm, vtm in zip(self.unit_names, self.seclist):  # zip them into OrderedDict
            self.timeunits[ktm] = vtm
            if VERBOSE:
                print("{:6} : {:10}".format(ktm, vtm))
        self.VERBOSE = VERBOSE

    def breakdown(self, rawseconds):
        """
        incoming raw-seconds (or whatever) returned as list of whatever-unit strings
        """
        qt = abs(rawseconds)
        divtime = []
        for plc, (kt, vt) in enumerate(self.timeunits.viewitems()):
            qt, leftover = divmod(qt, vt)
            if qt:
                divtime.append(str(int(qt)) + " " + str(kt))
            if leftover < 1:
                if self.VERBOSE:
                    print("({} = fractional {} from given {})".format(leftover, kt, rawseconds))
                    print("a stringy-list breakdown (joined): ")
                return divtime
            qt = leftover
        return divtime

    def breakdict(self, rawseconds):
        """
        incoming raw-seconds (or whatever) returned as dict
        with {unit_name: quantity-inside-remainder}
        """
        qt = abs(rawseconds)
        divtime = OrderedDict()
        for plc, (kt, vt) in enumerate(self.timeunits.viewitems()):
            qt, leftover = divmod(qt, vt)
            if qt:
                divtime[kt] = int(qt)
            if leftover < 1:
                if self.VERBOSE:
                    print("({} = fractional {} from given {})".format(leftover, kt, rawseconds))
                    print("a dictionary breakdown:")
                return divtime
            qt = leftover
        return divtime

    def timebetween(self, start, end):
        """
        returns dict of unit-quant breakdown, optionally prints as string
        """
        assert isinstance(start, int) or isinstance(start, float)
        assert isinstance(end, int) or isinstance(end, float)
        quant = end - start
        if self.VERBOSE:
            print("between {0} {2}, and {1} {2}".format(start, end, self.unit_names[-1]))
            print(" : {}".format(", ".join(self.breakdown(quant))))
        return self.breakdict(quant)
Example #11
0
def inheritance_check(variants):
    m_gf = "NA12891"
    m_gm = "NA12892"
    d_gf = "NA12889"
    d_gm = "NA12890"
    mom = "NA12878"
    dad = "NA12877"
    # sample_ID:[hom_ref, het, hom_alt]
    # kids = ['NA12879':[0,0,0], 'NA12880':[0,0,0], 'NA12881':[0,0,0], 'NA12882':[0,0,0], 'NA12883':[0,0,0], 'NA12884':[0,0,0], 'NA12885':[0,0,0], 'NA12886':[0,0,0], 'NA12887':[0,0,0], 'NA12888':[0,0,0], 'NA12893':[0,0,0], 'NA12878':[0,0,0],'NA12877':[0,0,0]}
    # load dict with the children sample names
    kids = OrderedDict()
    for i in range(12877, 12889):
        name = "NA" + str(i)
        kids[name] = [0, 0, 0]
    kids["NA12893"] = [0, 0, 0]

    parents = {"NA12878": [0, 0, 0], "NA12877": [0, 0, 0]}
    hits = []
    present_min = 5
    absent_max = 0
    for var in variants:
        # must be present in only one parent, and only one grandparent.
        mom_count = int(var.gts[mom].format["AO"])
        m_gf_count = int(var.gts[m_gf].format["AO"])
        m_gm_count = int(var.gts[m_gm].format["AO"])

        dad_count = int(var.gts[dad].format["AO"])
        d_gf_count = int(var.gts[d_gf].format["AO"])
        d_gm_count = int(var.gts[d_gm].format["AO"])

        # is var a true het in mom? present in mom and one of her parents (and not dad), or dad and one of his parents (and not mom)
        mom_het = mom_count >= present_min and (
            (m_gf_count >= present_min and m_gm_count == absent_max)
            or (m_gf_count == absent_max and m_gm_count >= present_min)
        )
        dad_het = dad_count >= present_min and (
            (d_gf_count >= present_min and d_gm_count == absent_max)
            or (d_gf_count == absent_max and d_gm_count >= present_min)
        )
        # if dad_het and mom_het and var.gts[mom].format['GT'] != "1/1" and var.gts[dad].format['GT'] != "1/1":
        if dad_het and mom_het:
            hits.append(var)
            for kid in kids:
                if var.gts[kid].format["GT"] == "0/0":
                    kids[kid][0] += 1
                elif var.gts[kid].format["GT"] == "0/1":
                    kids[kid][1] += 1
                elif var.gts[kid].format["GT"] == "1/1":
                    kids[kid][2] += 1
    hom_ref = 0
    het = 0
    hom_alt = 0
    for name, counts in kids.viewitems():
        if name == mom or name == dad:
            continue
        hom_ref += counts[0]
        het += counts[1]
        hom_alt += counts[2]
    total = hom_ref + het + hom_alt
    if total <= 0:
        exit("Zero total")
    sys.stderr.write("Sample\t0/0\t0/1\t1/1\n")
    for kid, counts in kids.viewitems():
        if kid == mom:
            kid = "Mom"
        if kid == dad:
            kid = "Dad"
        sys.stderr.write(kid + "\t")
        total = float(sum(counts))
        sys.stderr.write(
            "{0:.2f}\t{1:.2f}\t{2:.2f}\n".format(
                (counts[0] / total) * 100, (counts[1] / total) * 100, (counts[2] / total) * 100
            )
        )

    total = hom_ref + het + hom_alt
    hom_ref = (hom_ref / float(total)) * 100
    het = (het / float(total)) * 100
    hom_alt = (hom_alt / float(total)) * 100
    sys.stderr.write("\nAggregate: ")
    sys.stderr.write(str(len(hits)) + " total variants.\n")
    sys.stderr.write("GT\t%\n")
    sys.stderr.write("0/0\t{0:.2f}\n0/1\t{1:.2f}\n1/1\t{2:.2f}\n".format(hom_ref, het, hom_alt))
    return hits
Example #12
0
class Robot(object):
    """
    for initializing and running the interface between cpu and robot firmware via serial link
    """

    def __init__(self, baud="115200", port="/dev/ttyACM0", readtimer=0, nl="\n", LOAD=True):
        self.baud = baud
        self.port = port
        self.con = ser.Serial(port=port, baudrate=baud, timeout=readtimer)
        self.nl = nl
        self.LOADING = LOAD
        self.do = {
            "pickup_pos": "G0 X1",
            "drop_pos": "G0 X52",
            "fan_on": "M106",
            "fan_off": "M107",
            "servo_drop": "M280 S57 P0",
            "servo_up": "M280 S120 P0",
            "end_stop_status": "M119",
            "positions": "M114",
            "stop": "M410",
        }
        self.times = {
            "pickup_pos": 3,
            "drop_pos": 3,
            "fan_on": 0.1,
            "fan_off": 0.1,
            "servo_drop": 0.6,
            "servo_up": 2.0,
            "end_stop_status": 0.1,
            "positions": 0.06,
            "stop": 0.02,
        }
        self.sensor_keys = ["x_min", "y_min", "z_min", "x_max", "y_max"]
        for w in xrange(5):
            print ("waiting {} seconds to init serial".format(5 - w))
            time.sleep(1)
        print ("serial portisOpen={}".format(self.con.isOpen()))
        # physically home X (arm) Y (hopper) and Z (output bin) to zero positions
        self.con.write("G28 XZ" + nl)
        self.con.write("G28 Y" + nl)
        time.sleep(0.5)

        # arm 'X' swing out to allow loading of hopper
        self.con.write(self.do["drop_pos"] + nl + " " + self.do["servo_up"] + nl)
        self.con.write(self.do["fan_off"] + nl)
        self.NEED_DROP = False
        self.CARD_CARRIED = False
        self.ID_DONE = False
        self.PICKING_UP = False

        # adjust sort categories quantity and bin position here:
        self.bins = OrderedDict([("Low", 125), ("High", 247.5), ("NoID", 50.0)])
        self.bin_cuts = OrderedDict([("Low", 0.0), ("High", 0.5), ("NoID", 10000.0)])
        self.bin_sliver = 0.2
        self.LOADING = True
        # tl = self.con.readline()
        # while tl:
        #    print("startup: {}".format(tl.strip()))
        #    tl = self.con.readline()
        r = self.con.read(self.con.inWaiting())
        for p in r.split("echo:"):
            print p

    def dothis(self, instruction):
        """ sends instruction to robot and returns the estimated execution time if available """
        if instruction in self.do.keys():
            self.con.write(self.do[instruction] + self.nl)
            return self.times[instruction]
        self.con.write(instruction + self.nl)
        return 0.0

    def bin_lookup(self, price, binname=None):
        """returns the bin-name the card-price should be sorted into"""
        for bk, bv in self.bin_cuts.viewitems():
            if price >= bv:
                binname = bk
        return binname

    def sensor_stats(self, min_ret=99, retry=0):
        """returns dict of end-stop sensors, keyed by sensor name, with values of 'open' or 'TRIGGERED'"""
        extra_time = 0
        if retry > 5:
            extra_time = 0.1 * retry
            print ("extra time: {} sec".format(extra_time))
        if retry > 32:
            print ("too many retries of sensors = {}".format(retry))
            return {"y_max": "TRIGGERED", "x_max": "y_is_fake!"}

        wait = self.dothis("end_stop_status") + time.time() + extra_time
        while (time.time() < wait) and (self.con.inWaiting() < min_ret):
            pass
        sensordict = dict(
            [
                tuple(chunk.split(": "))
                for chunk in self.con.read(size=self.con.inWaiting()).split(self.nl)
                if (": " in chunk) and (("_min" in chunk) or ("_max" in chunk))
            ]
        )
        skeys = sensordict.keys()
        if all([sk in skeys for sk in self.sensor_keys]):
            return sensordict
        retry += 1
        # print("Retry sensor_stats() #{}".format(retry))
        return self.sensor_stats(min_ret=min_ret, retry=retry)

    def xyz_pos(self, min_ret=59):
        """ returns dict of current stepper DESTINATIONS (in float(mm)) keyed by single-letter axis names"""
        wait = self.dothis("positions") + time.time()
        # start = time.time()
        must_have = ["X", "Y", "Z", "E"]
        xyz_dict = {}
        while time.time() < wait and (self.con.inWaiting() < min_ret):
            pass
        # finalwait = self.con.inWaiting()
        for positions in [
            ps.split(" Count ")[0] for ps in self.con.read(size=self.con.inWaiting()).split(self.nl) if " Count " in ps
        ]:
            if all([axis in positions for axis in must_have]):
                for p in positions.split(" "):
                    if ":" in p:
                        k, v = p.split(":")
                        xyz_dict[k.strip()] = float(v.strip())
        # print("actual speed: {}, ret: {}".format(time.time() - start, finalwait))
        return xyz_dict or self.xyz_pos(min_ret=min_ret - 1)

    def go_xz(self, bin_name, timeconst=0.07, reverse=False):
        """ given a destination bin, position everything for the drop, while decrementing for the next drop into the bin and
        return the estimated time from the present when the drop can happen """
        back = 1 if not reverse else -1
        newz = float(self.bins[bin_name])
        self.bins[bin_name] -= self.bin_sliver * back
        curz = self.xyz_pos()["Z"]
        if not reverse:
            x_spot = self.do["drop_pos"].split(" ")[1]
            x_time = self.times["drop_pos"]
        else:
            x_time, x_spot = 0, ""
        z_time = abs(curz - newz) * timeconst
        self.dothis("G1 Z" + str(newz) + " " + x_spot)
        return z_time if z_time > x_time else x_time

    def hopper_up(self, y_current=None, bite=1.1, timeconst=0.7):
        """ raise the input hopper by a little bit, return the seconds it is estimated to take"""
        if y_current is None:
            try:
                y_current = self.xyz_pos()["Y"]
            except KeyError:
                print ("WARNING: hopper_up couldn't get 'Y' starting position. Moving to zero + 1.")
                y_current = 0
        self.dothis("G0 Y{}".format(y_current + bite))
        return bite * timeconst

    def load_hopper(self, move=10.0, y_top=220):
        """ load cards until bottom switch is triggered, indicating max capacity, but only move
        down while top proximity sensor is triggered. Set self.LOADING false when done"""
        # first move up until proximity sensor is triggered to get the platform up top
        print (" - Initializing hopper upwards (until top sensor triggered) - ")
        self.dothis("G0 Y{}".format(y_top))
        power_warn_time = time.time() + 22.0
        INITIALIZE_UP = True
        while INITIALIZE_UP:
            sensor = self.sensor_stats()
            if time.time() > power_warn_time:
                power_warn_time = time.time() + 2.0
                print ("Is the power-supply on? If not, break and start over.")
                print ("sensors say: {}".format(sensor))
            if "TRIG" in sensor["y_max"]:
                print ("top sensor = {}".format(sensor["y_max"]))
                time.sleep(self.dothis("stop"))
                INITIALIZE_UP = False
        xyz = self.xyz_pos()
        print ("LOAD THE HOPPER. Loading ends when bottom limit switch is triggered.")
        print ("Positions:  {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()])))
        new_sweep = True
        destination = max((xyz["Y"] - move), 0)
        start = time.time()
        while self.LOADING:
            sensor = self.sensor_stats()
            if "TRIG" in sensor["y_min"]:
                self.dothis("stop")
                self.dothis("G92 Y0")
                self.dothis("G0 Y0")
                self.LOADING = False
                continue
            if "TRIG" in sensor["y_max"] and new_sweep:
                print ("moving down to: Y={}".format(destination))
                self.dothis("G0 Y{}".format(destination))
                start = time.time()
                new_sweep = False
            if "open" in sensor["y_max"] and not new_sweep:
                print ("top sensor Open after {} seconds...".format(time.time() - start))
                new_sweep = True
                xyz = self.xyz_pos()
                if "Y" in xyz.keys():
                    destination = max((xyz["Y"] - move), 0)
                else:
                    print ("BAD XYZ: {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()])))
        xyz = self.xyz_pos()
        print ("DONE LOADING")
        print ("Positions:  {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()])))
        nudge_up = True
        wait = 0
        sensor = self.sensor_stats()
        while nudge_up:
            if time.time() > wait:
                wait = self.hopper_up() + time.time()
                sensor = self.sensor_stats()
            if "TRIG" in sensor["y_max"]:
                nudge_up = False
        time.sleep(self.dothis("fan_on"))
        return self.hopper_up(bite=0.2)
Example #13
0
def main():
    import argparse

    parser = argparse.ArgumentParser(description="Collect and dispatch various metrics to destinations.")
    parser.add_argument(
        "-t",
        "--destination",
        metavar="host[:port]",
        help="host[:port] (default port: 2003, can be overidden"
        " via config file) of sink destination endpoint (e.g. carbon"
        " linereceiver tcp port, by default).",
    )
    parser.add_argument(
        "-i", "--interval", type=int, metavar="seconds", help="Interval between collecting and sending the datapoints."
    )

    parser.add_argument(
        "-e",
        "--collector-enable",
        action="append",
        metavar="collector",
        default=list(),
        help="Enable only the specified metric collectors," " can be specified multiple times.",
    )
    parser.add_argument(
        "-d",
        "--collector-disable",
        action="append",
        metavar="collector",
        default=list(),
        help="Explicitly disable specified metric collectors,"
        " can be specified multiple times. Overrides --collector-enable.",
    )

    parser.add_argument(
        "-s",
        "--sink-enable",
        action="append",
        metavar="sink",
        default=list(),
        help="Enable only the specified datapoint sinks," " can be specified multiple times.",
    )
    parser.add_argument(
        "-x",
        "--sink-disable",
        action="append",
        metavar="sink",
        default=list(),
        help="Explicitly disable specified datapoint sinks,"
        " can be specified multiple times. Overrides --sink-enable.",
    )

    parser.add_argument(
        "-p",
        "--processor-enable",
        action="append",
        metavar="processor",
        default=list(),
        help="Enable only the specified datapoint processors," " can be specified multiple times.",
    )
    parser.add_argument(
        "-z",
        "--processor-disable",
        action="append",
        metavar="processor",
        default=list(),
        help="Explicitly disable specified datapoint processors,"
        " can be specified multiple times. Overrides --processor-enable.",
    )

    parser.add_argument(
        "-c",
        "--config",
        action="append",
        metavar="path",
        default=list(),
        help="Configuration files to process."
        " Can be specified more than once."
        " Values from the latter ones override values in the former."
        " Available CLI options override the values in any config.",
    )

    parser.add_argument(
        "-a",
        "--xattr-emulation",
        metavar="db-path",
        help="Emulate filesystem extended attributes (used in"
        " some collectors like sysstat or cron_log), storing per-path"
        " data in a simple shelve db.",
    )
    parser.add_argument("-n", "--dry-run", action="store_true", help="Do not actually send data.")
    parser.add_argument(
        "--debug-memleaks",
        action="store_true",
        help="Import guppy and enable its manhole to debug memleaks (requires guppy module).",
    )
    parser.add_argument("--debug", action="store_true", help="Verbose operation mode.")
    optz = parser.parse_args()

    # Read configuration files
    cfg = AttrDict.from_yaml("{}.yaml".format(os.path.splitext(os.path.realpath(__file__))[0]))
    for k in optz.config:
        cfg.update_yaml(k)

    # Logging
    import logging

    configure_logging(cfg.logging, logging.DEBUG if optz.debug else logging.WARNING)
    if not cfg.logging.tracebacks:

        class NoTBLogger(logging.Logger):
            def exception(self, *argz, **kwz):
                self.error(*argz, **kwz)

        logging.setLoggerClass(NoTBLogger)
    log = logging.getLogger(__name__)

    # Manholes
    if optz.debug_memleaks:
        import guppy
        from guppy.heapy import Remote

        Remote.on()

        # Fill "auto-detected" blanks in the configuration, CLI overrides
    try:
        if optz.destination:
            cfg.sinks._default.host = optz.destination
        cfg.sinks._default.host = cfg.sinks._default.host.rsplit(":", 1)
        if len(cfg.sinks._default.host) == 1:
            cfg.sinks._default.host = cfg.sinks._default.host[0], cfg.sinks._default.default_port
        else:
            cfg.sinks._default.host[1] = int(cfg.sinks._default.host[1])
    except KeyError:
        pass
    if optz.interval:
        cfg.loop.interval = optz.interval
    if optz.dry_run:
        cfg.debug.dry_run = optz.dry_run
    if optz.xattr_emulation:
        cfg.core.xattr_emulation = optz.xattr_emulation

    # Fake "xattr" module, if requested
    if cfg.core.xattr_emulation:
        import shelve

        xattr_db = shelve.open(cfg.core.xattr_emulation, "c")

        class xattr_path(object):
            def __init__(self, base):
                assert isinstance(base, str)
                self.base = base

            def key(self, k):
                return "{}\0{}".format(self.base, k)

            def __setitem__(self, k, v):
                xattr_db[self.key(k)] = v

            def __getitem__(self, k):
                return xattr_db[self.key(k)]

            def __del__(self):
                xattr_db.sync()

        class xattr_module(object):
            xattr = xattr_path

        sys.modules["xattr"] = xattr_module

        # Override "enabled" collector/sink parameters, based on CLI
    ep_conf = dict()
    for ep, enabled, disabled in [
        ("collectors", optz.collector_enable, optz.collector_disable),
        ("processors", optz.processor_enable, optz.processor_disable),
        ("sinks", optz.sink_enable, optz.sink_disable),
    ]:
        conf = cfg[ep]
        conf_base = conf.pop("_default")
        if "debug" not in conf_base:
            conf_base["debug"] = cfg.debug
        ep_conf[ep] = conf_base, conf, OrderedDict(), enabled, disabled

        # Init global cfg for collectors/sinks' usage
    from graphite_metrics import collectors, sinks, loops

    collectors.cfg = sinks.cfg = loops.cfg = cfg

    # Init pluggable components
    import pkg_resources

    for ep_type in "collector", "processor", "sink":
        ep_key = "{}s".format(ep_type)  # a bit of a hack
        conf_base, conf, objects, enabled, disabled = ep_conf[ep_key]
        ep_dict = dict((ep.name, ep) for ep in pkg_resources.iter_entry_points("graphite_metrics.{}".format(ep_key)))
        eps = OrderedDict(
            (name, (ep_dict.pop(name), subconf or AttrDict())) for name, subconf in conf.viewitems() if name in ep_dict
        )
        eps.update((name, (module, conf_base)) for name, module in ep_dict.viewitems())
        for ep_name, (ep_module, subconf) in eps.viewitems():
            if ep_name[0] == "_":
                log.debug("Skipping {} enty point," " prefixed by underscore: {}".format(ep_type, ep_name))
            subconf.rebase(conf_base)  # fill in "_default" collector parameters
            if enabled:
                if ep_name in enabled:
                    subconf["enabled"] = True
                else:
                    subconf["enabled"] = False
            if disabled and ep_name in disabled:
                subconf["enabled"] = False
            if subconf.get("enabled", True):
                log.debug("Loading {}: {}".format(ep_type, ep_name))
                try:
                    obj = getattr(ep_module.load(), ep_type)(subconf)
                except Exception as err:
                    log.exception("Failed to load/init {} ({}): {}".format(ep_type, ep_name, err))
                    subconf.enabled = False
                    obj = None
                if subconf.get("enabled", True):
                    objects[ep_name] = obj
                else:
                    log.debug(
                        ("{} {} (entry point: {})" " was disabled after init").format(ep_type.title(), obj, ep_name)
                    )
        if ep_type != "processor" and not objects:
            log.fatal("No {}s were properly enabled/loaded, bailing out".format(ep_type))
            sys.exit(1)
        log.debug("{}: {}".format(ep_key.title(), objects))

    loop = dict((ep.name, ep) for ep in pkg_resources.iter_entry_points("graphite_metrics.loops"))
    conf = AttrDict(**cfg.loop)
    if "debug" not in conf:
        conf.debug = cfg.debug
    loop = loop[cfg.loop.name].load().loop(conf)

    collectors, processors, sinks = it.imap(
        op.itemgetter(2), op.itemgetter("collectors", "processors", "sinks")(ep_conf)
    )
    log.debug(
        "Starting main loop: {} ({} collectors, {} processors, {} sinks)".format(
            loop, len(collectors), len(processors), len(sinks)
        )
    )
    loop.start(collectors, processors, sinks)