Esempio n. 1
0
def test_get_all_raises_timeout_if_not_all_futures_are_available(futures):
    futures[0].set(0)
    futures[1].set(1)
    # futures[2] is unset

    with pytest.raises(Timeout):
        get_all(futures, timeout=0)
Esempio n. 2
0
 def test_get_all_can_be_called_multiple_times(self):
     self.results[0].set(0)
     self.results[1].set(1)
     self.results[2].set(2)
     result1 = get_all(self.results)
     result2 = get_all(self.results)
     self.assertEqual(result1, result2)
Esempio n. 3
0
 def test_get_all_can_be_called_multiple_times(self):
     self.results[0].set(0)
     self.results[1].set(1)
     self.results[2].set(2)
     result1 = get_all(self.results)
     result2 = get_all(self.results)
     self.assertEqual(result1, result2)
Esempio n. 4
0
 def make_burger(self, order):
     recipe = self.recipes[order]
     future_ingredients = []
     for ingredient in recipe:
         future_ingredient = self.preparer_proxy.prepare_ingredient(
             ingredient)
         future_ingredients.append(future_ingredient)
     prepared_ingredients = pykka.get_all(pykka.get_all(future_ingredients))
     return Burger(order, prepared_ingredients)
Esempio n. 5
0
def test_get_all_can_be_called_multiple_times(futures):
    futures[0].set(0)
    futures[1].set(1)
    futures[2].set(2)

    result1 = get_all(futures)
    result2 = get_all(futures)

    assert result1 == result2
Esempio n. 6
0
    def refresh(self, uri=None):
        """
        Refresh library. Limit to URI and below if an URI is given.

        :param uri: directory or track URI
        :type uri: string
        """
        if uri is not None:
            backend = self._get_backend(uri)
            if backend:
                backend.library.refresh(uri).get()
        else:
            futures = [b.library.refresh(uri) for b in self.backends.with_library.values()]
            pykka.get_all(futures)
Esempio n. 7
0
    def refresh(self, uri=None):
        """
        Refresh library. Limit to URI and below if an URI is given.

        :param uri: directory or track URI
        :type uri: string
        """
        if uri is not None:
            backend = self._get_backend(uri)
            if backend:
                backend.library.refresh(uri).get()
        else:
            futures = [
                b.library.refresh(uri) for b in self.backends.with_library]
            pykka.get_all(futures)
Esempio n. 8
0
def extract_recipes(ingredient_list):
    """
    Extracts recipes for a list of ingredients *Multi-Threaded Solution*
    :param ingredient_list: list of ingredients to serve the initial query
    :return: Dictionary of recipes (includes link and ingredient list for each recipe)
    """
    query = ", ".join(ingredient_list)
    # Initiate the search
    base_url = "http://allrecipes.com"
    entry = base_url + "/search/results/?wt=" + query + "&sort=re"
    start_page = requests.get(entry)
    tree = html.fromstring(start_page.content)
    response = tree.xpath('//article[contains(@class, \'grid-col--fixed-tiles\')]//@href')
    # Extract search result links
    links = set()
    for i in xrange(min(10, len(response))):
        if "recipe" in str(response[i]):
            links.add(base_url + response[i])
    # Spawn workers to process each link
    futures, workers = [], []
    for link in links:
        message = {'link': link}
        actor_ref = Worker.start()
        workers.append(actor_ref)
        futures.append(actor_ref.ask(message, block=False))
    # Collect and merge worker answers
    recipes = dict()
    answers = pykka.get_all(futures)
    for answer in answers:
        recipes[answer['name']] = dict()
        recipes[answer['name']]['ingredients'] = answer['ingredients']
        recipes[answer['name']]['link'] = answer['link']
    for worker in workers:
        worker.stop()
    return recipes
Esempio n. 9
0
    def search(self, query=None, **kwargs):
        """
        Search the library for tracks where ``field`` contains ``values``.

        Examples::

            # Returns results matching 'a'
            search({'any': ['a']})
            search(any=['a'])

            # Returns results matching artist 'xyz'
            search({'artist': ['xyz']})
            search(artist=['xyz'])

            # Returns results matching 'a' and 'b' and artist 'xyz'
            search({'any': ['a', 'b'], 'artist': ['xyz']})
            search(any=['a', 'b'], artist=['xyz'])

        :param query: one or more queries to search for
        :type query: dict
        :rtype: list of :class:`mopidy.models.SearchResult`
        """
        query = query or kwargs
        futures = [
            b.library.search(**query) for b in self.backends.with_library]
        return [result for result in pykka.get_all(futures) if result]
Esempio n. 10
0
 def test_wait_all_is_alias_of_get_all(self):
     self.results[0].set(0)
     self.results[1].set(1)
     self.results[2].set(2)
     result1 = get_all(self.results)
     result2 = wait_all(self.results)
     self.assertEqual(result1, result2)
Esempio n. 11
0
    def get_images(self, uris):
        """Lookup the images for the given URIs

        Backends can use this to return image URIs for any URI they know about
        be it tracks, albums, playlists... The lookup result is a dictionary
        mapping the provided URIs to lists of images.

        Unknown URIs or URIs the corresponding backend couldn't find anything
        for will simply return an empty list for that URI.

        :param list uris: list of URIs to find images for
        :rtype: {uri: tuple of :class:`mopidy.models.Image`}

        .. versionadded:: 1.0
        """
        futures = [
            backend.library.get_images(backend_uris)
            for (backend, backend_uris)
            in self._get_backends_to_uris(uris).items() if backend_uris]

        results = {uri: tuple() for uri in uris}
        for r in pykka.get_all(futures):
            for uri, images in r.items():
                results[uri] += tuple(images)
        return results
Esempio n. 12
0
    def get_images(self, uris):
        """Lookup the images for the given URIs

        Backends can use this to return image URIs for any URI they know about
        be it tracks, albums, playlists... The lookup result is a dictionary
        mapping the provided URIs to lists of images.

        Unknown URIs or URIs the corresponding backend couldn't find anything
        for will simply return an empty list for that URI.

        :param list uris: list of URIs to find images for
        :rtype: {uri: tuple of :class:`mopidy.models.Image`}

        .. versionadded:: 1.0
        """
        futures = [
            backend.library.get_images(backend_uris)
            for (backend,
                 backend_uris) in self._get_backends_to_uris(uris).items()
            if backend_uris
        ]

        results = {uri: tuple() for uri in uris}
        for r in pykka.get_all(futures):
            for uri, images in r.items():
                results[uri] += tuple(images)
        return results
Esempio n. 13
0
def test_get_all_blocks_until_all_futures_are_available(futures):
    futures[0].set(0)
    futures[1].set(1)
    futures[2].set(2)

    result = get_all(futures)

    assert result == [0, 1, 2]
Esempio n. 14
0
 def get_playlists(self, include_tracks=True):
     futures = [
         b.playlists.playlists for b in self.backends.with_playlists]
     results = pykka.get_all(futures)
     playlists = list(itertools.chain(*results))
     if not include_tracks:
         playlists = [p.copy(tracks=[]) for p in playlists]
     return playlists
Esempio n. 15
0
 def test_get_all_raises_timeout_if_not_all_futures_are_available(self):
     try:
         self.results[0].set(0)
         self.results[2].set(2)
         result = get_all(self.results, timeout=0)
         self.fail('Should timeout')
     except gevent.Timeout:
         pass
Esempio n. 16
0
 def _nodes_down(self, size):
     # Make sure to iterate over self.cloud_nodes because what we're
     # counting here are compute nodes that are reported by the cloud
     # provider but are considered "down" by Arvados.
     return sum(1 for down in pykka.get_all(
         rec.actor.in_state('down')
         for rec in self.cloud_nodes.nodes.itervalues()
         if size is None or rec.cloud_node.size.id == size.id) if down)
Esempio n. 17
0
 def get_playlists(self, include_tracks=True):
     futures = [
         b.playlists.playlists for b in self.backends.with_playlists]
     results = pykka.get_all(futures)
     playlists = list(itertools.chain(*results))
     if not include_tracks:
         playlists = [p.copy(tracks=[]) for p in playlists]
     return playlists
Esempio n. 18
0
 def _node_states(self, size):
     states = pykka.get_all(rec.actor.get_state()
                            for rec in self.cloud_nodes.nodes.itervalues()
                            if ((size is None or rec.cloud_node.size.id == size.id) and
                                rec.shutdown_actor is None))
     states += ['shutdown' for rec in self.cloud_nodes.nodes.itervalues()
                if ((size is None or rec.cloud_node.size.id == size.id) and
                    rec.shutdown_actor is not None)]
     return states
Esempio n. 19
0
 def _nodes_missing(self, size):
     return sum(
         1 for arv_node in pykka.get_all(
             rec.actor.arvados_node
             for rec in self.cloud_nodes.nodes.itervalues()
             if rec.cloud_node.size.id == size.id
             and rec.actor.cloud_node.get().id not in self.shutdowns)
         if arv_node
         and cnode.arvados_node_missing(arv_node, self.node_stale_after))
Esempio n. 20
0
 def _node_states(self, size):
     proxy_states = []
     states = []
     for rec in self.cloud_nodes.nodes.itervalues():
         if size is None or rec.cloud_node.size.id == size.id:
             if rec.shutdown_actor is None and rec.actor is not None:
                 proxy_states.append(rec.actor.get_state())
             else:
                 states.append("shutdown")
     return states + pykka.get_all(proxy_states)
Esempio n. 21
0
 def _node_states(self, size):
     proxy_states = []
     states = []
     for rec in self.cloud_nodes.nodes.itervalues():
         if size is None or rec.cloud_node.size.id == size.id:
             if rec.shutdown_actor is None and rec.actor is not None:
                 proxy_states.append(rec.actor.get_state())
             else:
                 states.append("shutdown")
     return states + pykka.get_all(proxy_states)
Esempio n. 22
0
 def _node_states(self, size):
     states = pykka.get_all(
         rec.actor.get_state()
         for rec in self.cloud_nodes.nodes.itervalues()
         if ((size is None or rec.cloud_node.size.id == size.id)
             and rec.shutdown_actor is None))
     states += [
         'shutdown' for rec in self.cloud_nodes.nodes.itervalues()
         if ((size is None or rec.cloud_node.size.id == size.id)
             and rec.shutdown_actor is not None)
     ]
     return states
Esempio n. 23
0
    def refresh(self, uri_scheme=None):
        """
        Refresh the playlists in :attr:`playlists`.

        If ``uri_scheme`` is :class:`None`, all backends are asked to refresh.
        If ``uri_scheme`` is an URI scheme handled by a backend, only that
        backend is asked to refresh. If ``uri_scheme`` doesn't match any
        current backend, nothing happens.

        :param uri_scheme: limit to the backend matching the URI scheme
        :type uri_scheme: string
        """
        if uri_scheme is None:
            futures = [b.playlists.refresh() for b in self.backends.with_playlists]
            pykka.get_all(futures)
            listener.CoreListener.send("playlists_loaded")
        else:
            backend = self.backends.with_playlists_by_uri_scheme.get(uri_scheme, None)
            if backend:
                backend.playlists.refresh().get()
                listener.CoreListener.send("playlists_loaded")
Esempio n. 24
0
 def make_burger(self, order):
     recipe = self.recipes[order]
     future_ingredients = []
     for ingredient in recipe:
         if is_slice(ingredient):
             slicer = next(self.slicers)
             future_ingredient = slicer.ask_for_sliced(ingredient)
         else:
             griller = next(self.grillers)
             future_ingredient = griller.ask_for_grilled(ingredient)
         future_ingredients.append(future_ingredient)
     prepared_ingredients = pykka.get_all(future_ingredients)
     return Burger(order, prepared_ingredients)
Esempio n. 25
0
    def refresh(self, uri_scheme=None):
        """
        Refresh the playlists in :attr:`playlists`.

        If ``uri_scheme`` is :class:`None`, all backends are asked to refresh.
        If ``uri_scheme`` is an URI scheme handled by a backend, only that
        backend is asked to refresh. If ``uri_scheme`` doesn't match any
        current backend, nothing happens.

        :param uri_scheme: limit to the backend matching the URI scheme
        :type uri_scheme: string
        """
        if uri_scheme is None:
            futures = [b.playlists.refresh()
                       for b in self.backends.with_playlists.values()]
            pykka.get_all(futures)
            listener.CoreListener.send('playlists_loaded')
        else:
            backend = self.backends.with_playlists.get(uri_scheme, None)
            if backend:
                backend.playlists.refresh().get()
                listener.CoreListener.send('playlists_loaded')
Esempio n. 26
0
def run(pool_size, *ips):
    # Start resolvers
    resolvers = [Resolver.start().proxy() for _ in range(pool_size)]

    # Distribute work by mapping IPs to resolvers (not blocking)
    hosts = []
    for i, ip in enumerate(ips):
        hosts.append(resolvers[i % len(resolvers)].resolve(ip))

    # Gather results (blocking)
    ip_to_host = zip(ips, pykka.get_all(hosts))
    pprint.pprint(list(ip_to_host))

    # Clean up
    pykka.ActorRegistry.stop_all()
Esempio n. 27
0
def run(pool_size, *ips):
    # Start resolvers
    resolvers = [Resolver.start().proxy() for _ in range(pool_size)]

    # Distribute work by mapping IPs to resolvers (not blocking)
    hosts = []
    for i, ip in enumerate(ips):
        hosts.append(resolvers[i % len(resolvers)].resolve(ip))

    # Gather results (blocking)
    ip_to_host = zip(ips, pykka.get_all(hosts))
    pprint.pprint(list(ip_to_host))

    # Clean up
    pykka.ActorRegistry.stop_all()
Esempio n. 28
0
    def find_exact(self, query=None, uris=None, **kwargs):
        """
        Search the library for tracks where ``field`` is ``values``.

        If the query is empty, and the backend can support it, all available
        tracks are returned.

        If ``uris`` is given, the search is limited to results from within the
        URI roots. For example passing ``uris=['file:']`` will limit the search
        to the local backend.

        Examples::

            # Returns results matching 'a' from any backend
            find_exact({'any': ['a']})
            find_exact(any=['a'])

            # Returns results matching artist 'xyz' from any backend
            find_exact({'artist': ['xyz']})
            find_exact(artist=['xyz'])

            # Returns results matching 'a' and 'b' and artist 'xyz' from any
            # backend
            find_exact({'any': ['a', 'b'], 'artist': ['xyz']})
            find_exact(any=['a', 'b'], artist=['xyz'])

            # Returns results matching 'a' if within the given URI roots
            # "file:///media/music" and "spotify:"
            find_exact(
                {'any': ['a']}, uris=['file:///media/music', 'spotify:'])
            find_exact(any=['a'], uris=['file:///media/music', 'spotify:'])

        :param query: one or more queries to search for
        :type query: dict
        :param uris: zero or more URI roots to limit the search to
        :type uris: list of strings or :class:`None`
        :rtype: list of :class:`mopidy.models.SearchResult`
        """
        query = query or kwargs
        futures = [
            backend.library.find_exact(query=query, uris=backend_uris)
            for (backend,
                 backend_uris) in self._get_backends_to_uris(uris).items()
        ]
        return [result for result in pykka.get_all(futures) if result]
Esempio n. 29
0
    def find_exact(self, query=None, uris=None, **kwargs):
        """
        Search the library for tracks where ``field`` is ``values``.

        If the query is empty, and the backend can support it, all available
        tracks are returned.

        If ``uris`` is given, the search is limited to results from within the
        URI roots. For example passing ``uris=['file:']`` will limit the search
        to the local backend.

        Examples::

            # Returns results matching 'a' from any backend
            find_exact({'any': ['a']})
            find_exact(any=['a'])

            # Returns results matching artist 'xyz' from any backend
            find_exact({'artist': ['xyz']})
            find_exact(artist=['xyz'])

            # Returns results matching 'a' and 'b' and artist 'xyz' from any
            # backend
            find_exact({'any': ['a', 'b'], 'artist': ['xyz']})
            find_exact(any=['a', 'b'], artist=['xyz'])

            # Returns results matching 'a' if within the given URI roots
            # "file:///media/music" and "spotify:"
            find_exact(
                {'any': ['a']}, uris=['file:///media/music', 'spotify:'])
            find_exact(any=['a'], uris=['file:///media/music', 'spotify:'])

        :param query: one or more queries to search for
        :type query: dict
        :param uris: zero or more URI roots to limit the search to
        :type uris: list of strings or :class:`None`
        :rtype: list of :class:`mopidy.models.SearchResult`
        """
        query = query or kwargs
        futures = [
            backend.library.find_exact(query=query, uris=backend_uris)
            for (backend, backend_uris)
            in self._get_backends_to_uris(uris).items()]
        return [result for result in pykka.get_all(futures) if result]
def _load_songs(songs):
    """ A wrapper around _load_song that
    allows for multiple songs to be looked
    up using the same database connection
    """
    loaders = [
        Loader.start().proxy() for _ in range(min(len(songs), cpu_count()))
    ]

    loaded = []
    for i, song in enumerate(songs):
        loaded.append(loaders[i % len(loaders)].load(song))

    segs = _flatten(pykka.get_all(loaded))

    for loader in loaders:
        loader.stop()

    return segs
Esempio n. 31
0
    def search(self, **query):
        """
        Search the library for tracks where ``field`` contains ``values``.

        Examples::

            # Returns results matching 'a'
            search(any=['a'])
            # Returns results matching artist 'xyz'
            search(artist=['xyz'])
            # Returns results matching 'a' and 'b' and artist 'xyz'
            search(any=['a', 'b'], artist=['xyz'])

        :param query: one or more queries to search for
        :type query: dict
        :rtype: list of :class:`mopidy.models.Track`
        """
        futures = [
            b.library.search(**query) for b in self.backends.with_library]
        results = pykka.get_all(futures)
        return list(itertools.chain(*results))
Esempio n. 32
0
def parallel_run(pool_size, call_limit, queries):
    __logger = log4p.GetLogger(__name__,
                               logging_level=settings.DEBUG_LEVEL,
                               config=settings.LOGGING_CONFIG)
    callers = [Caller.start().proxy() for _ in range(pool_size)]

    json_results = []
    for i, query in enumerate(queries):
        if i < call_limit:
            json_results.append(callers[i %
                                        len(callers)].map_address_call(query))
        else:
            pass

    gathered_results = pykka.get_all(json_results)

    pykka.ActorRegistry.stop_all()

    __logger.logger.debug(gathered_results)

    return gathered_results
Esempio n. 33
0
    def get_distinct(self, field, query=None):
        """
        List distinct values for a given field from the library.

        This has mainly been added to support the list commands the MPD
        protocol supports in a more sane fashion. Other frontends are not
        recommended to use this method.

        :param string field: One of ``artist``, ``albumartist``, ``album``,
            ``composer``, ``performer``, ``date``or ``genre``.
        :param dict query: Query to use for limiting results, see
            :meth:`search` for details about the query format.
        :rtype: set of values corresponding to the requested field type.

        .. versionadded:: 1.0
        """
        futures = [b.library.get_distinct(field, query)
                   for b in self.backends.with_library.values()]
        result = set()
        for r in pykka.get_all(futures):
            result.update(r)
        return result
Esempio n. 34
0
    def search(self, **query):
        """
        Search the library for tracks where ``field`` contains ``values``.

        Examples::

            # Returns results matching 'a'
            search(any=['a'])
            # Returns results matching artist 'xyz'
            search(artist=['xyz'])
            # Returns results matching 'a' and 'b' and artist 'xyz'
            search(any=['a', 'b'], artist=['xyz'])

        :param query: one or more queries to search for
        :type query: dict
        :rtype: list of :class:`mopidy.models.Track`
        """
        futures = [
            b.library.search(**query) for b in self.backends.with_library
        ]
        results = pykka.get_all(futures)
        return list(itertools.chain(*results))
Esempio n. 35
0
def extract_recipes(ingredient_list, count=50):
    """
    Extracts recipes for a list of ingredients *Multi-Threaded Solution*
    :param count: number of recipes to extract
    :param ingredient_list: list of ingredients to serve the initial query
    :return: Dictionary of recipes (includes link and ingredient list for each recipe)
    """
    query = ", ".join(ingredient_list)
    # Initiate the search
    base_url = "http://allrecipes.com"
    entry = base_url + "/search/results/?wt=" + query + "&sort=re"
    start_page = requests.get(entry)
    tree = html.fromstring(start_page.content)
    response = tree.xpath('//article[contains(@class, \'grid-col--fixed-tiles\')]//@href')
    # Extract search result links
    links = set()
    for r in response:
        if "/recipe/" in str(r):
            links.add(base_url + r)
    # Spawn workers to process each link
    links = list(links)
    shuffle(links)
    futures, workers = [], []
    for i in xrange(min(count, len(links))):
        message = {'link': links[i]}
        actor_ref = Worker.start()
        workers.append(actor_ref)
        futures.append(actor_ref.ask(message, block=False))
    # Collect and merge worker answers
    recipes = dict()
    answers = pykka.get_all(futures)
    for answer in answers:
        recipes[answer['name']] = dict()
        recipes[answer['name']]['ingredients'] = answer['ingredients']
        recipes[answer['name']]['link'] = answer['link']
        recipes[answer['name']]['picture'] = answer['picture']
    for worker in workers:
        worker.stop()
    return recipes
Esempio n. 36
0
    def get_distinct(self, field, query=None):
        """
        List distinct values for a given field from the library.

        This has mainly been added to support the list commands the MPD
        protocol supports in a more sane fashion. Other frontends are not
        recommended to use this method.

        :param string field: One of ``track``, ``artist``, ``albumartist``,
            ``album``, ``composer``, ``performer``, ``date``or ``genre``.
        :param dict query: Query to use for limiting results, see
            :meth:`search` for details about the query format.
        :rtype: set of values corresponding to the requested field type.

        .. versionadded:: 1.0
        """
        futures = [
            b.library.get_distinct(field, query)
            for b in self.backends.with_library.values()
        ]
        result = set()
        for r in pykka.get_all(futures):
            result.update(r)
        return result
Esempio n. 37
0
 def _actor_nodes(self, node_actor):
     return pykka.get_all([node_actor.cloud_node, node_actor.arvados_node])
Esempio n. 38
0
 def _nodes_busy(self, size):
     return sum(1 for busy in pykka.get_all(
         rec.actor.in_state('busy')
         for rec in self.cloud_nodes.nodes.itervalues()
         if rec.cloud_node.size.id == size.id) if busy)
Esempio n. 39
0
 def test_get_all_blocks_until_all_futures_are_available(self):
     self.results[0].set(0)
     self.results[1].set(1)
     self.results[2].set(2)
     result = get_all(self.results)
     self.assertEqual(result, [0, 1, 2])
Esempio n. 40
0
    def test_multi_threaded_analog_read(self, multi_threading_test_devices,
                                        seed):
        # Reset the pseudorandom number generator with seed.
        random.seed(seed)

        sample_rate = 10000
        samples_per_read = int(sample_rate / 10)

        number_of_reads = random.randint(200, 500)
        number_of_samples = samples_per_read * number_of_reads

        channels_to_test = []
        for device in multi_threading_test_devices:
            channels_to_test.append(random.choice(device.ai_physical_chans))

        tasks = []
        try:
            for channel_to_test in channels_to_test:
                task = None
                try:
                    task = nidaqmx.Task()
                    task.ai_channels.add_ai_voltage_chan(channel_to_test.name,
                                                         max_val=10,
                                                         min_val=-10)
                    task.timing.cfg_samp_clk_timing(
                        sample_rate, samps_per_chan=number_of_samples)
                except nidaqmx.DaqError:
                    if task is not None:
                        task.close()
                    raise
                else:
                    tasks.append(task)
        except nidaqmx.DaqError:
            for task in tasks:
                task.close()
            raise

        actor_refs = []
        actor_proxies = []
        for task in tasks:
            actor_ref = DAQmxReaderActor.start(task)
            actor_refs.append(actor_ref)
            actor_proxies.append(actor_ref.proxy())

        try:
            for task in tasks:
                task.start()

            read_futures = []
            for actor_proxy in actor_proxies:
                read_futures.append(
                    actor_proxy.read(samples_per_read,
                                     number_of_reads,
                                     timeout=2))

            pykka.get_all(read_futures, (number_of_samples / sample_rate) + 10)

        finally:
            for task in tasks:
                task.close()

            for actor_ref in actor_refs:
                try:
                    actor_ref.stop(timeout=(number_of_samples / sample_rate) +
                                   10)
                except pykka.Timeout:
                    print('Could not stop actor {0} within the specified '
                          'timeout.'.format(actor_ref))
Esempio n. 41
0
 def test_get_all_blocks_until_all_futures_are_available(self):
     self.results[0].set(0)
     self.results[1].set(1)
     self.results[2].set(2)
     result = get_all(self.results)
     self.assertEqual(result, [0, 1, 2])
Esempio n. 42
0
 def get_uri_schemes(self):
     futures = [b.uri_schemes for b in self.backends]
     results = pykka.get_all(futures)
     uri_schemes = itertools.chain(*results)
     return sorted(uri_schemes)
Esempio n. 43
0
 def _nodes_missing(self, size):
     return sum(1 for arv_node in
                pykka.get_all(rec.actor.arvados_node for rec in
                              self.cloud_nodes.nodes.itervalues()
                              if rec.cloud_node.size.id == size.id and rec.actor.cloud_node.get().id not in self.shutdowns)
                if arv_node and cnode.arvados_node_missing(arv_node, self.node_stale_after))
Esempio n. 44
0
 def _nodes_busy(self):
     return sum(1 for idle in
                pykka.get_all(rec.actor.in_state('idle') for rec in
                              self.cloud_nodes.nodes.itervalues())
                if idle is False)
Esempio n. 45
0
def status(context):
    """
    *musicpd.org, status section:*

        ``status``

        Reports the current status of the player and the volume level.

        - ``volume``: 0-100 or -1
        - ``repeat``: 0 or 1
        - ``single``: 0 or 1
        - ``consume``: 0 or 1
        - ``playlist``: 31-bit unsigned integer, the playlist version
          number
        - ``playlistlength``: integer, the length of the playlist
        - ``state``: play, stop, or pause
        - ``song``: playlist song number of the current song stopped on or
          playing
        - ``songid``: playlist songid of the current song stopped on or
          playing
        - ``nextsong``: playlist song number of the next song to be played
        - ``nextsongid``: playlist songid of the next song to be played
        - ``time``: total time elapsed (of current playing/paused song)
        - ``elapsed``: Total time elapsed within the current song, but with
          higher resolution.
        - ``bitrate``: instantaneous bitrate in kbps
        - ``xfade``: crossfade in seconds
        - ``audio``: sampleRate``:bits``:channels
        - ``updatings_db``: job id
        - ``error``: if there is an error, returns message here

    *Clarifications based on experience implementing*
        - ``volume``: can also be -1 if no output is set.
        - ``elapsed``: Higher resolution means time in seconds with three
          decimal places for millisecond precision.
    """
    futures = {
        'tracklist.length': context.core.tracklist.length,
        'tracklist.version': context.core.tracklist.version,
        'playback.volume': context.core.playback.volume,
        'tracklist.consume': context.core.tracklist.consume,
        'tracklist.random': context.core.tracklist.random,
        'tracklist.repeat': context.core.tracklist.repeat,
        'tracklist.single': context.core.tracklist.single,
        'playback.state': context.core.playback.state,
        'playback.current_tl_track': context.core.playback.current_tl_track,
        'tracklist.index': (
            context.core.tracklist.index(
                context.core.playback.current_tl_track.get())),
        'playback.time_position': context.core.playback.time_position,
    }
    pykka.get_all(futures.values())
    result = [
        ('volume', _status_volume(futures)),
        ('repeat', _status_repeat(futures)),
        ('random', _status_random(futures)),
        ('single', _status_single(futures)),
        ('consume', _status_consume(futures)),
        ('playlist', _status_playlist_version(futures)),
        ('playlistlength', _status_playlist_length(futures)),
        ('xfade', _status_xfade(futures)),
        ('state', _status_state(futures)),
    ]
    if futures['playback.current_tl_track'].get() is not None:
        result.append(('song', _status_songpos(futures)))
        result.append(('songid', _status_songid(futures)))
    if futures['playback.state'].get() in (
            PlaybackState.PLAYING, PlaybackState.PAUSED):
        result.append(('time', _status_time(futures)))
        result.append(('elapsed', _status_time_elapsed(futures)))
        result.append(('bitrate', _status_bitrate(futures)))
    return result
Esempio n. 46
0
 def _actor_nodes(self, node_actor):
     return pykka.get_all([node_actor.cloud_node, node_actor.arvados_node])
Esempio n. 47
0
    def __init__(self, pantry, n_slicers=8, n_grillers=4, n_builders=16):
        self.slicers = [SlicerActor.start(pantry).proxy() for _ in range(n_slicers)]
        self.grillers = [GrillerActor.start(pantry).proxy() for _ in range(n_grillers)]
        self.builders = [
            BurgerBuilderActor.start(RECIPES, self.slicers, self.grillers).proxy() for _ in range(n_builders)
        ]


if __name__ == "__main__":
    multiplier = int(sys.argv[2]) if len(sys.argv) > 2 else 1
    orders = multiplier * sys.argv[1].split(",")

    kitchen = KitchenActors(Pantry())
    builder_generator = round_robin_generator(kitchen.builders)

    start_time = time.time()
    future_burgers = []
    for order in orders:
        builder = next(builder_generator)
        future_burger = builder.make_burger(order)
        future_burgers.append(future_burger)

    burgers = pykka.get_all(future_burgers)

    for order, burger in zip(orders, burgers):
        burger.taste(RECIPES[order])
        print(f"You can eat your delicious '{burger}'")

    pykka.ActorRegistry.stop_all()
    print(f"Delivered {len(orders)} burgers in {time.time()-start_time}s")
Esempio n. 48
0
 def get_playlists(self):
     futures = [b.playlists.playlists for b in self.backends.with_playlists]
     results = pykka.get_all(futures)
     return list(itertools.chain(*results))
Esempio n. 49
0
 def get_playlists(self):
     futures = [
         b.playlists.playlists for b in self.backends.with_playlists]
     results = pykka.get_all(futures)
     return list(itertools.chain(*results))
Esempio n. 50
0
 def get_uri_schemes(self):
     """Get list of URI schemes we can handle"""
     futures = [b.uri_schemes for b in self.backends]
     results = pykka.get_all(futures)
     uri_schemes = itertools.chain(*results)
     return sorted(uri_schemes)
def analyze_segments(segs):
    ss = SongSegment()

    count = ss.count()

    allMatches = list(map(lambda x: [], segs))

    matchers = [Matcher.start().proxy() for _ in range(cpu_count())]
    print("Searching through " + str(count // BUCKET_SIZE + 1) +
          " buckets, with " + str(BUCKET_SIZE) + " segments in each")
    for i in range(0, count // BUCKET_SIZE + 1):
        print("Bucket: " + str(i + 1))
        established_segments = list(
            filter(
                lambda x: x['mfcc'] is not None and x['chroma'] is not None and
                x['tempogram'] is not None,
                ss.get_all_in_range(i * BUCKET_SIZE, (i + 1) * BUCKET_SIZE)))
        established_segments = list(
            map(_process_db_segment, established_segments))

        data = np.array(list(map(lambda x: x[3], established_segments)))

        bucket = _create_bucket(data)

        query_object = bucket[1].construct_query_pool()
        query_object.set_num_probes(25)

        matched = []
        for i, seg in enumerate(segs):
            matched.append(matchers[i % len(matchers)].match(
                seg, query_object))

        matches = pykka.get_all(matched)

        for j in range(0, len(matches)):
            allMatches[j].append(
                list(map(lambda x: established_segments[x], matches[j])))

        del data
        del bucket
        del established_segments
        del query_object
        del matches

    for matcher in matchers:
        matcher.stop()

    for i in range(0, len(segs)):
        best = _find_best_matches(_flatten(allMatches[i]), segs[i])

        matches = ss.get_by_ids(list(map(lambda match: match[0][0], best)))
        matches = list(
            map(lambda match: (match['_id'], match['similar']), matches))

        for j in range(0, len(matches)):
            matches[j][1].append(
                dict({
                    'id': segs[i][0],
                    'distance': best[j][1],
                }))

            match_ids = list(set(map(lambda x: x['id'], matches[j][1])))
            innerMatches = list(
                map(
                    lambda match_id: next(x for x in matches[j][1]
                                          if x['id'] == match_id), match_ids))
            innerMatches.sort(key=lambda m: m['distance'])
            ss.update_similar(matches[j][0], innerMatches[:10])

        formatted = []
        for match in best:
            formatted.append(dict({'id': match[0][0], 'distance': match[1]}))

        ss.update_similar(segs[i][0], formatted)

    ss.close()
Esempio n. 52
0
 def get_uri_schemes(self):
     """Get list of URI schemes we can handle"""
     futures = [b.uri_schemes for b in self.backends]
     results = pykka.get_all(futures)
     uri_schemes = itertools.chain(*results)
     return sorted(uri_schemes)
Esempio n. 53
0
 def _get_actor_attrs(self, actor, *attr_names):
     return pykka.get_all([getattr(actor, name) for name in attr_names])
Esempio n. 54
0
 def _get_actor_attrs(self, actor, *attr_names):
     return pykka.get_all([getattr(actor, name) for name in attr_names])
Esempio n. 55
0
 def get_uri_schemes(self):
     futures = [b.uri_schemes for b in self.backends]
     results = pykka.get_all(futures)
     uri_schemes = itertools.chain(*results)
     return sorted(uri_schemes)
Esempio n. 56
0
def status(context):
    """
    *musicpd.org, status section:*

        ``status``

        Reports the current status of the player and the volume level.

        - ``volume``: 0-100 or -1
        - ``repeat``: 0 or 1
        - ``single``: 0 or 1
        - ``consume``: 0 or 1
        - ``playlist``: 31-bit unsigned integer, the playlist version
          number
        - ``playlistlength``: integer, the length of the playlist
        - ``state``: play, stop, or pause
        - ``song``: playlist song number of the current song stopped on or
          playing
        - ``songid``: playlist songid of the current song stopped on or
          playing
        - ``nextsong``: playlist song number of the next song to be played
        - ``nextsongid``: playlist songid of the next song to be played
        - ``time``: total time elapsed (of current playing/paused song)
        - ``elapsed``: Total time elapsed within the current song, but with
          higher resolution.
        - ``bitrate``: instantaneous bitrate in kbps
        - ``xfade``: crossfade in seconds
        - ``audio``: sampleRate``:bits``:channels
        - ``updatings_db``: job id
        - ``error``: if there is an error, returns message here

    *Clarifications based on experience implementing*
        - ``volume``: can also be -1 if no output is set.
        - ``elapsed``: Higher resolution means time in seconds with three
          decimal places for millisecond precision.
    """
    futures = {
        'tracklist.length':
        context.core.tracklist.length,
        'tracklist.version':
        context.core.tracklist.version,
        'playback.volume':
        context.core.playback.volume,
        'tracklist.consume':
        context.core.tracklist.consume,
        'tracklist.random':
        context.core.tracklist.random,
        'tracklist.repeat':
        context.core.tracklist.repeat,
        'tracklist.single':
        context.core.tracklist.single,
        'playback.state':
        context.core.playback.state,
        'playback.current_tl_track':
        context.core.playback.current_tl_track,
        'tracklist.index': (context.core.tracklist.index(
            context.core.playback.current_tl_track.get())),
        'playback.time_position':
        context.core.playback.time_position,
    }
    pykka.get_all(futures.values())
    result = [
        ('volume', _status_volume(futures)),
        ('repeat', _status_repeat(futures)),
        ('random', _status_random(futures)),
        ('single', _status_single(futures)),
        ('consume', _status_consume(futures)),
        ('playlist', _status_playlist_version(futures)),
        ('playlistlength', _status_playlist_length(futures)),
        ('xfade', _status_xfade(futures)),
        ('state', _status_state(futures)),
    ]
    if futures['playback.current_tl_track'].get() is not None:
        result.append(('song', _status_songpos(futures)))
        result.append(('songid', _status_songid(futures)))
    if futures['playback.state'].get() in (PlaybackState.PLAYING,
                                           PlaybackState.PAUSED):
        result.append(('time', _status_time(futures)))
        result.append(('elapsed', _status_time_elapsed(futures)))
        result.append(('bitrate', _status_bitrate(futures)))
    return result
Esempio n. 57
0
 def _nodes_busy(self, size):
     return sum(1 for busy in
                pykka.get_all(rec.actor.in_state('busy') for rec in
                              self.cloud_nodes.nodes.itervalues()
                              if rec.cloud_node.size.id == size.id)
                if busy)