Пример #1
0
def tasklet(func, args, kwds, ff=True):
    wrapped = ndb.tasklet(func)
    rv = wrapped(*args, **kwds)

    if ff:
        fast_forward_eventloop()
    return rv
Пример #2
0
def tasklet(func, args, kwds, ff=True):
    wrapped = ndb.tasklet(func)
    rv = wrapped(*args, **kwds)

    if ff:
        fast_forward_eventloop()
    return rv
Пример #3
0
def _make_async(method):
    def _async(self, *args, **kw):
        # method specs
        path = method._request_specs['path']
        args_names = method._request_specs['args']
        defaults = method._request_specs['defaults']
        prop_to_param = method._request_specs['prop_to_param']
        map_params = method._request_specs['map_params']

        # build parameter map
        args_map = api.map_func_args(args, kw, args_names, defaults)
        for attr_name in prop_to_param:
            args_map[attr_name] = getattr(self, attr_name, None)

        # fix params name and remove params with None values
        params = api.translate_args(args_map, map_params)
        params = dict((
            k,
            v,
        ) for k, v in params.items() if v is not None)

        kw['api_result'] = yield self.api.get_async(path, params=params)
        raise ndb.Return(method(self, *args, **kw))

    return ndb.tasklet(_async)
Пример #4
0
def _make_async(method):
    def _async(self, *args, **kw):
        # method specs
        path = method._request_specs['path']
        args_names = method._request_specs['args']
        defaults = method._request_specs['defaults']
        prop_to_param = method._request_specs['prop_to_param']
        map_params = method._request_specs['map_params']

        # build parameter map
        args_map = api.map_func_args(args, kw, args_names, defaults)
        for attr_name in prop_to_param:
            args_map[attr_name] = getattr(self, attr_name, None)

        # fix params name and remove params with None values
        params = api.translate_args(args_map, map_params)
        params =  dict((k, v,) for k, v in params.items() if v is not None)

        kw['api_result'] = yield self.api.get_async(path, params=params)
        raise ndb.Return(method(self, *args, **kw))
    return ndb.tasklet(_async)
Пример #5
0
    def _do_update(self, no_rfi):
        # Kick off retrieval of all RawFileInfo entities from the Datastore
        if no_rfi:
            all_rfi_future = ndb.tasklet(lambda: ())()
        else:
            all_rfi_future = RawFileInfo.query().fetch_async()

        # Kick off check for new vim version
        refresh_vim_version_future = self.refresh_vim_version_async()

        # Kick off retrieval of 'runtime/doc' dir listing in github
        docdir_future = vim_github_request_async(
            '/repos/vim/vim/contents/runtime/doc', self._g.docdir_etag)

        # Put all RawFileInfo entites into a map
        rfi_map = { r.key.string_id(): r for r in all_rfi_future.get_result() }

        processor_futures = set()
        processor_futures_by_name = {}

        def processor_futures_add(name, value):
            processor_futures.add(value)
            processor_futures_by_name[name] = value

        def queue_urlfetch(name, url, git_sha=None):
            rfi = rfi_map.get(name)
            etag = rfi.etag if rfi is not None else None
            logging.debug("fetching %s (etag: %s)", name, etag)
            processor_future = ProcessorHTTP.create_async(name, git_sha,
                                                          url=url, etag=etag)
            processor_futures_add(name, processor_future)

        # Kick off FAQ download

        queue_urlfetch(FAQ_NAME, FAQ_BASE_URL + FAQ_NAME)

        # Iterating over 'runtime/doc' dir listing, kick off download for all
        # modified items

        docdir = docdir_future.get_result()

        if docdir.status_code == HTTP_NOT_MOD:
            logging.info("doc dir not modified")
        elif docdir.status_code == HTTP_OK:
            self._g.docdir_etag = docdir.headers.get(HTTP_HDR_ETAG)
            self._g_changed = True
            logging.debug("got doc dir etag %s", self._g.docdir_etag)
            for item in docdir.json:
                name = item['name'].encode()
                if item['type'] == 'file' and DOC_ITEM_RE.match(name):
                    assert name not in processor_futures_by_name
                    git_sha = item['sha'].encode()
                    rfi = rfi_map.get(name)
                    if rfi is not None and rfi.git_sha == git_sha:
                        logging.debug("%s unchanged (sha=%s)", name,
                                      rfi.git_sha)
                        continue
                    elif rfi is None:
                        logging.debug("%s is new (sha=%s)", name, git_sha)
                    else:
                        logging.debug("%s changed (%s != %s)", name,
                                      rfi.git_sha, git_sha)
                    queue_urlfetch(name, item['download_url'], git_sha)

        # Check if we have a new vim version
        is_new_vim_version = refresh_vim_version_future.get_result()

        # If there is no new vim version, and if the only file we're downloading
        # is the FAQ, and if the FAQ was not modified, then there is nothing to
        # do for us, so bail out now

        if not is_new_vim_version and len(processor_futures) == 1:
            faq_uf = processor_futures_by_name[FAQ_NAME].get_result()
            if faq_uf.http_result().status_code == HTTP_NOT_MOD:
                return

        @ndb.tasklet
        def get_content_async(name):
            processor_future = processor_futures_by_name.get(name)
            # Do we already have retrieval queued?
            if processor_future is not None:
                # If so, wait for that and return the content.
                processor = yield processor_future
                content = yield processor.raw_content_async()
            else:
                # If we don't have retrieval queued, that means we must already
                # have the latest version in the Datastore, so get the content
                # from there.
                rfc = yield RawFileContent.get_by_id_async(name)
                content = rfc.data
            raise ndb.Return(content)

        # Make sure we are retrieving tags, either from HTTP or from Datastore
        tags_future = get_content_async(TAGS_NAME)

        # Make sure we are retrieving FAQ, either from HTTP or from Datastore
        faq_future = get_content_async(FAQ_NAME)

        # If we found a new vim version and we're not already downloading
        # help.txt, kick off its retrieval from the Datastore instead
        # (since we're displaying the current vim version in the rendered
        # help.txt.html)
        if is_new_vim_version and HELP_NAME not in processor_futures_by_name:
            processor_futures_add(HELP_NAME,
                                  ProcessorDB.create_async(HELP_NAME))

        # Construct the vimhelp-to-html converter, providing it the tags file,
        # and adding on the FAQ for extra tags
        h2h = VimH2H(tags_future.get_result(), version=self._g.vim_version)
        h2h.add_tags(FAQ_NAME, faq_future.get_result())

        # Wait for urlfetches and Datastore accesses to return; kick off the
        # processing as they do so

        while len(processor_futures) > 0:
            try:
                future = ndb.Future.wait_any(processor_futures)
                processor = future.get_result()
            except urlfetch.Error as e:
                logging.error(e)
                # If we could not fetch the URL, continue with the others, but
                # set 'self._g_changed' to False so we do not save the
                # 'GlobalInfo' object at the end, so that we will retry at the
                # next run
                self._g_changed = False
            else:  # no exception was raised
                processor.process_async(h2h)
                # Because this method is decorated '@ndb.toplevel', we don't
                # need to keep hold of the future returned by the above line:
                # this method automatically waits for all outstanding futures
                # before returning.
            processor_futures.remove(future)
            del processor_futures_by_name[processor.name()]