Example #1
0
    def gather_candidates(self, context):
        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~',))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src,
                                                       self.extra_path)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~') or
                          (cached and cache_key[-1] == 'package' and
                           not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        # Extra options to pass to the server.
        options = {
            'cwd': context.get('cwd'),
            'extra_path': self.extra_path,
            'runtimepath': context.get('runtimepath'),
        }

        if (not cached or refresh) and cache_key and cache_key[-1] == 'package':
            # Create a synthetic completion for a module import as a fallback.
            synthetic_src = ['import {0}; {0}.'.format(cache_key[0])]
            options.update({
                'synthetic': {
                    'src': synthetic_src,
                    'line': 1,
                    'col': len(synthetic_src[0]),
                }
            })

        if not cached:
            wait = True

        # Note: This waits a very short amount of time to give the server or
        # cache a chance to reply.  If there's no reply during this period,
        # empty results are returned and we defer to deoplete's async refresh.
        # The current requests's async status is tracked in `_async_keys`.
        # If the async cache result is older than 5 seconds, the completion
        # request goes back to the default behavior of attempting to refresh as
        # needed by the `refresh` and `wait` variables above.
        self.debug('Key: %r, Refresh: %r, Wait: %r, Async: %r', cache_key,
                   refresh, wait, cache_key in self._async_keys)

        context['is_async'] = cache_key in self._async_keys
        if context['is_async']:
            if not cached:
                self.debug('[async] waiting for completions: %r', cache_key)
                return []
            else:
                self._async_keys.remove(cache_key)
                context['is_async'] = False
                if time.time() - cached.time < 5:
                    self.debug('[async] finished: %r', cache_key)
                    return self.finalize_cached(cache_key, filters, cached)
                else:
                    self.debug('[async] outdated: %r', cache_key)

        if cache_key and (not cached or refresh):
            n = time.time()
            wait_complete = False
            worker.work_queue.put((cache_key, extra_modules, '\n'.join(src),
                                   line, col, str(buf.name), options))
            while wait and time.time() - n < 0.25:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    self.debug('Got updated cache, stopped waiting.')
                    wait_complete = True
                    break
                time.sleep(0.01)

            if wait and not wait_complete:
                self._async_keys.add(cache_key)
                context['is_async'] = True
                self.debug('[async] deferred: %r', cache_key)
                return []

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        return self.finalize_cached(cache_key, filters, cached)
Example #2
0
    def gather_candidates(self, context):
        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~', ))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~', ), [], '', 1, 0, ''))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~')
                          or (cached and cache_key[-1] == 'package'
                              and not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        if (not cached
                or refresh) and cache_key and cache_key[-1] == 'package':
            # Make a synthetic completion for a module to guarantee the correct
            # completions.
            src = ['from {} import '.format(cache_key[0])]
            self.debug('source: %r', src)
            line = 1
            col = len(src[0])

        if cached is None:
            wait = True

        self.debug('Key: %r, Refresh: %r, Wait: %r', cache_key, refresh, wait)
        if cache_key and (not cached or refresh):
            n = time.time()
            worker.work_queue.put(
                (cache_key, extra_modules, '\n'.join(src), line, col,
                 str(buf.name)))
            while wait and time.time() - n < 2:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    break
                time.sleep(0.01)

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~', ), [], '', 1, 0, ''))

        if cached:
            if cached.completions is None:
                out = self.mix_boilerplate([])
            elif cache_key[-1] == 'vars':
                out = self.mix_boilerplate(cached.completions)
            else:
                out = cached.completions
            if filters:
                out = (x for x in out if x['$type'] in filters)
            return [x for x in sorted(out, key=sort_key)]
        return []
    def gather_candidates(self, context):
        if not self.workers_started:
            if self.python_path and 'VIRTUAL_ENV' not in os.environ:
                cache.python = self.python_path
            worker.start(max(1, self.worker_threads), self.description_length,
                         self.use_short_types, self.show_docstring,
                         self.debug_enabled, self.python_path)
            cache.start_background(worker.comp_queue)
            self.workers_started = True

        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~',))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~',), [], '', 1, 0, ''))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~') or
                          (cached and cache_key[-1] == 'package' and
                           not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        if (not cached or refresh) and cache_key and cache_key[-1] == 'package':
            # Make a synthetic completion for a module to guarantee the correct
            # completions.
            src = ['from {} import '.format(cache_key[0])]
            self.debug('source: %r', src)
            line = 1
            col = len(src[0])

        if cached is None:
            wait = True

        self.debug('Key: %r, Refresh: %r, Wait: %r', cache_key, refresh, wait)
        if cache_key and (not cached or refresh):
            n = time.time()
            worker.work_queue.put((cache_key, extra_modules, '\n'.join(src),
                                   line, col, str(buf.name)))
            while wait and time.time() - n < 2:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    break
                time.sleep(0.01)

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~',), [], '', 1, 0, ''))

        if cached:
            if cached.completions is None:
                out = self.mix_boilerplate([])
            elif cache_key[-1] == 'vars':
                out = self.mix_boilerplate(cached.completions)
            else:
                out = cached.completions
            if filters:
                out = (x for x in out if x['$type'] in filters)
            return [x for x in sorted(out, key=sort_key)]
        return []
Example #4
0
    def gather_candidates(self, context):
        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~',))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src,
                                                       self.extra_path)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~') or
                          (cached and cache_key[-1] == 'package' and
                           not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        # Extra options to pass to the server.
        options = {
            'cwd': context.get('cwd'),
            'extra_path': self.extra_path,
            'runtimepath': context.get('runtimepath'),
        }

        if (not cached or refresh) and cache_key and cache_key[-1] == 'package':
            # Create a synthetic completion for a module import as a fallback.
            synthetic_src = ['import {0}; {0}.'.format(cache_key[0])]
            options.update({
                'synthetic': {
                    'src': synthetic_src,
                    'line': 1,
                    'col': len(synthetic_src[0]),
                }
            })

        if not cached:
            wait = True

        # Note: This waits a very short amount of time to give the server or
        # cache a chance to reply.  If there's no reply during this period,
        # empty results are returned and we defer to deoplete's async refresh.
        # The current requests's async status is tracked in `_async_keys`.
        # If the async cache result is older than 5 seconds, the completion
        # request goes back to the default behavior of attempting to refresh as
        # needed by the `refresh` and `wait` variables above.
        self.debug('Key: %r, Refresh: %r, Wait: %r, Async: %r', cache_key,
                   refresh, wait, cache_key in self._async_keys)

        context['is_async'] = cache_key in self._async_keys
        if context['is_async']:
            if not cached:
                self.debug('[async] waiting for completions: %r', cache_key)
                return []
            else:
                self._async_keys.remove(cache_key)
                context['is_async'] = False
                if time.time() - cached.time < 5:
                    self.debug('[async] finished: %r', cache_key)
                    return self.finalize_cached(cache_key, filters, cached)
                else:
                    self.debug('[async] outdated: %r', cache_key)

        if cache_key and (not cached or refresh):
            n = time.time()
            wait_complete = False
            worker.work_queue.put((cache_key, extra_modules, '\n'.join(src),
                                   line, col, str(buf.name), options))
            while wait and time.time() - n < 0.25:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    self.debug('Got updated cache, stopped waiting.')
                    wait_complete = True
                    break
                time.sleep(0.01)

            if wait and not wait_complete:
                self._async_keys.add(cache_key)
                context['is_async'] = True
                self.debug('[async] deferred: %r', cache_key)
                return []

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        return self.finalize_cached(cache_key, filters, cached)
Example #5
0
    def gather_candidates(self, context):
        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~',))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src,
                                                       self.extra_path)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~') or
                          (cached and cache_key[-1] == 'package' and
                           not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        # Extra options to pass to the server.
        options = {
            'cwd': context.get('cwd'),
            'extra_path': self.extra_path,
            'runtimepath': context.get('runtimepath'),
        }

        if (not cached or refresh) and cache_key and cache_key[-1] == 'package':
            # Create a synthetic completion for a module import as a fallback.
            synthetic_src = ['from {} import '.format(cache_key[0])]
            options.update({
                'synthetic': {
                    'src': synthetic_src,
                    'line': 1,
                    'col': len(synthetic_src),
                }
            })

        if cached is None:
            wait = True

        self.debug('Key: %r, Refresh: %r, Wait: %r', cache_key, refresh, wait)
        if cache_key and (not cached or refresh):
            n = time.time()
            worker.work_queue.put((cache_key, extra_modules, '\n'.join(src),
                                   line, col, str(buf.name), options))
            while wait and time.time() - n < 2:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    self.debug('Stopped waiting')
                    break
                time.sleep(0.01)

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~',), [], '', 1, 0, '', None))

        if cached:
            if cached.completions is None:
                out = self.mix_boilerplate([])
            elif cache_key[-1] == 'vars':
                out = self.mix_boilerplate(cached.completions)
            else:
                out = cached.completions
            if filters:
                out = (x for x in out if x['type'] in filters)
            return [self.finalize(x) for x in sorted(out, key=sort_key)]
        return []