예제 #1
0
    def on_init(self, context):
        vars = context['vars']

        self.statement_length = vars.get(
            'deoplete#sources#jedi#statement_length', 0)
        self.use_short_types = vars.get('deoplete#sources#jedi#short_types',
                                        False)
        self.show_docstring = vars.get('deoplete#sources#jedi#show_docstring',
                                       False)
        # Only one worker is really needed since deoplete-jedi has a pretty
        # aggressive cache.
        # Two workers may be needed if working with very large source files.
        self.worker_threads = vars.get('deoplete#sources#jedi#worker_threads',
                                       1)
        # Hard coded python interpreter location
        self.python_path = vars.get('deoplete#sources#jedi#python_path', '')
        self.debug_enabled = vars.get('deoplete#sources#jedi#debug_enabled',
                                      False)

        self.workers_started = False
        self.boilerplate = []  # Completions that are included in all results

        if not self.workers_started:
            if self.python_path and 'VIRTUAL_ENV' not in os.environ:
                cache.python_path = self.python_path
            worker.start(max(1, self.worker_threads), self.statement_length,
                         self.use_short_types, self.show_docstring,
                         self.debug_enabled, self.python_path)
            cache.start_background(worker.comp_queue)
            self.workers_started = True
예제 #2
0
    def on_init(self, context):
        vars = context['vars']

        self.statement_length = vars.get(
            'deoplete#sources#jedi#statement_length', 0)
        self.server_timeout = vars.get(
            'deoplete#sources#jedi#server_timeout', 10)
        self.use_short_types = vars.get(
            'deoplete#sources#jedi#short_types', False)
        self.show_docstring = vars.get(
            'deoplete#sources#jedi#show_docstring', False)
        self.debug_server = vars.get(
            'deoplete#sources#jedi#debug_server', None)
        self.auto_imports = vars.get(
            'deoplete#sources#jedi#auto_imports', ())
        # Only one worker is really needed since deoplete-jedi has a pretty
        # aggressive cache.
        # Two workers may be needed if working with very large source files.
        self.worker_threads = vars.get(
            'deoplete#sources#jedi#worker_threads', 2)
        # Hard coded python interpreter location
        self.python_path = vars.get(
            'deoplete#sources#jedi#python_path', '')
        self.extra_path = vars.get(
            'deoplete#sources#jedi#extra_path', [])

        self.boilerplate = []  # Completions that are included in all results

        log_file = ''
        root_log = logging.getLogger('deoplete')

        if self.debug_server is not None and self.debug_server:
            self.debug_enabled = True
            if isinstance(self.debug_server, str):
                log_file = self.debug_server
            else:
                for handler in root_log.handlers:
                    if isinstance(handler, logging.FileHandler):
                        log_file = handler.baseFilename
                        break

        if not self.debug_enabled:
            child_log = root_log.getChild('jedi')
            child_log.propagate = False

        if not self.workers_started:
            if self.python_path and 'VIRTUAL_ENV' not in os.environ:
                cache.python_path = self.python_path
            worker.start(max(1, self.worker_threads), self.statement_length,
                         self.server_timeout, self.use_short_types, self.show_docstring,
                         (log_file, root_log.level), self.python_path, self.auto_imports)
            cache.start_background(worker.comp_queue)
            self.workers_started = True
예제 #3
0
    def on_init(self, context):
        vars = context['vars']

        self.statement_length = vars.get(
            'deoplete#sources#jedi#statement_length', 0)
        self.server_timeout = vars.get('deoplete#sources#jedi#server_timeout',
                                       10)
        self.use_short_types = vars.get('deoplete#sources#jedi#short_types',
                                        False)
        self.show_docstring = vars.get('deoplete#sources#jedi#show_docstring',
                                       False)
        self.debug_server = vars.get('deoplete#sources#jedi#debug_server',
                                     None)
        # Only one worker is really needed since deoplete-jedi has a pretty
        # aggressive cache.
        # Two workers may be needed if working with very large source files.
        self.worker_threads = vars.get('deoplete#sources#jedi#worker_threads',
                                       2)
        # Hard coded python interpreter location
        self.python_path = vars.get('deoplete#sources#jedi#python_path', '')
        self.extra_path = vars.get('deoplete#sources#jedi#extra_path', [])

        self.boilerplate = []  # Completions that are included in all results

        log_file = ''
        root_log = logging.getLogger('deoplete')

        if self.debug_server is not None and self.debug_server:
            self.debug_enabled = True
            if isinstance(self.debug_server, str):
                log_file = self.debug_server
            else:
                for handler in root_log.handlers:
                    if isinstance(handler, logging.FileHandler):
                        log_file = handler.baseFilename
                        break

        if not self.debug_enabled:
            child_log = root_log.getChild('jedi')
            child_log.propagate = False

        if not self.workers_started:
            if self.python_path and 'VIRTUAL_ENV' not in os.environ:
                cache.python_path = self.python_path
            worker.start(max(1, self.worker_threads), self.statement_length,
                         self.server_timeout, self.use_short_types,
                         self.show_docstring, (log_file, root_log.level),
                         self.python_path)
            cache.start_background(worker.comp_queue)
            self.workers_started = True
예제 #4
0
    def gather_candidates(self, context):
        if not self.workers_started:
            if self.python_path and 'VIRTUAL_ENV' not in os.environ:
                cache.python = self.python_path
            worker.start(max(1, self.worker_threads), self.description_length,
                         self.use_short_types, self.show_docstring,
                         self.debug_enabled, self.python_path)
            cache.start_background(worker.comp_queue)
            self.workers_started = True

        refresh_boilerplate = False
        if not self.boilerplate:
            bp = cache.retrieve(('boilerplate~',))
            if bp:
                self.boilerplate = bp.completions[:]
                refresh_boilerplate = True
            else:
                # This should be the first time any completion happened, so
                # `wait` will be True.
                worker.work_queue.put((('boilerplate~',), [], '', 1, 0, ''))

        line = context['position'][1]
        col = context['complete_position']
        buf = self.vim.current.buffer
        src = buf[:]

        extra_modules = []
        cache_key = None
        cached = None
        refresh = True
        wait = False

        # Inclusion filters for the results
        filters = []

        if re.match('^\s*(from|import)\s+', context['input']) \
                and not re.match('^\s*from\s+\S+\s+', context['input']):
            # If starting an import, only show module results
            filters.append('module')

        cache_key, extra_modules = cache.cache_context(buf.name, context, src)
        cached = cache.retrieve(cache_key)
        if cached and not cached.refresh:
            modules = cached.modules
            if all([filename in modules for filename in extra_modules]) \
                    and all([utils.file_mtime(filename) == mtime
                             for filename, mtime in modules.items()]):
                # The cache is still valid
                refresh = False

        if cache_key and (cache_key[-1] in ('dot', 'vars', 'import', 'import~') or
                          (cached and cache_key[-1] == 'package' and
                           not len(cached.modules))):
            # Always refresh scoped variables and module imports.  Additionally
            # refresh cached items that did not have associated module files.
            refresh = True

        if (not cached or refresh) and cache_key and cache_key[-1] == 'package':
            # Make a synthetic completion for a module to guarantee the correct
            # completions.
            src = ['from {} import '.format(cache_key[0])]
            self.debug('source: %r', src)
            line = 1
            col = len(src[0])

        if cached is None:
            wait = True

        self.debug('Key: %r, Refresh: %r, Wait: %r', cache_key, refresh, wait)
        if cache_key and (not cached or refresh):
            n = time.time()
            worker.work_queue.put((cache_key, extra_modules, '\n'.join(src),
                                   line, col, str(buf.name)))
            while wait and time.time() - n < 2:
                cached = cache.retrieve(cache_key)
                if cached and cached.time >= n:
                    break
                time.sleep(0.01)

        if refresh_boilerplate:
            # This should only occur the first time completions happen.
            # Refresh the boilerplate to ensure it's always up to date (just in
            # case).
            self.debug('Refreshing boilerplate')
            worker.work_queue.put((('boilerplate~',), [], '', 1, 0, ''))

        if cached:
            if cached.completions is None:
                out = self.mix_boilerplate([])
            elif cache_key[-1] == 'vars':
                out = self.mix_boilerplate(cached.completions)
            else:
                out = cached.completions
            if filters:
                out = (x for x in out if x['$type'] in filters)
            return [x for x in sorted(out, key=sort_key)]
        return []