def async_index_view(file_name, databases, project_folders): norm_file_name = os.path.normcase(file_name) symdb.set_databases(databases) for dbi, database in enumerate(databases): roots = database.get('roots', []) if database.get('include_project_folders'): roots.extend(project_folders) if roots: for root in roots: root = os.path.normcase( os.path.normpath(os.path.expandvars(root))) if norm_file_name.startswith(root + os.sep): break else: continue pattern = database.get('pattern') if pattern and not re.search(pattern, file_name): continue processed = symdb.process_file(dbi, file_name) # process_file may return False due to syntax error, but still # update last read time, so commit anyway. symdb.commit() if processed: ui_worker.schedule(status_message, 'Indexed ' + file_name)
def async_index_view(file_name, databases, project_folders): norm_file_name = os.path.normcase(file_name) symdb.set_databases(databases) for dbi, database in enumerate(databases): roots = database.get('roots', []) if database.get('include_project_folders'): roots.extend(project_folders) if roots: for root in roots: root = os.path.normcase( os.path.normpath(os.path.expandvars(root))) if norm_file_name.startswith(root + os.sep): break else: continue pattern = database.get('pattern') if pattern and not re.search(pattern, file_name): continue processed = symdb.process_file(dbi, file_name) # process_file may return False due to syntax error, but still # update last read time, so commit anyway. symdb.commit() if processed: ui_worker.schedule(status_message, 'Indexed ' + file_name)
def async_process_files_inner(cls, databases, project_folders, rebuild): if rebuild: # Helper process should not reference files to be deleted. symdb._cleanup() # Simply remove associated database files if build from scratch is # requested. for database in databases: try: os.remove(os.path.expandvars(database['path'])) except OSError: # Specified database file may not yet exist or is # inaccessible. pass symdb.set_databases(databases) for dbi, database in enumerate(databases): roots = database.get('roots', []) for i, root in enumerate(roots): roots[i] = os.path.expandvars(root) if database.get('include_project_folders'): roots.extend(project_folders) symdb.begin_file_processing(dbi) for symbol_root in roots: for root, dirs, files in os.walk(symbol_root): for file_name in files: if not cls.index_in_progress: symdb.end_file_processing(dbi) symdb.commit() ui_worker.schedule(status_message, 'Indexing canceled') return if not is_python_source_file(file_name): continue path = os.path.abspath(os.path.join(root, file_name)) pattern = database.get('pattern') if not pattern or re.search(pattern, path): if symdb.process_file(dbi, path): ui_worker.schedule(status_message, 'Indexed ' + path) # Do not commit after each file, since it's # very slow. symdb.end_file_processing(dbi) symdb.commit() ui_worker.schedule(status_message, 'Done indexing')
def async_process_files_inner(cls, databases, project_folders, rebuild): if rebuild: # Helper process should not reference files to be deleted. symdb._cleanup() # Simply remove associated database files if build from scratch is # requested. for database in databases: try: os.remove(os.path.expandvars(database['path'])) except OSError: # Specified database file may not yet exist or is # inaccessible. pass symdb.set_databases(databases) for dbi, database in enumerate(databases): roots = database.get('roots', []) for i, root in enumerate(roots): roots[i] = os.path.expandvars(root) if database.get('include_project_folders'): roots.extend(project_folders) symdb.begin_file_processing(dbi) for symbol_root in roots: for root, dirs, files in os.walk(symbol_root): for file_name in files: if not cls.index_in_progress: symdb.end_file_processing(dbi) symdb.commit() ui_worker.schedule(status_message, 'Indexing canceled') return if not is_python_source_file(file_name): continue path = os.path.abspath(os.path.join(root, file_name)) pattern = database.get('pattern') if not pattern or re.search(pattern, path): if symdb.process_file(dbi, path): ui_worker.schedule(status_message, 'Indexed ' + path) # Do not commit after each file, since it's # very slow. symdb.end_file_processing(dbi) symdb.commit() ui_worker.schedule(status_message, 'Done indexing')
def async_search(databases): symdb.set_databases(databases) results = symdb.query_occurrences(symbol) ui_worker.schedule(handle_results, results)
def async_search(databases): symdb.set_databases(databases) results = symdb.query_occurrences(symbol) ui_worker.schedule(handle_results, results)