def remove_lang(self, lang): """Remove the given language from the stdlib zone.""" log.debug("update '%s' stdlibs", lang) # Figure out what updates need to be done... cix_glob = join(self.stdlibs_dir, safe_lang_from_lang(lang) + "*.cix") todo = [] for area, subpath in self.res_index: res = AreaResource(subpath, area) if fnmatch.fnmatch(res.path, cix_glob): todo.append(("remove", AreaResource(subpath, area))) # ... and then do them. self._handle_res_todos(lang, todo) self.save()
def _res_ids_from_selections(self, selections): """Returns a tuple of the database resource ids for the given selections and a list of selections that didn't match any loaded resources. """ if self._res_ids_from_selector_cache is None: cache = self._res_ids_from_selector_cache = {} for cix_area_path, res_data in list(self.res_index.items()): cix_path = AreaResource(cix_area_path).path res_id = res_data[0] cache[normpath(normcase(cix_path))] = [res_id] name = splitext(basename(cix_path))[0].lower() if name not in cache: cache[name] = [] cache[name].append(res_id) log.debug("_res_ids_from_selector_cache: %r", cache) res_ids = [] missing_selections = [] for selector, selection \ in list(self._selection_from_selector(selections).items()): try: res_ids += self._res_ids_from_selector_cache[selector] except KeyError as ex: missing_selections.append(selection) log.debug("_res_ids_from_selections: res_ids=%r", res_ids) return tuple(res_ids), missing_selections
def _update_lang_with_ver(self, lang, ver=None, progress_cb=None): """Import stdlib data for this lang, if necessary. "lang" is the language to update. "ver" (optional) is a specific version of the language, e.g. (5, 8). "progress_cb" (optional) is a callable that is called as follows to show the progress of the update: progress_cb(<desc>, <value>) where <desc> is a short string describing the current step and <value> is an integer between 0 and 100 indicating the level of completeness. """ log.debug("update '%s' stdlibs", lang) # Figure out what updates need to be done... if progress_cb: try: progress_cb("Determining necessary updates...", 5) except: log.exception("error in progress_cb (ignoring)") if ver is not None: ver_str = ".".join(map(str, ver)) cix_path = join(self.stdlibs_dir, "%s-%s.cix" % (safe_lang_from_lang(lang), ver_str)) else: cix_path = join(self.stdlibs_dir, "%s.cix" % (safe_lang_from_lang(lang), )) # Need to acquire db lock, as the indexer and main thread may both be # calling into _update_lang_with_ver at the same time. self.db.acquire_lock() try: todo = [] res = AreaResource(cix_path, "ci-pkg-dir") try: last_updated = self.res_index[res.area_path] except KeyError: todo.append(("add", res)) else: mtime = os.stat(cix_path).st_mtime if last_updated != mtime: # epsilon? '>=' instead of '!='? todo.append(("update", res)) # ... and then do them. self._handle_res_todos(lang, todo, progress_cb) self.save() finally: self.db.release_lock()
def update(self, selections=None, progress_cb=None): """Update the catalog as necessary. "selections" (optional) is a list of string of the same form as to `.get_lib()' -- used here to filter the catalogs that we consider for updating. "progress_cb" (optional) is a callable that is called as follows to show the progress of the update: progress_cb(<desc>, <value>) where <desc> is a short string describing the current step and <value> is an integer between 0 and 100 indicating the level of completeness. """ self._lock.acquire() try: self._have_updated_at_least_once = True # Figure out what updates need to be done... if progress_cb: try: progress_cb("Determining necessary catalog updates...", 5) except: log.exception("error in progress_cb (ignoring)") res_name_from_res_path = dict( # this is our checklist (p, v[2]) for p, v in list(self.res_index.items())) todos = [] log.info("updating %s: %d catalog dir(s)", self, len(self.catalog_dirs)) for catalog_info in self.avail_catalogs(selections): cix_path = catalog_info["cix_path"] res = AreaResource(cix_path) # check that the update-time is the mtime (i.e. up-to-date) try: res_id, last_updated, name, res_data \ = self.res_index[res.area_path] except KeyError: # add this new CIX file todos.append(("add", res, catalog_info["name"])) else: mtime = os.stat(cix_path).st_mtime if last_updated != mtime: # epsilon? '>=' instead of '!='? # update with newer version todos.append(("update", res, catalog_info["name"])) # else: # log.debug("not updating '%s' catalog: mtime is unchanged", # catalog_info["name"]) del res_name_from_res_path[res.area_path] # tick it off for res_area_path, res_name in list( res_name_from_res_path.items()): # remove this obsolete CIX file try: todos.append( ("remove", AreaResource(res_area_path), res_name)) except ValueError as ex: # Skip resources in unknown areas. This is primarily to # allow debugging/testing (when the set of registered # path_areas may not include the set when running in # Komodo.) pass # Filter todos on selections, if any. if selections is not None: selection_from_selector = self._selection_from_selector( selections) before = todos[:] todos = [ todo for todo in todos if todo[2].lower() in selection_from_selector or normpath( normcase(todo[1].path)) in selection_from_selector ] # ... and then do them. if not todos: return for i, (action, res, name) in enumerate(todos): log.debug("%s `%s' catalog (%s)", action, name, res) try: if action == "add": desc = "Adding '%s' API catalog" % basename( res.subpath) if progress_cb: try: progress_cb(desc, (5 + 95 / len(todos) * i)) except: log.exception( "error in progress_cb (ignoring)") else: self.db.report_event(desc) self._add_res(res) elif action == "remove": desc = "Removing '%s' API catalog" % basename( res.subpath) if progress_cb: try: progress_cb(desc, (5 + 95 / len(todos) * i)) except: log.exception( "error in progress_cb (ignoring)") else: self.db.report_event(desc) self._remove_res(res) elif action == "update": desc = "Updating '%s' API catalog" % basename( res.subpath) if progress_cb: try: progress_cb(desc, (5 + 95 / len(todos) * i)) except: log.exception( "error in progress_cb (ignoring)") else: self.db.report_event(desc) # XXX Bad for filesystem. Change this to do it # more intelligently if possible. self._remove_res(res) self._add_res(res) except DatabaseError as ex: log.warn("%s (skipping)" % ex) if progress_cb: try: progress_cb("Saving catalog indices...", 95) except: log.exception("error in progress_cb (ignoring)") self._res_ids_from_selector_cache = None # invalidate this cache if self._res_index is not None: self.db.save_pickle(join(self.base_dir, "res_index"), self._res_index) if self._blob_index is not None: self.db.save_pickle(join(self.base_dir, "blob_index"), self._blob_index) if self._toplevelname_index is not None: self.db.save_pickle(join(self.base_dir, "toplevelname_index"), self._toplevelname_index) if self._toplevelprefix_index is not None: self.db.save_pickle( join(self.base_dir, "toplevelprefix_index"), self._toplevelprefix_index) finally: self._lock.release()