def _add_res(self, res, lang, name, ver): log.debug("%s stdlibs: add %s", lang, res) cix_path = res.path try: tree = tree_from_cix_path(cix_path) except ET.XMLParserError, ex: log.warn("could not load %s stdlib from `%s' (%s): skipping", name, cix_path, ex) return
def _add_res(self, res, lang, name, ver): log.debug("%s stdlibs: add %s", lang, res) cix_path = res.path try: tree = tree_from_cix_path(cix_path) except ET.XMLParserError as ex: log.warn("could not load %s stdlib from `%s' (%s): skipping", name, cix_path, ex) return dbdir = join(self.base_dir, name) if exists(dbdir): log.warn("`db/stdlibs/%s' already exists and should not: " "removing it", name) try: rmdir(dbdir) except OSError as ex: log.error("could not remove `%s' to create %s stdlib in " "database (%s): skipping", dbdir, name) if not exists(dbdir): os.makedirs(dbdir) # Create 'blob_index' and 'toplevel*_index' and write out # '.blob' file. LEN_PREFIX = self.db.LEN_PREFIX is_hits_from_lpath_lang = lang in self.db.import_everything_langs blob_index = {} # {blobname -> dbfile} toplevelname_index = {} # {ilk -> toplevelname -> blobnames} toplevelprefix_index = {} # {ilk -> prefix -> toplevelnames} for blob in tree.findall("file/scope"): assert lang == blob.get("lang"), "Adding %s resource %s to %s blob" % (lang, res, blob.get("lang")) blobname = blob.get("name") dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname) blob_index[blobname] = dbfile ET.ElementTree(blob).write(join(dbdir, dbfile + ".blob")) for toplevelname, elem in blob.names.items(): if "__local__" in elem.get("attributes", "").split(): # this is internal to the stdlib continue ilk = elem.get("ilk") or elem.tag bft = toplevelname_index.setdefault(ilk, {}) if toplevelname not in bft: bft[toplevelname] = set([blobname]) else: bft[toplevelname].add(blobname) prefix = toplevelname[:LEN_PREFIX] tfp = toplevelprefix_index.setdefault(ilk, {}) if prefix not in tfp: tfp[prefix] = set([toplevelname]) else: tfp[prefix].add(toplevelname) self.db.save_pickle(join(dbdir, "blob_index"), blob_index) self.db.save_pickle(join(dbdir, "toplevelname_index"), toplevelname_index) self.db.save_pickle(join(dbdir, "toplevelprefix_index"), toplevelprefix_index) mtime = os.stat(cix_path).st_mtime self.res_index[res.area_path] = mtime
def _add_res(self, res): cix_path = res.path try: tree = tree_from_cix_path(cix_path) except ET.XMLParserError as ex: log.warn("could not load `%s' into catalog (skipping): %s", cix_path, ex) return LEN_PREFIX = self.db.LEN_PREFIX res_id = self._new_res_id() res_data = {} # {lang -> blobname -> ilk -> toplevelnames} name = tree.get("name") or splitext(basename(cix_path))[0] for blob in tree.findall("file/scope"): lang, blobname = blob.get("lang"), blob.get("name") if not lang: raise DatabaseError("add `%s': no 'lang' attr on %r" % (res, blob)) # Create 'res_data'. tfifb = res_data.setdefault(lang, {}) toplevelnames_from_ilk = tfifb.setdefault(blobname, {}) if lang in self.db.import_everything_langs: for toplevelname, elem in six.iteritems(blob.names): ilk = elem.get("ilk") or elem.tag if ilk not in toplevelnames_from_ilk: toplevelnames_from_ilk[ilk] = set([toplevelname]) else: toplevelnames_from_ilk[ilk].add(toplevelname) # Update 'toplevel*_index'. # toplevelname_index: {lang -> ilk -> toplevelname -> res_id -> blobnames} # toplevelprefix_index: {lang -> ilk -> prefix -> res_id -> toplevelnames} bfrftfi = self.toplevelname_index.setdefault(lang, {}) tfrfpfi = self.toplevelprefix_index.setdefault(lang, {}) for ilk, toplevelnames in six.iteritems(toplevelnames_from_ilk): bfrft = bfrftfi.setdefault(ilk, {}) tfrfp = tfrfpfi.setdefault(ilk, {}) for toplevelname in toplevelnames: bfr = bfrft.setdefault(toplevelname, {}) if res_id not in bfr: bfr[res_id] = set([blobname]) else: bfr[res_id].add(blobname) prefix = toplevelname[:LEN_PREFIX] tfr = tfrfp.setdefault(prefix, {}) if res_id not in tfr: tfr[res_id] = set([toplevelname]) else: tfr[res_id].add(toplevelname) # Update 'blob_index'. dbfile_and_res_id_from_blobname \ = self.blob_index.setdefault(lang, {}) assert blobname not in dbfile_and_res_id_from_blobname, \ ("codeintel: %s %r blob in `%s' collides " "with existing %s %r blob (from res_id %r) in catalog: " "(XXX haven't decided how to deal with that yet)" % (lang, blobname, cix_path, lang, blobname, dbfile_and_res_id_from_blobname[blobname][1])) dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname) dbfile_and_res_id_from_blobname[blobname] = (dbfile, res_id) # Write out '.blob' file. dbdir = join(self.base_dir, safe_lang_from_lang(lang)) if not exists(dbdir): log.debug("fs-write: mkdir '%s'", dbdir) os.makedirs(dbdir) log.debug("fs-write: catalog %s blob '%s'", lang, dbfile) ET.ElementTree(blob).write(join(dbdir, dbfile+".blob")) # Update 'res_index'. last_updated = os.stat(cix_path).st_mtime self.res_index[res.area_path] \ = (res_id, last_updated, name, res_data)
def _add_res(self, res): cix_path = res.path try: tree = tree_from_cix_path(cix_path) except ET.XMLParserError as ex: log.warn("could not load `%s' into catalog (skipping): %s", cix_path, ex) return LEN_PREFIX = self.db.LEN_PREFIX res_id = self._new_res_id() res_data = {} # {lang -> blobname -> ilk -> toplevelnames} name = tree.get("name") or splitext(basename(cix_path))[0] for blob in tree.findall("file/scope"): lang, blobname = blob.get("lang"), blob.get("name") if not lang: raise DatabaseError("add `%s': no 'lang' attr on %r" % (res, blob)) # Create 'res_data'. tfifb = res_data.setdefault(lang, {}) toplevelnames_from_ilk = tfifb.setdefault(blobname, {}) if lang in self.db.import_everything_langs: for toplevelname, elem in blob.names.items(): ilk = elem.get("ilk") or elem.tag if ilk not in toplevelnames_from_ilk: toplevelnames_from_ilk[ilk] = set([toplevelname]) else: toplevelnames_from_ilk[ilk].add(toplevelname) # Update 'toplevel*_index'. # toplevelname_index: {lang -> ilk -> toplevelname -> res_id -> blobnames} # toplevelprefix_index: {lang -> ilk -> prefix -> res_id -> # toplevelnames} bfrftfi = self.toplevelname_index.setdefault(lang, {}) tfrfpfi = self.toplevelprefix_index.setdefault(lang, {}) for ilk, toplevelnames in toplevelnames_from_ilk.items(): bfrft = bfrftfi.setdefault(ilk, {}) tfrfp = tfrfpfi.setdefault(ilk, {}) for toplevelname in toplevelnames: bfr = bfrft.setdefault(toplevelname, {}) if res_id not in bfr: bfr[res_id] = set([blobname]) else: bfr[res_id].add(blobname) prefix = toplevelname[:LEN_PREFIX] tfr = tfrfp.setdefault(prefix, {}) if res_id not in tfr: tfr[res_id] = set([toplevelname]) else: tfr[res_id].add(toplevelname) # Update 'blob_index'. dbfile_and_res_id_from_blobname \ = self.blob_index.setdefault(lang, {}) assert blobname not in dbfile_and_res_id_from_blobname, \ ("codeintel: %s %r blob in `%s' collides " "with existing %s %r blob (from res_id %r) in catalog: " "(XXX haven't decided how to deal with that yet)" % (lang, blobname, cix_path, lang, blobname, dbfile_and_res_id_from_blobname[blobname][1])) dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname) dbfile_and_res_id_from_blobname[blobname] = (dbfile, res_id) # Write out '.blob' file. dbdir = join(self.base_dir, safe_lang_from_lang(lang)) if not exists(dbdir): log.debug("fs-write: mkdir '%s'", dbdir) os.makedirs(dbdir) log.debug("fs-write: catalog %s blob '%s'", lang, dbfile) ET.ElementTree(blob).write(join(dbdir, dbfile + ".blob")) # Update 'res_index'. last_updated = os.stat(cix_path).st_mtime self.res_index[res.area_path] \ = (res_id, last_updated, name, res_data)
def _add_res(self, res, lang, name, ver): log.debug("%s stdlibs: add %s", lang, res) cix_path = res.path try: tree = tree_from_cix_path(cix_path) except ET.XMLParserError as ex: log.warn("could not load %s stdlib from `%s' (%s): skipping", name, cix_path, ex) return dbdir = join(self.base_dir, name) if exists(dbdir): log.warn( "`db/stdlibs/%s' already exists and should not: " "removing it", name) try: rmdir(dbdir) except OSError as ex: log.error( "could not remove `%s' to create %s stdlib in " "database (%s): skipping", dbdir, name) if not exists(dbdir): os.makedirs(dbdir) # Create 'blob_index' and 'toplevel*_index' and write out # '.blob' file. LEN_PREFIX = self.db.LEN_PREFIX is_hits_from_lpath_lang = lang in self.db.import_everything_langs blob_index = {} # {blobname -> dbfile} toplevelname_index = {} # {ilk -> toplevelname -> blobnames} toplevelprefix_index = {} # {ilk -> prefix -> toplevelnames} for blob in tree.findall("file/scope"): assert lang == blob.get("lang"), \ "Adding %s resource %s to %s blob" % ( lang, res, blob.get("lang")) blobname = blob.get("name") dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname) blob_index[blobname] = dbfile ET.ElementTree(blob).write(join(dbdir, dbfile + ".blob")) for toplevelname, elem in blob.names.items(): if "__local__" in elem.get("attributes", "").split(): # this is internal to the stdlib continue ilk = elem.get("ilk") or elem.tag bft = toplevelname_index.setdefault(ilk, {}) if toplevelname not in bft: bft[toplevelname] = set([blobname]) else: bft[toplevelname].add(blobname) prefix = toplevelname[:LEN_PREFIX] tfp = toplevelprefix_index.setdefault(ilk, {}) if prefix not in tfp: tfp[prefix] = set([toplevelname]) else: tfp[prefix].add(toplevelname) self.db.save_pickle(join(dbdir, "blob_index"), blob_index) self.db.save_pickle(join(dbdir, "toplevelname_index"), toplevelname_index) self.db.save_pickle(join(dbdir, "toplevelprefix_index"), toplevelprefix_index) mtime = os.stat(cix_path).st_mtime self.res_index[res.area_path] = mtime