Example #1
0
 def _upgrade_wipe_db(self, curr_ver, result_ver):
     """Sometimes it is justified to just wipe the DB and start over."""
     assert result_ver == self.VERSION
     if exists(self.base_dir):
         log.debug("fs-write: wipe db")
         rmdir(self.base_dir)
     self.create()
Example #2
0
 def _upgrade_wipe_db(self, curr_ver, result_ver):
     """Sometimes it is justified to just wipe the DB and start over."""
     assert result_ver == self.VERSION
     if exists(self.base_dir):
         log.debug("fs-write: wipe db")
         rmdir(self.base_dir)
     self.create()
Example #3
0
 def _upgrade_wipe_db_langzones(self, curr_ver, result_ver):
     for lang in self._gen_langs_in_db():
         safe_lang = safe_lang_from_lang(lang)
         langzone_dir = join(self.base_dir, "db", safe_lang)
         if exists(langzone_dir):
             log.debug("fs-write: wipe db/%s", safe_lang)
             rmdir(langzone_dir)
     open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #4
0
 def _upgrade_wipe_db_langzones(self, curr_ver, result_ver):
     for lang in self._gen_langs_in_db():
         safe_lang = safe_lang_from_lang(lang)
         langzone_dir = join(self.base_dir, "db", safe_lang)
         if exists(langzone_dir):
             log.debug("fs-write: wipe db/%s", safe_lang)
             rmdir(langzone_dir)
     open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #5
0
    def _add_res(self, res, lang, name, ver):
        log.debug("%s stdlibs: add %s", lang, res)
        cix_path = res.path
        try:
            tree = tree_from_cix_path(cix_path)
        except ET.XMLParserError as ex:
            log.warn("could not load %s stdlib from `%s' (%s): skipping", name, cix_path, ex)
            return

        dbdir = join(self.base_dir, name)
        if exists(dbdir):
            log.warn("`db/stdlibs/%s' already exists and should not: " "removing it", name)
            try:
                rmdir(dbdir)
            except OSError as ex:
                log.error("could not remove `%s' to create %s stdlib in " "database (%s): skipping", dbdir, name)
        if not exists(dbdir):
            os.makedirs(dbdir)

        # Create 'blob_index' and 'toplevel*_index' and write out
        # '.blob' file.
        LEN_PREFIX = self.db.LEN_PREFIX
        is_hits_from_lpath_lang = lang in self.db.import_everything_langs
        blob_index = {}  # {blobname -> dbfile}
        toplevelname_index = {}  # {ilk -> toplevelname -> blobnames}
        toplevelprefix_index = {}  # {ilk -> prefix -> toplevelnames}
        for blob in tree.findall("file/scope"):
            assert lang == blob.get("lang"), "Adding %s resource %s to %s blob" % (lang, res, blob.get("lang"))
            blobname = blob.get("name")
            dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname)
            blob_index[blobname] = dbfile
            ET.ElementTree(blob).write(join(dbdir, dbfile + ".blob"))
            for toplevelname, elem in blob.names.items():
                if "__local__" in elem.get("attributes", "").split():
                    # this is internal to the stdlib
                    continue
                ilk = elem.get("ilk") or elem.tag
                bft = toplevelname_index.setdefault(ilk, {})
                if toplevelname not in bft:
                    bft[toplevelname] = set([blobname])
                else:
                    bft[toplevelname].add(blobname)
                prefix = toplevelname[:LEN_PREFIX]
                tfp = toplevelprefix_index.setdefault(ilk, {})
                if prefix not in tfp:
                    tfp[prefix] = set([toplevelname])
                else:
                    tfp[prefix].add(toplevelname)

        self.db.save_pickle(join(dbdir, "blob_index"), blob_index)
        self.db.save_pickle(join(dbdir, "toplevelname_index"), toplevelname_index)
        self.db.save_pickle(join(dbdir, "toplevelprefix_index"), toplevelprefix_index)

        mtime = os.stat(cix_path).st_mtime
        self.res_index[res.area_path] = mtime
Example #6
0
 def _remove_res(self, res, lang, name, ver):
     log.debug("%s stdlibs: remove %s", lang, res)
     del self.res_index[res.area_path]
     dbdir = join(self.base_dir, name)
     try:
         rmdir(dbdir)
     except OSError, ex:
         try:
             os.rename(dbdir, dbdir+".zombie")
         except OSError, ex2:
             log.error("could not remove %s stdlib database dir `%s' (%s): "
                       "couldn't even rename it to `%s.zombie' (%s): "
                       "giving up", name, dbdir, ex, name, ex2)
Example #7
0
    def reset(self, backup=True):
        """Move the given database out of the way to make way for a new one.

            "backup" (optional, default True) is a boolean indicating if
                the original database should be backed up. If so, the backup
                is $base_dir+".err".
        """
        self.acquire_lock()
        try:
            if exists(self.base_dir):
                # TODO: make this more bullet proof
                if backup:
                    err_base_dir = self.base_dir + ".err"
                    log.info("backing up db to '%s'", err_base_dir)
                    if os.path.exists(err_base_dir):
                        rmdir(err_base_dir)
                        for i in range(10):  # Try to avoid OSError from slow-deleting NTFS
                            if not os.path.exists(err_base_dir):
                                break
                            time.sleep(1)
                    if os.path.exists(err_base_dir):  # couldn't remove it
                        log.warn("couldn't remove old '%s' (skipping backup)",
                                 err_base_dir)
                        rmdir(self.base_dir)
                    else:
                        os.rename(self.base_dir, err_base_dir)
                else:
                    rmdir(self.base_dir)

            self._catalogs_zone = None
            self._stdlibs_zone = None
            self.create()
        finally:
            self.release_lock()
Example #8
0
    def reset(self, backup=True):
        """Move the given database out of the way to make way for a new one.

            "backup" (optional, default True) is a boolean indicating if
                the original database should be backed up. If so, the backup
                is $base_dir+".err".
        """
        self.acquire_lock()
        try:
            if exists(self.base_dir):
                # TODO: make this more bullet proof
                if backup:
                    err_base_dir = self.base_dir + ".err"
                    log.info("backing up db to '%s'", err_base_dir)
                    if os.path.exists(err_base_dir):
                        rmdir(err_base_dir)
                        for i in range(10):  # Try to avoid OSError from slow-deleting NTFS
                            if not os.path.exists(err_base_dir):
                                break
                            time.sleep(1)
                    if os.path.exists(err_base_dir):  # couldn't remove it
                        log.warn("couldn't remove old '%s' (skipping backup)",
                                 err_base_dir)
                        rmdir(self.base_dir)
                    else:
                        os.rename(self.base_dir, err_base_dir)
                else:
                    rmdir(self.base_dir)

            self._catalogs_zone = None
            self._stdlibs_zone = None
            self.create()
        finally:
            self.release_lock()
Example #9
0
    def _upgrade_wipe_db_langs(self, curr_ver, result_ver, langs):
        for lang in langs:
            safe_lang = safe_lang_from_lang(lang)
            # stdlibs zone
            self.get_stdlibs_zone().remove_lang(lang)

            # API catalogs zone
            # TODO: CatalogsZone needs a .remove_lang(). Until then we just
            #      remove the whole thing.

            # (multi)langzone
            langzone_dir = join(self.base_dir, "db", safe_lang)
            if exists(langzone_dir):
                log.debug("fs-write: wipe db/%s", safe_lang)
                rmdir(langzone_dir)

        catalog_dir = join(self.base_dir, "db", "catalogs")
        if exists(catalog_dir):
            log.debug("fs-write: wipe db/catalogs")
            rmdir(catalog_dir)

        open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #10
0
    def _upgrade_wipe_db_langs(self, curr_ver, result_ver, langs):
        for lang in langs:
            safe_lang = safe_lang_from_lang(lang)
            # stdlibs zone
            self.get_stdlibs_zone().remove_lang(lang)

            # API catalogs zone
            # TODO: CatalogsZone needs a .remove_lang(). Until then we just
            #      remove the whole thing.

            # (multi)langzone
            langzone_dir = join(self.base_dir, "db", safe_lang)
            if exists(langzone_dir):
                log.debug("fs-write: wipe db/%s", safe_lang)
                rmdir(langzone_dir)

        catalog_dir = join(self.base_dir, "db", "catalogs")
        if exists(catalog_dir):
            log.debug("fs-write: wipe db/catalogs")
            rmdir(catalog_dir)

        open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #11
0
                )

    def _add_res(self, res, lang, name, ver):
        log.debug("%s stdlibs: add %s", lang, res)
        cix_path = res.path
        try:
            tree = tree_from_cix_path(cix_path)
        except ET.XMLParserError, ex:
            log.warn("could not load %s stdlib from `%s' (%s): skipping", name, cix_path, ex)
            return

        dbdir = join(self.base_dir, name)
        if exists(dbdir):
            log.warn("`db/stdlibs/%s' already exists and should not: " "removing it", name)
            try:
                rmdir(dbdir)
            except OSError, ex:
                log.error("could not remove `%s' to create %s stdlib in " "database (%s): skipping", dbdir, name)
        if not exists(dbdir):
            os.makedirs(dbdir)

        # Create 'blob_index' and 'toplevel*_index' and write out
        # '.blob' file.
        LEN_PREFIX = self.db.LEN_PREFIX
        is_hits_from_lpath_lang = lang in self.db.import_everything_langs
        blob_index = {}  # {blobname -> dbfile}
        toplevelname_index = {}  # {ilk -> toplevelname -> blobnames}
        toplevelprefix_index = {}  # {ilk -> prefix -> toplevelnames}
        for blob in tree.findall("file/scope"):
            assert lang == blob.get("lang"), "Adding %s resource %s to %s blob" % (lang, res, blob.get("lang"))
            blobname = blob.get("name")
Example #12
0
 def _upgrade_wipe_db_catalogs(self, curr_ver, result_ver):
     catalog_dir = join(self.base_dir, "db", "catalogs")
     if exists(catalog_dir):
         log.debug("fs-write: wipe db/catalogs")
         rmdir(catalog_dir)
     open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #13
0
        log.debug("%s stdlibs: add %s", lang, res)
        cix_path = res.path
        try:
            tree = tree_from_cix_path(cix_path)
        except ET.XMLParserError, ex:
            log.warn("could not load %s stdlib from `%s' (%s): skipping", name,
                     cix_path, ex)
            return

        dbdir = join(self.base_dir, name)
        if exists(dbdir):
            log.warn(
                "`db/stdlibs/%s' already exists and should not: "
                "removing it", name)
            try:
                rmdir(dbdir)
            except OSError, ex:
                log.error(
                    "could not remove `%s' to create %s stdlib in "
                    "database (%s): skipping", dbdir, name)
        if not exists(dbdir):
            os.makedirs(dbdir)

        # Create 'blob_index' and 'toplevel*_index' and write out
        # '.blob' file.
        LEN_PREFIX = self.db.LEN_PREFIX
        is_hits_from_lpath_lang = lang in self.db.import_everything_langs
        blob_index = {}  # {blobname -> dbfile}
        toplevelname_index = {}  # {ilk -> toplevelname -> blobnames}
        toplevelprefix_index = {}  # {ilk -> prefix -> toplevelnames}
        for blob in tree.findall("file/scope"):
Example #14
0
 def _upgrade_wipe_db_catalogs(self, curr_ver, result_ver):
     catalog_dir = join(self.base_dir, "db", "catalogs")
     if exists(catalog_dir):
         log.debug("fs-write: wipe db/catalogs")
         rmdir(catalog_dir)
     open(join(self.base_dir, "VERSION"), 'w').write(result_ver)
Example #15
0
    def _add_res(self, res, lang, name, ver):
        log.debug("%s stdlibs: add %s", lang, res)
        cix_path = res.path
        try:
            tree = tree_from_cix_path(cix_path)
        except ET.XMLParserError as ex:
            log.warn("could not load %s stdlib from `%s' (%s): skipping", name,
                     cix_path, ex)
            return

        dbdir = join(self.base_dir, name)
        if exists(dbdir):
            log.warn(
                "`db/stdlibs/%s' already exists and should not: "
                "removing it", name)
            try:
                rmdir(dbdir)
            except OSError as ex:
                log.error(
                    "could not remove `%s' to create %s stdlib in "
                    "database (%s): skipping", dbdir, name)
        if not exists(dbdir):
            os.makedirs(dbdir)

        # Create 'blob_index' and 'toplevel*_index' and write out
        # '.blob' file.
        LEN_PREFIX = self.db.LEN_PREFIX
        is_hits_from_lpath_lang = lang in self.db.import_everything_langs
        blob_index = {}  # {blobname -> dbfile}
        toplevelname_index = {}  # {ilk -> toplevelname -> blobnames}
        toplevelprefix_index = {}  # {ilk -> prefix -> toplevelnames}
        for blob in tree.findall("file/scope"):
            assert lang == blob.get("lang"), \
                "Adding %s resource %s to %s blob" % (
                    lang, res, blob.get("lang"))
            blobname = blob.get("name")
            dbfile = self.db.bhash_from_blob_info(cix_path, lang, blobname)
            blob_index[blobname] = dbfile
            ET.ElementTree(blob).write(join(dbdir, dbfile + ".blob"))
            for toplevelname, elem in blob.names.items():
                if "__local__" in elem.get("attributes", "").split():
                    # this is internal to the stdlib
                    continue
                ilk = elem.get("ilk") or elem.tag
                bft = toplevelname_index.setdefault(ilk, {})
                if toplevelname not in bft:
                    bft[toplevelname] = set([blobname])
                else:
                    bft[toplevelname].add(blobname)
                prefix = toplevelname[:LEN_PREFIX]
                tfp = toplevelprefix_index.setdefault(ilk, {})
                if prefix not in tfp:
                    tfp[prefix] = set([toplevelname])
                else:
                    tfp[prefix].add(toplevelname)

        self.db.save_pickle(join(dbdir, "blob_index"), blob_index)
        self.db.save_pickle(join(dbdir, "toplevelname_index"),
                            toplevelname_index)
        self.db.save_pickle(join(dbdir, "toplevelprefix_index"),
                            toplevelprefix_index)

        mtime = os.stat(cix_path).st_mtime
        self.res_index[res.area_path] = mtime