Esempio n. 1
0
    def _isIdentical(self, path1, path2):
        """
        Checks if the files denoted by path1 and path2 are identical according
        to the settings of the object.

        The files must have passed the preliminary test by _preTestIdentity().
        """
        
        stat1 = os.stat(pathEnc(path1))
        stat2 = os.stat(pathEnc(path2))

        if self.modDateIsEnough and stat1.st_mtime == stat2.st_mtime:
            return True

        # End of fast tests, now the whole content must be compared
        
        file1 = file(path1, "rb")    
        file2 = file(path2, "rb")
        
        try:
            while True:
                block1 = file1.read(1024 * 1024)
                block2 = file2.read(1024 * 1024)
                if len(block1) == 0 and len(block2) == 0:
                    # EOF
                    return True
                if len(block1) != len(block2):
                    raise FSException(_(u"File compare error, file not readable or "
                            u"changed during compare"))

                if block1 != block2:
                    return False
        finally:
            file2.close()
            file1.close()
Esempio n. 2
0
    def _isIdentical(self, path1, path2):
        """
        Checks if the files denoted by path1 and path2 are identical according
        to the settings of the object.

        The files must have passed the preliminary test by _preTestIdentity().
        """
        
        stat1 = os.stat(pathEnc(path1))
        stat2 = os.stat(pathEnc(path2))

        if self.modDateIsEnough and stat1.st_mtime == stat2.st_mtime:
            return True

        # End of fast tests, now the whole content must be compared
        
        file1 = open(path1, "rb")    
        file2 = open(path2, "rb")
        
        try:
            while True:
                block1 = file1.read(1024 * 1024)
                block2 = file2.read(1024 * 1024)
                if len(block1) == 0 and len(block2) == 0:
                    # EOF
                    return True
                if len(block1) != len(block2):
                    raise FSException(_("File compare error, file not readable or "
                            "changed during compare"))

                if block1 != block2:
                    return False
        finally:
            file2.close()
            file1.close()
Esempio n. 3
0
    def findDestPathNoSource(self, suffix, prefix=u""):
        """
        Find a path to a destination.
        """
        self._ensureStorage()

        if prefix:
            fname = prefix + suffix

            destPath = os.path.join(self.storagePath, fname)
            if not os.path.exists(pathEnc(destPath)):
                return destPath
        else:
            prefix = u""


#         mat = _FILESPLITPAT.match(fname)
#         if mat is None:
#             raise FSException("Internal error: Bad source file name")
# 
#         coreName = mat.group("name")
#         suffix = mat.group("suffix")

        for t in xrange(20):  # Number of tries
            newName = u"%s_%s%s" % (prefix, createRandomString(20), suffix)

            destPath = os.path.join(self.storagePath, newName)
            if not os.path.exists(pathEnc(destPath)):
                return destPath

        # Give up
        return None
Esempio n. 4
0
    def findDestPathNoSource(self, suffix, prefix=""):
        """
        Find a path to a destination.
        """
        self._ensureStorage()

        if prefix:
            fname = prefix + suffix

            destPath = os.path.join(self.storagePath, fname)
            if not os.path.exists(pathEnc(destPath)):
                return destPath
        else:
            prefix = ""


#         mat = _FILESPLITPAT.match(fname)
#         if mat is None:
#             raise FSException("Internal error: Bad source file name")
# 
#         coreName = mat.group("name")
#         suffix = mat.group("suffix")

        for t in range(20):  # Number of tries
            newName = "%s_%s%s" % (prefix, createRandomString(20), suffix)

            destPath = os.path.join(self.storagePath, newName)
            if not os.path.exists(pathEnc(destPath)):
                return destPath

        # Give up
        return None
Esempio n. 5
0
def createWikiDB(wikiName, dataDir, overwrite=False):
    """
    creates the initial db
    Warning: If overwrite is True, a previous file will be deleted!
    """
    dbfile = join(dataDir, "wiki.sli")
    if (not exists(pathEnc(dbfile)) or overwrite):
        if (not exists(pathEnc(dataDir))):
            mkdir(pathEnc(dataDir))
        else:
            if exists(pathEnc(dbfile)) and overwrite:
                unlink(pathEnc(dbfile))

#         if (wikiDocument is not None):
#             dbName = self.wikiDocument.getWikiConfig().get("wiki_db", "db_filename",
#                     u"").strip()
#                     
#             if (dbName == u""):
#                 dbName = u"wikidb"
#         else:

        dbName = u"wikidb"

        # create the database
        connection = gadfly.gadfly()
        connection.startup(dbName, dataDir)
        connwrap = ConnectWrap(connection)

        try:
            for tn in MAIN_TABLES:
                changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])
                
            for key, value in (
                    ("formatver", str(VERSION_DB)),  # Version of database format the data was written
                    ("writecompatver", str(VERSION_WRITECOMPAT)),  # Lowest format version which is write compatible
                    ("readcompatver", str(VERSION_READCOMPAT)),  # Lowest format version which is read compatible
                    ("branchtag", "WikidPad"),  # Tag of the WikidPad branch
                    ("locale", "-") # Locale for cached wordnormcase column. '-': column invalid
                    ):
                setSettingsValue(connwrap, key, value)
    
#             connwrap.executemany("insert or replace into settings(key, value) "+
#                         "values (?, ?)", (
#                     ("formatver", "0"),  # Version of database format the data was written
#                     ("writecompatver", "0"),  # Lowest format version which is write compatible
#                     ("readcompatver", "0"),  # Lowest format version which is read compatible
#                     ("branchtag", "WikidPad")  # Tag of the WikidPad branch
#                     )  )
        
            rebuildIndices(connwrap)
            connwrap.commit()
            
        finally:
            # close the connection
            connwrap.close()

    else:
        raise WikiDBExistsException(
                _(u"database already exists at location: %s") % dataDir)
Esempio n. 6
0
def createWikiDB(wikiName, dataDir, overwrite=False):
    """
    creates the initial db
    Warning: If overwrite is True, a previous file will be deleted!
    """
    dbfile = join(dataDir, "wiki.sli")
    if (not exists(pathEnc(dbfile)) or overwrite):
        if (not exists(pathEnc(dataDir))):
            mkdir(pathEnc(dataDir))
        else:
            if exists(pathEnc(dbfile)) and overwrite:
                unlink(pathEnc(dbfile))

        # create the database
        connection = gadfly.gadfly()
        connection.startup("wikidb", dataDir)
        connwrap = ConnectWrap(connection)

        try:
            for tn in MAIN_TABLES:
                changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

            for key, value in (
                ("formatver", str(VERSION_DB)
                 ),  # Version of database format the data was written
                ("writecompatver", str(VERSION_WRITECOMPAT)
                 ),  # Lowest format version which is write compatible
                ("readcompatver", str(VERSION_READCOMPAT)
                 ),  # Lowest format version which is read compatible
                ("branchtag", "WikidPad"),  # Tag of the WikidPad branch
                (
                    "locale", "-"
                )  # Locale for cached wordnormcase column. '-': column invalid
            ):
                setSettingsValue(connwrap, key, value)


#             connwrap.executemany("insert or replace into settings(key, value) "+
#                         "values (?, ?)", (
#                     ("formatver", "0"),  # Version of database format the data was written
#                     ("writecompatver", "0"),  # Lowest format version which is write compatible
#                     ("readcompatver", "0"),  # Lowest format version which is read compatible
#                     ("branchtag", "WikidPad")  # Tag of the WikidPad branch
#                     )  )

            rebuildIndices(connwrap)
            connwrap.commit()

        finally:
            # close the connection
            connwrap.close()

    else:
        raise WikiDBExistsException(
            _(u"database already exists at location: %s") % dataDir)
Esempio n. 7
0
def createWikiDB(wikiName, dataDir, overwrite=False, wikiDocument=None):
    """
    creates the initial db
    Warning: If overwrite is True, a previous file will be deleted!
    """

    if (wikiDocument is not None):
        dbPath = wikiDocument.getWikiConfig().get("wiki_db", "db_filename",
                                                  "").strip()

        if (dbPath == ""):
            dbPath = "wikiovw.sli"
    else:
        dbPath = "wikiovw.sli"

    dbfile = join(dataDir, dbPath)
    if (not exists(pathEnc(dbfile)) or overwrite):
        if (not exists(pathEnc(dataDir))):
            mkdir(pathEnc(dataDir))
        else:
            if exists(pathEnc(dbfile)) and overwrite:
                unlink(pathEnc(dbfile))

        # create the database
        connwrap = ConnectWrapSyncCommit(sqlite.connect(dbfile))

        try:
            for tn in MAIN_TABLES:
                changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

            connwrap.executemany(
                "insert or replace into settings(key, value) " +
                "values (?, ?)",
                (
                    ("formatver", str(VERSION_DB)
                     ),  # Version of database format the data was written
                    ("writecompatver", str(VERSION_WRITECOMPAT)
                     ),  # Lowest format version which is write compatible
                    ("readcompatver", str(VERSION_READCOMPAT)
                     ),  # Lowest format version which is read compatible
                    ("branchtag", "WikidPad")  # Tag of the WikidPad branch
                    #                     ("locale", "-") # Locale for cached wordnormcase column. '-': column invalid
                ))

            rebuildIndices(connwrap)
            connwrap.syncCommit()

        finally:
            # close the connection
            connwrap.close()

    else:
        raise WikiDBExistsException(
            _("database already exists at location: %s") % dataDir)
Esempio n. 8
0
def createWikiDB(wikiName, dataDir, overwrite=False, wikiDocument=None):
    """
    creates the initial db
    Warning: If overwrite is True, a previous file will be deleted!
    """
    
    if (wikiDocument is not None):
        dbPath = wikiDocument.getWikiConfig().get("wiki_db", "db_filename",
                    "").strip()
                    
        if (dbPath == ""):
            dbPath = "wikiovw.sli"
    else:
        dbPath = "wikiovw.sli"

    dbfile = join(dataDir, dbPath)
    if (not exists(pathEnc(dbfile)) or overwrite):
        if (not exists(pathEnc(dataDir))):
            mkdir(pathEnc(dataDir))
        else:
            if exists(pathEnc(dbfile)) and overwrite:
                unlink(pathEnc(dbfile))

        # create the database
        connwrap = ConnectWrapSyncCommit(sqlite.connect(dbfile))

        try:
            for tn in MAIN_TABLES:
                changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])
    
            connwrap.executemany("insert or replace into settings(key, value) "+
                        "values (?, ?)", (
                    ("formatver", str(VERSION_DB)),  # Version of database format the data was written
                    ("writecompatver", str(VERSION_WRITECOMPAT)),  # Lowest format version which is write compatible
                    ("readcompatver", str(VERSION_READCOMPAT)),  # Lowest format version which is read compatible
                    ("branchtag", "WikidPad")  # Tag of the WikidPad branch
#                     ("locale", "-") # Locale for cached wordnormcase column. '-': column invalid
                    )  )

            rebuildIndices(connwrap)
            connwrap.syncCommit()
            
        finally:
            # close the connection
            connwrap.close()

    else:
        raise WikiDBExistsException(
                _("database already exists at location: %s") % dataDir)
Esempio n. 9
0
    def _preTestIdentity(self, destpath, srcfname, srcstat):
        """
        Preliminary test if the destination file denoted by destpath and
        the source described by its name srcfnam without path and
        its stats COULD BE identical.
        If the function returns False, they are definitely different,
        if it returns True, further tests are necessary.
        """
        if not os.path.isfile(destpath):
            # Must be existing file
            return False

        destfname = os.path.basename(destpath)

        if os.path.splitext(srcfname)[1] != os.path.splitext(destfname)[1]:
            # file suffix must match always
            return False

        if self.filenameMustMatch and srcfname != destfname:
            return False

        deststat = os.stat(pathEnc(destpath))
        
        if deststat.st_size != srcstat.st_size:
            # Size must match always
            return False

        if self.modDateMustMatch and deststat.st_mtime != srcstat.st_mtime:
            return False
            
        # This means it COULD BE identical according to the quick tests
        return True
Esempio n. 10
0
    def _preTestIdentity(self, destpath, srcfname, srcstat):
        """
        Preliminary test if the destination file denoted by destpath and
        the source described by its name srcfnam without path and
        its stats COULD BE identical.
        If the function returns False, they are definitely different,
        if it returns True, further tests are necessary.
        """
        if not os.path.isfile(destpath):
            # Must be existing file
            return False

        destfname = os.path.basename(destpath)

        if os.path.splitext(srcfname)[1] != os.path.splitext(destfname)[1]:
            # file suffix must match always
            return False

        if self.filenameMustMatch and srcfname != destfname:
            return False

        deststat = os.stat(pathEnc(destpath))
        
        if deststat.st_size != srcstat.st_size:
            # Size must match always
            return False

        if self.modDateMustMatch and deststat.st_mtime != srcstat.st_mtime:
            return False
            
        # This means it COULD BE identical according to the quick tests
        return True
Esempio n. 11
0
    def findDestPath(self, srcPath):
        """
        Find a path to a new destination
        of the source file denoted by srcPath. Some settings of
        the object determine how this is done exactly.
        Returns a tuple (path, exists) where path is the destination
        path and exists is True if an identical file exists already
        at the destination.
        If path is None, a new filename couldn't be found.
        """

        if not (os.path.isfile(srcPath) or os.path.isdir(srcPath)):
            raise FSException(_("Path '%s' must point to an existing file") %
                    srcPath)

        self._ensureStorage()
        
        for c in self._getCandidates(srcPath):
            if self._isIdentical(srcPath, c):
                return (c, True)

        # No identical file found, so find a not yet used name for the new file.
        fname = os.path.basename(srcPath)

        if not os.path.exists(pathEnc(os.path.join(self.storagePath, fname))):
            return (os.path.join(self.storagePath, fname), False)

        mat = _FILESPLITPAT.match(fname)
        if mat is None:
            raise FSException(_("Internal error: Bad source file name"))

        coreName = mat.group("name")
        suffix = mat.group("suffix")

        for t in range(10):  # Number of tries
            newName = "%s_%s%s" % (coreName, createRandomString(10), suffix)
            
            if not os.path.exists(pathEnc(os.path.join(
                    self.storagePath, newName))):
                return (os.path.join(self.storagePath, newName), False)

        # Give up
        return (None, False)
Esempio n. 12
0
    def findDestPath(self, srcPath):
        """
        Find a path to a new destination
        of the source file denoted by srcPath. Some settings of
        the object determine how this is done exactly.
        Returns a tuple (path, exists) where path is the destination
        path and exists is True if an identical file exists already
        at the destination.
        If path is None, a new filename couldn't be found.
        """

        if not (os.path.isfile(srcPath) or os.path.isdir(srcPath)):
            raise FSException(_(u"Path '%s' must point to an existing file") %
                    srcPath)

        self._ensureStorage()
        
        for c in self._getCandidates(srcPath):
            if self._isIdentical(srcPath, c):
                return (c, True)

        # No identical file found, so find a not yet used name for the new file.
        fname = os.path.basename(srcPath)

        if not os.path.exists(pathEnc(os.path.join(self.storagePath, fname))):
            return (os.path.join(self.storagePath, fname), False)

        mat = _FILESPLITPAT.match(fname)
        if mat is None:
            raise FSException(_(u"Internal error: Bad source file name"))

        coreName = mat.group("name")
        suffix = mat.group("suffix")

        for t in xrange(10):  # Number of tries
            newName = u"%s_%s%s" % (coreName, createRandomString(10), suffix)
            
            if not os.path.exists(pathEnc(os.path.join(
                    self.storagePath, newName))):
                return (os.path.join(self.storagePath, newName), False)

        # Give up
        return (None, False)
Esempio n. 13
0
    def _getCandidates(self, srcPath):
        """
        Find possible candidates for detailed identity check. The file storage
        must exist already.
        srcPath -- Must be a path to an existing file
        """
        srcfname = os.path.basename(srcPath)
        srcstat = os.stat(pathEnc(srcPath))

        
        ccSameName = set()     # candidate category: same filename
                                    # (only one possible entry)
        ccSameMod = set()     # candidate category: same mod. date
        ccElse = set()     # candidate category: everything else

        samenamepath = os.path.join(self.storagePath, srcfname)
        if self._preTestIdentity(samenamepath, srcfname, srcstat):
            ccSameName.add(samenamepath)

        if self.filenameMustMatch:
            # No other candidates possible
            return list(ccSameName)


        ext = os.path.splitext(srcPath)[1]
        for p in glob.glob(os.path.join(self.storagePath, "*" + ext)):
            if p == samenamepath:
                # Already tested above
                continue

            if self._preTestIdentity(p, srcfname, srcstat):
                deststat = os.stat(pathEnc(p))
                if deststat.st_mtime == srcstat.st_mtime:
                    ccSameMod.add(p)
                else:
                    ccElse.add(p)

        return list(ccSameName) + list(ccSameMod) + list(ccElse)
Esempio n. 14
0
    def _getCandidates(self, srcPath):
        """
        Find possible candidates for detailed identity check. The file storage
        must exist already.
        srcPath -- Must be a path to an existing file
        """
        srcfname = os.path.basename(srcPath)
        srcstat = os.stat(pathEnc(srcPath))

        
        ccSameName = set()     # candidate category: same filename
                                    # (only one possible entry)
        ccSameMod = set()     # candidate category: same mod. date
        ccElse = set()     # candidate category: everything else

        samenamepath = os.path.join(self.storagePath, srcfname)
        if self._preTestIdentity(samenamepath, srcfname, srcstat):
            ccSameName.add(samenamepath)

        if self.filenameMustMatch:
            # No other candidates possible
            return list(ccSameName)


        ext = os.path.splitext(srcPath)[1]
        for p in glob.glob(os.path.join(self.storagePath, "*" + ext)):
            if p == samenamepath:
                # Already tested above
                continue

            if self._preTestIdentity(p, srcfname, srcstat):
                deststat = os.stat(pathEnc(p))
                if deststat.st_mtime == srcstat.st_mtime:
                    ccSameMod.add(p)
                else:
                    ccElse.add(p)

        return list(ccSameName) + list(ccSameMod) + list(ccElse)
Esempio n. 15
0
def updateDatabase(connwrap, dataDir, pagefileSuffix, wikiData):
    """
    Update a database from an older version to current (checkDatabaseFormat()
    should have returned 1 before calling this function)
    """
    connwrap.syncCommit()
    
    # Always remember that there is no automatic unicode<->utf-8 conversion
    # during this function!

    formatver = getSettingsInt(connwrap, "formatver")

    if formatver == 0:
        # Insert in table wikiwords column wordnormcase
        changeTableSchema(connwrap, "wikiwords", 
                TABLE_DEFINITIONS["wikiwords"])

        formatver = 1

    # --- WikiPad 1.8beta1 reached (formatver=1, writecompatver=1,
    #         readcompatver=1) ---


    if formatver == 1:
        # Update "wikiwords" schema and create new tables
        for tn in ("wikiwords", "wikiwordmatchterms", "datablocks",
                "datablocksexternal"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

        # Transfer "search_views" data to "datablocks" table
        searches = connwrap.execSqlQuery(
                "select title, datablock from search_views")

        for title, data in searches:
            connwrap.execSql(
                "insert into datablocks(unifiedname, data) "+\
                "values (?, ?)", ("savedsearch/" + title, sqlite.Binary(data)))

        connwrap.execSql("drop table search_views")

        allWords = connwrap.execSqlQuerySingleColumn("select word from wikiwords")

        # Divide into functional and wiki pages
        wikiWords = []
        funcWords = []
        for w in allWords:
            w = w.decode("utf-8")
            if w.startswith(u'['):
                funcWords.append(w)
            else:
                wikiWords.append(w)

        # Fill the new fields in table "wikiwords"
        for wikiWord in wikiWords:
            filename = wikiWord + pagefileSuffix
            fullPath = join(dataDir, filename)
            try:
                # We don't use coarsening here for the FSB because a different
                # coarsening setting can't exist for the old wiki format
                filesig = getFileSignatureBlock(fullPath)
            except (IOError, WindowsError):
                traceback.print_exc()
                continue
            
            connwrap.execSql("update wikiwords set filepath = ?, "
                    "filenamelowercase = ?, filesignature = ? "
                    "where word = ?", (filename.encode("utf-8"),
                    filename.lower().encode("utf-8"), sqlite.Binary(filesig),
                    wikiWord.encode("utf-8")))

        # Move functional pages to new table "datablocksexternal" and rename them
        for funcWord in funcWords:
            if funcWord not in (u"[TextBlocks]", u"[PWL]", u"[CCBlacklist]"):
                continue # Error ?!
            
            unifName = u"wiki/" + funcWord[1:-1]
            fullPath = join(dataDir, funcWord + pagefileSuffix)
            
            icf = iterCompatibleFilename(unifName, u".data")
            
            for i in range(10):  # Actual "while True", but that's too dangerous
                newFilename = icf.next()
                newPath = join(dataDir, newFilename)

                if exists(pathEnc(newPath)):
                    # A file with the designated new name of fn already exists
                    # -> do nothing
                    continue

                try:
                    rename(pathEnc(fullPath), pathEnc(newPath))
                    # We don't use coarsening here for the FSB because a different
                    # coarsening setting can't exist for the old wiki format
                    connwrap.execSql(
                        "insert into datablocksexternal(unifiedname, filepath, "
                        "filenamelowercase, filesignature) values (?, ?, ?, ?)",
                        (unifName.encode("utf-8"), newFilename.encode("utf-8"),
                            newFilename.lower().encode("utf-8"),
                            sqlite.Binary(getFileSignatureBlock(newPath))))
                    connwrap.execSql("delete from wikiwords where word = ?",
                            (funcWord.encode("utf-8"),))
                    break
                except (IOError, OSError):
                    traceback.print_exc()
                    continue

        formatver = 2
        
    # --- WikiPad 2.0alpha1 reached (formatver=2, writecompatver=2,
    #         readcompatver=2) ---
    
    if formatver == 2:
        # Recreate table "todos"
        connwrap.execSql("drop table todos;")
        changeTableSchema(connwrap, "todos", TABLE_DEFINITIONS["todos"])
        connwrap.execSql("update todos set firstcharpos=-1;")

        # Rename table "wikiwordprops" to "wikiwordattrs"
        changeTableSchema(connwrap, "wikiwordattrs", TABLE_DEFINITIONS["wikiwordattrs"])
        connwrap.execSql("insert into wikiwordattrs(word, key, value, "
                "firstcharpos, charlength) select word, key, value, "
                "firstcharpos, -1 from wikiwordprops;")
        connwrap.execSql("drop table wikiwordprops;")

        for tn in ("wikirelations", "wikiwordmatchterms"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])
            connwrap.execSql("update %s set firstcharpos=-1;" % tn)

        # Mark all wikiwords to need a rebuild
        connwrap.execSql("update wikiwords set metadataprocessed=0;")

        formatver = 3

    # --- WikiPad 2.1alpha1 reached (formatver=3, writecompatver=3,
    #         readcompatver=3) ---


    connwrap.executemany("insert or replace into settings(key, value) "+
                "values (?, ?)", (
            ("formatver", str(VERSION_DB)),  # Version of database format the data was written
            ("writecompatver", str(VERSION_WRITECOMPAT)),  # Lowest format version which is write compatible
            ("readcompatver", str(VERSION_READCOMPAT)),  # Lowest format version which is read compatible
            ("branchtag", "WikidPad")  # Tag of the WikidPad branch
            )  )

    rebuildIndices(connwrap)

    connwrap.syncCommit()
Esempio n. 16
0
 def _storageExists(self):
     """
     Test if file storage (=directory) exists.
     """
     return os.path.exists(pathEnc(self.storagePath))
Esempio n. 17
0
def updateDatabase(connwrap, dataDir, pagefileSuffix):
    """
    Update a database from an older version to current (checkDatabaseFormat()
    should have returned 1 before calling this function)
    """
    connwrap.commit()

    indices = connwrap.execSqlQuerySingleColumn(
        "select INDEX_NAME from __indices__")
    tables = connwrap.execSqlQuerySingleColumn(
        "select TABLE_NAME from __table_names__")

    indices = map(string.upper, indices)
    tables = map(string.upper, tables)

    # updatedTables = []

    if not "SETTINGS" in tables:
        # We are prior WikidPad 1.2beta2 (which writes format version 0)

        if "WIKIWORDPROPS_PKEY" in indices:
            #             print "dropping index wikiwordprops_pkey"
            connwrap.execSql("drop index wikiwordprops_pkey")
#         if "WIKIWORDPROPS_WORD" not in indices:
#             print "creating index wikiwordprops_word"
#             connwrap.execSql("create index wikiwordprops_word on wikiwordprops(word)")
#         if "WIKIRELATIONS_WORD" not in indices:
#             print "creating index wikirelations_word"
#             connwrap.execSql("create index wikirelations_word on wikirelations(word)")
        if "REGISTRATION" in tables:
            connwrap.execSql("drop table registration")

        # Update search_views
        searches = connwrap.execSqlQuerySingleColumn(
            "select search from search_views")

        changeTableSchema(connwrap, "search_views",
                          TABLE_DEFINITIONS["search_views"])

        for search in searches:
            searchOp = SearchReplaceOperation()
            searchOp.searchStr = search
            searchOp.wikiWide = True
            searchOp.booleanOp = True

            try:
                # Raises exception if search is invalid
                searchOp.rebuildSearchOpTree()
            except:
                continue

            datablock = searchOp.getPackedSettings()

            connwrap.execSql(
                "insert into search_views(title, datablock) "+\
                "values (?, ?)", (searchOp.getTitle(), datablock))

        formatver = 0

        changeTableSchema(connwrap, "settings", TABLE_DEFINITIONS["settings"])

        # Write initial format versions
        for key, value in (
            ("formatver",
             "0"),  # Version of database format the data was written
            ("writecompatver",
             "0"),  # Lowest format version which is write compatible
            ("readcompatver",
             "0"),  # Lowest format version which is read compatible
            ("branchtag", "WikidPad")  # Tag of the WikidPad branch
        ):
            setSettingsValue(connwrap, key, value)

        # --- WikiPad 1.20beta2 reached (formatver=0, writecompatver=0,
        #         readcompatver=0) ---

    formatver = getSettingsInt(connwrap, "formatver")

    if formatver == 0:
        # From formatver 0 to 1, all filenames with brackets are renamed
        # to have no brackets
        filenames = glob.glob(join(mbcsEnc(dataDir, "replace")[0], '*.wiki'))
        for fn in filenames:
            fn = mbcsDec(fn, "replace")[0]
            bn = basename(fn)
            newbname = removeBracketsFilename(bn)
            if bn == newbname:
                continue

            newname = mbcsEnc(join(dataDir, newbname), "replace")[0]
            if exists(pathEnc(newname)):
                # A file with the designated new name of fn already exists
                # -> do nothing
                continue

            try:
                rename(fn, newname)
            except (IOError, OSError):
                pass

        formatver = 1

        # --- WikiPad 1.20beta3 reached (formatver=1, writecompatver=1,
        #         readcompatver=1) ---

    if formatver == 1:
        # remove brackets from all wikiwords in database

        # table wikiwords
        dataIn = connwrap.execSqlQuery(
            "select word, created, modified from wikiwords")
        connwrap.execSql("drop table wikiwords")
        connwrap.commit()
        changeTableSchema(connwrap, "wikiwords",
                          TABLE_DEFINITIONS["wikiwords"])
        rebuildIndices(connwrap)

        uniqueCtl = {}
        for w, c, m in dataIn:
            w = oldWikiWordToLabel(w)
            if not uniqueCtl.has_key(w):
                connwrap.execSqlInsert(
                    "wikiwords",
                    ("word", "created", "modified", "presentationdatablock"),
                    (w, c, m, ""))
                #                 connwrap.execSql("insert into wikiwords(word, created, modified) "
                #                         "values (?, ?, ?)", (w, c, m))
                uniqueCtl[w] = None

        # table wikirelations
        dataIn = connwrap.execSqlQuery(
            "select word, relation, created from wikirelations")
        connwrap.execSql("drop table wikirelations")
        connwrap.commit()
        changeTableSchema(connwrap, "wikirelations",
                          TABLE_DEFINITIONS["wikirelations"])
        rebuildIndices(connwrap)

        uniqueCtl = {}
        for w, r, c in dataIn:
            w, r = oldWikiWordToLabel(w), oldWikiWordToLabel(r)
            if not uniqueCtl.has_key((w, r)):
                connwrap.execSqlInsert("wikirelations",
                                       ("word", "relation", "created"),
                                       (w, r, c))
                #                 connwrap.execSql("insert into wikirelations(word, relation, created) "
                #                         "values (?, ?, ?)", (w, r, c))
                uniqueCtl[(w, r)] = None

        # table wikiwordprops
        dataIn = connwrap.execSqlQuery(
            "select word, key, value from wikiwordprops")
        connwrap.execSql("drop table wikiwordprops")
        connwrap.commit()
        changeTableSchema(connwrap, "wikiwordprops",
                          TABLE_DEFINITIONS["wikiwordprops_PRE2_1alpha01"])
        rebuildIndices(connwrap)

        for w, k, v in dataIn:
            connwrap.execSqlInsert("wikiwordprops", ("word", "key", "value"),
                                   (oldWikiWordToLabel(w), k, v),
                                   tableDefault="wikiwordprops_PRE2_1alpha01")
#             connwrap.execSql("insert into wikiwordprops(word, key, value) "
#                     "values (?, ?, ?)", (oldWikiWordToLabel(w), k, v))

# table todos
        dataIn = connwrap.execSqlQuery("select word, todo from todos")
        connwrap.execSql("drop table todos")
        connwrap.commit()
        changeTableSchema(connwrap, "todos",
                          TABLE_DEFINITIONS["todos_PRE2_1alpha01"])
        rebuildIndices(connwrap)

        for w, t in dataIn:
            connwrap.execSqlInsert("todos", ("word", "todo"),
                                   (oldWikiWordToLabel(w), t),
                                   tableDefault="todos_PRE2_1alpha01")


#             connwrap.execSql("insert into todos(word, todo) "
#                     "values (?, ?)", (oldWikiWordToLabel(w), t))

        formatver = 2

        # --- WikiPad 1.6beta2 reached (formatver=2, writecompatver=2,
        #         readcompatver=2) ---

    if formatver == 2:
        changeTableSchema(connwrap, "wikiwords",
                          TABLE_DEFINITIONS["wikiwords"])

        # --- WikiPad 1.8beta1 reached (formatver=3, writecompatver=3,
        #         readcompatver=2) ---

        formatver = 3

    if formatver == 3:

        # Update "wikiwords" schema and create new tables
        for tn in ("wikiwords", "wikiwordmatchterms", "datablocks",
                   "datablocksexternal", "defaultvalues"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

        # (Re)fill "defaultvalues" and read them into connection wrapper
        connwrap.fillDefaultValues()
        connwrap.readDefaultValues()

        # Transfer "search_views" data to "datablocks" table
        searches = connwrap.execSqlQuery(
            "select title, datablock from search_views", strConv=(True, False))

        for title, data in searches:
            connwrap.execSql(
                "insert into datablocks(unifiedname, data) "+\
                "values (?, ?)", (u"savedsearch/" + title, data))

        connwrap.execSql("drop table search_views")

        allWords = connwrap.execSqlQuerySingleColumn(
            "select word from wikiwords")

        # Divide into functional and wiki pages
        wikiWords = []
        funcWords = []
        for w in allWords:
            if w.startswith('['):
                funcWords.append(w)
            else:
                wikiWords.append(w)

        # Fill the new fields in table "wikiwords"
        for wikiWord in wikiWords:
            filename = wikiWord + pagefileSuffix
            fullPath = join(dataDir, filename)
            try:
                # We don't use coarsening here for the FSB because a different
                # coarsening setting can't exist for the old wiki format
                filesig = getFileSignatureBlock(fullPath)
            except (IOError, WindowsError):
                traceback.print_exc()
                continue

            connwrap.execSql(
                "update wikiwords set filepath = ?, "
                "filenamelowercase = ?, filesignature = ? "
                "where word = ?",
                (filename, filename.lower(), filesig, wikiWord))

        # Move functional pages to new table "datablocksexternal" and rename them
        for funcWord in funcWords:
            if funcWord not in (u"[TextBlocks]", u"[PWL]", u"[CCBlacklist]"):
                continue  # Error ?!

            unifName = u"wiki/" + funcWord[1:-1]
            fullPath = join(dataDir, funcWord + pagefileSuffix)

            icf = iterCompatibleFilename(unifName, u".data")

            for i in range(
                    10):  # Actual "while True", but that's too dangerous
                newFilename = icf.next()
                newPath = join(dataDir, newFilename)

                if exists(pathEnc(newPath)):
                    # A file with the designated new name of fn already exists
                    # -> do nothing
                    continue

                try:
                    rename(pathEnc(fullPath), pathEnc(newPath))

                    # We don't use coarsening here for the FSB because a different
                    # coarsening setting can't exist for the old wiki format
                    connwrap.execSqlInsert(
                        "datablocksexternal",
                        ("unifiedname", "filepath", "filenamelowercase",
                         "filesignature"),
                        (unifName, newFilename, newFilename.lower(),
                         getFileSignatureBlock(newPath)))
                    connwrap.execSql("delete from wikiwords where word = ?",
                                     (funcWord, ))
                    break
                except (IOError, OSError):
                    traceback.print_exc()
                    continue

        # --- WikiPad 2.0alpha1 reached (formatver=4, writecompatver=4,
        #         readcompatver=4) ---

        formatver = 4

    if formatver == 4:
        # (Re)fill "defaultvalues" and read them into connection wrapper
        connwrap.fillDefaultValues()
        connwrap.readDefaultValues()

        # Recreate table "todos" with new schema
        connwrap.execSql("drop table todos")
        changeTableSchema(connwrap, "todos", TABLE_DEFINITIONS["todos"])

        # Rename table "wikiwordprops" to "wikiwordattrs"
        changeTableSchema(connwrap, "wikiwordattrs",
                          TABLE_DEFINITIONS["wikiwordattrs"])
        connwrap.execSql("insert into wikiwordattrs(word, key, value) "
                         "select word, key, value from wikiwordprops")
        connwrap.execSql("drop table wikiwordprops")

        for tn in ("wikirelations", "wikiwordmatchterms"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

        # Mark all wikiwords to need a rebuild
        connwrap.execSql("update wikiwords set metadataprocessed=0")

        formatver = 5

    # --- WikiPad 2.1alpha.1 reached (formatver=5, writecompatver=5,
    #         readcompatver=5) ---

    # Write format information
    for key, value in (
        ("formatver",
         str(VERSION_DB)),  # Version of database format the data was written
        ("writecompatver", str(VERSION_WRITECOMPAT)
         ),  # Lowest format version which is write compatible
        ("readcompatver", str(VERSION_READCOMPAT)
         ),  # Lowest format version which is read compatible
        ("branchtag", "WikidPad"),  # Tag of the WikidPad branch
        ("locale", "-"
         )  # Locale for cached wordnormcase column. '-': column invalid
    ):
        setSettingsValue(connwrap, key, value)

    rebuildIndices(connwrap)

    connwrap.commit()
Esempio n. 18
0
def updateDatabase(connwrap, dataDir, pagefileSuffix):
    """
    Update a database from an older version to current (checkDatabaseFormat()
    should have returned 1 before calling this function)
    """
    connwrap.syncCommit()

    # Always remember that there is no automatic unicode<->utf-8 conversion
    # during this function!

    formatver = getSettingsInt(connwrap, "formatver")

    if formatver == 0:
        # Insert in table wikiwords column wordnormcase
        changeTableSchema(connwrap, "wikiwords",
                          TABLE_DEFINITIONS["wikiwords"])

        formatver = 1

    # --- WikiPad 1.8beta1 reached (formatver=1, writecompatver=1,
    #         readcompatver=1) ---

    if formatver == 1:
        # Update "wikiwords" schema and create new tables
        for tn in ("wikiwords", "wikiwordmatchterms", "datablocks",
                   "datablocksexternal"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

        # Transfer "search_views" data to "datablocks" table
        searches = connwrap.execSqlQuery(
            "select title, datablock from search_views")

        for title, data in searches:
            connwrap.execSql(
                "insert into datablocks(unifiedname, data) "+\
                "values (?, ?)", ("savedsearch/" + title, sqlite.Binary(data)))

        connwrap.execSql("drop table search_views")

        allWords = connwrap.execSqlQuerySingleColumn(
            "select word from wikiwords")

        # Divide into functional and wiki pages
        wikiWords = []
        funcWords = []
        for w in allWords:
            w = w.decode("utf-8")
            if w.startswith('['):
                funcWords.append(w)
            else:
                wikiWords.append(w)

        # Fill the new fields in table "wikiwords"
        for wikiWord in wikiWords:
            filename = wikiWord + pagefileSuffix
            fullPath = join(dataDir, filename)
            try:
                # We don't use coarsening here for the FSB because a different
                # coarsening setting can't exist for the old wiki format
                filesig = getFileSignatureBlock(fullPath)
            except (IOError, WindowsError):
                traceback.print_exc()
                continue

            connwrap.execSql(
                "update wikiwords set filepath = ?, "
                "filenamelowercase = ?, filesignature = ? "
                "where word = ?",
                (filename.encode("utf-8"), filename.lower().encode("utf-8"),
                 sqlite.Binary(filesig), wikiWord.encode("utf-8")))

        # Move functional pages to new table "datablocksexternal" and rename them
        for funcWord in funcWords:
            if funcWord not in ("[TextBlocks]", "[PWL]", "[CCBlacklist]"):
                continue  # Error ?!

            unifName = "wiki/" + funcWord[1:-1]
            fullPath = join(dataDir, funcWord + pagefileSuffix)

            icf = iterCompatibleFilename(unifName, ".data")

            for i in range(
                    10):  # Actual "while True", but that's too dangerous
                newFilename = next(icf)
                newPath = join(dataDir, newFilename)

                if exists(pathEnc(newPath)):
                    # A file with the designated new name of fn already exists
                    # -> do nothing
                    continue

                try:
                    rename(pathEnc(fullPath), pathEnc(newPath))
                    # We don't use coarsening here for the FSB because a different
                    # coarsening setting can't exist for the old wiki format
                    connwrap.execSql(
                        "insert into datablocksexternal(unifiedname, filepath, "
                        "filenamelowercase, filesignature) values (?, ?, ?, ?)",
                        (unifName.encode("utf-8"), newFilename.encode("utf-8"),
                         newFilename.lower().encode("utf-8"),
                         sqlite.Binary(getFileSignatureBlock(newPath))))
                    connwrap.execSql("delete from wikiwords where word = ?",
                                     (funcWord.encode("utf-8"), ))
                    break
                except (IOError, OSError):
                    traceback.print_exc()
                    continue

        formatver = 2

    # --- WikiPad 2.0alpha1 reached (formatver=2, writecompatver=2,
    #         readcompatver=2) ---

    if formatver == 2:
        # Recreate table "todos"
        connwrap.execSql("drop table todos;")
        changeTableSchema(connwrap, "todos", TABLE_DEFINITIONS["todos"])
        connwrap.execSql("update todos set firstcharpos=-1;")

        # Rename table "wikiwordprops" to "wikiwordattrs"
        changeTableSchema(connwrap, "wikiwordattrs",
                          TABLE_DEFINITIONS["wikiwordattrs"])
        connwrap.execSql("insert into wikiwordattrs(word, key, value, "
                         "firstcharpos, charlength) select word, key, value, "
                         "firstcharpos, -1 from wikiwordprops;")
        connwrap.execSql("drop table wikiwordprops;")

        for tn in ("wikirelations", "wikiwordmatchterms"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])
            connwrap.execSql("update %s set firstcharpos=-1;" % tn)

        # Mark all wikiwords to need a rebuild
        connwrap.execSql("update wikiwords set metadataprocessed=0;")

        formatver = 3

    # --- WikiPad 2.1alpha1 reached (formatver=3, writecompatver=3,
    #         readcompatver=3) ---

    connwrap.executemany(
        "insert or replace into settings(key, value) " + "values (?, ?)",
        (
            ("formatver", str(VERSION_DB)
             ),  # Version of database format the data was written
            ("writecompatver", str(VERSION_WRITECOMPAT)
             ),  # Lowest format version which is write compatible
            ("readcompatver", str(VERSION_READCOMPAT)
             ),  # Lowest format version which is read compatible
            ("branchtag", "WikidPad")  # Tag of the WikidPad branch
        ))

    rebuildIndices(connwrap)

    connwrap.syncCommit()
Esempio n. 19
0
def updateDatabase(connwrap, dataDir, pagefileSuffix):
    """
    Update a database from an older version to current (checkDatabaseFormat()
    should have returned 1 before calling this function)
    """
    connwrap.commit()

    indices = connwrap.execSqlQuerySingleColumn("select INDEX_NAME from __indices__")
    tables = connwrap.execSqlQuerySingleColumn("select TABLE_NAME from __table_names__")

    indices = map(string.upper, indices)
    tables = map(string.upper, tables)
    
    # updatedTables = []
    
    if not "SETTINGS" in tables:
        # We are prior WikidPad 1.2beta2 (which writes format version 0)
        
        if "WIKIWORDPROPS_PKEY" in indices:
#             print "dropping index wikiwordprops_pkey"
            connwrap.execSql("drop index wikiwordprops_pkey")
#         if "WIKIWORDPROPS_WORD" not in indices:
#             print "creating index wikiwordprops_word"
#             connwrap.execSql("create index wikiwordprops_word on wikiwordprops(word)")
#         if "WIKIRELATIONS_WORD" not in indices:
#             print "creating index wikirelations_word"
#             connwrap.execSql("create index wikirelations_word on wikirelations(word)")
        if "REGISTRATION" in tables:
            connwrap.execSql("drop table registration")

        # Update search_views
        searches = connwrap.execSqlQuerySingleColumn(
                "select search from search_views")
                
        changeTableSchema(connwrap, "search_views", 
                TABLE_DEFINITIONS["search_views"])
        
        for search in searches:
            searchOp = SearchReplaceOperation()
            searchOp.searchStr = search
            searchOp.wikiWide = True
            searchOp.booleanOp = True

            try:
                # Raises exception if search is invalid
                searchOp.rebuildSearchOpTree()
            except:
                continue

            datablock = searchOp.getPackedSettings()

            connwrap.execSql(
                "insert into search_views(title, datablock) "+\
                "values (?, ?)", (searchOp.getTitle(), datablock))

        formatver = 0
        
        changeTableSchema(connwrap, "settings", 
                TABLE_DEFINITIONS["settings"])
        
        # Write initial format versions
        for key, value in (
                ("formatver", "0"),  # Version of database format the data was written
                ("writecompatver", "0"),  # Lowest format version which is write compatible
                ("readcompatver", "0"),  # Lowest format version which is read compatible
                ("branchtag", "WikidPad")  # Tag of the WikidPad branch
                ):
            setSettingsValue(connwrap, key, value)


        # --- WikiPad 1.20beta2 reached (formatver=0, writecompatver=0,
        #         readcompatver=0) ---


    formatver = getSettingsInt(connwrap, "formatver")
    
    if formatver == 0:
        # From formatver 0 to 1, all filenames with brackets are renamed
        # to have no brackets
        filenames = glob.glob(join(mbcsEnc(dataDir, "replace")[0], '*.wiki'))
        for fn in filenames:
            fn = mbcsDec(fn, "replace")[0]
            bn = basename(fn)
            newbname = removeBracketsFilename(bn)
            if bn == newbname:
                continue
                    
            newname = mbcsEnc(join(dataDir, newbname), "replace")[0]
            if exists(pathEnc(newname)):
                # A file with the designated new name of fn already exists
                # -> do nothing
                continue
            
            try:
                rename(fn, newname)
            except (IOError, OSError):
                pass
        
        formatver = 1
        
        # --- WikiPad 1.20beta3 reached (formatver=1, writecompatver=1,
        #         readcompatver=1) ---

    if formatver == 1:
        # remove brackets from all wikiwords in database
        
        # table wikiwords
        dataIn = connwrap.execSqlQuery(
                "select word, created, modified from wikiwords")
        connwrap.execSql("drop table wikiwords")
        connwrap.commit()
        changeTableSchema(connwrap, "wikiwords", 
                TABLE_DEFINITIONS["wikiwords"])
        rebuildIndices(connwrap)
        
        uniqueCtl = {}
        for w, c, m in dataIn:
            w = oldWikiWordToLabel(w)
            if not uniqueCtl.has_key(w):
                connwrap.execSqlInsert("wikiwords", ("word", "created", 
                        "modified", "presentationdatablock"),
                        (w, c, m, ""))
#                 connwrap.execSql("insert into wikiwords(word, created, modified) "
#                         "values (?, ?, ?)", (w, c, m))
                uniqueCtl[w] = None

        # table wikirelations
        dataIn = connwrap.execSqlQuery(
                "select word, relation, created from wikirelations")
        connwrap.execSql("drop table wikirelations")
        connwrap.commit()
        changeTableSchema(connwrap, "wikirelations", 
                TABLE_DEFINITIONS["wikirelations"])
        rebuildIndices(connwrap)

        uniqueCtl = {}
        for w, r, c in dataIn:
            w, r = oldWikiWordToLabel(w), oldWikiWordToLabel(r)
            if not uniqueCtl.has_key((w, r)):
                connwrap.execSqlInsert("wikirelations", ("word", "relation", 
                        "created"), (w, r, c))
#                 connwrap.execSql("insert into wikirelations(word, relation, created) "
#                         "values (?, ?, ?)", (w, r, c))
                uniqueCtl[(w, r)] = None

        # table wikiwordprops
        dataIn = connwrap.execSqlQuery(
                "select word, key, value from wikiwordprops")
        connwrap.execSql("drop table wikiwordprops")
        connwrap.commit()
        changeTableSchema(connwrap, "wikiwordprops", 
                TABLE_DEFINITIONS["wikiwordprops_PRE2_1alpha01"])
        rebuildIndices(connwrap)

        for w, k, v in dataIn:
            connwrap.execSqlInsert("wikiwordprops", ("word", "key", 
                    "value"), (oldWikiWordToLabel(w), k, v),
                    tableDefault="wikiwordprops_PRE2_1alpha01")
#             connwrap.execSql("insert into wikiwordprops(word, key, value) "
#                     "values (?, ?, ?)", (oldWikiWordToLabel(w), k, v))

        # table todos
        dataIn = connwrap.execSqlQuery(
                "select word, todo from todos")
        connwrap.execSql("drop table todos")
        connwrap.commit()
        changeTableSchema(connwrap, "todos", 
                TABLE_DEFINITIONS["todos_PRE2_1alpha01"])
        rebuildIndices(connwrap)

        for w, t in dataIn:
            connwrap.execSqlInsert("todos", ("word", "todo"),
                    (oldWikiWordToLabel(w), t),
                    tableDefault="todos_PRE2_1alpha01")
#             connwrap.execSql("insert into todos(word, todo) "
#                     "values (?, ?)", (oldWikiWordToLabel(w), t))

        formatver = 2

        # --- WikiPad 1.6beta2 reached (formatver=2, writecompatver=2,
        #         readcompatver=2) ---

    if formatver == 2:
        changeTableSchema(connwrap, "wikiwords", 
                TABLE_DEFINITIONS["wikiwords"])
                
        # --- WikiPad 1.8beta1 reached (formatver=3, writecompatver=3,
        #         readcompatver=2) ---

        formatver = 3
        

    if formatver == 3:

        # Update "wikiwords" schema and create new tables
        for tn in ("wikiwords", "wikiwordmatchterms", "datablocks",
                "datablocksexternal", "defaultvalues"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])
        
        # (Re)fill "defaultvalues" and read them into connection wrapper
        connwrap.fillDefaultValues()
        connwrap.readDefaultValues()


        # Transfer "search_views" data to "datablocks" table
        searches = connwrap.execSqlQuery(
                "select title, datablock from search_views",
                strConv=(True, False))

        for title, data in searches:
            connwrap.execSql(
                "insert into datablocks(unifiedname, data) "+\
                "values (?, ?)", (u"savedsearch/" + title, data))

        connwrap.execSql("drop table search_views")

        allWords = connwrap.execSqlQuerySingleColumn("select word from wikiwords")
        
        # Divide into functional and wiki pages
        wikiWords = []
        funcWords = []
        for w in allWords:
            if w.startswith('['):
                funcWords.append(w)
            else:
                wikiWords.append(w)

        # Fill the new fields in table "wikiwords"
        for wikiWord in wikiWords:
            filename = wikiWord + pagefileSuffix
            fullPath = join(dataDir, filename)
            try:
                # We don't use coarsening here for the FSB because a different
                # coarsening setting can't exist for the old wiki format
                filesig = getFileSignatureBlock(fullPath)
            except (IOError, WindowsError):
                traceback.print_exc()
                continue

            connwrap.execSql("update wikiwords set filepath = ?, "
                    "filenamelowercase = ?, filesignature = ? "
                    "where word = ?", (filename, filename.lower(), filesig,
                    wikiWord))

        # Move functional pages to new table "datablocksexternal" and rename them
        for funcWord in funcWords:
            if funcWord not in (u"[TextBlocks]", u"[PWL]", u"[CCBlacklist]"):
                continue # Error ?!
            
            unifName = u"wiki/" + funcWord[1:-1]
            fullPath = join(dataDir, funcWord + pagefileSuffix)
            
            icf = iterCompatibleFilename(unifName, u".data")
            
            for i in range(10):  # Actual "while True", but that's too dangerous
                newFilename = icf.next()
                newPath = join(dataDir, newFilename)

                if exists(pathEnc(newPath)):
                    # A file with the designated new name of fn already exists
                    # -> do nothing
                    continue

                try:
                    rename(pathEnc(fullPath), pathEnc(newPath))

                    # We don't use coarsening here for the FSB because a different
                    # coarsening setting can't exist for the old wiki format
                    connwrap.execSqlInsert("datablocksexternal", ("unifiedname",
                            "filepath", "filenamelowercase", "filesignature"),
                            (unifName, newFilename, newFilename.lower(),
                            getFileSignatureBlock(newPath)))
                    connwrap.execSql("delete from wikiwords where word = ?",
                            (funcWord,))
                    break
                except (IOError, OSError):
                    traceback.print_exc()
                    continue


        # --- WikiPad 2.0alpha1 reached (formatver=4, writecompatver=4,
        #         readcompatver=4) ---

        formatver = 4
        
    if formatver == 4:
        # (Re)fill "defaultvalues" and read them into connection wrapper
        connwrap.fillDefaultValues()
        connwrap.readDefaultValues()

        # Recreate table "todos" with new schema
        connwrap.execSql("drop table todos")
        changeTableSchema(connwrap, "todos", TABLE_DEFINITIONS["todos"])

        # Rename table "wikiwordprops" to "wikiwordattrs"
        changeTableSchema(connwrap, "wikiwordattrs", TABLE_DEFINITIONS["wikiwordattrs"])
        connwrap.execSql("insert into wikiwordattrs(word, key, value) "
                "select word, key, value from wikiwordprops")
        connwrap.execSql("drop table wikiwordprops")

        for tn in ("wikirelations", "wikiwordmatchterms"):
            changeTableSchema(connwrap, tn, TABLE_DEFINITIONS[tn])

        # Mark all wikiwords to need a rebuild
        connwrap.execSql("update wikiwords set metadataprocessed=0")

        formatver = 5

    # --- WikiPad 2.1alpha.1 reached (formatver=5, writecompatver=5,
    #         readcompatver=5) ---



    # Write format information
    for key, value in (
            ("formatver", str(VERSION_DB)),  # Version of database format the data was written
            ("writecompatver", str(VERSION_WRITECOMPAT)),  # Lowest format version which is write compatible
            ("readcompatver", str(VERSION_READCOMPAT)),  # Lowest format version which is read compatible
            ("branchtag", "WikidPad"),  # Tag of the WikidPad branch
            ("locale", "-") # Locale for cached wordnormcase column. '-': column invalid
            ):
        setSettingsValue(connwrap, key, value)

    rebuildIndices(connwrap)
    
    connwrap.commit()
Esempio n. 20
0
 def _storageExists(self):
     """
     Test if file storage (=directory) exists.
     """
     return os.path.exists(pathEnc(self.storagePath))