Exemple #1
0
def decodeObject(toDecode):
    """
    Take a string and turn it into an object
    """
    decoded = decodeObjectRaw(toDecode)
    doUpgrade()
    return decoded
Exemple #2
0
 def test_sourcestamp_version3(self):
     pkl = textwrap.dedent("""\
         (ibuildbot.sourcestamp
         SourceStamp
         p1
         (dp2
         S'project'
         p3
         S''
         sS'ssid'
         p4
         I10
         sS'repository'
         p5
         S''
         sS'patch_info'
         p6
         NsS'buildbot.sourcestamp.SourceStamp.persistenceVersion'
         p7
         I2
         sS'patch'
         Nsb.""")
     ss = cPickle.loads(pkl)
     styles.doUpgrade()
     self.assertEqual(ss.sourcestampsetid, 10)
     self.assertEqual(ss.codebase, '')
    def testVersionUpgrade(self):
        global MyVersioned
        class MyVersioned(styles.Versioned):
            persistenceVersion = 2
            v3 = 0
            v4 = 0

            def __init__(self):
                self.somedata = 'xxx'

            def upgradeToVersion3(self):
                self.v3 = self.v3 + 1

            def upgradeToVersion4(self):
                self.v4 = self.v4 + 1
        mv = MyVersioned()
        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
        pickl = cPickle.dumps(mv)
        MyVersioned.persistenceVersion = 4
        obj = cPickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3, "didn't do version 3 upgrade"
        assert obj.v4, "didn't do version 4 upgrade"
        pickl = cPickle.dumps(obj)
        obj = cPickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3 == 1, "upgraded unnecessarily"
        assert obj.v4 == 1, "upgraded unnecessarily"
Exemple #4
0
    def loadBuildFromFile(self, number):
        filename = self.makeBuildFilename(number)
        try:
            log.msg("Loading builder %s's build %d from on-disk pickle" %
                    (self.name, number))
            with open(filename, "rb") as f:
                build = load(f)
            build.setProcessObjects(self, self.master)

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [
                    hasattr(o, 'wasUpgraded') for o in versioneds.values()
            ]:
                log.msg("re-writing upgraded build pickle")
                build.saveYourself()

            # check that logfiles exist
            build.checkLogfiles()
            return build
        except IOError:
            raise IndexError("no such build %d" % number)
        except EOFError:
            raise IndexError("corrupted build pickle %d" % number)
Exemple #5
0
def load(filename, style, passphrase=None):
    """Load an object from a file.

    Deserialize an object from a file. The file can be encrypted.

    @param filename: string
    @param style: string (one of 'source', 'xml' or 'pickle')
    @param passphrase: string
    """
    mode = "r"
    if style == "source":
        from twisted.persisted.aot import unjellyFromSource as load
    elif style == "xml":
        from twisted.persisted.marmalade import unjellyFromXML as load
    else:
        load, mode = pickle.load, "rb"
    if passphrase:
        fp = StringIO.StringIO(_decrypt(passphrase, open(filename, "rb").read()))
    else:
        fp = open(filename, mode)
    mainMod = sys.modules["__main__"]
    ee = _EverythingEphemeral()
    sys.modules["__main__"] = ee
    ee.initRun = 1
    value = load(fp)
    sys.modules["__main__"] = mainMod
    styles.doUpgrade()
    ee.initRun = 0
    persistable = IPersistable(value, None)
    if persistable is not None:
        persistable.setStyle(style)
    return value
Exemple #6
0
def load(filename, style):
    """Load an object from a file.

    Deserialize an object from a file. The file can be encrypted.

    @param filename: string
    @param style: string (one of 'pickle' or 'source')
    """
    mode = 'r'
    if style=='source':
        from twisted.persisted.aot import unjellyFromSource as _load
    else:
        _load, mode = pickle.load, 'rb'

    fp = open(filename, mode)
    ee = _EverythingEphemeral(sys.modules['__main__'])
    sys.modules['__main__'] = ee
    ee.initRun = 1
    with fp:
        try:
            value = _load(fp)
        finally:
            # restore __main__ if an exception is raised.
            sys.modules['__main__'] = ee.mainMod

    styles.doUpgrade()
    ee.initRun = 0
    persistable = IPersistable(value, None)
    if persistable is not None:
        persistable.setStyle(style)
    return value
Exemple #7
0
 def load(filename):
     """
     Load package from disk, returns a package
     """
     if not zipfile.is_zipfile(filename):
         return None
     zippedFile = zipfile.ZipFile(filename, "r", zipfile.ZIP_DEFLATED)
     toDecode   = zippedFile.read(u"content.data")
     try:
         newPackage = decodeObjectRaw(toDecode)
         newPackage.afterUpgradeHandlers = []
         doUpgrade()
     except:
         import traceback
         traceback.print_exc()
         raise
     newPackage.filename = Path(filename)
     newPackage.resourceDir = TempDirPath()
     for filename in zippedFile.namelist():
         if unicode(filename, 'utf8') != u"content.data":
             outFile = open(newPackage.resourceDir/filename, "wb")
             outFile.write(zippedFile.read(filename))
     for handler in newPackage.afterUpgradeHandlers:
         handler()
     del newPackage.afterUpgradeHandlers
     return newPackage
Exemple #8
0
def load(filename, style):
    """Load an object from a file.

    Deserialize an object from a file. The file can be encrypted.

    @param filename: string
    @param style: string (one of 'pickle' or 'source')
    """
    mode = 'r'
    if style == 'source':
        from twisted.persisted.aot import unjellyFromSource as _load
    else:
        _load, mode = pickle.load, 'rb'

    fp = open(filename, mode)
    ee = _EverythingEphemeral(sys.modules['__main__'])
    sys.modules['__main__'] = ee
    ee.initRun = 1
    with fp:
        try:
            value = _load(fp)
        finally:
            # restore __main__ if an exception is raised.
            sys.modules['__main__'] = ee.mainMod

    styles.doUpgrade()
    ee.initRun = 0
    persistable = IPersistable(value, None)
    if persistable is not None:
        persistable.setStyle(style)
    return value
    def test_versionUpgrade(self):
        global MyVersioned
        class MyVersioned(styles.Versioned):
            persistenceVersion = 2
            persistenceForgets = ['garbagedata']
            v3 = 0
            v4 = 0

            def __init__(self):
                self.somedata = 'xxx'
                self.garbagedata = lambda q: 'cant persist'

            def upgradeToVersion3(self):
                self.v3 += 1

            def upgradeToVersion4(self):
                self.v4 += 1
        mv = MyVersioned()
        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
        pickl = pickle.dumps(mv)
        MyVersioned.persistenceVersion = 4
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3, "didn't do version 3 upgrade"
        assert obj.v4, "didn't do version 4 upgrade"
        pickl = pickle.dumps(obj)
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3 == 1, "upgraded unnecessarily"
        assert obj.v4 == 1, "upgraded unnecessarily"
Exemple #10
0
    def test_nonIdentityHash(self):
        global ClassWithCustomHash

        class ClassWithCustomHash(styles.Versioned):
            def __init__(self, unique, hash):
                self.unique = unique
                self.hash = hash

            def __hash__(self):
                return self.hash

        v1 = ClassWithCustomHash("v1", 0)
        v2 = ClassWithCustomHash("v2", 0)

        pkl = pickle.dumps((v1, v2))
        del v1, v2
        ClassWithCustomHash.persistenceVersion = 1
        ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(
            self, "upgraded", True)
        v1, v2 = pickle.loads(pkl)
        styles.doUpgrade()
        self.assertEqual(v1.unique, "v1")
        self.assertEqual(v2.unique, "v2")
        self.assertTrue(v1.upgraded)
        self.assertTrue(v2.upgraded)
Exemple #11
0
    def test_upgradeDeserializesObjectsRequiringUpgrade(self):
        global ToyClassA, ToyClassB

        class ToyClassA(styles.Versioned):
            pass

        class ToyClassB(styles.Versioned):
            pass

        x = ToyClassA()
        y = ToyClassB()
        pklA, pklB = pickle.dumps(x), pickle.dumps(y)
        del x, y
        ToyClassA.persistenceVersion = 1

        def upgradeToVersion1(self):
            self.y = pickle.loads(pklB)
            styles.doUpgrade()

        ToyClassA.upgradeToVersion1 = upgradeToVersion1
        ToyClassB.persistenceVersion = 1

        def setUpgraded(self):
            setattr(self, "upgraded", True)

        ToyClassB.upgradeToVersion1 = setUpgraded

        x = pickle.loads(pklA)
        styles.doUpgrade()
        self.assertTrue(x.y.upgraded)
Exemple #12
0
 def test_sourcestamp_version3(self):
     pkl = textwrap.dedent("""\
         (ibuildbot.sourcestamp
         SourceStamp
         p1
         (dp2
         S'project'
         p3
         S''
         sS'ssid'
         p4
         I10
         sS'repository'
         p5
         S''
         sS'patch_info'
         p6
         NsS'buildbot.sourcestamp.SourceStamp.persistenceVersion'
         p7
         I2
         sS'patch'
         Nsb.""")
     ss = cPickle.loads(pkl)
     styles.doUpgrade()
     self.assertEqual(ss.sourcestampsetid,10)
     self.assertEqual(ss.codebase, '')
Exemple #13
0
def load(filename, style, passphrase=None):
    """Load an object from a file.

    Deserialize an object from a file. The file can be encrypted.

    @param filename: string
    @param style: string (one of 'source', 'xml' or 'pickle')
    @param passphrase: string
    """
    mode = 'r'
    if style == 'source':
        from twisted.persisted.aot import unjellyFromSource as load
    elif style == 'xml':
        from twisted.persisted.marmalade import unjellyFromXML as load
    else:
        load, mode = pickle.load, 'rb'
    if passphrase:
        fp = StringIO.StringIO(
            _decrypt(passphrase,
                     open(filename, 'rb').read()))
    else:
        fp = open(filename, mode)
    mainMod = sys.modules['__main__']
    ee = _EverythingEphemeral()
    sys.modules['__main__'] = ee
    ee.initRun = 1
    value = load(fp)
    sys.modules['__main__'] = mainMod
    styles.doUpgrade()
    ee.initRun = 0
    persistable = IPersistable(value, None)
    if persistable is not None:
        persistable.setStyle(style)
    return value
Exemple #14
0
    def testUpgradeDeserializesObjectsRequiringUpgrade(self):
        global ToyClassA, ToyClassB

        class ToyClassA(styles.Versioned):
            pass

        class ToyClassB(styles.Versioned):
            pass

        x = ToyClassA()
        y = ToyClassB()
        pklA, pklB = pickle.dumps(x), pickle.dumps(y)
        del x, y
        ToyClassA.persistenceVersion = 1

        def upgradeToVersion1(self):
            self.y = pickle.loads(pklB)
            styles.doUpgrade()

        ToyClassA.upgradeToVersion1 = upgradeToVersion1
        ToyClassB.persistenceVersion = 1
        ToyClassB.upgradeToVersion1 = lambda self: setattr(
            self, 'upgraded', True)

        x = pickle.loads(pklA)
        styles.doUpgrade()
        self.failUnless(x.y.upgraded)
Exemple #15
0
 def decode(filename, data):
     log.msg("Loading %s..." % (filename,))
     sys.modules['__main__'] = EverythingEphemeral()
     application = pickle.loads(data)
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
Exemple #16
0
    def test_versionUpgrade(self):
        global MyVersioned
        class MyVersioned(styles.Versioned):
            persistenceVersion = 2
            persistenceForgets = ['garbagedata']
            v3 = 0
            v4 = 0

            def __init__(self):
                self.somedata = 'xxx'
                self.garbagedata = lambda q: 'cant persist'

            def upgradeToVersion3(self):
                self.v3 += 1

            def upgradeToVersion4(self):
                self.v4 += 1
        mv = MyVersioned()
        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
        pickl = pickle.dumps(mv)
        MyVersioned.persistenceVersion = 4
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3, "didn't do version 3 upgrade"
        assert obj.v4, "didn't do version 4 upgrade"
        pickl = pickle.dumps(obj)
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3 == 1, "upgraded unnecessarily"
        assert obj.v4 == 1, "upgraded unnecessarily"
Exemple #17
0
    def testNonIdentityHash(self):
        global ClassWithCustomHash

        class ClassWithCustomHash(styles.Versioned):
            def __init__(self, unique, hash):
                self.unique = unique
                self.hash = hash

            def __hash__(self):
                return self.hash

        v1 = ClassWithCustomHash('v1', 0)
        v2 = ClassWithCustomHash('v2', 0)

        pkl = pickle.dumps((v1, v2))
        del v1, v2
        ClassWithCustomHash.persistenceVersion = 1
        ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(
            self, 'upgraded', True)
        v1, v2 = pickle.loads(pkl)
        styles.doUpgrade()
        self.assertEquals(v1.unique, 'v1')
        self.assertEquals(v2.unique, 'v2')
        self.failUnless(v1.upgraded)
        self.failUnless(v2.upgraded)
Exemple #18
0
 def load(filename):
     """
     Load package from disk, returns a package.
     """
     if not zipfile.is_zipfile(filename):
         return None
     zippedFile = zipfile.ZipFile(filename, "r", zipfile.ZIP_DEFLATED)
     toDecode   = zippedFile.read(u"content.data")
     resourceDir = TempDirPath()
     for fn in zippedFile.namelist():
         if unicode(fn, 'utf8') != u"content.data":
             outFile = open(resourceDir/fn, "wb")
             outFile.write(zippedFile.read(fn))
     try:
         newPackage = decodeObjectRaw(toDecode)
         newPackage.afterUpgradeHandlers = []
         newPackage.resourceDir = resourceDir
         doUpgrade()
     except:
         import traceback
         traceback.print_exc()
         raise
     if newPackage.tempFile:
         newPackage.tempFile = False
     else:
         newPackage.filename = Path(filename)
     for handler in newPackage.afterUpgradeHandlers:
         handler()
     del newPackage.afterUpgradeHandlers
     newPackage.updateRecentDocuments(newPackage.filename)
     newPackage.isChanged = False
     return newPackage
Exemple #19
0
def decodeObject(toDecode):
    """
    Take a string and turn it into an object
    """
    decoded = decodeObjectRaw(toDecode)
    doUpgrade()
    return decoded
Exemple #20
0
 def decode(self):
     from twisted.persisted.marmalade import unjellyFromXML
     sys.modules['__main__'] = EverythingEphemeral()
     application = unjellyFromXML(StringIO.StringIO(self.data))
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
Exemple #21
0
    def loadBuildFromFile(self, number):
        filename = self.makeBuildFilename(number)
        try:
            log.msg("Loading builder %s's build %d from on-disk pickle"
                    % (self.name, number))
            with open(filename, "rb") as f:
                build = pickle.load(f)
            build.setProcessObjects(self, self.master)

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [hasattr(o, 'wasUpgraded') for o in versioneds.values()]:
                log.msg("re-writing upgraded build pickle")
                build.saveYourself()

            # check that logfiles exist
            build.checkLogfiles()
            return build
        except IOError:
            raise IndexError("no such build %d" % number)
        except EOFError:
            raise IndexError("corrupted build pickle %d" % number)
Exemple #22
0
    def builderAdded(self, name, basedir, category=None, friendly_name=None, description=None, project=None):
        """
        @rtype: L{BuilderStatus}
        """
        filename = os.path.join(self.basedir, basedir, "builder")
        log.msg("trying to load status pickle from %s" % filename)
        builder_status = None

        if friendly_name is None:
            friendly_name = name

        try:
            with open(filename, "rb") as f:
                builder_status = load(f)
            builder_status.master = self.master
            builder_status.basedir = os.path.join(self.basedir, basedir)

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [ hasattr(o, 'wasUpgraded') for o in versioneds.values() ]:
                log.msg("re-writing upgraded builder pickle")
                builder_status.saveYourself()

        except IOError:
            log.msg("no saved status pickle, creating a new one")
        except:
            log.msg("error while loading status pickle, creating a new one")
            log.msg("error follows:")
            klog.err_json()
        if not builder_status:
            builder_status = builder.BuilderStatus(name, category, self.master, friendly_name,
                                                   description, project=project)
            builder_status.addPointEvent(["builder", "created"])
        log.msg("added builder %s in category %s" % (name, category))
        # an unpickled object might not have category set from before,
        # so set it here to make sure
        builder_status.category = category
        builder_status.description = description
        builder_status.master = self.master
        builder_status.basedir = os.path.join(self.basedir, basedir)
        builder_status.name = name # it might have been updated
        builder_status.setStatus(self)
        builder_status.friendly_name = friendly_name

        if not os.path.isdir(builder_status.basedir):
            os.makedirs(builder_status.basedir)
        builder_status.determineNextBuildNumber()

        builder_status.setBigState("offline")

        for t in self.watchers:
            self.announceNewBuilder(t, name, builder_status)

        return builder_status
Exemple #23
0
 def decode(self):
     from twisted.persisted.aot import unjellyFromSource
     sys.modules['__main__'] = EverythingEphemeral()
     application = unjellyFromSource(StringIO.StringIO(self.data))
     application.persistStyle = "aot"
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
Exemple #24
0
 def decode(filename, data):
     from twisted.persisted.aot import unjellyFromSource
     log.msg("Loading %s..." % (filename,))
     sys.modules['__main__'] = EverythingEphemeral()
     application = unjellyFromSource(StringIO.StringIO(data))
     application.persistStyle = 'aot'
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
Exemple #25
0
 def decode(filename, data):
     from twisted.persisted.marmalade import unjellyFromXML
     log.msg('<Loading file="%s" />' % (filename,))
     sys.modules['__main__'] = EverythingEphemeral()
     application = unjellyFromXML(StringIO.StringIO(data))
     application.persistStyle = 'xml'
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
Exemple #26
0
 def test_upgrade(self):
     self.patch(
         BuildStepStatus, "upgradeToVersion1", lambda _: self.fail("BuildStepStatus.upgradeToVersion1 called")
     )
     self.patch(BuildStatus, "upgradeToVersion1", lambda _: self.fail("BuildStatus.upgradeToVersion1 called"))
     self.patch(BuilderStatus, "upgradeToVersion1", lambda _: self.fail("BuilderStatus.upgradeToVersion1 called"))
     pkl_result = pickle.loads(self.pickle_data)
     styles.doUpgrade()
     del pkl_result
Exemple #27
0
 def testNullVersionUpgrade(self):
     global NullVersioned
     class NullVersioned:
         ok = 0
     pkcl = pickle.dumps(NullVersioned())
     class NullVersioned(styles.Versioned):
         def upgradeToVersion1(self):
             self.ok = 1
     mnv = pickle.loads(pkcl)
     styles.doUpgrade()
     assert mnv.ok, "initial upgrade not run!"
 def testNullVersionUpgrade(self):
     global NullVersioned
     class NullVersioned:
         ok = 0
     pkcl = cPickle.dumps(NullVersioned())
     class NullVersioned(styles.Versioned):
         def upgradeToVersion1(self):
             self.ok = 1
     mnv = cPickle.loads(pkcl)
     styles.doUpgrade()
     assert mnv.ok, "initial upgrade not run!"
Exemple #29
0
    def builderAdded(self, name, basedir, category=None, description=None):
        """
        @rtype: L{BuilderStatus}
        """
        filename = os.path.join(self.basedir, basedir, "builder")
        log.msg("trying to load status pickle from %s" % filename)
        builder_status = None
        try:
            with open(filename, "rb") as f:
                builder_status = load(f)
            builder_status.master = self.master

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [hasattr(o, 'wasUpgraded') for o in versioneds.values()]:
                log.msg("re-writing upgraded builder pickle")
                builder_status.saveYourself()

        except IOError:
            log.msg("no saved status pickle, creating a new one")
        except:
            log.msg("error while loading status pickle, creating a new one")
            log.msg("error follows:")
            log.err()
        if not builder_status:
            builder_status = builder.BuilderStatus(name, category, self.master,
                                                   description)
            builder_status.addPointEvent(["builder", "created"])
        log.msg("added builder %s in category %s" % (name, category))
        # an unpickled object might not have category set from before,
        # so set it here to make sure
        builder_status.category = category
        builder_status.description = description
        builder_status.master = self.master
        builder_status.basedir = os.path.join(self.basedir, basedir)
        builder_status.name = name  # it might have been updated
        builder_status.status = self

        if not os.path.isdir(builder_status.basedir):
            os.makedirs(builder_status.basedir)
        builder_status.determineNextBuildNumber()

        builder_status.setBigState("offline")

        for t in self.watchers:
            self.announceNewBuilder(t, name, builder_status)

        return builder_status
Exemple #30
0
 def loadChanges(self):
     filename = os.path.join(self.basedir, "changes.pck")
     try:
         changes = load(open(filename, "rb"))
         styles.doUpgrade()
     except IOError:
         log.msg("changes.pck missing, using new one")
         changes = ChangeMaster()
     except EOFError:
         log.msg("corrupted changes.pck, using new one")
         changes = ChangeMaster()
     self.useChanges(changes)
 def loadChanges(self):
     filename = os.path.join(self.basedir, "changes.pck")
     try:
         changes = load(open(filename, "rb"))
         styles.doUpgrade()
     except IOError:
         log.msg("changes.pck missing, using new one")
         changes = ChangeMaster()
     except EOFError:
         log.msg("corrupted changes.pck, using new one")
         changes = ChangeMaster()
     self.useChanges(changes)
Exemple #32
0
 def test_upgrade(self):
     self.patch(
         BuildStepStatus, 'upgradeToVersion1',
         lambda _: self.fail("BuildStepStatus.upgradeToVersion1 called"))
     self.patch(BuildStatus, 'upgradeToVersion1',
                lambda _: self.fail("BuildStatus.upgradeToVersion1 called"))
     self.patch(
         BuilderStatus, 'upgradeToVersion1',
         lambda _: self.fail("BuilderStatus.upgradeToVersion1 called"))
     pkl_result = cPickle.loads(self.pickle_data)
     styles.doUpgrade()
     del pkl_result
Exemple #33
0
 def test_nullVersionUpgrade(self):
     global NullVersioned
     class NullVersioned(object):
         def __init__(self):
             self.ok = 0
     pkcl = pickle.dumps(NullVersioned())
     class NullVersioned(styles.Versioned, object):
         persistenceVersion = 1
         def upgradeToVersion1(self):
             self.ok = 1
     mnv = pickle.loads(pkcl)
     styles.doUpgrade()
     assert mnv.ok, "initial upgrade not run!"
 def test_nullVersionUpgrade(self):
     global NullVersioned
     class NullVersioned(object):
         def __init__(self):
             self.ok = 0
     pkcl = pickle.dumps(NullVersioned())
     class NullVersioned(styles.Versioned, object):
         persistenceVersion = 1
         def upgradeToVersion1(self):
             self.ok = 1
     mnv = pickle.loads(pkcl)
     styles.doUpgrade()
     assert mnv.ok, "initial upgrade not run!"
Exemple #35
0
    def builderAdded(self, name, basedir, tags=None, description=None):
        """
        @rtype: L{BuilderStatus}
        """
        filename = os.path.join(self.basedir, basedir, "builder")
        log.msg("trying to load status pickle from %s" % filename)
        builder_status = None
        try:
            with open(filename, "rb") as f:
                builder_status = pickle.load(f)
            builder_status.master = self.master

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [
                    hasattr(o, 'wasUpgraded') for o in itervalues(versioneds)
            ]:
                log.msg("re-writing upgraded builder pickle")
                builder_status.saveYourself()

        except IOError:
            log.msg("no saved status pickle, creating a new one")
        except Exception:
            log.err("error while loading status pickle, creating a new one")
        if not builder_status:
            builder_status = builder.BuilderStatus(name, tags, self.master,
                                                   description)
            builder_status.addPointEvent(["builder", "created"])
        log.msg("added builder %s with tags %r" % (name, tags))
        # an unpickled object might not have tags set from before,
        # so set it here to make sure
        builder_status.setTags(tags)
        builder_status.description = description
        builder_status.master = self.master
        builder_status.basedir = os.path.join(self.basedir, basedir)
        builder_status.name = name  # it might have been updated
        builder_status.status = self

        builder_status.setBigState("offline")

        for t in self.watchers:
            self.announceNewBuilder(t, name, builder_status)

        return builder_status
Exemple #36
0
 def loadOID(self, oid):
     
     # maintenance note: when implementing future, truly async loadOID
     # methods, it may be useful to keep a dictionary around of
     # previously-loaded OIDs during recursive loads to make sure that we
     # don't send multiple requests to the DB for the same OID in the same
     # actual request.
     
     f = open(os.path.join(self.dirname, str(oid)))
     up = pickle.Unpickler(f)
     up.persistent_load = self.persistentLoad
     obj = up.load()
     # cheating...
     from twisted.persisted.styles import doUpgrade
     doUpgrade()
     return defer.succeed(obj)
Exemple #37
0
    def migrate_changes(self):
        # if we still have a changes.pck, then we need to migrate it
        changes_pickle = os.path.join(self.basedir, "changes.pck")
        if os.path.exists(changes_pickle):
            if not self.quiet: print "migrating changes.pck to database"

            # 'source' will be an old b.c.changes.ChangeMaster instance, with a
            # .changes attribute
            source = cPickle.load(open(changes_pickle, "rb"))
            styles.doUpgrade()

            if not self.quiet:
                print " (%d Change objects)" % len(source.changes)

            # first, scan for changes without a number.  If we find any, then we'll
            # renumber the changes sequentially
            have_unnumbered = False
            for c in source.changes:
                if c.revision and c.number is None:
                    have_unnumbered = True
                    break
            if have_unnumbered:
                n = 1
                for c in source.changes:
                    if c.revision:
                        c.number = n
                        n = n + 1

            # insert the changes
            cursor = self.conn.cursor()
            for c in source.changes:
                if not c.revision:
                    continue
                self._addChangeToDatabase(c, cursor)

            # update next_changeid
            max_changeid = max(
                [c.number for c in source.changes if c.revision] + [0])
            cursor.execute("""INSERT into changes_nextid VALUES (%d)""" %
                           (max_changeid + 1))

            if not self.quiet:
                print "moving changes.pck to changes.pck.old; delete it or keep it as a backup"
            os.rename(changes_pickle, changes_pickle + ".old")
        else:
            c = self.conn.cursor()
            c.execute("""INSERT into changes_nextid VALUES (1)""")
Exemple #38
0
    def migrate_changes(self):
        # if we still have a changes.pck, then we need to migrate it
        changes_pickle = os.path.join(self.basedir, "changes.pck")
        if os.path.exists(changes_pickle):
            if not self.quiet:
                print "migrating changes.pck to database"

            # 'source' will be an old b.c.changes.ChangeMaster instance, with a
            # .changes attribute
            source = cPickle.load(open(changes_pickle, "rb"))
            styles.doUpgrade()

            if not self.quiet:
                print " (%d Change objects)" % len(source.changes)

            # first, scan for changes without a number.  If we find any, then we'll
            # renumber the changes sequentially
            have_unnumbered = False
            for c in source.changes:
                if c.revision and c.number is None:
                    have_unnumbered = True
                    break
            if have_unnumbered:
                n = 1
                for c in source.changes:
                    if c.revision:
                        c.number = n
                        n = n + 1

            # insert the changes
            cursor = self.conn.cursor()
            for c in source.changes:
                if not c.revision:
                    continue
                self._addChangeToDatabase(c, cursor)

            # update next_changeid
            max_changeid = max([c.number for c in source.changes if c.revision] + [0])
            cursor.execute("""INSERT into changes_nextid VALUES (%d)""" % (max_changeid + 1))

            if not self.quiet:
                print "moving changes.pck to changes.pck.old; delete it or keep it as a backup"
            os.rename(changes_pickle, changes_pickle + ".old")
        else:
            c = self.conn.cursor()
            c.execute("""INSERT into changes_nextid VALUES (1)""")
Exemple #39
0
    def getBuildByNumber(self, number):
        # first look in currentBuilds
        for b in self.currentBuilds:
            if b.number == number:
                return self.touchBuildCache(b)

        # then in the buildCache
        try:
            b = self.buildCache[number]
        except KeyError:
            metrics.MetricCountEvent.log("buildCache.misses", 1)
        else:
            metrics.MetricCountEvent.log("buildCache.hits", 1)
            return self.touchBuildCache(b)

        # then fall back to loading it from disk
        filename = self.makeBuildFilename(number)
        try:
            log.msg("Loading builder %s's build %d from on-disk pickle" %
                    (self.name, number))
            with open(filename, "rb") as f:
                build = load(f)
            build.setProcessObjects(self, self.master)

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [
                    hasattr(o, 'wasUpgraded') for o in versioneds.values()
            ]:
                log.msg("re-writing upgraded build pickle")
                build.saveYourself()

            # check that logfiles exist
            build.checkLogfiles()
            return self.touchBuildCache(build)
        except IOError:
            raise IndexError("no such build %d" % number)
        except EOFError:
            raise IndexError("corrupted build pickle %d" % number)
Exemple #40
0
    def getBuildByNumber(self, number):
        # first look in currentBuilds
        for b in self.currentBuilds:
            if b.number == number:
                return self.touchBuildCache(b)

        # then in the buildCache
        try:
            b = self.buildCache[number]
        except KeyError:
            metrics.MetricCountEvent.log("buildCache.misses", 1)
        else:
            metrics.MetricCountEvent.log("buildCache.hits", 1)
            return self.touchBuildCache(b)

        # then fall back to loading it from disk
        filename = self.makeBuildFilename(number)
        try:
            log.msg("Loading builder %s's build %d from on-disk pickle"
                % (self.name, number))
            with open(filename, "rb") as f:
                build = load(f)
            build.setProcessObjects(self, self.master)

            # (bug #1068) if we need to upgrade, we probably need to rewrite
            # this pickle, too.  We determine this by looking at the list of
            # Versioned objects that have been unpickled, and (after doUpgrade)
            # checking to see if any of them set wasUpgraded.  The Versioneds'
            # upgradeToVersionNN methods all set this.
            versioneds = styles.versionedsToUpgrade
            styles.doUpgrade()
            if True in [ hasattr(o, 'wasUpgraded') for o in versioneds.values() ]:
                log.msg("re-writing upgraded build pickle")
                build.saveYourself()

            # check that logfiles exist
            build.checkLogfiles()
            return self.touchBuildCache(build)
        except IOError:
            raise IndexError("no such build %d" % number)
        except EOFError:
            raise IndexError("corrupted build pickle %d" % number)
Exemple #41
0
 def testUpgradeDeserializesObjectsRequiringUpgrade(self):
     global ToyClassA, ToyClassB
     class ToyClassA(styles.Versioned):
         pass
     class ToyClassB(styles.Versioned):
         pass
     x = ToyClassA()
     y = ToyClassB()
     pklA, pklB = pickle.dumps(x), pickle.dumps(y)
     del x, y
     ToyClassA.persistenceVersion = 1
     def upgradeToVersion1(self):
         self.y = pickle.loads(pklB)
         styles.doUpgrade()
     ToyClassA.upgradeToVersion1 = upgradeToVersion1
     ToyClassB.persistenceVersion = 1
     ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
     x = pickle.loads(pklA)
     styles.doUpgrade()
     self.failUnless(x.y.upgraded)
Exemple #42
0
 def testNonIdentityHash(self):
     global ClassWithCustomHash
     class ClassWithCustomHash(styles.Versioned):
         def __init__(self, unique, hash):
             self.unique = unique
             self.hash = hash
         def __hash__(self):
             return self.hash
     v1 = ClassWithCustomHash('v1', 0)
     v2 = ClassWithCustomHash('v2', 0)
     pkl = pickle.dumps((v1, v2))
     del v1, v2
     ClassWithCustomHash.persistenceVersion = 1
     ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
     v1, v2 = pickle.loads(pkl)
     styles.doUpgrade()
     self.assertEquals(v1.unique, 'v1')
     self.assertEquals(v2.unique, 'v2')
     self.failUnless(v1.upgraded)
     self.failUnless(v2.upgraded)
def load(filename, style, passphrase=None):
    """Load an object from a file.

    Deserialize an object from a file. The file can be encrypted.

    @param filename: string
    @param style: string (one of 'source', 'xml' or 'pickle')
    @param passphrase: string
    """
    mode = 'r'
    if style=='source':
        from twisted.persisted.aot import unjellyFromSource as _load
    elif style=='xml':
        from twisted.persisted.marmalade import unjellyFromXML as _load
    else:
        _load, mode = pickle.load, 'rb'
    if passphrase:
        fp = StringIO.StringIO(_decrypt(passphrase,
                                        open(filename, 'rb').read()))
    else:
        fp = open(filename, mode)
    ee = _EverythingEphemeral(sys.modules['__main__'])
    sys.modules['__main__'] = ee
    ee.initRun = 1
    try:
        value = _load(fp)
    finally:
        # restore __main__ if an exception is raised.
        sys.modules['__main__'] = ee.mainMod

    styles.doUpgrade()
    ee.initRun = 0
    persistable = IPersistable(value, None)
    if persistable is not None:
        persistable.setStyle(style)
    return value
 def upgradeToVersion1(self):
     self.y = pickle.loads(pklB)
     styles.doUpgrade()
Exemple #45
0
def import_changes(migrate_engine):
    # get the basedir from the engine - see model.py if you're wondering
    # how it got there
    basedir = migrate_engine.buildbot_basedir

    # strip None from any of these values, just in case
    def remove_none(x):
        if x is None:
            return u""
        elif isinstance(x, str):
            return x.decode("utf8")
        else:
            return x

    # if we still have a changes.pck, then we need to migrate it
    changes_pickle = os.path.join(basedir, "changes.pck")
    if not os.path.exists(changes_pickle):
        migrate_engine.execute(changes_nextid.insert(), next_changeid=1)
        return

    # if not quiet: print "migrating changes.pck to database"

    # 'source' will be an old b.c.changes.ChangeMaster instance, with a
    # .changes attribute.  Note that we use 'r', and not 'rb', because these
    # pickles were written using the old text pickle format, which requires
    # newline translation
    with open(changes_pickle, "r") as f:
        source = pickle.load(f)
    styles.doUpgrade()

    # if not quiet: print " (%d Change objects)" % len(source.changes)

    # first, scan for changes without a number.  If we find any, then we'll
    # renumber the changes sequentially
    have_unnumbered = False
    for c in source.changes:
        if c.revision and c.number is None:
            have_unnumbered = True
            break
    if have_unnumbered:
        n = 1
        for c in source.changes:
            if c.revision:
                c.number = n
                n = n + 1

    # insert the changes
    for c in source.changes:
        if not c.revision:
            continue
        try:
            values = dict(changeid=c.number,
                          author=c.who,
                          comments=c.comments,
                          is_dir=0,
                          branch=c.branch,
                          revision=c.revision,
                          revlink=c.revlink,
                          when_timestamp=c.when,
                          category=c.category)

            values = dict([(k, remove_none(v)) for k, v in iteritems(values)])
        except UnicodeDecodeError, e:
            raise UnicodeError(
                "Trying to import change data as UTF-8 failed.  Please look at contrib/fix_changes_pickle_encoding.py: %s"
                % str(e))

        migrate_engine.execute(changes.insert(), **values)

        # NOTE: change_links is not populated, since it is deleted in db
        # version 20.  The table is still created, though.

        # sometimes c.files contains nested lists -- why, I do not know!  But we deal with
        # it all the same - see bug #915. We'll assume for now that c.files contains *either*
        # lists of filenames or plain filenames, not both.
        def flatten(l):
            if l and isinstance(l[0], list):
                rv = []
                for e in l:
                    if isinstance(e, list):
                        rv.extend(e)
                    else:
                        rv.append(e)
                return rv
            else:
                return l

        for filename in flatten(c.files):
            migrate_engine.execute(change_files.insert(),
                                   changeid=c.number,
                                   filename=filename)

        for propname, propvalue in iteritems(c.properties.properties):
            encoded_value = json.dumps(propvalue)
            migrate_engine.execute(change_properties.insert(),
                                   changeid=c.number,
                                   property_name=propname,
                                   property_value=encoded_value)
    def load(filename, newLoad=True, destinationPackage=None):
        """
        Load package from disk, returns a package.
        """
        if not zipfile.is_zipfile(filename):
            return None

        zippedFile = zipfile.ZipFile(filename, "r")
        
        try:
            # Get the jellied package data
            toDecode   = zippedFile.read(u"content.data")
        except KeyError:
            log.info("no content.data, trying Common Cartridge/Content Package")
            newPackage = loadCC(zippedFile, filename)
            newPackage.tempFile = False
            newPackage.isChanged = False
            newPackage.filename = Path(filename)

            return newPackage
            
        # Need to add a TempDirPath because it is a nonpersistant member
        resourceDir = TempDirPath()

        # Extract resource files from package to temporary directory
        for fn in zippedFile.namelist():
            if unicode(fn, 'utf8') != u"content.data":
                outFile = open(resourceDir/fn, "wb")
                outFile.write(zippedFile.read(fn))
                outFile.flush()
                outFile.close()

        try:
            newPackage = decodeObjectRaw(toDecode)
            G.application.afterUpgradeHandlers = []
            newPackage.resourceDir = resourceDir
            G.application.afterUpgradeZombies2Delete = []

            if newLoad: 
                # provide newPackage to doUpgrade's versionUpgrade() to
                # correct old corrupt extracted packages by setting the
                # any corrupt package references to the new package:

                log.debug("load() about to doUpgrade newPackage \"" 
                        + newPackage._name + "\" " + repr(newPackage) )
                if hasattr(newPackage, 'resourceDir'):
                    log.debug("newPackage resourceDir = "
                            + newPackage.resourceDir)
                else:
                    # even though it was just set above? should not get here:
                    log.error("newPackage resourceDir has NO resourceDir!")

                doUpgrade(newPackage)

                # after doUpgrade, compare the largest found field ID:
                if G.application.maxFieldId >= Field.nextId:
                    Field.nextId = G.application.maxFieldId + 1

            else: 
                # and when merging, automatically set package references to
                # the destinationPackage, into which this is being merged:

                log.debug("load() about to merge doUpgrade newPackage \"" 
                        + newPackage._name + "\" " + repr(newPackage)
                        + " INTO destinationPackage \"" 
                        + destinationPackage._name + "\" " 
                        + repr(destinationPackage))
                
                log.debug("using their resourceDirs:")
                if hasattr(newPackage, 'resourceDir'):
                    log.debug("   newPackage resourceDir = " 
                            + newPackage.resourceDir)
                else:
                    log.error("newPackage has NO resourceDir!")
                if hasattr(destinationPackage, 'resourceDir'):
                    log.debug("   destinationPackage resourceDir = " 
                            + destinationPackage.resourceDir)
                else:
                    log.error("destinationPackage has NO resourceDir!")

                doUpgrade(destinationPackage, 
                        isMerge=True, preMergePackage=newPackage)

                # after doUpgrade, compare the largest found field ID:
                if G.application.maxFieldId >= Field.nextId:
                    Field.nextId = G.application.maxFieldId + 1

        except:
            import traceback
            traceback.print_exc()
            raise

        if newPackage.tempFile:
            # newPackage.filename was stored as it's original filename
            newPackage.tempFile = False
        else:
            # newPackage.filename is the name that the package was last loaded from
            # or saved to
            newPackage.filename = Path(filename)

        # Let idevices and nodes handle any resource upgrading they may need to
        # Note: Package afterUpgradeHandlers *must* be done after Resources'
        # and the package should be updated before everything else,
        # so, prioritize with a 3-pass, 3-level calling setup
        # in order of: 1) resources, 2) package, 3) anything other objects
        for handler_priority in range(3):
          for handler in G.application.afterUpgradeHandlers:

            if handler_priority == 0 and \
            repr(handler.im_class)=="<class 'exe.engine.resource.Resource'>":
                # level-0 handlers: Resource
                handler()

            elif handler_priority == 1 and \
            repr(handler.im_class)=="<class 'exe.engine.package.Package'>":
                # level-1 handlers: Package (requires resources first)
                if handler.im_self == newPackage: 
                    handler()
                else:
                    log.warn("Extra package object found, " \
                       + "ignoring its afterUpgradeHandler: " \
                       + repr(handler))

            elif handler_priority == 2 and \
            repr(handler.im_class)!="<class 'exe.engine.resource.Resource'>" \
            and \
            repr(handler.im_class)!="<class 'exe.engine.package.Package'>":
                # level-2 handlers: all others
                handler()

        G.application.afterUpgradeHandlers = []

        num_zombies = len(G.application.afterUpgradeZombies2Delete)
        for i in range(num_zombies-1, -1, -1):
            zombie = G.application.afterUpgradeZombies2Delete[i]
            # now, the zombie list can contain nodes OR resources to delete.
            # if zombie is a node, then also pass in a pruning parameter..
            zombie_is_node = False
            if isinstance(zombie, Node):
                zombie_is_node = True

            if zombie_is_node: 
                zombie.delete(pruningZombies=True) 
            else:
                zombie.delete() 
            del zombie
        G.application.afterUpgradeZombies2Delete = []

        newPackage.updateRecentDocuments(newPackage.filename)
        newPackage.isChanged = False
        return newPackage
def import_changes(migrate_engine):
    # get the basedir from the engine - see model.py if you're wondering
    # how it got there
    basedir = migrate_engine.buildbot_basedir

    # strip None from any of these values, just in case
    def remove_none(x):
        if x is None: return u""
        elif isinstance(x, str):
            return x.decode("utf8")
        else:
            return x

    # if we still have a changes.pck, then we need to migrate it
    changes_pickle = os.path.join(basedir, "changes.pck")
    if not os.path.exists(changes_pickle):
        migrate_engine.execute(changes_nextid.insert(),
                next_changeid=1)
        return

    #if not quiet: print "migrating changes.pck to database"

    # 'source' will be an old b.c.changes.ChangeMaster instance, with a
    # .changes attribute.  Note that we use 'r', and not 'rb', because these
    # pickles were written using the old text pickle format, which requires
    # newline translation
    source = cPickle.load(open(changes_pickle,"r"))
    styles.doUpgrade()

    #if not quiet: print " (%d Change objects)" % len(source.changes)

    # first, scan for changes without a number.  If we find any, then we'll
    # renumber the changes sequentially
    have_unnumbered = False
    for c in source.changes:
        if c.revision and c.number is None:
            have_unnumbered = True
            break
    if have_unnumbered:
        n = 1
        for c in source.changes:
            if c.revision:
                c.number = n
                n = n + 1

    # insert the changes
    for c in source.changes:
        if not c.revision:
            continue
        try:
            # Trim long comment fields to 1024 characters, but preserve header
            # and footer with important tags such as Cr-Commit-Position.
            trimmed_comments = c.comments
            if len(trimmed_comments) > 1024:
              header, footer = trimmed_comments[:506], trimmed_comments[-506:]
              trimmed_comments = '%s\n...skip...\n%s' % (header, footer)

            values = dict(
                    changeid=c.number,
                    author=c.who[:256],
                    comments=trimmed_comments,
                    is_dir=c.isdir,
                    branch=c.branch[:256],
                    revision=c.revision[:256],
                    revlink=c.revlink[:256],
                    when_timestamp=c.when,
                    category=c.category)
            values = dict([ (k, remove_none(v)) for k, v in values.iteritems() ])
        except UnicodeDecodeError, e:
            raise UnicodeError("Trying to import change data as UTF-8 failed.  Please look at contrib/fix_changes_pickle_encoding.py: %s" % str(e))

        migrate_engine.execute(changes.insert(), **values)

        for link in c.links:
            migrate_engine.execute(change_links.insert(),
                    changeid=c.number, link=link)

        # sometimes c.files contains nested lists -- why, I do not know!  But we deal with
        # it all the same - see bug #915. We'll assume for now that c.files contains *either*
        # lists of filenames or plain filenames, not both.
        def flatten(l):
            if l and type(l[0]) == list:
                rv = []
                for e in l:
                    if type(e) == list:
                        rv.extend(e)
                    else:
                        rv.append(e)
                return rv
            else:
                return l
        for filename in flatten(c.files):
            migrate_engine.execute(change_files.insert(),
                    changeid=c.number,
                    filename=filename)

        for propname,propvalue in c.properties.properties.items():
            encoded_value = json.dumps(propvalue)
            migrate_engine.execute(change_properties.insert(),
                    changeid=c.number,
                    property_name=propname,
                    property_value=encoded_value)
Exemple #48
0
 def load(filename, newLoad=True, destinationPackage=None):
     """
     Load package from disk, returns a package.
     """
     if not zipfile.is_zipfile(filename):
         return None
     zippedFile = zipfile.ZipFile(filename, "r")
     toDecode   = zippedFile.read(u"content.data")
     resourceDir = TempDirPath()
     for fn in zippedFile.namelist():
         if unicode(fn, 'utf8') != u"content.data":
             outFile = open(resourceDir/fn, "wb")
             outFile.write(zippedFile.read(fn))
             outFile.flush()
             outFile.close()
     try:
         newPackage = decodeObjectRaw(toDecode)
         G.application.afterUpgradeHandlers = []
         newPackage.resourceDir = resourceDir
         G.application.afterUpgradeZombies2Delete = []
         if newLoad: 
             log.debug("load() about to doUpgrade newPackage \"" 
                     + newPackage._name + "\" " + repr(newPackage) )
             if hasattr(newPackage, 'resourceDir'):
                 log.debug("newPackage resourceDir = "
                         + newPackage.resourceDir)
             else:
                 log.error("newPackage resourceDir has NO resourceDir!")
             doUpgrade(newPackage)
         else: 
             log.debug("load() about to merge doUpgrade newPackage \"" 
                     + newPackage._name + "\" " + repr(newPackage)
                     + " INTO destinationPackage \"" 
                     + destinationPackage._name + "\" " 
                     + repr(destinationPackage))
             log.debug("using their resourceDirs:")
             if hasattr(newPackage, 'resourceDir'):
                 log.debug("   newPackage resourceDir = " 
                         + newPackage.resourceDir)
             else:
                 log.error("newPackage has NO resourceDir!")
             if hasattr(destinationPackage, 'resourceDir'):
                 log.debug("   destinationPackage resourceDir = " 
                         + destinationPackage.resourceDir)
             else:
                 log.error("destinationPackage has NO resourceDir!")
             doUpgrade(destinationPackage, 
                     isMerge=True, preMergePackage=newPackage)
     except:
         import traceback
         traceback.print_exc()
         raise
     if newPackage.tempFile:
         newPackage.tempFile = False
     else:
         newPackage.filename = Path(filename)
     for handler_priority in range(3):
       for handler in G.application.afterUpgradeHandlers:
         if handler_priority == 0 and \
         repr(handler.im_class)=="<class 'exe.engine.resource.Resource'>":
             handler()
         elif handler_priority == 1 and \
         repr(handler.im_class)=="<class 'exe.engine.package.Package'>":
             if handler.im_self == newPackage: 
                 handler()
             else:
                 log.warn("Extra package object found, " \
                    + "ignoring its afterUpgradeHandler: " \
                    + repr(handler))
         elif handler_priority == 2 and \
         repr(handler.im_class)!="<class 'exe.engine.resource.Resource'>" \
         and \
         repr(handler.im_class)!="<class 'exe.engine.package.Package'>":
             handler()
     G.application.afterUpgradeHandlers = []
     num_zombies = len(G.application.afterUpgradeZombies2Delete)
     for i in range(num_zombies-1, -1, -1):
         zombie = G.application.afterUpgradeZombies2Delete[i]
         zombie_is_node = False
         if isinstance(zombie, Node):
             zombie_is_node = True
         if zombie_is_node: 
             zombie.delete(pruningZombies=True) 
         else:
             zombie.delete() 
         del zombie
     G.application.afterUpgradeZombies2Delete = []
     newPackage.updateRecentDocuments(newPackage.filename)
     newPackage.isChanged = False
     return newPackage
Exemple #49
0
def import_changes(migrate_engine):
    # get the basedir from the engine - see model.py if you're wondering
    # how it got there
    basedir = migrate_engine.buildbot_basedir

    # strip None from any of these values, just in case
    def remove_none(x):
        if x is None: return u""
        elif isinstance(x, str):
            return x.decode("utf8")
        else:
            return x

    # if we still have a changes.pck, then we need to migrate it
    changes_pickle = os.path.join(basedir, "changes.pck")
    if not os.path.exists(changes_pickle):
        migrate_engine.execute(changes_nextid.insert(),
                next_changeid=1)
        return

    #if not quiet: print "migrating changes.pck to database"

    # 'source' will be an old b.c.changes.ChangeMaster instance, with a
    # .changes attribute.  Note that we use 'r', and not 'rb', because these
    # pickles were written using the old text pickle format, which requires
    # newline translation
    source = cPickle.load(open(changes_pickle,"r"))
    styles.doUpgrade()

    #if not quiet: print " (%d Change objects)" % len(source.changes)

    # first, scan for changes without a number.  If we find any, then we'll
    # renumber the changes sequentially
    have_unnumbered = False
    for c in source.changes:
        if c.revision and c.number is None:
            have_unnumbered = True
            break
    if have_unnumbered:
        n = 1
        for c in source.changes:
            if c.revision:
                c.number = n
                n = n + 1

    # insert the changes
    for c in source.changes:
        if not c.revision:
            continue
        try:
            values = dict(
                    changeid=c.number,
                    author=c.who,
                    comments=c.comments,
                    is_dir=c.isdir,
                    branch=c.branch,
                    revision=c.revision,
                    revlink=c.revlink,
                    when_timestamp=c.when,
                    category=c.category)
            values = dict([ (k, remove_none(v)) for k, v in values.iteritems() ])
        except UnicodeDecodeError, e:
            raise UnicodeError("Trying to import change data as UTF-8 failed.  Please look at contrib/fix_changes_pickle_encoding.py: %s" % str(e))

        migrate_engine.execute(changes.insert(), **values)

        for link in c.links:
            migrate_engine.execute(change_links.insert(),
                    changeid=c.number, link=link)

        # sometimes c.files contains nested lists -- why, I do not know!  But we deal with
        # it all the same - see bug #915. We'll assume for now that c.files contains *either*
        # lists of filenames or plain filenames, not both.
        def flatten(l):
            if l and type(l[0]) == list:
                rv = []
                for e in l:
                    if type(e) == list:
                        rv.extend(e)
                    else:
                        rv.append(e)
                return rv
            else:
                return l
        for filename in flatten(c.files):
            migrate_engine.execute(change_files.insert(),
                    changeid=c.number,
                    filename=filename)

        for propname,propvalue in c.properties.properties.items():
            encoded_value = json.dumps(propvalue)
            migrate_engine.execute(change_properties.insert(),
                    changeid=c.number,
                    property_name=propname,
                    property_value=encoded_value)
Exemple #50
0
 def decode(self):
     sys.modules['__main__'] = EverythingEphemeral()
     application = pickle.load(StringIO.StringIO(self.data))
     sys.modules['__main__'] = mainMod
     styles.doUpgrade()
     return application
    def load(filename, newLoad=True, destinationPackage=None):
        """
        Load package from disk, returns a package.
        """
        if not zipfile.is_zipfile(filename):
            return None

        zippedFile = zipfile.ZipFile(filename, "r")

        try:
            # Get the jellied package data
            toDecode = zippedFile.read(u"content.data")
        except KeyError:
            log.info("no content.data, trying Common Cartridge/Content Package")
            newPackage = loadCC(zippedFile, filename)
            newPackage.tempFile = False
            newPackage.isChanged = False
            newPackage.filename = Path(filename)

            return newPackage

        # Need to add a TempDirPath because it is a nonpersistant member
        resourceDir = TempDirPath()

        # Extract resource files from package to temporary directory
        for fn in zippedFile.namelist():
            if unicode(fn, "utf8") != u"content.data":
                outFile = open(resourceDir / fn, "wb")
                outFile.write(zippedFile.read(fn))
                outFile.flush()
                outFile.close()

        try:
            newPackage = decodeObjectRaw(toDecode)
            G.application.afterUpgradeHandlers = []
            newPackage.resourceDir = resourceDir
            G.application.afterUpgradeZombies2Delete = []

            if newLoad:
                # provide newPackage to doUpgrade's versionUpgrade() to
                # correct old corrupt extracted packages by setting the
                # any corrupt package references to the new package:

                log.debug('load() about to doUpgrade newPackage "' + newPackage._name + '" ' + repr(newPackage))
                if hasattr(newPackage, "resourceDir"):
                    log.debug("newPackage resourceDir = " + newPackage.resourceDir)
                else:
                    # even though it was just set above? should not get here:
                    log.error("newPackage resourceDir has NO resourceDir!")

                doUpgrade(newPackage)

                # after doUpgrade, compare the largest found field ID:
                if G.application.maxFieldId >= Field.nextId:
                    Field.nextId = G.application.maxFieldId + 1

            else:
                # and when merging, automatically set package references to
                # the destinationPackage, into which this is being merged:

                log.debug(
                    'load() about to merge doUpgrade newPackage "'
                    + newPackage._name
                    + '" '
                    + repr(newPackage)
                    + ' INTO destinationPackage "'
                    + destinationPackage._name
                    + '" '
                    + repr(destinationPackage)
                )

                log.debug("using their resourceDirs:")
                if hasattr(newPackage, "resourceDir"):
                    log.debug("   newPackage resourceDir = " + newPackage.resourceDir)
                else:
                    log.error("newPackage has NO resourceDir!")
                if hasattr(destinationPackage, "resourceDir"):
                    log.debug("   destinationPackage resourceDir = " + destinationPackage.resourceDir)
                else:
                    log.error("destinationPackage has NO resourceDir!")

                doUpgrade(destinationPackage, isMerge=True, preMergePackage=newPackage)

                # after doUpgrade, compare the largest found field ID:
                if G.application.maxFieldId >= Field.nextId:
                    Field.nextId = G.application.maxFieldId + 1

        except:
            import traceback

            traceback.print_exc()
            raise

        if newPackage.tempFile:
            # newPackage.filename was stored as it's original filename
            newPackage.tempFile = False
        else:
            # newPackage.filename is the name that the package was last loaded from
            # or saved to
            newPackage.filename = Path(filename)

        # Let idevices and nodes handle any resource upgrading they may need to
        # Note: Package afterUpgradeHandlers *must* be done after Resources'
        # and the package should be updated before everything else,
        # so, prioritize with a 3-pass, 3-level calling setup
        # in order of: 1) resources, 2) package, 3) anything other objects
        for handler_priority in range(3):
            for handler in G.application.afterUpgradeHandlers:

                if handler_priority == 0 and repr(handler.im_class) == "<class 'exe.engine.resource.Resource'>":
                    # level-0 handlers: Resource
                    handler()

                elif handler_priority == 1 and repr(handler.im_class) == "<class 'exe.engine.package.Package'>":
                    # level-1 handlers: Package (requires resources first)
                    if handler.im_self == newPackage:
                        handler()
                    else:
                        log.warn("Extra package object found, " + "ignoring its afterUpgradeHandler: " + repr(handler))

                elif (
                    handler_priority == 2
                    and repr(handler.im_class) != "<class 'exe.engine.resource.Resource'>"
                    and repr(handler.im_class) != "<class 'exe.engine.package.Package'>"
                ):
                    # level-2 handlers: all others
                    handler()

        G.application.afterUpgradeHandlers = []

        num_zombies = len(G.application.afterUpgradeZombies2Delete)
        for i in range(num_zombies - 1, -1, -1):
            zombie = G.application.afterUpgradeZombies2Delete[i]
            # now, the zombie list can contain nodes OR resources to delete.
            # if zombie is a node, then also pass in a pruning parameter..
            zombie_is_node = False
            if isinstance(zombie, Node):
                zombie_is_node = True

            if zombie_is_node:
                zombie.delete(pruningZombies=True)
            else:
                zombie.delete()
            del zombie
        G.application.afterUpgradeZombies2Delete = []

        newPackage.updateRecentDocuments(newPackage.filename)
        newPackage.isChanged = False
        return newPackage
Exemple #52
0
 def upgradeToVersion1(self):
     self.y = pickle.loads(pklB)
     styles.doUpgrade()