def loadBuildFromFile(self, number): filename = self.makeBuildFilename(number) try: log.msg("Loading builder %s's build %d from on-disk pickle" % (self.name, number)) with open(filename, "rb") as f: build = pickle.load(f) build.setProcessObjects(self, self.master) # (bug #1068) if we need to upgrade, we probably need to rewrite # this pickle, too. We determine this by looking at the list of # Versioned objects that have been unpickled, and (after doUpgrade) # checking to see if any of them set wasUpgraded. The Versioneds' # upgradeToVersionNN methods all set this. versioneds = styles.versionedsToUpgrade styles.doUpgrade() if True in [hasattr(o, 'wasUpgraded') for o in itervalues(versioneds)]: log.msg("re-writing upgraded build pickle") build.saveYourself() # check that logfiles exist build.checkLogfiles() return build except IOError: raise IndexError("no such build %d" % number) except EOFError: raise IndexError("corrupted build pickle %d" % number)
def loadBuildFromFile(self, number): filename = self.makeBuildFilename(number) try: log.msg("Loading builder %s's build %d from on-disk pickle" % (self.name, number)) with open(filename, "rb") as f: build = pickle.load(f) build.setProcessObjects(self, self.master) # (bug #1068) if we need to upgrade, we probably need to rewrite # this pickle, too. We determine this by looking at the list of # Versioned objects that have been unpickled, and (after doUpgrade) # checking to see if any of them set wasUpgraded. The Versioneds' # upgradeToVersionNN methods all set this. versioneds = styles.versionedsToUpgrade styles.doUpgrade() if True in [hasattr(o, 'wasUpgraded') for o in versioneds.values()]: log.msg("re-writing upgraded build pickle") build.saveYourself() # check that logfiles exist build.checkLogfiles() return build except IOError: raise IndexError("no such build %d" % number) except EOFError: raise IndexError("corrupted build pickle %d" % number)
def fix_pickle_encoding(self, old_encoding): """Do the equivalent of master/contrib/fix_pickle_encoding.py""" changes_file = os.path.join(self.basedir, "changes.pck") with open(changes_file) as fp: changemgr = pickle.load(fp) changemgr.recode_changes(old_encoding, quiet=True) with open(changes_file, "w") as fp: pickle.dump(changemgr, fp)
def builderAdded(self, name, basedir, tags=None, description=None): """ @rtype: L{BuilderStatus} """ filename = os.path.join(self.basedir, basedir, "builder") log.msg("trying to load status pickle from %s" % filename) builder_status = None try: with open(filename, "rb") as f: builder_status = pickle.load(f) builder_status.master = self.master # (bug #1068) if we need to upgrade, we probably need to rewrite # this pickle, too. We determine this by looking at the list of # Versioned objects that have been unpickled, and (after doUpgrade) # checking to see if any of them set wasUpgraded. The Versioneds' # upgradeToVersionNN methods all set this. versioneds = styles.versionedsToUpgrade styles.doUpgrade() if True in [ hasattr(o, 'wasUpgraded') for o in versioneds.values() ]: log.msg("re-writing upgraded builder pickle") builder_status.saveYourself() except IOError: log.msg("no saved status pickle, creating a new one") except Exception: log.err("error while loading status pickle, creating a new one") if not builder_status: builder_status = builder.BuilderStatus(name, tags, self.master, description) builder_status.addPointEvent(["builder", "created"]) log.msg("added builder %s with tags %r" % (name, tags)) # an unpickled object might not have tags set from before, # so set it here to make sure builder_status.setTags(tags) builder_status.description = description builder_status.master = self.master builder_status.basedir = os.path.join(self.basedir, basedir) builder_status.name = name # it might have been updated builder_status.status = self if not os.path.isdir(builder_status.basedir): os.makedirs(builder_status.basedir) builder_status.determineNextBuildNumber() builder_status.setBigState("offline") for t in self.watchers: self.announceNewBuilder(t, name, builder_status) return builder_status
def check_pickles(_): # try to unpickle things down to the level of a logfile filename = os.path.join(self.basedir, 'builder', 'builder') with open(filename, "rb") as f: builder_status = pickle.load(f) builder_status.master = self.master builder_status.basedir = os.path.join(self.basedir, 'builder') b0 = builder_status.loadBuildFromFile(0) logs = b0.getLogs() log = logs[0] text = log.old_getText() self.assertIn('HEAD is now at', text)
def builderAdded(self, name, basedir, tags=None, description=None): """ @rtype: L{BuilderStatus} """ filename = os.path.join(self.basedir, basedir, "builder") log.msg("trying to load status pickle from %s" % filename) builder_status = None try: with open(filename, "rb") as f: builder_status = pickle.load(f) builder_status.master = self.master # (bug #1068) if we need to upgrade, we probably need to rewrite # this pickle, too. We determine this by looking at the list of # Versioned objects that have been unpickled, and (after doUpgrade) # checking to see if any of them set wasUpgraded. The Versioneds' # upgradeToVersionNN methods all set this. versioneds = styles.versionedsToUpgrade styles.doUpgrade() if True in [hasattr(o, 'wasUpgraded') for o in versioneds.values()]: log.msg("re-writing upgraded builder pickle") builder_status.saveYourself() except IOError: log.msg("no saved status pickle, creating a new one") except: log.msg("error while loading status pickle, creating a new one") log.msg("error follows:") log.err() if not builder_status: builder_status = builder.BuilderStatus(name, tags, self.master, description) builder_status.addPointEvent(["builder", "created"]) log.msg("added builder %s with tags %r" % (name, tags)) # an unpickled object might not have tags set from before, # so set it here to make sure builder_status.setTags(tags) builder_status.description = description builder_status.master = self.master builder_status.basedir = os.path.join(self.basedir, basedir) builder_status.name = name # it might have been updated builder_status.status = self if not os.path.isdir(builder_status.basedir): os.makedirs(builder_status.basedir) builder_status.determineNextBuildNumber() builder_status.setBigState("offline") for t in self.watchers: self.announceNewBuilder(t, name, builder_status) return builder_status
def check_pickles(_): # try to unpickle things down to the level of a logfile filename = os.path.join(self.basedir, 'builder', 'builder') with open(filename, "rb") as f: builder_status = pickle.load(f) builder_status.master = self.master builder_status.basedir = os.path.join(self.basedir, 'builder') b1 = builder_status.loadBuildFromFile(1) logs = b1.getLogs() log = logs[0] text = log.old_getText() self.assertIn('HEAD is now at', text) b2 = builder_status.loadBuildFromFile(1) self.assertEqual(b2.getReason(), "The web-page 'rebuild' button was pressed by '<unknown>': \n")
def import_changes(migrate_engine): # get the basedir from the engine - see model.py if you're wondering # how it got there basedir = migrate_engine.buildbot_basedir # strip None from any of these values, just in case def remove_none(x): if x is None: return u"" elif isinstance(x, str): return x.decode("utf8") else: return x # if we still have a changes.pck, then we need to migrate it changes_pickle = os.path.join(basedir, "changes.pck") if not os.path.exists(changes_pickle): migrate_engine.execute(changes_nextid.insert(), next_changeid=1) return # if not quiet: print "migrating changes.pck to database" # 'source' will be an old b.c.changes.ChangeMaster instance, with a # .changes attribute. Note that we use 'r', and not 'rb', because these # pickles were written using the old text pickle format, which requires # newline translation with open(changes_pickle, "r") as f: source = pickle.load(f) styles.doUpgrade() # if not quiet: print " (%d Change objects)" % len(source.changes) # first, scan for changes without a number. If we find any, then we'll # renumber the changes sequentially have_unnumbered = False for c in source.changes: if c.revision and c.number is None: have_unnumbered = True break if have_unnumbered: n = 1 for c in source.changes: if c.revision: c.number = n n = n + 1 # insert the changes for c in source.changes: if not c.revision: continue try: values = dict(changeid=c.number, author=c.who, comments=c.comments, is_dir=0, branch=c.branch, revision=c.revision, revlink=c.revlink, when_timestamp=c.when, category=c.category) values = dict([(k, remove_none(v)) for k, v in iteritems(values)]) except UnicodeDecodeError, e: raise UnicodeError( "Trying to import change data as UTF-8 failed. Please look at contrib/fix_changes_pickle_encoding.py: %s" % str(e)) migrate_engine.execute(changes.insert(), **values) # NOTE: change_links is not populated, since it is deleted in db # version 20. The table is still created, though. # sometimes c.files contains nested lists -- why, I do not know! But we deal with # it all the same - see bug #915. We'll assume for now that c.files contains *either* # lists of filenames or plain filenames, not both. def flatten(l): if l and isinstance(l[0], list): rv = [] for e in l: if isinstance(e, list): rv.extend(e) else: rv.append(e) return rv else: return l for filename in flatten(c.files): migrate_engine.execute(change_files.insert(), changeid=c.number, filename=filename) for propname, propvalue in iteritems(c.properties.properties): encoded_value = json.dumps(propvalue) migrate_engine.execute(change_properties.insert(), changeid=c.number, property_name=propname, property_value=encoded_value)
def test_load(self): f = cStringIO.StringIO(self.simplePickle) obj = pickle.load(f) self.assertSimplePickleContents(obj)
def import_changes(migrate_engine): # get the basedir from the engine - see model.py if you're wondering # how it got there basedir = migrate_engine.buildbot_basedir # strip None from any of these values, just in case def remove_none(x): if x is None: return u"" elif isinstance(x, str): return x.decode("utf8") else: return x # if we still have a changes.pck, then we need to migrate it changes_pickle = os.path.join(basedir, "changes.pck") if not os.path.exists(changes_pickle): migrate_engine.execute(changes_nextid.insert(), next_changeid=1) return # if not quiet: print "migrating changes.pck to database" # 'source' will be an old b.c.changes.ChangeMaster instance, with a # .changes attribute. Note that we use 'r', and not 'rb', because these # pickles were written using the old text pickle format, which requires # newline translation with open(changes_pickle, "r") as f: source = pickle.load(f) styles.doUpgrade() # if not quiet: print " (%d Change objects)" % len(source.changes) # first, scan for changes without a number. If we find any, then we'll # renumber the changes sequentially have_unnumbered = False for c in source.changes: if c.revision and c.number is None: have_unnumbered = True break if have_unnumbered: n = 1 for c in source.changes: if c.revision: c.number = n n = n + 1 # insert the changes for c in source.changes: if not c.revision: continue try: values = dict( changeid=c.number, author=c.who, comments=c.comments, is_dir=0, branch=c.branch, revision=c.revision, revlink=c.revlink, when_timestamp=c.when, category=c.category) values = dict([(k, remove_none(v)) for k, v in values.iteritems()]) except UnicodeDecodeError, e: raise UnicodeError("Trying to import change data as UTF-8 failed. Please look at contrib/fix_changes_pickle_encoding.py: %s" % str(e)) migrate_engine.execute(changes.insert(), **values) # NOTE: change_links is not populated, since it is deleted in db # version 20. The table is still created, though. # sometimes c.files contains nested lists -- why, I do not know! But we deal with # it all the same - see bug #915. We'll assume for now that c.files contains *either* # lists of filenames or plain filenames, not both. def flatten(l): if l and isinstance(l[0], list): rv = [] for e in l: if isinstance(e, list): rv.extend(e) else: rv.append(e) return rv else: return l for filename in flatten(c.files): migrate_engine.execute(change_files.insert(), changeid=c.number, filename=filename) for propname, propvalue in c.properties.properties.items(): encoded_value = json.dumps(propvalue) migrate_engine.execute(change_properties.insert(), changeid=c.number, property_name=propname, property_value=encoded_value)
if len(args) == 2: changes_file = args[0] old_encoding = args[1] elif len(args) == 1: changes_file = "changes.pck" old_encoding = args[0] else: parser.error("Need at least one argument") print "opening %s" % (changes_file,) try: fp = open(changes_file) except IOError, e: parser.error("Couldn't open %s: %s" % (changes_file, str(e))) changemgr = load(fp) fp.close() print "decoding bytestrings in %s using %s" % (changes_file, old_encoding) changemgr.recode_changes(old_encoding) changes_backup = changes_file + ".old" i = 0 while os.path.exists(changes_backup): i += 1 changes_backup = changes_file + ".old.%i" % i print "backing up %s to %s" % (changes_file, changes_backup) os.rename(changes_file, changes_backup) dump(changemgr, open(changes_file, "w"))
if len(args) == 2: changes_file = args[0] old_encoding = args[1] elif len(args) == 1: changes_file = "changes.pck" old_encoding = args[0] else: parser.error("Need at least one argument") print "opening %s" % (changes_file, ) try: fp = open(changes_file) except IOError, e: parser.error("Couldn't open %s: %s" % (changes_file, str(e))) changemgr = load(fp) fp.close() print "decoding bytestrings in %s using %s" % (changes_file, old_encoding) changemgr.recode_changes(old_encoding) changes_backup = changes_file + ".old" i = 0 while os.path.exists(changes_backup): i += 1 changes_backup = changes_file + ".old.%i" % i print "backing up %s to %s" % (changes_file, changes_backup) os.rename(changes_file, changes_backup) dump(changemgr, open(changes_file, "w"))