def thd(): c = self.connectLdap() infos = {'username': username} pattern = self.accountPattern % dict(username=username) res = self.search( c, self.accountBase, pattern, attributes=[self.accountEmail, self.accountFullName, 'dn'] + self.accountExtraFields) if len(res) != 1: raise KeyError("ldap search \"%s\" returned %d results" % (pattern, len(res))) dn, ldap_infos = res[0]['dn'], res[0]['raw_attributes'] infos['full_name'] = ldap_infos[self.accountFullName] infos['email'] = ldap_infos[self.accountEmail] for f in self.accountExtraFields: if f in ldap_infos: infos[f] = ldap_infos[f] # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos['groups'] = flatten([ group_infos['raw_attributes'][self.groupName] for group_infos in res ]) return infos
def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides): kwargs = dict([(arg, getattr(self, arg)) for arg in self._shellMixinArgs]) kwargs.update(overrides) stdio = None if stdioLogName is not None: stdio = yield self.addLog(stdioLogName) kwargs['command'] = flatten(kwargs['command'], (list, tuple)) # check for the usePTY flag if kwargs['usePTY'] != 'slave-config': if self.slaveVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader( "NOTE: slave does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.slaveVersionIsOlderThan("shell", "2.15"): if stdio is not None: yield stdio.addHeader( "NOTE: slave does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] # lazylogfiles are handled below del kwargs['lazylogfiles'] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs['env'] = yield self.build.render(builderEnv) kwargs['env'].update(self.env) kwargs['stdioLogName'] = stdioLogName # default the workdir appropriately if not self.workdir: if callable(self.build.workdir): kwargs['workdir'] = self.build.workdir(self.build.sources) else: kwargs['workdir'] = self.build.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand(**kwargs) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in self.logfiles.items(): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred callback = lambda cmd_arg, logname=logname: self.addLog( logname) cmd.useLogDelayed(logname, callback, True) else: # tell the BuildStepStatus to add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) defer.returnValue(cmd)
def thd(): c = self.connectLdap() infos = {'username': username} pattern = self.accountPattern % dict(username=username) res = self.search(c, self.accountBase, pattern, attributes=[self.accountEmail, self.accountFullName] + self.accountExtraFields) if len(res) != 1: raise KeyError("ldap search \"%s\" returned %d results" % (pattern, len(res))) dn, ldap_infos = res[0]['dn'], res[0]['raw_attributes'] if isinstance(dn, bytes): dn = dn.decode('utf-8') def getLdapInfo(x): if isinstance(x, list): return x[0] return x infos['full_name'] = getLdapInfo(ldap_infos[self.accountFullName]) infos['email'] = getLdapInfo(ldap_infos[self.accountEmail]) for f in self.accountExtraFields: if f in ldap_infos: infos[f] = getLdapInfo(ldap_infos[f]) # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos['groups'] = flatten([group_infos['raw_attributes'][self.groupName] for group_infos in res]) return infos
def globSources(sources): results = yield defer.gatherResults([ self.runGlob( os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) results = [self.workerPathToMasterPath(p) for p in flatten(results)] return results
def thd(): c = self.connectLdap() infos = {'username': username} pattern = self.accountPattern % dict(username=username) res = self.search(c, self.accountBase, pattern, attributes=[ self.accountEmail, self.accountFullName] + self.accountExtraFields) if len(res) != 1: raise KeyError("ldap search \"{}\" returned {} results".format(pattern, len(res))) dn, ldap_infos = res[0]['dn'], res[0]['attributes'] def getFirstLdapInfo(x): if isinstance(x, list): x = x[0] if x else None return x infos['full_name'] = getFirstLdapInfo(ldap_infos[self.accountFullName]) infos['email'] = getFirstLdapInfo(ldap_infos[self.accountEmail]) for f in self.accountExtraFields: if f in ldap_infos: infos[f] = getFirstLdapInfo(ldap_infos[f]) if self.groupMemberPattern is None: infos['groups'] = [] return infos # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos['groups'] = flatten([group_infos['attributes'][self.groupName] for group_infos in res]) return infos
def getDetailsForBuildset(master, bsid, want_properties=False, want_steps=False, want_previous_build=False, want_logs=False, want_logs_content=False): # Here we will do a bunch of data api calls on behalf of the reporters # We do try to make *some* calls in parallel with the help of gatherResults, but don't commit # to much in that. The idea is to do parallelism while keeping the code readable # and maintainable. # first, just get the buildset and all build requests for our buildset id dl = [master.data.get(("buildsets", bsid)), master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [bsid])])] (buildset, breqs) = yield defer.gatherResults(dl) # next, get the bdictlist for each build request dl = [master.data.get(("buildrequests", breq['buildrequestid'], 'builds')) for breq in breqs] builds = yield defer.gatherResults(dl) builds = flatten(builds, types=(list, UserList)) if builds: yield getDetailsForBuilds(master, buildset, builds, want_properties=want_properties, want_steps=want_steps, want_previous_build=want_previous_build, want_logs=want_logs, want_logs_content=want_logs_content) return dict(buildset=buildset, builds=builds)
def globSources(sources): results = yield defer.gatherResults([ self.runGlob( os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) results = [self.workerPathToMasterPath(p) for p in flatten(results)] defer.returnValue(results)
def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName="stdio", **overrides): kwargs = dict([(arg, getattr(self, arg)) for arg in self._shellMixinArgs]) kwargs.update(overrides) stdio = None if stdioLogName is not None: # Reuse an existing log if possible; otherwise, create one. try: stdio = yield self.getLog(stdioLogName) except KeyError: stdio = yield self.addLog(stdioLogName) kwargs["command"] = flatten(kwargs["command"], (list, tuple)) # store command away for display self.command = kwargs["command"] # check for the usePTY flag if kwargs["usePTY"] != "slave-config": if self.slaveVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader("NOTE: slave does not allow master to override usePTY\n") del kwargs["usePTY"] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.slaveVersionIsOlderThan("shell", "2.15"): if stdio is not None: yield stdio.addHeader("NOTE: slave does not allow master to specify interruptSignal\n") del kwargs["interruptSignal"] # lazylogfiles are handled below del kwargs["lazylogfiles"] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs["env"] = yield self.build.render(builderEnv) kwargs["env"].update(self.env) kwargs["stdioLogName"] = stdioLogName if not kwargs.get("workdir"): kwargs["workdir"] = self.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand(collectStdout=collectStdout, collectStderr=collectStderr, **kwargs) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in self.logfiles.items(): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred callback = lambda cmd_arg, logname=logname: self.addLog(logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) defer.returnValue(cmd)
def _describe(self, done=False): """Return a list of short strings to describe this step, for the status display. This uses the first few words of the shell command. You can replace this by setting .description in your subclass, or by overriding this method to describe the step better. @type done: boolean @param done: whether the command is complete or not, to improve the way the command is described. C{done=False} is used while the command is still running, so a single imperfect-tense verb is appropriate ('compiling', 'testing', ...) C{done=True} is used when the command has finished, and the default getText() method adds some text, so a simple noun is appropriate ('compile', 'tests' ...) """ try: if done and self.descriptionDone is not None: return self.descriptionDone if self.description is not None: return self.description # we may have no command if this is a step that sets its command # name late in the game (e.g., in start()) if not self.command: return ["???"] words = self.command if isinstance(words, (str, unicode)): words = words.split() try: len(words) except (AttributeError, TypeError): # WithProperties and Property don't have __len__ # For old-style classes instances AttributeError raised, # for new-style classes instances - TypeError. return ["???"] # flatten any nested lists words = flatten(words, (list, tuple)) # strip instances and other detritus (which can happen if a # description is requested before rendering) words = [w for w in words if isinstance(w, (str, unicode))] if len(words) < 1: return ["???"] if len(words) == 1: return ["'%s'" % words[0]] if len(words) == 2: return ["'%s" % words[0], "%s'" % words[1]] return ["'%s" % words[0], "%s" % words[1], "...'"] except: log.err(failure.Failure(), "Error describing step") return ["???"]
def getDetailsForBuilds(master, buildset, builds, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): builderids = set([build['builderid'] for build in builds]) builders = yield defer.gatherResults( [master.data.get(("builders", _id)) for _id in builderids]) buildersbyid = dict([(builder['builderid'], builder) for builder in builders]) if wantProperties: buildproperties = yield defer.gatherResults([ master.data.get(("builds", build['buildid'], 'properties')) for build in builds ]) else: # we still need a list for the big zip buildproperties = range(len(builds)) if wantPreviousBuild: prev_builds = yield defer.gatherResults( [getPreviousBuild(master, build) for build in builds]) else: # we still need a list for the big zip prev_builds = range(len(builds)) if wantSteps: buildsteps = yield defer.gatherResults([ master.data.get(("builds", build['buildid'], 'steps')) for build in builds ]) if wantLogs: for s in flatten(buildsteps, types=(list, UserList)): s['logs'] = yield master.data.get( ("steps", s['stepid'], 'logs')) for l in s['logs']: l['content'] = yield master.data.get( ("logs", l['logid'], 'contents')) else: # we still need a list for the big zip buildsteps = range(len(builds)) # a big zip to connect everything together for build, properties, steps, prev in zip(builds, buildproperties, buildsteps, prev_builds): build['builder'] = buildersbyid[build['builderid']] build['buildset'] = buildset if wantProperties: build['properties'] = properties if wantSteps: build['steps'] = steps if wantPreviousBuild: build['prev_build'] = prev
def getDetailsForBuilds(master, buildset, builds, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): builderids = set([build['builderid'] for build in builds]) builders = yield defer.gatherResults([master.data.get(("builders", _id)) for _id in builderids]) buildersbyid = dict([(builder['builderid'], builder) for builder in builders]) if wantProperties: buildproperties = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'properties')) for build in builds]) else: # we still need a list for the big zip buildproperties = lrange(len(builds)) if wantPreviousBuild: prev_builds = yield defer.gatherResults( [getPreviousBuild(master, build) for build in builds]) else: # we still need a list for the big zip prev_builds = lrange(len(builds)) if wantSteps: buildsteps = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'steps')) for build in builds]) if wantLogs: for s in flatten(buildsteps, types=(list, UserList)): logs = yield master.data.get(("steps", s['stepid'], 'logs')) s['logs'] = list(logs) for l in s['logs']: l['content'] = yield master.data.get(("logs", l['logid'], 'contents')) else: # we still need a list for the big zip buildsteps = lrange(len(builds)) # a big zip to connect everything together for build, properties, steps, prev in zip(builds, buildproperties, buildsteps, prev_builds): build['builder'] = buildersbyid[build['builderid']] build['buildset'] = buildset build['url'] = getURLForBuild( master, build['builderid'], build['number']) if wantProperties: build['properties'] = properties if wantSteps: build['steps'] = list(steps) if wantPreviousBuild: build['prev_build'] = prev
def globSources(sources): dl = defer.DeferredList([ self.runGlob(os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) results = yield dl results = [ result[1] for result in filter(lambda result: result[0], results) ] results = flatten(results) defer.returnValue(results)
def globSources(sources): dl = defer.DeferredList([ self.runGlob( os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) results = yield dl results = [ result[1] for result in filter(lambda result: result[0], results) ] results = flatten(results) defer.returnValue(results)
def run(self): self.checkWorkerHasCommand("uploadDirectory") self.checkWorkerHasCommand("uploadFile") self.checkWorkerHasCommand("stat") self.stdio_log = yield self.addLog("stdio") masterdest = os.path.expanduser(self.masterdest) sources = self.workersrcs if isinstance(self.workersrcs, list) else [self.workersrcs] if self.keepstamp and self.workerVersionIsOlderThan( "uploadFile", "2.13"): m = (("This worker ({}) does not support preserving timestamps. " "Please upgrade the worker.").format(self.build.workername)) raise WorkerTooOldError(m) if not sources: return SKIPPED if self.glob: results = yield defer.gatherResults([ self.runGlob(os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) sources = [ self.workerPathToMasterPath(p) for p in flatten(results) ] log.msg("MultipleFileUpload started, from worker {!r} to master {!r}". format(sources, masterdest)) self.descriptionDone = [ 'uploading', str(len(sources)), 'file' if len(sources) == 1 else 'files' ] if not sources: result = SKIPPED else: result = SUCCESS for source in sources: result_single = yield self.startUpload(source, masterdest) if result_single == FAILURE: result = FAILURE break yield self.allUploadsDone(result, sources, masterdest) return result
def _describe(self, done=False): try: if done and self.descriptionDone is not None: return self.descriptionDone if self.description is not None: return self.description # if self.cmd is set, then use the RemoteCommand's info if self.cmd: command = self.command.command # otherwise, if we were configured with a command, use that elif self.command: command = self.command else: return super(ShellMixin, self)._describe(done) words = command if isinstance(words, (str, unicode)): words = words.split() try: len(words) except (AttributeError, TypeError): # WithProperties and Property don't have __len__ # For old-style classes instances AttributeError raised, # for new-style classes instances - TypeError. return super(ShellMixin, self)._describe(done) # flatten any nested lists words = flatten(words, (list, tuple)) # strip instances and other detritus (which can happen if a # description is requested before rendering) words = [w for w in words if isinstance(w, (str, unicode))] if len(words) < 1: return super(ShellMixin, self)._describe(done) if len(words) == 1: return ["'%s'" % words[0]] if len(words) == 2: return ["'%s" % words[0], "%s'" % words[1]] return ["'%s" % words[0], "%s" % words[1], "...'"] except Exception: log.err(failure.Failure(), "Error describing step") return super(ShellMixin, self)._describe(done)
def buildCommandKwargs(self, warnings): kwargs = buildstep.LoggingBuildStep.buildCommandKwargs(self) kwargs.update(self.remote_kwargs) kwargs['command'] = flatten(self.command, (list, tuple)) # check for the usePTY flag if 'usePTY' in kwargs and kwargs['usePTY'] != 'slave-config': if self.slaveVersionIsOlderThan("svn", "2.7"): warnings.append("NOTE: slave does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if "interruptSignal" in kwargs and self.slaveVersionIsOlderThan("shell", "2.15"): warnings.append("NOTE: slave does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] return kwargs
def thd(): infos = {'username': username} l = ldap.initialize(self.uri) l.simple_bind_s(self.bindUser, self.bindPw) pattern = self.accountPattern % dict(username=username) res = l.search_s(self.accountBase, ldap.SCOPE_SUBTREE, pattern, [ self.accountEmail, self.accountFullName, 'dn'] + self.accountExtraFields) if len(res) != 1: raise KeyError("ldap search \"%s\" returned %d results" % (pattern, len(res))) dn, ldap_infos = res[0] infos['full_name'] = ldap_infos[self.accountFullName][0] infos['email'] = ldap_infos[self.accountEmail][0] for f in self.accountExtraFields: if f in ldap_infos: infos[f] = ldap_infos[f][0] # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn.replace('\\', '\\\\')) res = l.search_s(self.groupBase, ldap.SCOPE_SUBTREE, pattern, [ self.groupName]) infos['groups'] = flatten([group_infos[self.groupName] for gdn, group_infos in res]) return infos
def buildCommandKwargs(self, warnings): kwargs = super().buildCommandKwargs() kwargs.update(self.remote_kwargs) kwargs['workdir'] = self.workdir kwargs['command'] = flatten(self.command, (list, tuple)) # check for the usePTY flag if 'usePTY' in kwargs and kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): warnings.append( "NOTE: worker does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if "interruptSignal" in kwargs and self.workerVersionIsOlderThan("shell", "2.15"): warnings.append( "NOTE: worker does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] return kwargs
def recode_changes(self, old_encoding, quiet=False): """Processes the list of changes, with the change attributes re-encoded unicode objects""" nconvert = 0 for c in self.changes: # give revision special handling, in case it is an integer if isinstance(c.revision, int): c.revision = unicode(c.revision) for attr in ("who", "comments", "revlink", "category", "branch", "revision"): a = getattr(c, attr) if isinstance(a, str): try: setattr(c, attr, a.decode(old_encoding)) nconvert += 1 except UnicodeDecodeError: raise UnicodeError( "Error decoding %s of change #%s as %s:\n%r" % (attr, c.number, old_encoding, a)) # filenames are a special case, but in general they'll have the same encoding # as everything else on a system. If not, well, hack this script to do your # import! newfiles = [] for filename in util.flatten(c.files): if isinstance(filename, str): try: filename = filename.decode(old_encoding) nconvert += 1 except UnicodeDecodeError: raise UnicodeError( "Error decoding filename '%s' of change #%s as %s:\n%r" % (filename.decode('ascii', 'replace'), c.number, old_encoding, a)) newfiles.append(filename) c.files = newfiles if not quiet: print "converted %d strings" % nconvert
def recode_changes(self, old_encoding, quiet=False): """Processes the list of changes, with the change attributes re-encoded unicode objects""" nconvert = 0 for c in self.changes: # give revision special handling, in case it is an integer if isinstance(c.revision, int): c.revision = unicode(c.revision) for attr in ("who", "comments", "revlink", "category", "branch", "revision"): a = getattr(c, attr) if isinstance(a, str): try: setattr(c, attr, a.decode(old_encoding)) nconvert += 1 except UnicodeDecodeError: raise UnicodeError( "Error decoding %s of change #%s as %s:\n%r" % (attr, c.number, old_encoding, a) ) # filenames are a special case, but in general they'll have the same encoding # as everything else on a system. If not, well, hack this script to do your # import! newfiles = [] for filename in util.flatten(c.files): if isinstance(filename, str): try: filename = filename.decode(old_encoding) nconvert += 1 except UnicodeDecodeError: raise UnicodeError( "Error decoding filename '%s' of change #%s as %s:\n%r" % (filename.decode("ascii", "replace"), c.number, old_encoding, a) ) newfiles.append(filename) c.files = newfiles if not quiet: print "converted %d strings" % nconvert
def thd(): c = self.connectLdap() infos = {"username": username} pattern = self.accountPattern % dict(username=username) res = self.search( c, self.accountBase, pattern, attributes=[self.accountEmail, self.accountFullName, "dn"] + self.accountExtraFields, ) if len(res) != 1: raise KeyError('ldap search "%s" returned %d results' % (pattern, len(res))) dn, ldap_infos = res[0]["dn"], res[0]["raw_attributes"] infos["full_name"] = ldap_infos[self.accountFullName] infos["email"] = ldap_infos[self.accountEmail] for f in self.accountExtraFields: if f in ldap_infos: infos[f] = ldap_infos[f] # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos["groups"] = flatten([group_infos["raw_attributes"][self.groupName] for group_infos in res]) return infos
def getDetailsForBuildset(master, bsid, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): # Here we will do a bunch of data api calls on behalf of the reporters # We do try to make *some* calls in parallel with the help of gatherResults, but don't commit # to much in that. The idea is to do parallelism while keeping the code readable # and maintainable. # first, just get the buildset and all build requests for our buildset id dl = [master.data.get(("buildsets", bsid)), master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [bsid])])] (buildset, breqs) = yield defer.gatherResults(dl) # next, get the bdictlist for each build request dl = [master.data.get(("buildrequests", breq['buildrequestid'], 'builds')) for breq in breqs] builds = yield defer.gatherResults(dl) builds = flatten(builds, types=(list, UserList)) if builds: yield getDetailsForBuilds(master, buildset, builds, wantProperties=wantProperties, wantSteps=wantSteps, wantPreviousBuild=wantPreviousBuild, wantLogs=wantLogs) defer.returnValue(dict(buildset=buildset, builds=builds))
def test_simple(self): self.assertEqual(util.flatten([1, 2, 3]), [1, 2, 3])
def flat(r): return flatten(r, self.types)
def setups(self): return flatten([self.get('setups', self.get('setup',[]))])
def commands_for_key(self, key): return flatten([self.get(key)])
def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides): kwargs = dict([(arg, getattr(self, arg)) for arg in self._shellMixinArgs]) kwargs.update(overrides) stdio = None if stdioLogName is not None: # Reuse an existing log if possible; otherwise, create one. try: stdio = yield self.getLog(stdioLogName) except KeyError: stdio = yield self.addLog(stdioLogName) kwargs['command'] = flatten(kwargs['command'], (list, tuple)) # store command away for display self.command = kwargs['command'] # check for the usePTY flag if kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.workerVersionIsOlderThan("shell", "2.15"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] # lazylogfiles are handled below del kwargs['lazylogfiles'] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs['env'] = yield self.build.render(builderEnv) kwargs['env'].update(self.env) kwargs['stdioLogName'] = stdioLogName # default the workdir appropriately if not kwargs.get('workdir') and not self.workdir: if callable(self.build.workdir): kwargs['workdir'] = self.build.workdir(self.build.sources) else: kwargs['workdir'] = self.build.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand( collectStdout=collectStdout, collectStderr=collectStderr, **kwargs ) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in iteritems(self.logfiles): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred def callback(cmd_arg, local_logname=logname): return self.addLog(local_logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) defer.returnValue(cmd)
def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides): kwargs = {arg: getattr(self, arg) for arg in self._shellMixinArgs} kwargs.update(overrides) stdio = None if stdioLogName is not None: # Reuse an existing log if possible; otherwise, create one. try: stdio = yield self.getLog(stdioLogName) except KeyError: stdio = yield self.addLog(stdioLogName) kwargs['command'] = flatten(kwargs['command'], (list, tuple)) # store command away for display self.command = kwargs['command'] # check for the usePTY flag if kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to override usePTY\n" ) del kwargs['usePTY'] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.workerVersionIsOlderThan( "shell", "2.15"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to specify interruptSignal\n" ) del kwargs['interruptSignal'] # lazylogfiles are handled below del kwargs['lazylogfiles'] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs['env'] = yield self.build.render(builderEnv) kwargs['env'].update(self.env) kwargs['stdioLogName'] = stdioLogName # default the workdir appropriately if not kwargs.get('workdir') and not self.workdir: if callable(self.build.workdir): kwargs['workdir'] = self.build.workdir(self.build.sources) else: kwargs['workdir'] = self.build.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand(collectStdout=collectStdout, collectStderr=collectStderr, **kwargs) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in iteritems(self.logfiles): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred def callback(cmd_arg, local_logname=logname): return self.addLog(local_logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) defer.returnValue(cmd)
def test_dict(self): d = {'a': [5, 6, 7], 'b': [7, 8, 9]} self.assertEqual(util.flatten(d), d)
def test_tuples(self): self.assertEqual(util.flatten([(1, 2), 3]), [(1, 2), 3])
def test_deep(self): self.assertEqual(util.flatten([[1, 2], 3, [[4]]]), [1, 2, 3, 4])
def test_string(self): self.assertEqual(util.flatten("abc"), "abc")