def merge_with(fn, *dicts): """ Merge dictionaries and apply function to combined values A key may occur in more than one dict, and all values mapped from the key will be passed to the function as a list, such as fn([val1, val2, ...]). >>> merge_with(sum, {1: 1, 2: 2}, {1: 10, 2: 20}) {1: 11, 2: 22} >>> merge_with(first, {1: 1, 2: 2}, {2: 20, 3: 30}) # doctest: +SKIP {1: 1, 2: 2, 3: 30} See Also: merge """ if len(dicts) == 1 and not isinstance(dicts[0], dict): dicts = dicts[0] result = dict() for d in dicts: for k, v in d.items(): try: result[k].append(v) except: result[k] = [v] return dict((k, fn(v)) for k, v in result.items())
def __init__(self, session: Session, config, loop=None): self.logger = logging.getLogger(__name__) self.session = session self.config = config if loop is None: self._loop = asyncio.get_event_loop() else: self._loop = loop self._reader_task = None self._writer_task = None self._inflight_task = None self._reader_ready = asyncio.Event(loop=self._loop) self._writer_ready = asyncio.Event(loop=self._loop) self._inflight_ready = asyncio.Event(loop=self._loop) self._inflight_changed = asyncio.Condition(loop=self._loop) self._running = False self.session.local_address, self.session.local_port = self.session.writer.get_extra_info("sockname") self.incoming_queues = dict() for p in PacketType: self.incoming_queues[p] = asyncio.Queue() self.outgoing_queue = asyncio.Queue() self.inflight_messages = dict()
def test_mode_full_timeout(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh', timeout=1)) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep()
def create_filetree(path=None, depth=0, max_depth=0): tree = None if max_depth == 0 or depth < max_depth: if path is None: path = os.getcwd() tree = dict(name=os.path.basename(path), children=[]) try: lst = os.listdir(path) except OSError: pass # ignore errors else: for name in lst: fn = os.path.join(path, name) if (os.path.isdir(fn) and re.match('^.*(Compiled)$', fn) is None): child = create_filetree(fn, depth + 1, max_depth) if child is not None: tree['children'].append(child) elif re.match('^.*\.(m|def|txt|csv)$', fn) is not None: tree['children'].append(dict(name=fn.replace( os.getcwd() + os.path.sep, ""))) return tree
def site(): """ Site handler """ myversion = request.env.web2py_version # Shortcut to make the elif statements more legible file_or_appurl = 'file' in request.vars or 'appurl' in request.vars if DEMO_MODE: pass elif request.vars.filename and not 'file' in request.vars: # create a new application appname = cleanpath(request.vars.filename).replace('.', '_') if app_create(appname, request): if MULTI_USER_MODE: db.app.insert(name=appname,owner=auth.user.id) session.flash = T('new application "%s" created', appname) redirect(URL('design',args=appname)) else: session.flash = \ T('unable to create application "%s" (it may exist already)', request.vars.filename) redirect(URL(r=request)) elif file_or_appurl and not request.vars.filename: # can't do anything without an app name msg = 'you must specify a name for the uploaded application' response.flash = T(msg) elif file_or_appurl and request.vars.filename: # fetch an application via URL or file upload f = None if request.vars.appurl is not '': try: f = urllib.urlopen(request.vars.appurl) except Exception, e: session.flash = DIV(T('Unable to download app because:'),PRE(str(e))) redirect(URL(r=request)) fname = request.vars.appurl elif request.vars.file is not '': f = request.vars.file.file fname = request.vars.file.filename if f: appname = cleanpath(request.vars.filename).replace('.', '_') installed = app_install(appname, f, request, fname, overwrite=request.vars.overwrite_check) if f and installed: msg = 'application %(appname)s installed with md5sum: %(digest)s' session.flash = T(msg, dict(appname=appname, digest=md5_hash(installed))) elif f and request.vars.overwrite_check: msg = 'unable to install application "%(appname)s"' session.flash = T(msg, dict(appname=request.vars.filename)) else: msg = 'unable to install application "%(appname)s"' session.flash = T(msg, dict(appname=request.vars.filename)) redirect(URL(r=request))
def _sourcedirIsUpdatable(self): myFileWriter = StringFileWriter() args = { 'workdir': self.build.path_module.join(self.workdir, 'CVS'), 'writer': myFileWriter, 'maxsize': None, 'blocksize': 32*1024, } cmd = buildstep.RemoteCommand('uploadFile', dict(slavesrc='Root', **args), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: defer.returnValue(False) return if myFileWriter.buffer.strip() != self.cvsroot: defer.returnValue(False) return myFileWriter.buffer = "" cmd = buildstep.RemoteCommand('uploadFile', dict(slavesrc='Repository', **args), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: defer.returnValue(False) return if myFileWriter.buffer.strip() != self.cvsmodule: defer.returnValue(False) return defer.returnValue(True)
def check(cmd, mf): m = mf.findNode('PyQt5') if m and not isinstance(m, MissingModule): try: # PyQt5 with sipconfig module, handled # by sip recipe import sipconfig return None except ImportError: pass # All imports are done from C code, hence not visible # for modulegraph # 1. Use of 'sip' # 2. Use of other modules, datafiles and C libraries # in the PyQt5 package. mf.import_hook('sip', m) if sys.version[0] != 2: return dict(packages=['PyQt5'], expected_missing_imports=set(['copy_reg', 'cStringIO', 'StringIO'])) else: return dict(packages=['PyQt5']) return None
def __compute_alternative_params(self): # Copied directly from skopt transformed_bounds = np.array(self.__opt.space.transformed_bounds) est = clone(self.__opt.base_estimator) with warnings.catch_warnings(): warnings.simplefilter("ignore") est.fit(self.__opt.space.transform(self.__opt.Xi), self.__opt.yi) X = self.__opt.space.transform(self.__opt.space.rvs( n_samples=self.__opt.n_points, random_state=self.__opt.rng)) values = _gaussian_acquisition(X=X, model=est, y_opt=np.min(self.__opt.yi), acq_func='EI', acq_func_kwargs=dict(n_points=10000)) print('original point ei: %s' % np.min(values)) discount_width = .5 values = self.__discount_leased_params(X, values, discount_width) while np.min(values) > -1e-5 and discount_width > 1e-2: discount_width *= .9 values = _gaussian_acquisition(X=X, model=est, y_opt=np.min(self.__opt.yi), acq_func='EI', acq_func_kwargs=dict(n_points=10000)) values = self.__discount_leased_params(X, values, discount_width) next_x = X[np.argmin(values)] print('new point ei: %s' % np.min(values)) if not self.__opt.space.is_categorical: next_x = np.clip(next_x, transformed_bounds[:, 0], transformed_bounds[:, 1]) return self.__opt.space.inverse_transform(next_x.reshape((1, -1)))[0]
def parse(self, basefile): # Find out possible skeleton entries by loading the entire # graph of resource references, and find resources that only # exist as objects. # # Note: if we used download_from_triplestore we know that this list # is clean -- we could just iterate the graph w/o filtering g = Graph() self.log.info("Parsing %s" % basefile) g.parse(self.store.downloaded_path(basefile), format="nt") self.log.info("Compiling object set") # create a uri -> True dict mapping -- maybe? objects = dict(zip([str(o).split("#")[0] for (s, p, o) in g], True)) self.log.info("Compiling subject set") subjects = dict(zip([str(s).split("#")[0] for (s, p, o) in g], True)) self.log.info("%s objects, %s subjects. Iterating through existing objects" % (len(objects), len(subjects))) for o in objects: if not o.startswith(self.config.url): continue if '9999:999' in o: continue if o in subjects: continue for repo in otherrepos: skelbase = repo.basefile_from_uri(repo) if skelbase: skel = repo.triples_from_uri(o) # need to impl with self.store.open_distilled(skelbase, "wb") as fp: fp.write(skel.serialize(format="pretty-xml")) self.log.info("Created skel for %s" % o)
def close(self): """ Shut down the UnitManager, and all umgr components. """ # we do not cancel units at this point, in case any component or pilot # wants to continue to progress unit states, which should indeed be # independent from the umgr life cycle. if self._closed: return self._terminate.set() self.stop() self._rep.info('<<close unit manager') # we don't want any callback invokations during shutdown # FIXME: really? with self._cb_lock: self._callbacks = dict() for m in rpt.UMGR_METRICS: self._callbacks[m] = dict() self._log.info("Closed UnitManager %s." % self._uid) self._closed = True self._rep.ok('>>ok\n')
def testGetSetProperties(self): self.addEngine(4) dikt = dict(a=5, b='asdf', c=True, d=None, e=list(range(5))) d= self.multiengine.set_properties(dikt) d.addCallback(lambda r: self.multiengine.get_properties()) d.addCallback(lambda r: self.assertEquals(r, 4*[dikt])) d.addCallback(lambda r: self.multiengine.get_properties(('c',))) d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}])) d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False))) d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'))) d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)])) #Non-blocking d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False)) d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) d.addCallback(lambda r: self.multiengine.get_properties(block=False)) d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) d.addCallback(lambda r: self.assertEquals(r, 4*[dikt])) d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False)) d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}])) d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False)) d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False)) d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)])) return d
def test_existing_lcd_partial(self): commit1 = self._add_commit('Commit 1', ['file1']) commit2 = self._add_commit('Commit 2', ['file1', 'file2'], ['file2'], [commit1]) commit3 = self._add_commit('Commit 3', ['file1', 'file2', 'file3'], ['file3'], [commit2]) commit4 = self._add_commit('Commit 4', ['file1', 'file2', 'file3', 'file4'], ['file2', 'file4'], [commit3]) prev_lcd = M.repository.LastCommit( path='', commit_id=commit3._id, entries=[ dict( name='file1', commit_id=commit1._id), dict( name='file2', commit_id=commit2._id), dict( name='file3', commit_id=commit3._id), ], ) session(prev_lcd).flush() lcd = M.repository.LastCommit.get(commit4.tree) self.assertEqual(self.repo._commits[lcd.commit_id].message, commit4.message) self.assertEqual(lcd.path, '') self.assertEqual(len(lcd.entries), 4) self.assertEqual(lcd.by_name['file1'], commit1._id) self.assertEqual(lcd.by_name['file2'], commit4._id) self.assertEqual(lcd.by_name['file3'], commit3._id) self.assertEqual(lcd.by_name['file4'], commit4._id)
def get_sortable_columns(self): """ Returns a dictionary of the sortable columns. Key is a model field name and value is sort column (for example - attribute). If `column_sortable_list` is set, will use it. Otherwise, will call `scaffold_sortable_columns` to get them from the model. """ self._sortable_joins = dict() if self.column_sortable_list is None: return self.scaffold_sortable_columns() else: result = dict() for c in self.column_sortable_list: if isinstance(c, tuple): column, path = self._get_field_with_path(c[1]) column_name = c[0] elif isinstance(c, InstrumentedAttribute): column, path = self._get_field_with_path(c) column_name = str(c) else: column, path = self._get_field_with_path(c) column_name = c result[column_name] = column if path: self._sortable_joins[column_name] = path return result
def save(self, request, datastream_rev=None, visualization_rev=None): if datastream_rev: lifecycle = VisualizationLifeCycleManager(user=request.user) visualization_rev = lifecycle.create(datastream_rev, language=request.user.language, **self.cleaned_data) return dict( status='ok', revision_id=visualization_rev.id, messages=[ugettext('APP-VISUALIZATION-CREATEDSUCCESSFULLY-TEXT')] ) elif visualization_rev: lifecycle = VisualizationLifeCycleManager( user=request.user, visualization_revision_id=visualization_rev['visualization_revision_id'] ) visualization_rev = lifecycle.edit( language=request.auth_manager.language, changed_fields=self.changed_data, **self.cleaned_data ) return dict( status='ok', revision_id=visualization_rev.id, messages=[ugettext('APP-VISUALIZATION-CREATEDSUCCESSFULLY-TEXT')] )
def _compute_graph(self): stack=[self.begin] index=dict() rindex=dict() count=Counter() graph=dict() def enter(state): n = count.get() index[state] = n rindex[n] = state enter(self.begin) while stack: I=stack.pop() table=dict() for X in self.grammar.symbols: transition=self.transition(I,X) if transition: table[X]=transition if transition not in index: enter(transition) stack.append(transition) graph[I]=table self._graph=graph self._index=index self._rindex=rindex
def test_height(self): prefix='thumb_' meta = dict(thumbnails=[dict(height=50, prefix=prefix, include=['*.jpg'])]) self._test_generic_thumbnails(meta) for fn in IMAGES: im = Image.open(self._deployed_image(prefix, fn)) assert im.size[1] == 50
def __init__(self, *args, **kwargs): """Constructor to resolve values for all Parameters. For example, the Task:: class MyTask(luigi.Task): count = luigi.IntParameter() can be instantiated as ``MyTask(count=10)``. """ params = self.get_params() param_values = self.get_param_values(params, args, kwargs) # Set all values on class instance for key, value in param_values: setattr(self, key, value) # Register args and kwargs as an attribute on the class. Might be useful self.param_args = tuple(value for key, value in param_values) self.param_kwargs = dict(param_values) # Build up task id task_id_parts = [] param_objs = dict(params) for param_name, param_value in param_values: if dict(params)[param_name].significant: task_id_parts.append('%s=%s' % (param_name, param_objs[param_name].serialize(param_value))) self.task_id = '%s(%s)' % (self.task_family, ', '.join(task_id_parts)) self.__hash = hash(self.task_id)
def test_ignore_multiple_matches(self): self.assertEqual( self.interpret_bootstrap_stderr([ dict( action_num=0, node_id='i-b659f519', path=('s3://bucket/tmp/logs/j-1EE0CL1O7FDXU/node/' 'i-b659f519/bootstrap-actions/1/stderr.gz'), ), dict( action_num=0, node_id='i-e647eb49', path=('s3://bucket/tmp/logs/j-1EE0CL1O7FDXU/node/' 'i-e647eb49/bootstrap-actions/1/stderr.gz'), ), ]), dict( errors=[dict( action_num=0, node_id='i-b659f519', task_error=dict( message='BOOM!\n', path=('s3://bucket/tmp/logs/j-1EE0CL1O7FDXU/node/' 'i-b659f519/bootstrap-actions/1/stderr.gz'), ), )], partial=True, ) ) self.mock_cat_log.called_once_with( self.mock_fs, ('s3://bucket/tmp/logs/j-1EE0CL1O7FDXU/node/' 'i-b659f519/bootstrap-actions/1/stderr.gz'))
def list_available(bitcoind): address_summary = dict() address_to_account = dict() for info in bitcoind.listreceivedbyaddress(0): address_to_account[info["address"]] = info["account"] unspent = bitcoind.listunspent(0) for output in unspent: # listunspent doesn't give addresses, so: rawtx = bitcoind.getrawtransaction(output['txid'], 1) vout = rawtx["vout"][output['vout']] pk = vout["scriptPubKey"] # This code only deals with ordinary pay-to-bitcoin-address # or pay-to-script-hash outputs right now; anything exotic is ignored. if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash": continue address = pk["addresses"][0] if address in address_summary: address_summary[address]["total"] += vout["value"] address_summary[address]["outputs"].append(output) else: address_summary[address] = { "total" : vout["value"], "outputs" : [output], "account" : address_to_account.get(address, "") } return address_summary
def when_i_post_put_to_url_with_params(context, name, password,method, url, params = SKIP, admin = False, content_type = "application/x-www-form-urlencoded"): full_url = "%s%s" % (context.app_config.app_url_prefix,url) if admin is False else "%s%s" % (context.app_config.admin_url_prefix,url) h = httplib2.Http() h.add_credentials(name,password) # convert string params to dictionary form_params = {} if params is not SKIP: form_params = dict(urlparse.parse_qsl(params)) # handle both content = params.encode("utf-8") if content_type is "application/x-www-form-urlencoded": content = urllib.urlencode(form_params) if method in {"PUT","POST","PATCH"}: context.http_headers["content-type"] = content_type # encode http headers context.http_headers = dict((k.encode('ascii') if isinstance(k, unicode) else k, v.encode('ascii') if isinstance(v, unicode) else v) for k,v in context.http_headers.items()) context.resp, context.content = h.request(full_url.encode("utf-8"), method.encode("utf-8"), content, headers=context.http_headers) context.http_headers = {}
def testCleanUp(self): class TestableTest(unittest2.TestCase): def testNothing(self): pass test = TestableTest('testNothing') self.assertEqual(test._cleanups, []) cleanups = [] def cleanup1(*args, **kwargs): cleanups.append((1, args, kwargs)) def cleanup2(*args, **kwargs): cleanups.append((2, args, kwargs)) test.addCleanup(cleanup1, 1, 2, 3, four='hello', five='goodbye') test.addCleanup(cleanup2) self.assertEqual(test._cleanups, [(cleanup1, (1, 2, 3), dict(four='hello', five='goodbye')), (cleanup2, (), {})]) result = test.doCleanups() self.assertTrue(result) self.assertEqual(cleanups, [(2, (), {}), (1, (1, 2, 3), dict(four='hello', five='goodbye'))])
def backup(self, context, instance, name, backup_type, rotation, extra_properties=None): self.extra_props_last_call = extra_properties props = dict(backup_type=backup_type, rotation=rotation) props.update(extra_properties or {}) return dict(id='123', status='ACTIVE', name=name, properties=props)
def main_interface(): cur = g.db.execute('select id, provider, num_affected, end_time, flagged from outages where start_time<? and (end_time is null or end_time>? ) and hidden==0 order by start_time desc', [datetime.datetime.now(), datetime.datetime.now()]) outages = [dict(id=row[0], provider=row[1], num_affected=row[2], end_time=row[3], flagged=row[4]) for row in cur.fetchall()] for outage in outages: cur = g.db.execute('select provider_ref_id from outage_texts where rowid==?', [outage['id']]) outage['provider_ref'] = cur.fetchone() cur = g.db.execute('select dslusers_id from outages_dslusers_rel where outages_id==?', [outage['id']]) dsluser_ids = '(' for row in cur.fetchall(): dsluser_ids += str(row[0]) + ', ' dsluser_ids = dsluser_ids.rstrip().rstrip(',') dsluser_ids += ')' cur = g.db.execute('select id, asid, account_name, phone_number, user_name from dslusers where id IN ' + dsluser_ids) outage['users'] = [dict(id=row[0], asid=row[1], account_name=row[2], phone_number=row[3], user_name=row[4]) for row in cur.fetchall()] cur = g.db.execute('select count(*) from outages_dslusers_rel where outages_id==?', [outage['id']]) outage['our_affected'] = cur.fetchone()[0] return render_template('base.html', queues=queues, notes=notes, outages=outages)
def test_failed_overwrite(): table = Table.create('messages', schema=[ HashKey('id'), ], throughput={ 'read': 7, 'write': 3, }) data1 = {'id': '123', 'data': '678'} table.put_item(data=data1) data2 = {'id': '123', 'data': '345'} table.put_item(data=data2, overwrite=True) data3 = {'id': '123', 'data': '812'} table.put_item.when.called_with(data=data3).should.throw(ConditionalCheckFailedException) returned_item = table.lookup('123') dict(returned_item).should.equal(data2) data4 = {'id': '124', 'data': 812} table.put_item(data=data4) returned_item = table.lookup('124') dict(returned_item).should.equal(data4)
def user(): if MULTI_USER_MODE: if not db(db.auth_user).count(): auth.settings.registration_requires_approval = False return dict(form=auth()) else: return dict(form=T("Disabled"))
def test_item_add_and_describe_and_update(): table = create_table() data = { 'forum_name': 'LOLCat Forum', 'Body': 'http://url_to_lolcat.gif', 'SentBy': 'User A', } table.put_item(data=data) returned_item = table.get_item(forum_name="LOLCat Forum") returned_item.should_not.be.none dict(returned_item).should.equal({ 'forum_name': 'LOLCat Forum', 'Body': 'http://url_to_lolcat.gif', 'SentBy': 'User A', }) returned_item['SentBy'] = 'User B' returned_item.save(overwrite=True) returned_item = table.get_item( forum_name='LOLCat Forum' ) dict(returned_item).should.equal({ 'forum_name': 'LOLCat Forum', 'Body': 'http://url_to_lolcat.gif', 'SentBy': 'User B', })
def delete_plugin(): """ Object delete handler """ app=request.args(0) plugin = request.args(1) plugin_name='plugin_'+plugin if 'nodelete' in request.vars: redirect(URL('design',args=app)) elif 'delete' in request.vars: try: for folder in ['models','views','controllers','static','modules']: path=os.path.join(apath(app,r=request),folder) for item in os.listdir(path): if item.rsplit('.',1)[0] == plugin_name: filename=os.path.join(path,item) if os.path.isdir(filename): shutil.rmtree(filename) else: os.unlink(filename) session.flash = T('plugin "%(plugin)s" deleted', dict(plugin=plugin)) except Exception: session.flash = T('unable to delete file plugin "%(plugin)s"', dict(plugin=plugin)) redirect(URL('design',args=request.args(0))) return dict(plugin=plugin)
def edit_wiki_settings(node, auth, **kwargs): wiki_settings = node.get_addon('wiki') permissions = request.get_json().get('permission', None) if not wiki_settings: raise HTTPError(http.BAD_REQUEST, data=dict( message_short='Invalid request', message_long='Cannot change wiki settings without a wiki' )) if permissions == 'public': permissions = True elif permissions == 'private': permissions = False else: raise HTTPError(http.BAD_REQUEST, data=dict( message_short='Invalid request', message_long='Permissions flag used is incorrect.' )) try: wiki_settings.set_editing(permissions, auth, log=True) except NodeStateError as e: raise HTTPError(http.BAD_REQUEST, data=dict( message_short="Can't change privacy", message_long=e.message )) return { 'status': 'success', 'permissions': permissions, }
def _get_store_model(self, imsgstore): dct = {} props = dict( size=tags.PR_MESSAGE_SIZE_EXTENDED, guid=tags.PR_RECORD_KEY, lastLogon=tags.PR_LAST_LOGON_TIME, userEntryId=tags.PR_MAILBOX_OWNER_ENTRYID, ) storemodel = MapiObject(imsgstore, props) dct.update(dict( lastLogon=storemodel.lastLogon, size=storemodel.size, zarafaId=binguid_to_hexstr(storemodel.guid), )) if storemodel.userEntryId: imailuser = self.session.OpenEntry(storemodel.userEntryId, None, 0) usermodel = MapiObject(imailuser, dict(userid=tags.PR_ACCOUNT_W)) dct.update(dict( storeName=usermodel.userid, )) return Store(initial=dct)
def test_bad_revparse(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='oiasdfj010laksjfd') + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep()