def test_cleanup_single(self): fh_wrapper = self.manager.open(self.file1.name) fh_wrapper.last_accessed = 123456 time_func = lambda: 123458.1 self.manager.cleanup(time_func) assert_not_in(fh_wrapper.name, self.manager.cache) assert_equal(len(self.manager.cache), 0)
def _test_cors(self, capabilities): with remote_webdriver(capabilities) as driver: with ProxyServer.in_context() as proxy: driver.get('http://localhost:{0}/'.format(HTTP_PORT)) driver.find_element_by_css_selector('.first-name').send_keys( self.SENSITIVE_INFO['first_name']) driver.find_element_by_css_selector('.last-name').send_keys( self.SENSITIVE_INFO['last_name']) driver.find_element_by_css_selector('.email').send_keys( self.SENSITIVE_INFO['email']) driver.find_element_by_css_selector('.phone').send_keys( self.SENSITIVE_INFO['phone']) driver.find_element_by_css_selector('.cors-now').click() T.assert_equal( { 'original_request': self.SENSITIVE_INFO, 'success': True, }, json.loads( driver.find_element_by_css_selector( '.cors-status div').text)) # Make sure none of our sensitive values were leaked for value in self.SENSITIVE_INFO.values(): T.assert_not_in(value, proxy.sniffable_content) T.assert_in('jquery', proxy.sniffable_content)
def test_hoods_checklists(self): with fake_checklist_request(): # insert fake data from FakeDataMixin fake_pushid = 2 self.insert_pushes() self.insert_requests() req = self.get_requests_by_user('testuser1')[0] self.insert_pushcontent(req['id'], fake_pushid) # insert fake checklist data checklist_queries = [] checklist_items = ( {'request': req['id'], 'type': 'hoods', 'target': 'stage'}, {'request': req['id'], 'type': 'hoods', 'target': 'prod'}, {'request': req['id'], 'type': 'hoods-cleanup', 'target': 'post-verify-stage'}, ) for checklist_item in checklist_items: checklist_queries.append(db.push_checklist.insert(checklist_item)) db.execute_transaction_cb(checklist_queries, on_db_return) uri = "/checklist?id=%d" % fake_pushid response = self.fetch(uri) T.assert_equal(response.error, None) T.assert_not_in("No checklist items for this push", response.body) T.assert_in("Notify testuser1 to deploy Geoservices to stage", response.body) T.assert_in("Notify testuser1 to deploy Geoservices to prod", response.body)
def test_create_scratch_uri(self): # "walrus" bucket will be ignored; it doesn't start with "mrjob-" self.add_mock_s3_data({'walrus': {}, 'zebra': {}}) runner = EMRJobRunner(conf_path=False, s3_sync_wait_time=0.01) # bucket name should be mrjob- plus 16 random hex digits s3_scratch_uri = runner._opts['s3_scratch_uri'] assert_equal(s3_scratch_uri[:11], 's3://mrjob-') assert_equal(s3_scratch_uri[27:], '/tmp/') # bucket shouldn't actually exist yet scratch_bucket, _ = parse_s3_uri(s3_scratch_uri) assert_not_in(scratch_bucket, self.mock_s3_fs.keys()) # need to do something to ensure that the bucket actually gets # created. let's launch a (mock) job flow jfid = runner.make_persistent_job_flow() assert_in(scratch_bucket, self.mock_s3_fs.keys()) runner.make_emr_conn().terminate_jobflow(jfid) # once our scratch bucket is created, we should re-use it runner2 = EMRJobRunner(conf_path=False) assert_equal(runner2._opts['s3_scratch_uri'], s3_scratch_uri) s3_scratch_uri = runner._opts['s3_scratch_uri']
def test_wrong_filename(self): """Change the filename and don't match""" self._known_sha1_input[0]['file_path'] = 'wrong_name' output_blobs = self.run_test(LookupHashesFilter, self._known_sha1_input) T.assert_equal(1, len(output_blobs)) T.assert_not_in('osxcollector_shadowserver', output_blobs[0])
def test_ancestor_delitem(self): cd = ChainedDict(**{"the_key": True}) cd2 = ChainedDict(parent=cd, **{"the_other_key": True}) del cd["the_key"] T.assert_not_in("the_key", cd2)
def test_partial_filename(self): """Change the filename and don't match""" self._known_sha1_input[0]['file_path'] = '/System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Not_Quite_Libkern' output_blobs = self.run_test(LookupHashesFilter, self._known_sha1_input) T.assert_equal(1, len(output_blobs)) T.assert_not_in('osxcollector_shadowserver', output_blobs[0])
def test_checklist_duplicate(self): with fake_checklist_request(): # insert fake data from FakeDataMixin fake_pushid = 2 self.insert_pushes() self.insert_requests() test1_request = self.get_requests_by_user('testuser1')[0] test2_request = self.get_requests_by_user('testuser2')[0] self.insert_pushcontent(test1_request['id'], fake_pushid) self.insert_pushcontent(test2_request['id'], fake_pushid) # insert fake checklist data checklist_queries = [] for req in (test1_request, test2_request): checklist_queries.append(db.push_checklist.insert({ 'request': req['id'], 'type': 'search', 'target': 'prod' })) checklist_queries.append(db.push_checklist.insert({ 'request': req['id'], 'type': 'search-cleanup', 'target': 'post-verify-prod' })) db.execute_transaction_cb(checklist_queries, on_db_return) uri = "/checklist?id=%d" % fake_pushid response = self.fetch(uri) T.assert_equal(response.error, None) T.assert_not_in("No checklist items for this push", response.body) T.assert_not_equal(re.search("for testuser\d,testuser\d", response.body), None) T.assert_in("Before Certifying - Do In Prod", response.body)
def test_contains_ancestral(self): cd = ChainedDict(**{"the_key": True}) cd2 = ChainedDict(parent=cd, **{"the_other_key": True}) T.assert_in("the_key", cd2) T.assert_in("the_other_key", cd2) T.assert_not_in("the_other_key", cd)
def test_checklist_single_search_tag(self): with fake_checklist_request(): # insert fake data from FakeDataMixin fake_pushid = 2 self.insert_pushes() self.insert_requests() test1_request = self.get_requests_by_user('testuser1')[0] self.insert_pushcontent(test1_request['id'], fake_pushid) # insert fake checklist data checklist_queries = [ db.push_checklist.insert({ 'request': test1_request['id'], 'type': 'search', 'target': 'prod' }), db.push_checklist.insert({ 'request': test1_request['id'], 'type': 'search-cleanup', 'target': 'post-verify-prod' }), ] db.execute_transaction_cb(checklist_queries, on_db_return) uri = "/checklist?id=%d" % fake_pushid response = self.fetch(uri) T.assert_equal(response.error, None) T.assert_not_in("No checklist items for this push", response.body) T.assert_not_in("multiple requests", response.body) T.assert_in("for testuser1", response.body) T.assert_in("Before Certifying - Do In Prod", response.body)
def test_checklist_duplicate(self): with fake_checklist_request(): # insert fake data from FakeDataMixin fake_pushid = 2 self.insert_pushes() self.insert_requests() test1_request = self.get_requests_by_user('testuser1')[0] test2_request = self.get_requests_by_user('testuser2')[0] self.insert_pushcontent(test1_request['id'], fake_pushid) self.insert_pushcontent(test2_request['id'], fake_pushid) # insert fake checklist data checklist_queries = [] for req in (test1_request, test2_request): checklist_queries.append( db.push_checklist.insert({ 'request': req['id'], 'type': 'search', 'target': 'prod' })) checklist_queries.append( db.push_checklist.insert({ 'request': req['id'], 'type': 'search-cleanup', 'target': 'post-verify-prod' })) db.execute_transaction_cb(checklist_queries, on_db_return) uri = "/checklist?id=%d" % fake_pushid response = self.fetch(uri) T.assert_equal(response.error, None) T.assert_not_in("No checklist items for this push", response.body) T.assert_not_equal( re.search("for testuser\d,testuser\d", response.body), None) T.assert_in("Before Certifying - Do In Prod", response.body)
def test_close_with_write(self): # Test close with a write self.fh_wrapper.write("some things") self.fh_wrapper.close() assert_equal(self.fh_wrapper._fh, NullFileHandle) assert_equal(self.fh_wrapper.manager, self.manager) # This is somewhat coupled assert_not_in(self.fh_wrapper, self.manager.cache)
def test_no_domains(self): input_blobs = [ {'fungo': 'dingo', 'bingo': [11, 37], 'banana': {'a': 11}}, ] output_blobs = self.run_test(RelatedDomainsFilter, input_blobs=input_blobs) T.assert_equal(1, len(output_blobs)) T.assert_not_in('osxcollector_related', output_blobs[0])
def test_escape(self): T.assert_equal( [k for k in self.d if self.ed[k] != self.escaped[k]], [], "EscapedDict values doesn't match with pre-computed valued" ) T.assert_in("&", self.ed['amp']) T.assert_not_in(">", self.ed['gt'])
def verify_tag_rename(self, oldtag, newtag, success, db_results): self.check_db_results(success, db_results) # id, user, state, repo, branch, revision, *tags*, created, etc... tags = [result[6] for result in db_results.fetchall()] T.assert_not_in(oldtag, tags) T.assert_in(newtag, tags)
def test_delitem_with_ancestor(self): cd = ChainedDict(**{"the_key": True}) cd2 = ChainedDict(parent=cd) del cd2["the_key"] T.assert_not_in("the_key", cd2) T.assert_in("the_key", cd)
def verify_tag_rename(self, oldtag, newtag, success, db_results): self.check_db_results(success, db_results) #id, user, state, repo, branch, revision, *tags*, created, etc... tags = [result[6] for result in db_results.fetchall()] T.assert_not_in(oldtag, tags) T.assert_in(newtag, tags)
def test_default_protocols(self): stderr = StringIO() with no_handlers_for_logger(): log_to_stream('mrjob.job', stderr) mr_job = MRBoringJob() assert_equal(mr_job.options.input_protocol, 'raw_value') assert_equal(mr_job.options.protocol, 'json') assert_equal(mr_job.options.output_protocol, 'json') assert_not_in('deprecated', stderr.getvalue())
def verify_type_rename(self, oldtype, newtype, success, db_results): self.check_db_results(success, db_results) # id, push, *type*, status, target types = [result[2] for result in db_results.fetchall()] T.assert_not_in(oldtype, types) T.assert_not_in('%s-cleanup' % oldtype, types) T.assert_in('%s' % newtype, types) T.assert_in('%s-cleanup' % newtype, types)
def test_job_name_prefix_is_now_label(self): old_way = LocalMRJobRunner(conf_path=False, job_name_prefix='ads_chain') old_opts = old_way.get_opts() new_way = LocalMRJobRunner(conf_path=False, label='ads_chain') new_opts = new_way.get_opts() assert_equal(old_opts, new_opts) assert_equal(old_opts['label'], 'ads_chain') assert_not_in('job_name_prefix', old_opts)
def test_remove(self): # In cache fh_wrapper = self.manager.open(self.file1.name) assert_in(fh_wrapper.name, self.manager.cache) self.manager.remove(fh_wrapper) assert_not_in(fh_wrapper.name, self.manager.cache) # Not in cache self.manager.remove(fh_wrapper) assert_not_in(fh_wrapper.name, self.manager.cache)
def test_clear(self): d = {'1': 'a', '2': 'b', '3': 'c'} self.prepopulate_map_test(d, self.smap) for k in d: testify.assert_in(k, self.smap) self.smap.clear() for k in d: testify.assert_not_in(k, self.smap)
def test_delitem(self): self.smap['boo'] = 'ahhh!' testify.assert_equal(self.smap['boo'], 'ahhh!') del self.smap['boo'] testify.assert_not_in('boo', self.smap) testify.assert_raises(KeyError, lambda : self.smap['boo']) def try_delete(): del self.smap['boo'] testify.assert_raises(KeyError, try_delete)
def test_pop(self): self.smap['jason'] = 'fennell' testify.assert_equal(self.smap.pop('jason'), 'fennell') testify.assert_not_in('jason', self.smap) assert self.smap.pop('jason', None) is None testify.assert_raises( KeyError, lambda: self.smap.pop('jason') )
def test_job_name_prefix_is_now_label(self): with logger_disabled('mrjob.runner'): old_way = LocalMRJobRunner( conf_path=False, job_name_prefix='ads_chain') old_opts = old_way.get_opts() new_way = LocalMRJobRunner(conf_path=False, label='ads_chain') new_opts = new_way.get_opts() assert_equal(old_opts, new_opts) assert_equal(old_opts['label'], 'ads_chain') assert_not_in('job_name_prefix', old_opts)
def test_request_push_buttons_as_random_user(self): kwargs = dict(self.basic_kwargs) kwargs['push_buttons'] = True tree = self.render_module_request_with_users(self.basic_request, 'testuser', 'notuser', **kwargs) buttons_found = [] for button in tree.iter('button'): T.assert_not_in(button.attrib['class'], self.pushmaster_button_classes + self.push_button_classes) buttons_found.append(button) T.assert_equal(0, len(buttons_found))
def test_cleanup_opened(self): fh_wrapper = self.manager.open(self.file1.name) fh_wrapper.write("Some things") fh_wrapper.last_accessed = 123456 def time_func(): return 123458.1 self.manager.cleanup(time_func) assert_not_in(fh_wrapper.name, self.manager.cache) assert_equal(len(self.manager.cache), 0)
def test_new_db(self): # New, empty db should be fine with file not existing smap = sqlite3dbm.dbm.SqliteMap(self.path, flag='n') # Writeable smap['foo'] = 'bar' testify.assert_equal(smap['foo'], 'bar') # Re-open should give an empty db smap = sqlite3dbm.dbm.SqliteMap(self.path, flag='n') testify.assert_not_in('foo', smap) testify.assert_equal(len(smap), 0)
def test_multiple_in_memory_maps(self): # In-memory maps should not share state smap1 = sqlite3dbm.dbm.SqliteMap(':memory:', flag='w') smap2 = sqlite3dbm.dbm.SqliteMap(':memory:', flag='w') # Write to just the first map smap1['foo'] = 'a' testify.assert_equal(smap1['foo'], 'a') testify.assert_not_in('foo', smap2) # Write to just the second map smap2['bar'] = 'b' testify.assert_not_in('bar', smap1) testify.assert_equal(smap2['bar'], 'b')
def test_clear(self): droid = ['R2-D2', 'C-3P0'] self.smap_shelf.update({ 'jason': 'fennell', 'droid': droid, 'pi': 3.14 }) testify.assert_equal(self.smap_shelf['jason'], 'fennell') testify.assert_equal(len(self.smap_shelf), 3) self.smap_shelf.clear() testify.assert_equal(len(self.smap_shelf), 0) testify.assert_not_in('jason', self.smap_shelf)
def test_cleanup_natural(self): FileHandleManager.set_max_idle_time(1) fh_wrapper1 = self.manager.open(self.file1.name) fh_wrapper2 = self.manager.open(self.file2.name) fh_wrapper1.write("Some things") time.sleep(1.5) fh_wrapper2.write("Other things.") assert_not_in(fh_wrapper1.name, self.manager.cache) assert_in(fh_wrapper2.name, self.manager.cache) # Now that 1 is closed, try writing again fh_wrapper1.write("Some things") assert_in(fh_wrapper1.name, self.manager.cache) assert not fh_wrapper1._fh.closed
def test_no_mine_on_requests_as_random_user(self): kwargs = dict(self.basic_kwargs) kwargs['push_contents'] = self.generate_push_contents([self.basic_request]) kwargs['current_user'] = '******' with self.no_ui_modules(): tree = self.render_etree( self.push_status_page, push_info=self.basic_push, **kwargs) found_mockreq = [] for mockreq in tree.iter('mock'): T.assert_not_in('class', mockreq.getparent().attrib.keys()) found_mockreq.append(mockreq) T.assert_equal(5, len(found_mockreq))
def test_hoods_checklists(self): with fake_checklist_request(): # insert fake data from FakeDataMixin fake_pushid = 2 self.insert_pushes() self.insert_requests() req = self.get_requests_by_user('testuser1')[0] self.insert_pushcontent(req['id'], fake_pushid) # insert fake checklist data checklist_queries = [] checklist_items = ( { 'request': req['id'], 'type': 'hoods', 'target': 'stage' }, { 'request': req['id'], 'type': 'hoods', 'target': 'prod' }, { 'request': req['id'], 'type': 'hoods-cleanup', 'target': 'post-verify-stage' }, ) for checklist_item in checklist_items: checklist_queries.append( db.push_checklist.insert(checklist_item)) db.execute_transaction_cb(checklist_queries, on_db_return) uri = "/checklist?id=%d" % fake_pushid response = self.fetch(uri) T.assert_equal(response.error, None) T.assert_not_in("No checklist items for this push", response.body) T.assert_in("Notify testuser1 to deploy Geoservices to stage", response.body) T.assert_in("Notify testuser1 to deploy Geoservices to prod", response.body)
def test_large_amounts_of_stderr(self): mr_job = MRVerboseJob(['--no-conf']) mr_job.sandbox() try: mr_job.run_job() except TimeoutException: raise except Exception, e: # we expect the job to throw an exception # look for expected output from MRVerboseJob stderr = mr_job.stderr.getvalue() assert_in("counters: {'Foo': {'Bar': 10000}}\n", stderr) assert_in('status: 0\n', stderr) assert_in('status: 99\n', stderr) assert_not_in('status: 100\n', stderr) assert_in('STDERR: Qux\n', stderr) # exception should appear in exception message assert_in('BOOM', e.message)
def test_cleanup_many(self): fh_wrappers = [ self.manager.open(self.file1.name), self.manager.open(self.file2.name), self.manager.open(NamedTemporaryFile('r').name), self.manager.open(NamedTemporaryFile('r').name), self.manager.open(NamedTemporaryFile('r').name), ] for i, fh_wrapper in enumerate(fh_wrappers): fh_wrapper.last_accessed = 123456 + i time_func = lambda: 123460.1 self.manager.cleanup(time_func) assert_equal(len(self.manager.cache), 2) for fh_wrapper in fh_wrappers[:3]: assert_not_in(fh_wrapper.name, self.manager.cache) for fh_wrapper in fh_wrappers[3:]: assert_in(fh_wrapper.name, self.manager.cache)
def test_large_amounts_of_stderr(self): mr_job = MRVerboseJob(['--no-conf']) mr_job.sandbox() try: with no_handlers_for_logger(): mr_job.run_job() except TimeoutException: raise except Exception, e: # we expect the job to throw an exception # look for expected output from MRVerboseJob stderr = mr_job.stderr.getvalue() assert_in("Counters from step 1:\n Foo:\n Bar: 10000", stderr) assert_in('status: 0\n', stderr) assert_in('status: 99\n', stderr) assert_not_in('status: 100\n', stderr) assert_in('STDERR: Qux\n', stderr) # exception should appear in exception message assert_in('BOOM', repr(e))
def test_mine_on_requests_as_pushmaster(self): push_contents = {} section_id = [] for section in self.accepting_push_sections: push_contents[section] = [self.basic_request] section_id.append('%s-items' % section) kwargs = dict(self.basic_kwargs) kwargs['push_contents'] = push_contents with self.no_ui_modules(): tree = self.render_etree(self.push_status_page, push_info=self.basic_push, **kwargs) found_mockreq = [] for mockreq in tree.iter('mock'): T.assert_not_in('class', mockreq.getparent().attrib.keys()) found_mockreq.append(mockreq) T.assert_equal(5, len(found_mockreq))
def test_multiple_open_maps_per_path(self): smap1 = self.smap smap2 = sqlite3dbm.dbm.SqliteMap(self.path, flag='w') # Write in the first map smap1['foo'] = 'a' testify.assert_equal(smap1['foo'], 'a') testify.assert_equal(smap2['foo'], 'a') # Write in the second map smap2['bar'] = 'b' testify.assert_equal(smap1['bar'], 'b') testify.assert_equal(smap2['bar'], 'b') # Overwrite smap1['foo'] = 'c' testify.assert_equal(smap1['foo'], 'c') testify.assert_equal(smap2['foo'], 'c') # Delete del smap1['foo'] testify.assert_not_in('foo', smap1) testify.assert_not_in('foo', smap2)