def test_directory_owner(self): new_owner = 'newowner' self.client._mkdirs('foo') self.client.set_owner('foo', 'oldowner') self.client.set_owner('foo', new_owner) status = self.client.status('foo') eq_(status['owner'], new_owner)
def test_flag_on(self): Flag.objects.create(name='serve_pregenerated_snippets', everyone=True) with patch.object(views, 'fetch_pregenerated_snippets') as fetch_pregenerated_snippets: eq_(views.fetch_snippets(self.request, foo='bar'), fetch_pregenerated_snippets.return_value) fetch_pregenerated_snippets.assert_called_with(self.request, foo='bar')
def test_composer_tool_install_latest(self): ctx = utils.FormattedDict({ 'DOWNLOAD_URL': 'http://server/bins', 'PHP_VM': 'will_default_to_php_strategy', 'BUILD_DIR': '/build/dir', 'CACHE_DIR': '/cache/dir', 'COMPOSER_VERSION': 'latest', 'BP_DIR': '' }) builder = Dingus(_ctx=ctx) installer = Dingus() cfInstaller = Dingus() builder.install = Dingus(_installer=cfInstaller, return_value=installer) ct = self.extension_module.ComposerExtension(ctx) ct._builder = builder ct.install() eq_(2, len(builder.install.calls())) # make sure PHP is installed assert installer.package.calls().once() eq_('PHP', installer.package.calls()[0].args[0]) call = installer.package.calls()[0] assert call.return_value.calls().once() assert installer.calls().once() # make sure composer is installed assert installer._installer.calls().once() assert installer._installer.calls()[0].args[0] == \ 'https://getcomposer.org/composer.phar', \ "was %s" % installer._installer.calls()[0].args[0]
def test_file_owner(self): new_owner = 'newowner' self.client.write('foo', 'hello, world!') self.client.set_owner('foo', 'oldowner') self.client.set_owner('foo', new_owner) status = self.client.status('foo') eq_(status['owner'], new_owner)
def test_valid_disabled_snippet_authenticated(self): """Test disabled snippet returns 200 to authenticated users.""" snippet = SnippetFactory.create(disabled=True) User.objects.create_superuser('admin', '*****@*****.**', 'asdf') self.client.login(username='******', password='******') response = self.client.get(reverse('base.show', kwargs={'snippet_id': snippet.id})) eq_(response.status_code, 200)
def test_pipeline_empty(): """Pipelines should be empty after a send() call.""" sc = _client() with sc.pipeline() as pipe: pipe.incr('foo') eq_(1, len(pipe._stats)) eq_(0, len(pipe._stats))
def test_with_oddly_formatted_composer_file(self): exts = self.extension_module.ComposerConfiguration({ 'BUILD_DIR': '' }).read_exts_from_path( 'tests/data/composer/composer-format.json') eq_(1, len(exts)) eq_('mysqli', exts[0])
def test_answer_welcome_email(self): u1 = profile().user u2 = profile(first_answer_email_sent=True).user u3 = profile().user two_days = datetime.now() - timedelta(hours=48) q = question(creator=u1, save=True) answer(question=q, creator=u1, created=two_days, save=True) answer(question=q, creator=u2, created=two_days, save=True) answer(question=q, creator=u3, created=two_days, save=True) # Clear out the notifications that were sent mail.outbox = [] # Send email(s) for welcome messages cron.send_welcome_emails() # There should be an email for u3 only. # u1 was the asker, and so did not make a contribution. # u2 has already recieved the email eq_(len(mail.outbox), 1) attrs_eq(mail.outbox[0], to=[u3.email]) # u3's flag should now be set. u3 = User.objects.get(id=u3.id) eq_(u3.profile.first_answer_email_sent, True)
def __test(hop_length, fmin, n_bins, bins_per_octave, tuning, resolution, norm, sparsity): C2 = librosa.hybrid_cqt(y, sr=sr, hop_length=hop_length, fmin=fmin, n_bins=n_bins, bins_per_octave=bins_per_octave, tuning=tuning, resolution=resolution, norm=norm, sparsity=sparsity) C1 = librosa.cqt(y, sr=sr, hop_length=hop_length, fmin=fmin, n_bins=n_bins, bins_per_octave=bins_per_octave, tuning=tuning, resolution=resolution, norm=norm, sparsity=sparsity) eq_(C1.shape, C2.shape) # Check for numerical comparability idx1 = (C1 > 1e-4 * C1.max()) idx2 = (C2 > 1e-4 * C2.max()) perc = 0.99 thresh = 1e-3 idx = idx1 | idx2 assert np.percentile(np.abs(C1[idx] - C2[idx]), perc) < thresh * max(C1.max(), C2.max())
def test_delete_mid_read(self): self.viewer.extract() self.viewer.select('install.js') os.remove(os.path.join(self.viewer.dest, 'install.js')) res = self.viewer.read_file() eq_(res, '') assert self.viewer.selected['msg'].startswith('That file no')
def test_sample_info_genotype(): variants = load_vcf(data_path("multiallelic.vcf")) assert len(variants) == 2, "Expected 2 variants but got %s" % variants eq_(variants.metadata[variants[0]]['sample_info']['metastasis']['GT'], '0/1') eq_(variants.metadata[variants[1]]['sample_info']['metastasis']['GT'], '0/1')
def test_bom(self): dest = os.path.join(settings.TMP_PATH, 'test_bom') open(dest, 'w').write('foo'.encode('utf-16')) self.viewer.select('foo') self.viewer.selected = {'full': dest, 'size': 1} eq_(self.viewer.read_file(), u'foo') os.remove(dest)
def test_file_size(self): self.viewer.extract() self.viewer.get_files() self.viewer.select('install.js') res = self.viewer.read_file() eq_(res, '') assert self.viewer.selected['msg'].startswith('File size is')
def test_correlations_signatures(self, rget): model = models.CorrelationsSignatures api = model() def mocked_get(url, **options): assert '/correlations/signatures' in url ok_('/report_type/core-counts' in url) return Response(""" { "hits": ["FakeSignature1", "FakeSignature2"], "total": 2 } """) rget.side_effect = mocked_get r = api.get(report_type='core-counts', product='WaterWolf', version='1.0a1', platforms=['Windows NT', 'Linux']) eq_(r['total'], 2) r = api.get(report_type='core-counts', product='WaterWolf', version='1.0a1') eq_(r['total'], 2)
def test_channel_edit_child(self): channel = Channel.objects.get(slug='testing') response = self.client.get( reverse('manage:channel_edit', args=(channel.pk,)), ) eq_(response.status_code, 200) choices = ( response.content .split('name="parent"')[1] .split('</select>')[0] ) ok_('Main' in choices) # you should not be able to self-reference ok_('Testing' not in choices) main = Channel.objects.get(slug='main') response = self.client.post( reverse('manage:channel_edit', args=(channel.pk,)), { 'name': 'Different', 'slug': 'different', 'description': '<p>Other things</p>', 'parent': main.pk, 'feed_size': 10, } ) eq_(response.status_code, 302) channel = Channel.objects.get(slug='different') eq_(channel.parent, main) # now expect two links to "Main" on the channels page response = self.client.get(reverse('manage:channels')) eq_(response.status_code, 200) view_url = reverse('main:home_channels', args=(main.slug,)) eq_(response.content.count(view_url), 2)
def test_signature_urls(self, rget): model = models.SignatureURLs api = model() def mocked_get(**options): assert '/signatureurls/' in options['url'] ok_(urllib.quote('WaterWolf:1.0') in options['url']) return Response("""{ "hits": [{"url": "http://farm.ville", "crash_count":123}], "total": 1 } """) rget.side_effect = mocked_get today = datetime.datetime.utcnow() response = api.get( signature='FakeSignature', products=['WaterWolf'], versions=['WaterWolf:1.0'], start_date=today - datetime.timedelta(days=1), end_date=today, ) eq_(response['total'], 1) eq_(response['hits'][0], {'url': 'http://farm.ville', 'crash_count': 123})
def test_current_versions(self, rget): model = models.CurrentVersions api = model() def mocked_get(**options): assert '/products/' in options['url'] return Response(""" {"hits": { "SeaMonkey": [{ "product": "SeaMonkey", "throttle": "100.00", "end_date": "2012-05-10 00:00:00", "start_date": "2012-03-08 00:00:00", "featured": true, "version": "2.1.3pre", "release": "Beta", "id": 922}] }, "products": ["SeaMonkey"] } """) rget.side_effect = mocked_get info = api.get() ok_(isinstance(info, list)) ok_(isinstance(info[0], dict)) eq_(info[0]['product'], 'SeaMonkey')
def test_combined_inputs(): foo = '<a><b>foo</b></a>' bar = '<a><b>bar</b></a>' result = combined_inputs([foo, bar]) result = etree.tostring(result) eq_(result, b'<a><b>foo</b><b>bar</b></a>')
def test_from_revision_none(self): rev = revision() try: diff = revisions_unified_diff(None, rev) except AttributeError: self.fail("Should not throw AttributeError") eq_("Diff is unavailable.", diff)
def test_py_lang_files_defined(self): """ If `LANG_FILES` is defined a list of the values should be returned. """ lang_files = langfiles_for_path('lib/l10n_utils/tests/test_files/' 'extract_me_with_langfiles.py') eq_(lang_files, ['lebowski', 'dude'])
def test_composer_tool_install_latest(self): ctx = utils.FormattedDict( { "PHP_VM": "will_default_to_php_strategy", "BUILD_DIR": "/build/dir", "CACHE_DIR": "/cache/dir", "COMPOSER_VERSION": "latest", "BP_DIR": "", "WEBDIR": "", } ) builder = Dingus(_ctx=ctx) installer = Dingus() cfInstaller = Dingus() builder.install = Dingus(_installer=cfInstaller, return_value=installer) ct = self.extension_module.ComposerExtension(ctx) ct._builder = builder ct.install() eq_(2, len(builder.install.calls())) # make sure PHP is installed assert installer.package.calls().once() eq_("PHP", installer.package.calls()[0].args[0]) call = installer.package.calls()[0] assert call.return_value.calls().once() assert installer.calls().once() # make sure composer is installed assert installer._installer.calls().once() assert installer._installer.calls()[0].args[0] == "https://getcomposer.org/composer.phar", ( "was %s" % installer._installer.calls()[0].args[0] )
def test_some_pin(self): self.solitude.generic.buyer.get_object_or_404.return_value = { 'pin': True} res = self.client.get(self.url) self.solitude.generic.buyer.get_object_or_404.assert_called_with( headers={}, uuid='a') eq_(json.loads(res.content)['pin'], True)
def test_solid_paletted_image(self): img = Image.new('P', (100, 100), color=20) palette = [] for i in range(256): palette.extend((i, i//2, i%3)) img.putpalette(palette) eq_(is_single_color_image(img), (20, 10, 2))
def test_seek_iter(self): self.rbuf_wrapper.seek(0) data = list(self.rbuf_wrapper) eq_(data, ['Hello World!']) self.rbuf_wrapper.seek(0) data = list(self.rbuf_wrapper) eq_(data, ['Hello World!'])
def test_solid_merge(self): img1 = ImageSource(Image.new('RGB', (10, 10), (255, 0, 255))) img2 = ImageSource(Image.new('RGB', (10, 10), (0, 255, 255))) result = merge_images([img1, img2], ImageOptions(transparent=False)) img = result.as_image() eq_(img.getpixel((0, 0)), (0, 255, 255))
def test_file_for_group(self): new_group = 'newgroup' self.client.write('foo', 'hello, world!') self.client.set_owner('foo', group='oldgroup') self.client.set_owner('foo', group=new_group) status = self.client.status('foo') eq_(status['group'], new_group)
def test_dir_with_status(self): self.client.write('foo/bar', 'hello, world!') statuses = self.client.list('foo', status=True) eq_(len(statuses), 1) status = self.client.status('foo/bar') status['pathSuffix'] = 'bar' eq_(statuses[0], ('bar', status))
def test_from_paletted(self): img = self._make_test_image().quantize(256) img = make_transparent(img, (130, 150, 120), tolerance=5) assert img.mode == 'RGBA' assert img.size == (50, 50) colors = img.getcolors() eq_(colors, [(1600, (130, 140, 120, 255)), (900, (130, 150, 120, 0))])
def test_directory_for_group(self): new_group = 'newgroup' self.client._mkdirs('foo') self.client.set_owner('foo', group='oldgroup') self.client.set_owner('foo', group=new_group) status = self.client.status('foo') eq_(status['group'], new_group)
def test_no_user(self): self.solitude.generic.buyer.get_object_or_404.side_effect = ( ObjectDoesNotExist) res = self.client.post(self.url, {'pin': '1234'}) self.solitude.generic.buyer.post.assert_called_with({'uuid': 'a', 'pin': '1234'}) eq_(res.status_code, 201)
def test_reference_names(): eq_(ov_wustle_variants.reference_names(), {"GRCh37"})
def test_good(self, receipt, cert_response): receipt.crack.return_value = self._make_receipt() cert_response.return_value.ok = True cert_response.return_value.json = lambda: {'jwk': []} eq_(signer()[0], '')
def test_expire(self, receipt): now = time.time() receipt.crack.return_value = [{'exp': now + (3600 * 12)}, ''] eq_(signer()[0][:21], 'Cert will expire soon')
def test_crack_fails(self, receipt): receipt.crack.side_effect = ValueError eq_(signer()[0][:25], 'Error on cracking receipt')
def test_sign_fails(self, receipt): from lib.crypto.receipt import SigningError receipt.sign.side_effect = SigningError eq_(signer()[0][:16], 'Error on signing')
def test_repr(self): eq_(self.instance.__repr__(), self.instance.__wrapped__.__repr__())
def test_tag_pq_branch(self, repo): ret = pq(['argv0', 'import']) eq_(repo.rev_parse('master'), repo.rev_parse('debian/2.8-1^{}')) eq_(ret, 0) eq_(repo.branch, 'patch-queue/master') self.add_file(repo, 'foo.txt') ret = buildpackage( ['argv0', '--git-tag-only', '--git-retag', '--git-ignore-branch']) eq_(ret, 0) eq_(repo.branch, 'patch-queue/master') eq_(repo.rev_parse('patch-queue/master^{}^'), repo.rev_parse('debian/2.8-1^{}'))
def test_variant_collection_groupby_gene(): genes = ov_wustle_variants.groupby_gene().keys() # make sure that the IDs attached to Gene objects are the same as IDs # of groupby_gene_id gene_ids = set(ov_wustle_variants.groupby_gene_id().keys()) eq_({gene.id for gene in genes}, gene_ids)
def test_variant_collection_intersection(): combined = ov_wustle_variants.intersection(tcga_ov_variants) eq_(set(combined.sources), {ov_wustle_variants.source, tcga_ov_variants.source}) eq_(len(combined), 0)
def test_variant_collection_groupby_gene_name(): gene_names = set(ov_wustle_variants.groupby_gene_name().keys()) eq_(gene_names, {"AGL", "SASS6", "LRRC39", "UBE4B", "COL11A1"})
def test_merged_variant_collection_serialization(): intersection = ov_wustle_variants.intersection(tcga_ov_variants) eq_(intersection, pickle.loads(pickle.dumps(intersection))) union = ov_wustle_variants.union(tcga_ov_variants) eq_(union, pickle.loads(pickle.dumps(union)))
def test_variant_collection_gene_counts(): gene_counts = ov_wustle_variants.gene_counts() # test that each gene is counted just once eq_(list(gene_counts.values()), [1] * len(gene_counts))
def test_update_counts_from_file(self): management.call_command('update_counts_from_file', hive_folder, date=self.date) eq_(UpdateCount.objects.all().count(), 1) update_count = UpdateCount.objects.last() eq_(update_count.count, 5) eq_(update_count.date, date(2014, 7, 10)) eq_(update_count.versions, {u'3.8': 2, u'3.7': 3}) eq_(update_count.statuses, {u'userEnabled': 5}) application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}' eq_(update_count.applications[application], {u'3.6': 18}) eq_(update_count.oses, {u'WINNT': 5}) eq_(update_count.locales, {u'en-us': 1, u'en-US': 4})
def test_variant_collection_union(): combined = ov_wustle_variants.union(tcga_ov_variants) eq_(set(combined.sources), {ov_wustle_variants.source, tcga_ov_variants.source}) eq_(len(combined), len(ov_wustle_variants) + len(tcga_ov_variants))
def test_upload_field(): app = Flask(__name__) path = _create_temp() def _remove_testfiles(): safe_delete(path, 'test1.txt') safe_delete(path, 'test2.txt') class TestForm(form.BaseForm): upload = form.FileUploadField('Upload', base_path=path) class TestNoOverWriteForm(form.BaseForm): upload = form.FileUploadField('Upload', base_path=path, allow_overwrite=False) class Dummy(object): pass my_form = TestForm() eq_(my_form.upload.base_path, path) _remove_testfiles() dummy = Dummy() # Check upload with app.test_request_context( method='POST', data={'upload': (BytesIO(b'Hello World 1'), 'test1.txt')}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test1.txt') ok_(op.exists(op.join(path, 'test1.txt'))) # Check replace with app.test_request_context( method='POST', data={'upload': (BytesIO(b'Hello World 2'), 'test2.txt')}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test2.txt') ok_(not op.exists(op.join(path, 'test1.txt'))) ok_(op.exists(op.join(path, 'test2.txt'))) # Check delete with app.test_request_context(method='POST', data={'_upload-delete': 'checked'}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, None) ok_(not op.exists(op.join(path, 'test2.txt'))) # Check overwrite _remove_testfiles() my_form_ow = TestNoOverWriteForm() with app.test_request_context( method='POST', data={'upload': (BytesIO(b'Hullo'), 'test1.txt')}): my_form_ow = TestNoOverWriteForm(helpers.get_form_data()) ok_(my_form_ow.validate()) my_form_ow.populate_obj(dummy) eq_(dummy.upload, 'test1.txt') ok_(op.exists(op.join(path, 'test1.txt'))) with app.test_request_context( method='POST', data={'upload': (BytesIO(b'Hullo'), 'test1.txt')}): my_form_ow = TestNoOverWriteForm(helpers.get_form_data()) ok_(not my_form_ow.validate()) _remove_testfiles()
def test_variant_collection_serialization(): variant_list = [ Variant( 1, start=10, ref="AA", alt="AAT", ensembl=77), Variant(10, start=15, ref="A", alt="G"), Variant(20, start=150, ref="", alt="G"), ] original = VariantCollection( variant_list, source_to_metadata_dict={ "test_data": {variant: {"a": "b", "bar": 2} for variant in variant_list}}) # This causes the variants' ensembl objects to make a SQL connection, # which makes the ensembl object non-serializable. By calling this # method, we are checking that we don't attempt to directly serialize # the ensembl object. original.effects() original_first_variant = original[0] original_metadata = original.metadata # Test pickling reconstructed = pickle.loads(pickle.dumps(original)) eq_(original, reconstructed) eq_(reconstructed[0], original_first_variant) eq_(reconstructed.metadata[original_first_variant], original_metadata[original_first_variant]) merged = original.intersection(original) merged_reconstructed = pickle.loads(pickle.dumps(merged)) eq_(merged, merged_reconstructed) # Test JSON serialization variants_from_json = VariantCollection.from_json(original.to_json()) eq_(original, variants_from_json) eq_(variants_from_json[0], original_first_variant) # pylint: disable=no-member eq_(variants_from_json.metadata[original_first_variant], original_metadata[original_first_variant])
def test_invalid_args(self): """See that import-srpm fails gracefully if called with invalid args""" eq_(mock_import([]), 1) with assert_raises(SystemExit): mock_import(['--invalid-arg=123'])
def test_update_counts_from_file_bug_1093699(self): Addon.objects.create(guid='{fe9e9f88-42f0-40dc-970b-4b0e6b7a3d0b}', type=amo.ADDON_THEME) management.call_command('update_counts_from_file', hive_folder, date=self.date) eq_(UpdateCount.objects.all().count(), 1) uc = UpdateCount.objects.last() eq_(uc.count, 1320) eq_(uc.date, date(2014, 11, 06)) eq_(uc.versions, {u'1.7.16': 1, u'userEnabled': 3, u'1.7.13': 2, u'1.7.11': 3, u'1.6.0': 1, u'1.7.14': 1304, u'1.7.6': 6}) eq_(uc.statuses, {u'Unknown': 3, u'userEnabled': 1259, u'userDisabled': 58}) eq_(uc.oses, {u'WINNT': 1122, u'Darwin': 114, u'Linux': 84}) eq_(uc.locales[u'es-ES'], 20) eq_(uc.applications[u'{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}'], {u'2.0': 3})
def test_misc_options(self): """Test various options of git-import-srpm""" srpm = os.path.join(DATA_DIR, 'gbp-test2-2.0-0.src.rpm') eq_( mock_import([ '--no-pristine-tar', '--packaging-branch=pack', '--upstream-branch=orig', '--packaging-dir=packaging', '--packaging-tag=ver_%(upstreamversion)s-rel_%(release)s', '--upstream-tag=orig/%(upstreamversion)s', '--author-is-committer', srpm ]), 0) # Check repository state repo = GitRepository('gbp-test2') files = { 'Makefile', 'README', 'dummy.sh', 'packaging/bar.tar.gz', 'packaging/foo.txt', 'packaging/gbp-test2.spec', 'packaging/gbp-test2-alt.spec', 'packaging/my.patch', 'packaging/my2.patch', 'packaging/my3.patch' } self._check_repo_state(repo, 'pack', ['pack', 'orig'], files) eq_(len(repo.get_commits()), 2) # Check packaging dir eq_(len(repo.get_commits(paths='packaging')), 1) # Check tags tags = repo.get_tags() eq_(set(tags), set(['orig/2.0', 'ver_2.0-rel_0'])) # Check git committer/author info = repo.get_commit_info('pack') eq_(info['author'].name, 'Markus Lehtonen') eq_(info['author'].email, '*****@*****.**') eq_(info['author'].name, info['committer'].name) eq_(info['author'].email, info['committer'].email)
def test_image_upload_field(): app = Flask(__name__) path = _create_temp() def _remove_testimages(): safe_delete(path, 'test1.png') safe_delete(path, 'test1_thumb.jpg') safe_delete(path, 'test2.png') safe_delete(path, 'test2_thumb.jpg') safe_delete(path, 'test1.jpg') safe_delete(path, 'test1.jpeg') safe_delete(path, 'test1.gif') safe_delete(path, 'test1.png') safe_delete(path, 'test1.tiff') class TestForm(form.BaseForm): upload = form.ImageUploadField('Upload', base_path=path, thumbnail_size=(100, 100, True)) class TestNoResizeForm(form.BaseForm): upload = form.ImageUploadField('Upload', base_path=path, endpoint='test') class TestAutoResizeForm(form.BaseForm): upload = form.ImageUploadField('Upload', base_path=path, max_size=(64, 64, True)) class Dummy(object): pass my_form = TestForm() eq_(my_form.upload.base_path, path) eq_(my_form.upload.endpoint, 'static') _remove_testimages() dummy = Dummy() # Check upload filename = op.join(op.dirname(__file__), 'data', 'copyleft.png') with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test1.png') ok_(op.exists(op.join(path, 'test1.png'))) ok_(op.exists(op.join(path, 'test1_thumb.png'))) # Check replace with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'test2.png')}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test2.png') ok_(op.exists(op.join(path, 'test2.png'))) ok_(op.exists(op.join(path, 'test2_thumb.png'))) ok_(not op.exists(op.join(path, 'test1.png'))) ok_(not op.exists(op.join(path, 'test1_thumb.jpg'))) # Check delete with app.test_request_context(method='POST', data={'_upload-delete': 'checked'}): my_form = TestForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, None) ok_(not op.exists(op.join(path, 'test2.png'))) ok_(not op.exists(op.join(path, 'test2_thumb.png'))) # Check upload no-resize with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}): my_form = TestNoResizeForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test1.png') ok_(op.exists(op.join(path, 'test1.png'))) ok_(not op.exists(op.join(path, 'test1_thumb.png'))) # Check upload, auto-resize filename = op.join(op.dirname(__file__), 'data', 'copyleft.png') with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}): my_form = TestAutoResizeForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test1.png') ok_(op.exists(op.join(path, 'test1.png'))) filename = op.join(op.dirname(__file__), 'data', 'copyleft.tiff') with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'test1.tiff')}): my_form = TestAutoResizeForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, 'test1.jpg') ok_(op.exists(op.join(path, 'test1.jpg'))) # check allowed extensions for extension in ('gif', 'jpg', 'jpeg', 'png', 'tiff'): filename = 'copyleft.' + extension filepath = op.join(op.dirname(__file__), 'data', filename) with open(filepath, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, filename)}): my_form = TestNoResizeForm(helpers.get_form_data()) ok_(my_form.validate()) my_form.populate_obj(dummy) eq_(dummy.upload, my_form.upload.data.filename) # check case-sensitivity for extensions filename = op.join(op.dirname(__file__), 'data', 'copyleft.jpg') with open(filename, 'rb') as fp: with app.test_request_context(method='POST', data={'upload': (fp, 'copyleft.JPG')}): my_form = TestNoResizeForm(helpers.get_form_data()) ok_(my_form.validate())
def test_twoPeriodTopCrasherComparison(self): lastweek = self.now - datetime.timedelta(days=7) lastweek_str = datetimeutil.date_to_string(lastweek.date()) two_weeks = datetimeutil.date_to_string(self.now.date() - datetime.timedelta(days=14)) res = tcbs.twoPeriodTopCrasherComparison( self.connection, self.params ) res_expected = { 'totalPercentage': 1.0, 'end_date': lastweek_str, 'start_date': two_weeks, 'crashes': [{ 'count': 14L, 'mac_count': 1L, 'content_count': 0, 'first_report': lastweek_str, 'previousRank': 'null', 'currentRank': 0, 'startup_percent': None, 'versions': 'plugin1, plugin2', 'first_report_exact': lastweek_str + ' 00:00:00', 'percentOfTotal': 0.58333333333333304, 'changeInRank': 'new', 'is_gc_count': 1L, 'win_count': 12L, 'changeInPercentOfTotal': 'new', 'linux_count': 1L, 'hang_count': 0L, 'signature': 'Fake Signature #1', 'versions_count': 2, 'previousPercentOfTotal': 'null', 'plugin_count': 0 }, { 'count': 10L, 'mac_count': 2L, 'content_count': 0, 'first_report': lastweek_str, 'previousRank': 'null', 'currentRank': 1, 'startup_percent': None, 'versions': 'plugin1, plugin2, plugin3, ' 'plugin4, plugin5, plugin6', 'first_report_exact': lastweek_str + ' 00:00:00', 'percentOfTotal': 0.41666666666666702, 'changeInRank': 'new', 'is_gc_count': 3L, 'win_count': 7L, 'changeInPercentOfTotal': 'new', 'linux_count': 1L, 'hang_count': 0L, 'signature': 'Fake Signature #2', 'versions_count': 6, 'previousPercentOfTotal': 'null', 'plugin_count': 0 }], 'totalNumberOfCrashes': 24L } eq_(res, res_expected)
def test_import_spec(self): """Test importing of spec file""" specfile = 'gbp-test2-2.0-0-unpack/gbp-test2.spec' eq_(mock_import([specfile]), 0) # Check repository state ok_(GitRepository('gbp-test2').is_clean())
def test_getListOfTopCrashersBySignature(self): lastweek = self.now.date() - datetime.timedelta(days=7) params = self.params params.startDate = self.now.date() - datetime.timedelta(days=8) res = tcbs.getListOfTopCrashersBySignature( self.connection, self.params ) sig_1 = res.next() sig_2 = res.next() eq_(sig_1[0], "Fake Signature #1") eq_(sig_2[0], "Fake Signature #2") eq_(sig_1[8].date(), lastweek) eq_(sig_2[8].date(), lastweek) eq_(sig_1[10], 0.58333333333333304) eq_(sig_2[10], 0.41666666666666702) assert_raises(StopIteration, res.next) # Test if raises ValueError when are passed wrong parameters params.product = None assert_raises( BadArgumentError, tcbs.getListOfTopCrashersBySignature, self.connection, params )
def test_multiple_versions(self): """Test importing of multiple versions""" srpms = [ os.path.join(DATA_DIR, x) for x in [ 'gbp-test-1.0-1.src.rpm', 'gbp-test-1.0-1.other.src.rpm', 'gbp-test-1.1-1.src.rpm' ] ] eq_(mock_import(['--no-pristine-tar', srpms[0]]), 0) repo = GitRepository('gbp-test') self._check_repo_state(repo, 'master', ['master', 'upstream']) eq_(len(repo.get_commits()), 2) # Try to import same version again eq_(mock_import([srpms[1]]), 0) eq_(len(repo.get_commits()), 2) eq_(len(repo.get_commits(until='upstream')), 1) eq_( mock_import( ['--no-pristine-tar', '--allow-same-version', srpms[1]]), 0) # Added new version of packaging eq_(len(repo.get_commits()), 3) eq_(len(repo.get_commits(until='upstream')), 1) # Import new version eq_(mock_import(['--no-pristine-tar', srpms[2]]), 0) files = { 'Makefile', 'README', 'bar.tar.gz', 'dummy.sh', 'foo.txt', 'gbp-test.spec', 'my.patch', 'my2.patch', 'my3.patch' } self._check_repo_state(repo, 'master', ['master', 'upstream'], files) eq_(len(repo.get_commits()), 5) eq_(len(repo.get_commits(until='upstream')), 2) # Check number of tags eq_(len(repo.get_tags('upstream/*')), 2) eq_(len(repo.get_tags('packaging/*')), 3)
def test_delete_orphaned_products(self): make_mfsa('2015-100') Product.objects.create(name='Firefox 43.0.2') Product.objects.create(name='Firefox 43.0.3') eq_(update_security_advisories.delete_orphaned_products(), 2) eq_(Product.objects.get().name, 'Firefox 43.0.1')
def test_rangeOfQueriesGenerator(self): lastweek = self.now.date() - datetime.timedelta(days=7) query_list = tcbs.getListOfTopCrashersBySignature res = tcbs.rangeOfQueriesGenerator( self.connection, self.params, query_list ) generate = res.next() sig_1 = generate.next() sig_2 = generate.next() eq_(sig_1[0], "Fake Signature #1") eq_(sig_2[0], "Fake Signature #2") eq_(sig_1[8].date(), lastweek) eq_(sig_2[8].date(), lastweek) eq_(sig_1[10], 0.625) eq_(sig_2[10], 0.375) assert_raises(StopIteration, generate.next)
def test_discussion(self): location, = Location.objects.filter(name='Mountain View') today = timezone.now() event = SuggestedEvent.objects.create( user=self.user, title='Cool Title', slug='cool-title', short_description='Short Description', description='Description', start_time=today, location=location) discussion = SuggestedDiscussion.objects.create( event=event, enabled=True, notify_all=True, moderate_all=True, ) discussion.moderators.add(self.user) url = reverse('suggest:discussion', args=(event.pk, )) response = self.client.get(url) eq_(response.status_code, 200) data = { 'enabled': True, 'moderate_all': True, 'emails': self.user.email } # disable it response = self.client.post(url, dict(data, enabled=False)) eq_(response.status_code, 302) next_url = reverse('suggest:placeholder', args=(event.pk, )) self.assertRedirects(response, next_url) discussion = SuggestedDiscussion.objects.get(pk=discussion.pk) ok_(not discussion.enabled) # reset that discussion.enabled = True # try to add something that doesn't look like a valid email address response = self.client.post(url, dict(data, emails='not an email')) eq_(response.status_code, 200) # add two new emails one of which we don't already have a user for bob = User.objects.create_user('bob', '*****@*****.**', 'secret') # note the deliberate duplicate only different in case emails = ' %s , %s, %s, [email protected] ' % ( self.user.email, bob.email, self.user.email.upper()) response = self.client.post(url, dict(data, emails=emails)) eq_(response.status_code, 302) self.assertRedirects(response, next_url) eq_(discussion.moderators.all().count(), 3) # this should have created a new user new_user = User.objects.get(email='*****@*****.**') ok_(not new_user.has_usable_password()) # if you now open the form again these emails should be in there # already response = self.client.get(url) eq_(response.status_code, 200) # it can't trust the sort order on these expected email addresses ok_('*****@*****.**' in response.content) ok_('*****@*****.**' in response.content) ok_('*****@*****.**' in response.content)
'first_report_exact': lastweek_str + ' 00:00:00', 'percentOfTotal': 0.375, 'changeInRank': 0, 'is_gc_count': 1L, 'win_count': 1L, 'changeInPercentOfTotal': -0.041666666666667018, 'linux_count': 1L, 'hang_count': 0L, 'signature': 'Fake Signature #2', 'versions_count': 6, 'previousPercentOfTotal': 0.41666666666666702, 'plugin_count': 0, 'total_crashes': 8 }]] eq_(res, res_expected) #-------------------------------------------------------------------------- def test_latestEntryBeforeOrEqualTo(self): product = 'Firefox' version = '8.0' now = self.now.date() to_date = now - datetime.timedelta(days=1) lastweek = now - datetime.timedelta(days=7) res = tcbs.latestEntryBeforeOrEqualTo( self.connection, to_date, product, version
def test_details_disbled_location_options(self): mv = Location.objects.get(name='Mountain View') # create two other locations Location.objects.create(name='Atlantis', timezone='US/Pacific', is_active=False) babylon = Location.objects.create(name='Babylon', timezone='US/Pacific') event = SuggestedEvent.objects.create( user=self.user, title='Cool Title', slug='cool-title', description='Some long description', short_description='') url = reverse('suggest:details', args=(event.pk, )) response = self.client.get(url) eq_(response.status_code, 200) ok_('Atlantis' not in response.content) ok_('Babylon' in response.content) # one of the fixtures ok_('Mountain View' in response.content) channel = Channel.objects.create(name='Security', slug='security') data = { 'start_time': '2021-01-01 12:00:00', 'estimated_duration': '3600', 'timezone': 'US/Pacific', 'location': babylon.pk, 'privacy': Event.PRIVACY_CONTRIBUTORS, 'channels': channel.pk, 'enable_discussion': True } response = self.client.post(url, data) eq_(response.status_code, 302) # Now suppose Babylon becomes inactive too babylon.is_active = False babylon.save() # go back to edit again response = self.client.get(url) eq_(response.status_code, 200) ok_('Atlantis' not in response.content) # available because it was chosen ok_('Babylon' in response.content) ok_('Mountain View' in response.content) # but suppose we now switch to Mountain View data['location'] = mv.pk response = self.client.post(url, data) eq_(response.status_code, 302) # now we can't go back to Babylon again response = self.client.get(url) eq_(response.status_code, 200) ok_('Atlantis' not in response.content) ok_('Babylon' not in response.content) ok_('Mountain View' in response.content)
def test_autocomplete_email(self): url = reverse('suggest:autocomplete_emails') response = self.client.get(url) eq_(response.status_code, 400) response = self.client.get(url, {'q': ''}) eq_(response.status_code, 200) emails = json.loads(response.content)['emails'] eq_(emails, []) # [email protected] is the user set up in the fixtures response = self.client.get(url, {'q': 'fake'}) emails = json.loads(response.content)['emails'] eq_(emails, ['*****@*****.**']) # searching for something that isn't an email address # should suggest <q>@mozilla.com response = self.client.get(url, {'q': 'start'}) emails = json.loads(response.content)['emails'] eq_(emails, ['*****@*****.**']) # searching for something that doesn't exist and isn't a valid # email, nothing should be found response = self.client.get(url, {'q': 'afweef@asd'}) emails = json.loads(response.content)['emails'] eq_(emails, []) # searching for a valid email address should return it response = self.client.get(url, {'q': '*****@*****.**'}) emails = json.loads(response.content)['emails'] eq_(emails, ['*****@*****.**'])