def parse_attachment(message_part): content_disposition = message_part.get("Content-Disposition", None) if content_disposition: dispositions = content_disposition.strip().split(";") if bool(content_disposition and dispositions[0].lower() == "attachment"): file_data = message_part.get_payload(decode=True) attachment = StringIO(file_data) attachment.content_type = message_part.get_content_type() attachment.size = len(file_data) attachment.name = None attachment.create_date = None attachment.mod_date = None attachment.read_date = None for param in dispositions[1:]: name,value = param.split("=") name = name.lower() if name == "filename": attachment.name = value elif name == "create-date": attachment.create_date = value #TODO: datetime elif name == "modification-date": attachment.mod_date = value #TODO: datetime elif name == "read-date": attachment.read_date = value #TODO: datetime return attachment return None
def test_host_tests_with_multiple_testplans_perpackage(self): """Test host test plan based - one test plan""" distribution_model = "perpackage" image_url = 'http://image/url/image.bin' test_plan = StringIO("hulapaloo") test_plan.name = "testplan.xml" test_plan2 = StringIO("hulapaloo2") test_plan2.name = "testplan2.xml" test_list = {'host_testplans': [test_plan, test_plan2]} testrun_id = "" storage_address = "" test_filter = "" timeout = "30" rootstrap = "" commands = get_commands(distribution_model, image_url, rootstrap, test_list, testrun_id, storage_address, test_filter, timeout) expected_cmd_1 = ['conductor', '-u', 'http://image/url/image.bin', '-m', '30', '-p', 'testplan.xml', '-o'] expected_cmd_2 = ['conductor', '-u', 'http://image/url/image.bin', '-m', '30', '-p', 'testplan2.xml', '-o'] self.assertEquals(len(commands), 2) self.assertEquals(commands[0].command, expected_cmd_1) self.assertEquals(commands[1].command, expected_cmd_2)
def parse_attachments_poptres(self, content_disposition, part): dispositions = content_disposition.strip().split(";") if bool(content_disposition and dispositions[0].lower() == "attachment"): file_data = part.get_payload(decode=True) attachment = StringIO(file_data) attachment.content_type = part.get_content_type() attachment.size = len(file_data) attachment.name = None attachment.create_date = None attachment.mod_date = None attachment.read_date = None for param in dispositions[1:]: name, value = param.split("=") name = name.lower().strip() value = value.replace('"', '').strip() if name == "filename": attachment.name = value elif name == "create-date": attachment.create_date = value elif name == "modification-date": attachment.mod_date = value elif name == "read-date": attachment.read_date = value attachment.seek(0, 2) f = InMemoryUploadedFile(attachment, "", attachment.name, attachment.content_type, attachment.tell(), None) atch = Attachment() atch.user = self.usr atch.file.save(attachment.name, f) atch.save() return atch
def parse_attachment(self, message_part): content_disposition = message_part.get("Content-Disposition", None) if content_disposition: dispo_type, dispo_dict = self.parse_dispositions(content_disposition) if dispo_type == "attachment" or (dispo_type == 'inline' and 'filename' in dispo_dict): file_data = message_part.get_payload(decode=True) if file_data is None: file_data = "" attachment = StringIO(file_data) attachment.content_type = message_part.get_content_type() attachment.size = len(file_data) attachment.name = None attachment.create_date = None attachment.mod_date = None attachment.read_date = None if "filename" in dispo_dict: attachment.name = dispo_dict['filename'] else: content_type = message_part.get("Content-Type", None) if content_type: _, content_dict = self.parse_dispositions(content_type) if 'name' in content_dict: attachment.name = content_dict['name'] if "create-date" in dispo_dict: attachment.create_date = dispo_dict['create-date'] # TODO: datetime if "modification-date" in dispo_dict: attachment.mod_date = dispo_dict['modification-date'] # TODO: datetime if "read-date" in dispo_dict: attachment.read_date = dispo_dict['read-date'] # TODO: datetime return attachment return None
def parse_attachment(self, message_part): content_disposition = message_part.get("Content-Disposition", None) if content_disposition: dispositions = content_disposition.strip().split(";") if bool(content_disposition and dispositions[0].lower() == "attachment"): file_data = message_part.get_payload(decode=True) attachment = StringIO() attachment.write(file_data) attachment.content_type = message_part.get_content_type() attachment.size = len(file_data) attachment.name = None attachment.create_date = None attachment.mod_date = None attachment.read_date = None for param in dispositions[1:]: name,value = param.split("=") name = name.lower() attachment.name = value.replace('"', '') return attachment return None
def run(self, fname): baseFname, ext = os.path.splitext(os.path.basename(fname)) wavfname, created = util.docserver_get_wav_filename(self.musicbrainz_id) panelWidth = 900 # pixels panelHeight = 255 # pixels zoomlevels = [4, 8, 16, 32] # seconds options = coll.namedtuple('options', 'image_height fft_size image_width') options.image_height = panelHeight options.image_width = panelWidth options.fft_size = 31 ret = {} for zoom in zoomlevels: wvFile = wave.Wave_read(wavfname) framerate = wvFile.getframerate() totalframes = wvFile.getnframes() # We want this many frames per file at this zoom level. framesperimage = framerate * zoom wfname = "waveform%s" % zoom specname = "spectrum%s" % zoom wfdata = [] specdata = [] sumframes = 0 while sumframes < totalframes: fp, smallname = tempfile.mkstemp(".wav") os.close(fp) data = wvFile.readframes(framesperimage) wavout = wave.open(smallname, "wb") # This will set nframes, but writeframes resets it wavout.setparams(wvFile.getparams()) wavout.writeframes(data) wavout.close() sumframes += framesperimage specio = StringIO() # Set the name attr so that PIL gets the filetype hint specio.name = "spec.png" wavio = StringIO() wavio.name = "wav.png" w2png.genimages(smallname, wavio, specio, options) os.unlink(smallname) specdata.append(specio.getvalue()) wfdata.append(wavio.getvalue()) ret[wfname] = wfdata ret[specname] = specdata ret["smallfull"] = self.make_mini(wavfname) if created: os.unlink(wavfname) return ret
def test_subsequent_submission(self): doc = Document.objects.get(name='status-change-imaginary-mid-review') url = urlreverse('status_change_submit',kwargs=dict(name=doc.name)) login_testing_unauthorized(self, "ad", url) # A little additional setup # doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp self.assertEqual(doc.rev,u'00') path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) with open(path,'w') as f: f.write('This is the old proposal.') f.close() # Put the old proposal into IESG review (exercises ballot tab when looking at an older revision below) state_change_url = urlreverse('status_change_change_state',kwargs=dict(name=doc.name)) iesgeval_pk = str(State.objects.get(slug='iesgeval',type__slug='statchg').pk) r = self.client.post(state_change_url,dict(new_state=iesgeval_pk)) self.assertEqual(r.status_code, 302) # normal get r = self.client.get(url) self.assertEqual(r.status_code,200) q = PyQuery(r.content) self.assertTrue(q('textarea')[0].text.strip().startswith("This is the old proposal.")) # faulty posts trying to use file upload # Copied from wgtracker tests - is this really testing the server code, or is it testing # how client.post populates Content-Type? test_file = StringIO("\x10\x11\x12") # post binary file test_file.name = "unnamed" r = self.client.post(url, dict(txt=test_file,submit_response="1")) self.assertEqual(r.status_code, 200) self.assertTrue("does not appear to be a text file" in unicontent(r)) # sane post uploading a file test_file = StringIO("This is a new proposal.") test_file.name = "unnamed" r = self.client.post(url,dict(txt=test_file,submit_response="1")) self.assertEqual(r.status_code, 302) doc = Document.objects.get(name='status-change-imaginary-mid-review') self.assertEqual(doc.rev,u'01') path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) with open(path) as f: self.assertEqual(f.read(),"This is a new proposal.") f.close() self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc) # verify reset text button works r = self.client.post(url,dict(reset_text="1")) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue(q('textarea')[0].text.strip().startswith("Provide a description")) # make sure we can see the old revision url = urlreverse('doc_view',kwargs=dict(name=doc.name,rev='00')) r = self.client.get(url) self.assertEqual(r.status_code,200) self.assertTrue("This is the old proposal." in unicontent(r))
def test_multiple_upload_failure(self): """ Attempting to upload two files should fail. """ self.login("*****@*****.**") fp = StringIO("bah!") fp.name = "a.txt" fp2 = StringIO("pshaw!") fp2.name = "b.txt" result = self.client.post("/json/upload_file", {"f1": fp, "f2": fp2}) self.assert_json_error(result, "You may only upload one file at a time")
def make_mini(self, wavfname): smallfulloptions = coll.namedtuple('options', 'image_height fft_size image_width') smallfulloptions.fft_size = 4096 smallfulloptions.image_height = 65 smallfulloptions.image_width = 900 smallfullio = StringIO() smallfullio.name = "wav.png" # We don't use the spectogram, but need to provide it anyway smallfullspecio = StringIO() smallfullspecio.name = "spec.png" w2png.genimages(wavfname, smallfullio, smallfullspecio, smallfulloptions) return smallfullio.getvalue()
def test_import_export(self): """ tests that the results of an export can be imported """ assert Place.objects.all().count() == 0 assert test_client_login(self.client, username='******', password='******') == True csv_file = StringIO("Donut Mountain,123 Fakey St.,1.0,2.0\nFlapper Jacks,,,,\nDonut House,124 Fakey St.,1.001,2.001,http://www.example.org/bs\nSoup Sacks,,,") csv_file.name = 'test.csv' response = self.client.post(self.import_url, {'place_type': '1', 'csv_file': csv_file}) assert response.status_code == 200 assert Place.objects.all().count() == 2 place = Place.objects.get(normalized_name='DONUT MOUNTAIN') assert place.address == '123 Fakey St.' assert place.location.x == 2.0 assert place.location.y == 1.0 place = Place.objects.get(normalized_name='DONUT HOUSE') assert place.address == '124 Fakey St.' assert place.location.x == 2.001 assert place.location.y == 1.001 assert place.url == 'http://www.example.org/bs' response = self.client.post(self.export_url, {'place_type': place.place_type.id}) assert response.status_code == 200 Place.objects.all().delete() assert Place.objects.all().count() == 0 csv_file = StringIO(response.content) csv_file.name = 'test.csv' response = self.client.post(self.import_url, {'place_type': '1', 'csv_file': csv_file}) assert response.status_code == 200 assert Place.objects.all().count() == 2 place = Place.objects.get(normalized_name='DONUT MOUNTAIN') assert place.address == '123 Fakey St.' assert place.location.x == 2.0 assert place.location.y == 1.0 place = Place.objects.get(normalized_name='DONUT HOUSE') assert place.address == '124 Fakey St.' assert place.location.x == 2.001 assert place.location.y == 1.001 assert place.url == 'http://www.example.org/bs'
def test_upload(self): c = Client() url = reverse('gbd.covariate_data_server.views.covariate_upload') response = c.get(url) self.assertRedirects(response, '/accounts/login/?next=%s'%url) # then login and do functional tests c.login(username='******', password='******') response = c.get(url) self.assertTemplateUsed(response, 'covariate_upload.html') response = c.post(url, {}) self.assertTemplateUsed(response, 'covariate_upload.html') # now do it right, and make sure that data and datasets are added from StringIO import StringIO f = StringIO(',iso3,year,LDI_id,LDI_usd\n1,ABW,1950,1533.743774,1105.747437\n1,ABW,1951,1533.843774,1105.87437\n') f.name = 'LDI.csv' response = c.post(url, {'file':f, 'type': 'LDI_id', 'source': 'web', 'notes': 'description', 'uploader': 'red', 'yearStart': 1950, 'yearEnd': 2010}) self.assertRedirects(response, reverse('gbd.covariate_data_server.views.covariate_type_list_show')) self.assertEqual(CovariateType.objects.filter(slug='LDI_id').count(), 1) self.assertEqual(Covariate.objects.filter(type__slug='LDI_id', sex='male').count(), 2) self.assertEqual(Covariate.objects.filter(type__slug='LDI_id', sex='female').count(), 2) self.assertEqual(Covariate.objects.filter(type__slug='LDI_id', sex='total').count(), 2)
def test_import_template(self): country = self.create(Country, name='Zambia') statistic = G(StatDescription) # Export template data = self.assert_export_template(statistic=statistic) # Add new column for 2012 data.append_col(lambda _: '111.11', header='2012') csv_file = StringIO(data.csv) csv_file.name = "statistic.csv" response = self.fast_dispatch('admin:country-stats', method='post', post_params={ 'statistic': str(statistic.id), 'upload': csv_file, }, request_extras=dict(csrf_processing_done=True) ) self.assert_redirected_mini(response, reverse('admin:country-stats')) value = StatValue.objects.get( year=2012, country=country, description=statistic ) self.assertEqual(value.value, Decimal('111.11'))
def getImage(self): file_obj = StringIO() image = Image.new("RGBA", size=(50, 50), color=(256, 0, 0)) image.save(file_obj, 'png') file_obj.name = 'test.png' file_obj.seek(0) self.secpictodata['image'] = file_obj
def test_file_upload(self): """The page should have the correct number of errors in the header when using the pfif_xml_file_1 POST variable to send PFIF XML.""" xml_file = StringIO(PfifXml.XML_TWO_DUPLICATE_NO_CHILD) xml_file.name = 'two_duplicate_no_child.xml' response = self.make_request({'pfif_xml_file_1': xml_file}) self.assertTrue("3 Messages" in response.content)
def test_diff(self): """The diff results page should have a header and a div for each message.""" xml_file = StringIO(PfifXml.XML_ADDED_DELETED_CHANGED_1) xml_file.name = 'added_deleted_changed_1.xml' utils.set_file_for_test(StringIO(PfifXml.XML_ADDED_DELETED_CHANGED_2)) post_dict = { 'pfif_xml_file_1' : xml_file, 'pfif_xml_url_2' : 'fake_url', 'options' : ['text_is_case_sensitive']} response = self.make_request(post_dict, path='/diff/results') response_str = response.content # set the test file again because the first one will be at the end, and the # xml parser doesn't have to seek(0) on it. utils.set_file_for_test(StringIO(PfifXml.XML_ADDED_DELETED_CHANGED_2)) post_dict['options'].append('group_messages_by_record') grouped_response = self.make_request(post_dict, path='/diff/results') grouped_response_str = grouped_response.content # The header should have 'Diff' and 'Messages' in it along with the filename # or url. # The body should have each of five message types from pfif_object_diff for message in ['Diff', 'Messages', 'added_deleted_changed_1.xml', 'fake_url', 'extra', 'missing', 'field', 'record', 'Value', 'changed', 'A', 'B']: self.assertTrue(message in response_str and message in grouped_response_str, 'The diff was missing the ' 'following message: ' + message + '. The diff: ' + response_str)
def test_add_products_error(self): self.assertTrue(self.client.login(email="*****@*****.**")) # Check error when submitting form with not enough fields data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "0", "form-0-name": u"тестовый товар", "form-0-category": 1, "form-0-price": 100, #'form-0-quantity': 100, - NO QUANTITY "form-0-description": u"тест", "form-0-order": 0, } response = self.client.post(reverse("add_data", args=[1]), data=data) result = simplejson.loads(response.content) self.assertEquals(result["result"], "error") self.assertEquals(result["response"], {"form-0-quantity": [_("This field is required.")]}) # Check error when submitting form with CSV csv_file = StringIO("test product,home,ERROR,120,test description") csv_file.name = "prestashop.csv" data = {"0": csv_file} self.client.post(reverse("upload_add_csv"), data=data) data = {"form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "0"} response = self.client.post(reverse("add_data", args=[1]), data=data) result = simplejson.loads(response.content) self.assertEquals(result["result"], "success") self.assertEquals(result["response"], "14_update_permission_or_csv")
def test_do_mission_incorrectly_revdiff(self): orig_response = self.client.get( reverse(views.diffrecursive_get_original_tarball)) tfile = tarfile.open( fileobj=StringIO(orig_response.content), mode='r:gz') diff = StringIO() for fileinfo in tfile: if not fileinfo.isfile(): continue oldlines = tfile.extractfile(fileinfo).readlines() newlines = [] for line in oldlines: for old, new in view_helpers.DiffRecursiveMission.SUBSTITUTIONS: line = line.replace(old, new) newlines.append(line) # We're very similar to test_do_mission-correctly, but here we # switch newlines and oldlines, to create a reverse patch diff.writelines( difflib.unified_diff(newlines, oldlines, 'orig-' + fileinfo.name, fileinfo.name)) diff.seek(0) diff.name = 'foo.patch' # Submit, and see if we get the same error message we expect. error = self.client.post( reverse(views.diffrecursive_submit), {'diff': diff}) self.assert_( 'You submitted a patch that would revert the correct changes back to the originals. You may have mixed the parameters for diff, or performed a reverse patch.' in utf8(error)) paulproteus = Person.objects.get(user__username='******') self.assertEqual(len(StepCompletion.objects.filter( step__name='diffpatch_diffrecursive', person=paulproteus)), 0)
def mkfile(self, path, content=None): remote_path = self.get_remote_path(path) parent_path, name = os.path.split(remote_path) parent_folder = self.repo.getObjectByPath(parent_path) content_file = StringIO(content) content_file.name = path.rsplit('/', 1)[-1] parent_folder.createDocument(name, contentFile=content_file)
def test_process_id_map(self): """process_id_map should return correct results on small test map""" s = """#SampleID\tBarcodeSequence\tLinkerPrimerSequence\tX\tDescription #fake data x\tAA\tACGT\t3\tsample_x y\t"AC"\tACGT\t4\t"sample_y" z\tGG\tACGT\t5\tsample_z""" f = StringIO(s) f.name='test.xls' headers, id_map, description_map, run_description, errors, warnings = \ process_id_map(f) self.assertEqual(headers, ['BarcodeSequence', 'LinkerPrimerSequence', \ 'X']) self.assertEqual(id_map, {'y': {'X': '4', 'LinkerPrimerSequence': \ 'ACGT', 'BarcodeSequence': 'AC'}, 'x': {'X': '3', \ 'LinkerPrimerSequence': 'ACGT', 'BarcodeSequence': 'AA'}, 'z': \ {'X': '5', 'LinkerPrimerSequence': 'ACGT', 'BarcodeSequence': 'GG'}}) self.assertEqual(description_map, { 'x':'sample_x', 'y':'sample_y', 'z':'sample_z', }) self.assertEqual(run_description, ['fake data']) self.assertEqual(errors, []) self.assertEqual(warnings, [])
def test_condition(self): pp = CompilerPreprocessor({ 'A': 1, 'B': '2', 'C': '0L', }) pp.out = StringIO() input = StringIO(dedent('''\ #ifdef A IFDEF_A #endif #if A IF_A #endif # if B IF_B # else IF_NOT_B # endif #if !C IF_NOT_C #else IF_C #endif ''')) input.name = 'foo' pp.do_include(input) self.assertEquals('IFDEF_A\nIF_A\nIF_B\nIF_NOT_C\n', pp.out.getvalue())
def test_generate_playbook(self): playbook = [ dict( roles=['role1', 'role2'], ), ] self.task_config.update(dict( playbook=playbook )) task = self.klass(self.ctx, self.task_config) playbook_file_path = '/my/playbook/file' playbook_file_obj = StringIO() playbook_file_obj.name = playbook_file_path with patch.object(ansible, 'NamedTemporaryFile') as m_NTF: m_NTF.return_value = playbook_file_obj task.get_playbook() task.generate_playbook() m_NTF.assert_called_once_with(prefix="teuth_ansible_playbook_", delete=False) assert task.generated_playbook is True assert task.playbook_file == playbook_file_obj playbook_file_obj.seek(0) playbook_result = yaml.safe_load(playbook_file_obj) for play in playbook: play['hosts'] = 'all' assert playbook_result == playbook
def run(self): failed = [] for library in self._options.libraries: try: xml_doc = StringIO() # LibraryDocumentation().save() calls close() for the underlying # file but closing StringIO object discards its data. # This is why close() is overridden below. xml_doc.original_close = xml_doc.close try: try: if library.endswith('.xml'): with open(library) as xml_file: xml_doc.write(xml_file.read()) else: xml_doc.close = lambda: None LibraryDocumentation(library).save(xml_doc, 'xml') except DataError, e: message = "Library not found" if 'ImportError' in e.message else e.message failed.append(library) sys.stderr.write("Skipping '%s' due to an error: %s.\n" % (library, message)) continue xml_doc.name = library self._uploader.upload_file(xml_doc, self._options.lib_name, self._options.lib_version) sys.stdout.write("Updated documentation for '%s'.\n" % library) finally: xml_doc.original_close() except DataError, e: failed.append(library) sys.stderr.write('%s: Remote error: %s\n' % (os.path.basename(__file__), e.message))
def test_upload_asset(self, data, file_=False, side_effect=None, error=False, code=200, msg="Created"): content_type = "application/json" if file_: f = StringIO() f.name = "test_file.txt" f.write("test file") content = {"json": json.dumps(data), "file": f} content_type = MULTIPART_CONTENT else: content = json.dumps(data) self.am_instance.upload_asset.return_value = "http://locationurl.com/" def validator(request, body_response): if not error: # Check correct call if not file_: self.am_instance.upload_asset.assert_called_once_with(self.user, data) else: expected_file = request.FILES["file"] self.am_instance.upload_asset.assert_called_once_with(self.user, data, file_=expected_file) self.assertEquals( body_response, {"contentType": "application/zip", "content": "http://locationurl.com/"} ) else: self.assertEqual(body_response, {"error": msg, "result": "error"}) self._test_post_api(views.UploadCollection, content, content_type, side_effect, code, validator)
def save(self, name, content): if isinstance(content, (str, unicode)): # Support saving of string values content = StringIO(content) elif isinstance(content, file): # file objects don't support setting the 'name' # property. So wrap it in a FileProxyMixin f = FileProxyMixin() f.file = content content = f cleaned_name = self._clean_name(name) cleaned_name = self.get_available_name(cleaned_name) name = self._normalize_name(cleaned_name) headers = self.headers.copy() content_type = getattr(content, 'content_type', mimetypes.guess_type(name)[0] or Key.DefaultContentType) if self.gzip and content_type in self.gzip_content_types: content = self._compress_content(content) headers.update({'Content-Encoding': 'gzip'}) content.name = cleaned_name encoded_name = self._encode_name(name) key = self.bucket.get_key(encoded_name) if not key: key = self.bucket.new_key(encoded_name) if self.preload_metadata: self._entries[encoded_name] = key key.set_metadata('Content-Type', content_type) key.set_contents_from_file(content, headers=headers, policy=self.acl, reduced_redundancy=self.reduced_redundancy, rewind=True) return cleaned_name
def test_execute_playbook(self): playbook = '/my/playbook' self.task_config.update(dict( playbook=playbook )) fake_playbook = [dict(fake_playbook=True)] fake_playbook_obj = StringIO(yaml.safe_dump(fake_playbook)) fake_playbook_obj.name = playbook task = self.klass(self.ctx, self.task_config) with patch('teuthology.task.ansible.file', create=True) as m_file: m_file.return_value = fake_playbook_obj task.setup() args = task._build_args() logger = StringIO() with patch.object(ansible.pexpect, 'run') as m_run: m_run.return_value = ('', 0) with patch.object(Remote, 'reconnect') as m_reconnect: m_reconnect.return_value = True task.execute_playbook(_logfile=logger) m_run.assert_called_once_with( ' '.join(args), logfile=logger, withexitstatus=True, timeout=None, )
def _set_csv_file(self, sync_type, quantity=30, price=100): if sync_type == "EAN13": csv_file = StringIO("1000000000002,%d,%d" % (quantity, price)) else: csv_file = StringIO("20,%d,%d" % (quantity, price)) csv_file.name = "prestashop.csv" return csv_file
def test_do_mission_correctly_with_old_filenames(self): orig_response = self.client.get( reverse(views.diffrecursive_get_original_tarball)) tfile = tarfile.open( fileobj=StringIO(orig_response.content), mode='r:gz') diff = StringIO() for fileinfo in tfile: if not fileinfo.isfile(): continue # calcualate the old name transformed_name = view_helpers.DiffRecursiveMission.name_new2old( fileinfo.name) oldlines = tfile.extractfile(fileinfo).readlines() newlines = [] for line in oldlines: for old, new in view_helpers.DiffRecursiveMission.SUBSTITUTIONS: line = line.replace(old, new) newlines.append(line) diff.writelines(difflib.unified_diff(oldlines, newlines, 'orig-' + transformed_name, transformed_name)) diff.seek(0) diff.name = 'foo.patch' submit_response = self.client.post( reverse(views.diffrecursive_submit), {'diff': diff}) self.assert_(submit_response.context['diffrecursive_success']) paulproteus = Person.objects.get(user__username='******') self.assertEqual(len(StepCompletion.objects.filter( step__name='diffpatch_diffrecursive', person=paulproteus)), 1)
def test(Text): global count_n count_n += 1 if Text.find("\n") == -1: print "(%i) |%s|\n" % (count_n, Text) else: print "(%i)\n::\n%s\n::\n" % (count_n, Text) sh = StringIO(Text) sh.name = "test_string" descr = None # descr = counter.parse_line_column_counter(sh) try: descr = counter.parse_line_column_counter(sh) pass except EndOfStreamException: error.log("End of file reached while parsing 'counter' section.", sh, DontExitF=True) except: print "Exception!" if descr is not None: print descr.count_command_map print
def test_comment_on_review(self): fh = StringIO('testing') fh.name = 'test_file_1' self.assertEqual(len(mail.outbox), 0) response = self.client.post(reverse('review-detail', args=[self.review.pk]), { 'comment': "Oh nice demo!", 'attachment': fh, 'attachment_type': 'photo', 'description': 'Test Description', }) self.assertStatusCode(response, 302) rev = self.review.revision self.assertEqual(rev.commentthread_set.count(), 2) self.assertEqual( rev.commentthread_set.latest().comment_set.count(), 1 ) comment = rev.commentthread_set.latest().comment_set.get() self.assertEqual(comment.commenter, self.user) self.assertEqual(comment.comment, 'Oh nice demo!') self.assertEqual(comment.attachments.count(), 1) attachment = comment.attachments.get() self.assertEqual(attachment.attachment_type, models.Attachment.PHOTO) self.assertEqual(attachment.description, 'Test Description') self.assertEqual( models.Message.objects.filter(title__contains='New Comment').count(), 6 ) self.assertFalse( models.Message.objects.filter(receipient=self.user).exists() ) self.assertEqual(len(mail.outbox), 3)
def _calculate_correct_recursive_diff(self, dos_line_endings=False): orig_response = self.client.get( reverse(views.diffrecursive_get_original_tarball)) tfile = tarfile.open( fileobj=StringIO(orig_response.content), mode='r:gz') diff = StringIO() for fileinfo in tfile: if not fileinfo.isfile(): continue oldlines = tfile.extractfile(fileinfo).readlines() newlines = [] for line in oldlines: for old, new in view_helpers.DiffRecursiveMission.SUBSTITUTIONS: line = line.replace(old, new) newlines.append(line) lines_for_output = list( difflib.unified_diff(oldlines, newlines, 'orig-' + fileinfo.name, fileinfo.name)) bytes_for_output = ''.join(lines_for_output) if dos_line_endings: bytes_for_output = bytes_for_output.replace('\n', '\r\n') diff.write(bytes_for_output) diff.seek(0) diff.name = 'foo.patch' return diff
def test_autodetect_import_csv_raises_on_null_bytes(self): bin_file = StringIO() bin_file.write("\xFF\x00\xFF") bin_file.name = 'filename' bin_file.size = 10 bin_file.seek(0) self.assertRaises( DirigibleImportError, lambda: worksheet_from_csv(Worksheet(), bin_file, 1, 1, False))
def test_playbook_file(self): fake_playbook = [dict(fake_playbook=True)] fake_playbook_obj = StringIO(yaml.safe_dump(fake_playbook)) playbook = 'cephlab.yml' fake_playbook_obj.name = playbook task = self.klass(self.ctx, dict()) task.repo_path = '/tmp/fake/repo' self.mocks['file'].return_value = fake_playbook_obj task.get_playbook() assert task.playbook_file.name == playbook
def test_override_l10n(self): jm = self.jm jm.l10nbase = '/L10N_BASE' jm.outputFormat = 'flat' # doesn't touch chrome dir without files jarcontents = StringIO('''en-US.jar: relativesrcdir dom/locales: ''') jarcontents.name = 'override.mn' jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED') self.assertEquals(jm.localedirs, [os.path.join('/L10N_BASE', 'dom')])
def test_upload_qualification_document(self): setup_routing(self.app, routs=["tender_subpage_document_create"]) file_ = StringIO() file_.name = 'test_document.txt' file_.write("test upload qualification document text data") file_.seek(0) doc = self.client.upload_qualification_document(file_, self.tender, TEST_KEYS.qualification_id) self.assertEqual(doc.data.title, file_.name) self.assertEqual(doc.data.id, TEST_KEYS.new_document_id) file_.close()
def post_it(self, user_id=None, form_id=XFORM_ID): user_id = user_id or self.couch_user._id f = StringIO( XML_DATA.format( user_id=user_id, xmlns=XMLNS, xform_id=form_id, )) f.name = 'form.xml' return self.client.post(self.url, {'xml_submission_file': f})
def test_upload_contract_document(self): setup_routing(self.app, routs=["contract_document_create"]) file_ = StringIO() file_.name = 'test_document.txt' file_.write("test upload contract document text data") file_.seek(0) doc = self.client.upload_document(file_, self.contract) self.assertEqual(doc.data.title, file_.name) self.assertEqual(doc.data.id, TEST_CONTRACT_KEYS.new_document_id) file_.close()
def test_update_cancellation_document(self): setup_routing(self.app, routs=["tender_subpage_document_update"]) file_ = StringIO() file_.name = 'test_document.txt' file_.write("test upload tender document text data") file_.seek(0) doc = self.client.update_cancellation_document(file_, self.limited_tender, TEST_KEYS_LIMITED.cancellation_id, TEST_KEYS_LIMITED.cancellation_document_id) self.assertEqual(doc.data.title, file_.name) self.assertEqual(doc.data.id, TEST_KEYS_LIMITED.cancellation_document_id) file_.close()
def extract_templetor(fileobj, keywords, comment_tags, options): """Extract i18n messages from web.py templates. """ try: code = web.template.Template.generate_code(fileobj.read(), fileobj.name) f = StringIO(code) f.name = fileobj.name except Exception, e: print >> web.debug, fileobj.name + ':', str(e) return []
def _run_xml(self): self.ensure_one() # Patch StringIO object with missing name attribute # Name attribute is required by the odoo function convert_xml_import stringio = StringIO(self.code) stringio.name = self.name convert_xml_import(self._cr, __package__, stringio) return 'No expected result'
def test_upload_award_document(self): setup_routing(self.app, routes=["tender_subpage_document_create"]) file_ = StringIO() file_.name = 'test_award_document.txt' file_.write("test upload award document text data") file_.seek(0) doc = self.client.upload_award_document(file_, self.tender, TEST_TENDER_KEYS.award_id) self.assertEqual(doc.data.title, file_.name) self.assertEqual(doc.data.id, TEST_TENDER_KEYS.new_document_id)
def test_validator_options(self): """The validator results page should have a span or div for each print option.""" xml_file = StringIO(PfifXml.XML_EXPIRE_99_EMPTY_DATA) xml_file.name = 'xml_expire_99_empty_data.xml' post_dict = { 'pfif_xml_file_1': xml_file, 'print_options': ['show_errors'] } response = self.make_request(post_dict) self.assertTrue('ERROR' in response.content) self.assertTrue('message_type' in response.content) self.assertTrue('message_category' in response.content) post_dict['print_options'].append('show_warnings') response = self.make_request(post_dict) self.assertTrue('WARNING' in response.content) post_dict['print_options'].append('show_line_numbers') response = self.make_request(post_dict) self.assertTrue('message_line_number' in response.content) post_dict['print_options'].append('show_record_ids') response = self.make_request(post_dict) self.assertTrue('record_id' in response.content) post_dict['print_options'].append('show_full_line') response = self.make_request(post_dict) self.assertTrue('message_xml_full_line' in response.content) # EXPIRE_99 doesn't have any errors with xml element text or tag, so we use # a different XML file xml_file = StringIO(PfifXml.XML_INCORRECT_FORMAT_11) xml_file.name = 'xml_incorrect_format_11.xml' post_dict['pfif_xml_file_1'] = xml_file post_dict['print_options'].append('show_xml_tag') response = self.make_request(post_dict) self.assertTrue('message_xml_tag' in response.content) post_dict['print_options'].append('show_xml_text') response = self.make_request(post_dict) self.assertTrue('message_xml_text' in response.content)
def test_upload_update_csv(self): """Upload csv file HTML5 drag drop""" # Valid csv_file = StringIO('25,240\n20,30') csv_file.name = 'prestashop.csv' data = {'0': csv_file} response = self.client.post(reverse('upload_update_csv'), data=data) result = simplejson.loads(response.content) self.assertEquals(result['response'], _('File is valid')) self.assertEquals(result['lines'], 2) # Not valid csv_file = StringIO('LOL! Invalid!') csv_file.name = 'prestashop.csv' data['0'] = csv_file response = self.client.post(reverse('upload_update_csv'), data=data) result = simplejson.loads(response.content) self.assertEquals(result['response'], _('File is not valid'))
def test_parse_exceptions_include_stream_name(self): """Verify that parse exceptions include the stream name.""" stream = StringIO('''name: some.schema.1''') stream.name = 'schema.yaml' self.assertRaisesRegexp( parsers.ParserPhaseError, '^' 'SchemaClassesUndefinedError: ' 'Schema "some.schema.1": no classes defined' '$', self.parser.parse, stream)
def test_playbook_file(self): fake_playbook = [dict(fake_playbook=True)] fake_playbook_obj = StringIO(yaml.safe_dump(fake_playbook)) playbook = 'cephlab.yml' fake_playbook_obj.name = playbook task = self.klass(self.ctx, dict()) task.repo_path = '/tmp/fake/repo' with patch('teuthology.task.ansible.file', create=True) as m_file: m_file.return_value = fake_playbook_obj task.get_playbook() assert task.playbook_file.name == playbook
def test_update_bid_eligibility_document(self): setup_routing(self.app, routs=["tender_subpage_document_update"]) file_ = StringIO() file_.name = 'test_document.txt' file_.write("test upload tender eligibility_document text data") file_.seek(0) document_type = "eligibility_documents" doc = self.client.update_bid_document(file_, self.tender, TEST_KEYS.bid_id, TEST_KEYS.bid_eligibility_document_id, document_type) self.assertEqual(doc.data.title, file_.name) self.assertEqual(doc.data.id, TEST_KEYS.bid_eligibility_document_id) file_.close()
def extract_templetor(fileobj, keywords, comment_tags, options): """Extract i18n messages from web.py templates.""" try: # Replace/remove inline js '\$' which interferes with the Babel python parser: code = web.template.Template.generate_code( fileobj.read().replace('\$', ''), fileobj.name) f = StringIO(code) f.name = fileobj.name except Exception, e: print >> web.debug, fileobj.name + ':', str(e) return []
def return_werkzeug_filestorage(request, filename, maxsize=(320, 240)): extension = request.headers['content-type'].split('/')[-1] if extension not in current_app.config['ALLOWED_EXTENSIONS']: raise UploadNotAllowed("Unsupported file format") new_filename = filename + '.' + extension tempfile = StringIO(buf=request.content) tempfile.name = new_filename filestorage = FileStorage(tempfile, filename=new_filename, content_type=request.headers['content-type']) return filestorage
def test_autodetect_can_handle_japanese_utf8(self): some_kanji = u'\u65b0\u4e16\u7d00\u30a8\u30f4\u30a1\u30f3\u30b2\u30ea\u30aa\u30f3' japanese_file = StringIO() japanese_file.write(some_kanji.encode('utf-8')) japanese_file.name = 'filename' japanese_file.size = 10 japanese_file.seek(0) worksheet = worksheet_from_csv(Worksheet(), japanese_file, 1, 1, False) self.assertEquals(worksheet.A1.formula, some_kanji)
def spoof_submission(submit_url, body): client = Client() f = StringIO(body.encode('utf-8')) f.name = 'form.xml' response = client.post(submit_url, { 'xml_submission_file': f, }) try: return response['X-CommCareHQ-FormID'] except KeyError: return None
def test_image_in_question(self): imgfile = StringIO( 'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,' '\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;') imgfile.name = 'test_img_file.gif' self.question1.figure.save('image', ContentFile(imgfile.read())) response = self.client.get('/tq1/take/') self.assertContains(response, '<img src=') self.assertContains(response, 'alt="' + str(self.question1.content))
def make_mini(self, wavfname): smallfulloptions = coll.namedtuple('options', 'image_height fft_size image_width f_min f_max scale_exp pallete') smallfulloptions.fft_size = 4096 smallfulloptions.image_height = 65 smallfulloptions.image_width = 900 smallfulloptions.f_min = None smallfulloptions.f_max = None smallfulloptions.f_max = None smallfulloptions.pallete = None smallfulloptions.scale_exp = None smallfullio = StringIO() smallfullio.name = "wav.png" # We don't use the spectogram, but need to provide it anyway smallfullspecio = StringIO() smallfullspecio.name = "spec.png" invmfccio = StringIO() invmfccio.name = "spec.png" w2png.genimages(wavfname, smallfullio, smallfullspecio, invmfccio, smallfulloptions) return smallfullio.getvalue()
def ensure(server=None): with settings(user='******'): apt.add_repository('ppa:certbot/certbot') apt.ensure('certbot') if exists('/etc/letsencrypt/renewal-hooks/deploy' ) and server is not None: hook = StringIO() hook.name = server hook.write('#!/bin/sh') hook.write('systemctl reload %s' % server) put(hook, '/etc/letsencrypt/renewal-hooks/deploy/%s' % server)
def test_submit_charter(self): make_test_data() group = Group.objects.get(acronym="mars") charter = group.charter url = urlreverse('charter_submit', kwargs=dict(name=charter.name)) login_testing_unauthorized(self, "secretary", url) # normal get r = self.client.get(url) self.assertEquals(r.status_code, 200) q = PyQuery(r.content) self.assertEquals(len(q('form input[name=txt]')), 1) # faulty post test_file = StringIO("\x10\x11\x12") # post binary file test_file.name = "unnamed" r = self.client.post(url, dict(txt=test_file)) self.assertEquals(r.status_code, 200) self.assertTrue("does not appear to be a text file" in r.content) # post prev_rev = charter.rev latin_1_snippet = '\xe5' * 10 utf_8_snippet = '\xc3\xa5' * 10 test_file = StringIO("Windows line\r\nMac line\rUnix line\n" + latin_1_snippet) test_file.name = "unnamed" r = self.client.post(url, dict(txt=test_file)) self.assertEquals(r.status_code, 302) charter = Document.objects.get(name="charter-ietf-%s" % group.acronym) self.assertEquals(charter.rev, next_revision(prev_rev)) self.assertTrue("new_revision" in charter.latest_event().type) with open(os.path.join(self.charter_dir, charter.canonical_name() + "-" + charter.rev + ".txt")) as f: self.assertEquals(f.read(), "Windows line\nMac line\nUnix line\n" + utf_8_snippet)
def test_autodetect_csv_import_handles_carriage_returns_in_cells(self): excel_csv = StringIO() excel_csv.write(u'"carriage\nreturn!"\r\n'.encode('utf-8')) excel_csv.write(u"normal line\r\n".encode('utf-8')) excel_csv.name = 'filename' excel_csv.size = 10 excel_csv.seek(0) worksheet = worksheet_from_csv(Worksheet(), excel_csv, 2, 3, False) self.assertEquals(worksheet.B3.formula, "carriage\nreturn!") self.assertEquals(worksheet.B4.formula, "normal line")
def test_excel_csv_import_recognises_accents_and_currency_symbols(self): excel_csv = StringIO() excel_csv.write(u"\xe9".encode('windows-1252')) excel_csv.write(u"\xa3".encode('windows-1252')) excel_csv.write(u"\u20ac".encode('windows-1252')) excel_csv.name = 'filename' excel_csv.size = 10 excel_csv.seek(0) worksheet = worksheet_from_csv(Worksheet(), excel_csv, 3, 4, True) self.assertEquals(worksheet.C4.formula, u"\xe9\xa3\u20ac")
def create_video_grains_list(self): returnList = [] video_grains_path = os.listdir(self.temporaryPathGrain) video_grains_path.sort() for i, video in enumerate(video_grains_path): filename="video_grain"+str(i)+".ogv" content = StringIO(open(self.temporaryPathGrain + "/" + video).read()) content.name = filename content.filename = filename obj = Grain(id=filename, content=content, graintype='nsifile') returnList.append(obj) return returnList
def test_expansion(self): pp = CompilerPreprocessor({ 'A': 1, 'B': '2', 'C': 'c', }) pp.out = StringIO() input = StringIO('A.B.C') input.name = 'foo' pp.do_include(input) self.assertEquals(pp.out.getvalue(), '1 . 2 . c')
def tokenize(s): from StringIO import StringIO input = StringIO(s) input.name = '<string>' tok = FileTokenizer(TrackingStream(input)) result = [] while 1: t = tok.next() if t is EOF: break result.append(t) return result
def test_make_multipart_file_name(self): # post one file with filename sio = StringIO("This is a test file.") sio.name = "test.txt" context = dict(MIMEMultipart=Mock.FakeMIMEMultipart()) with injected(util.make_multipart, **context): content = dict(a="foo", b=sio) (headers, multi) = util.make_multipart(content, with_filenames=True) assert headers["Content-Type"] == \ 'multipart/form-data; boundary="foobar"' assert multi.strip() == MIME_FILE_NAME.format(newline_if_py3)
def test_override(self): jm = self.jm jm.outputFormat = 'flat' # doesn't touch chrome dir without files jarcontents = StringIO('''en-US.jar: relativesrcdir dom/locales: ''') jarcontents.name = 'override.mn' jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED') self.assertEquals(jm.localedirs, [ os.path.join(os.path.abspath('/TOPSOURCEDIR'), 'dom/locales', 'en-US') ])