def test_notification_two_attaches(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs1 = FieldStorage() fs1.name = 'file_info' fs1.filename = 'fake.txt' fs1.type = 'text/plain' fs1.file = StringIO('this is the content of the fake file\n') fs2 = FieldStorage() fs2.name = 'file_info' fs2.filename = 'fake2.txt' fs2.type = 'text/plain' fs2.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2]) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') base_url = h.absurl('{}attachment/'.format(p.url())) assert_in( '\nAttachments:\n\n' '- [fake.txt]({0}fake.txt) (37 Bytes; text/plain)\n' '- [fake2.txt]({0}fake2.txt) (37 Bytes; text/plain)'.format(base_url), n.text)
def test_user_manual_form(wtfs_app, pdf_1, pdf_2): with open(pdf_1, 'rb') as file: pdf_1 = BytesIO(file.read()) with open(pdf_2, 'rb') as file: pdf_2 = BytesIO(file.read()) user_manual = UserManual(wtfs_app) form = UserManualForm() form.apply_model(user_manual) assert form.pdf.data is None # Add field_storage = FieldStorage() field_storage.file = pdf_1 field_storage.type = 'application/pdf' field_storage.filename = 'example_1.pdf' form.pdf.process(PostData({'pdf': field_storage})) form.update_model(user_manual) pdf_1.seek(0) assert user_manual.pdf == pdf_1.read() form.apply_model(user_manual) assert form.pdf.data == { 'filename': 'user_manual.pdf', 'size': 8130, 'mimetype': 'application/pdf' } # Replace field_storage = FieldStorage() field_storage.file = pdf_2 field_storage.type = 'application/pdf' field_storage.filename = 'example_2.pdf' form.pdf.process(PostData({'pdf': field_storage})) form.update_model(user_manual) pdf_2.seek(0) assert user_manual.pdf == pdf_2.read() form.apply_model(user_manual) assert form.pdf.data == { 'filename': 'user_manual.pdf', 'size': 9115, 'mimetype': 'application/pdf' } # Delete form.pdf.action = 'delete' form.update_model(user_manual) assert not user_manual.exists
def test_attachment_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p_att = p.attach('foo.text', StringIO('Hello, world!'), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach('foo2.text', StringIO('Hello, thread!'), discussion_id=d._id, thread_id=t._id) d_att = p.attach('foo3.text', StringIO('Hello, discussion!'), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert 'wiki/_discuss' in att.url() assert 'attachment/' in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs = FieldStorage() fs.name='file_info' fs.filename='fake.txt' fs.type = 'text/plain' fs.file=StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum= None, subject= '', file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
def test_ms_data_as_file(self): import tempfile from cgi import FieldStorage msfile = tempfile.TemporaryFile() msfile.write('foo') msfile.flush() msfield = FieldStorage() msfield.file = msfile params = {'ms_data_format': 'mzxml', 'ms_data_file': msfield, 'max_ms_level': 3, 'abs_peak_cutoff': 1000, 'precursor_mz_precision': 0.005, 'mz_precision': 5.0, 'mz_precision_abs': 0.001, 'ionisation_mode': 1, } query = self.jobquery.add_ms_data(params) script = "{magma} read_ms_data --ms_data_format 'mzxml'" script += " -i '1' -m '3' -a '1000.0'" script += " -p '5.0' -q '0.001' --precursor_mz_precision '0.005'" script += " --call_back_url '/' ms_data.dat {db}\n" expected_query = JobQuery(directory=self.jobdir, prestaged=['ms_data.dat'], script=script, status_callback_url='/', ) self.assertEqual(query, expected_query) self.assertMultiLineEqual('foo', self.fetch_file('ms_data.dat'))
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') blog = project.app_instance('blog') with h.push_context('test', 'blog', neighborhood='Projects'): post = BM.BlogPost.new( title='Test title', text='test post', labels=['the firstlabel', 'the second label'], delete=None) ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') p = post.discussion_thread.add_post(text='test comment') p.add_multiple_attachments(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() blog.bulk_export(f, temp_dir, True) f.seek(0) blog = json.loads(f.read()) blog['posts'] = sorted(blog['posts'], key=lambda x: x['title'], reverse=True) file_path = 'blog/{}/{}/{}/test_file'.format( post._id, post.discussion_thread._id, list(post.discussion_thread.post_class().query.find())[0].slug) assert_equal( blog['posts'][0]['discussion_thread']['posts'][0]['attachments'][0] ['path'], file_path) assert os.path.exists(os.path.join(temp_dir, file_path))
def test_attachment_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p_att = p.attach('foo.text', StringIO('Hello, world!'), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach('foo2.text', StringIO('Hello, thread!'), discussion_id=d._id, thread_id=t._id) d_att = p.attach('foo3.text', StringIO('Hello, discussion!'), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert 'wiki/_discuss' in att.url() assert 'attachment/' in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs = FieldStorage() fs.name = 'file_info' fs.filename = 'fake.txt' fs.type = 'text/plain' fs.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') discussion = project.app_instance('discussion') post = Forum.query.get(shortname='general').sorted_threads[0].first_post test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') post.add_attachment(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() discussion.bulk_export(f, temp_dir, True) f.seek(0) discussion = json.loads(f.read()) forums = sorted(discussion['forums'], key=lambda x: x['name']) threads = sorted(forums[0]['threads'], key=lambda x: x['subject']) file_path = os.path.join( 'discussion', str(post.discussion_id), str(post.thread_id), post.slug, 'test_file' ) assert_equal(threads[0]['posts'][0]['attachments'][0]['path'], file_path) os.path.exists(file_path)
def test_binary_create(self): from io import BytesIO fs = FieldStorage() fs.file = BytesIO(b'fake_content') values = {'data':fs} self.provider.create(File, values)
def test_upload_release_already_exists(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = "foo.tar.gz" storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my_package", ":action": "file_upload", } root = Root() # Create initial release package = Package("my_package") package["1.0"] = Release("1.0", "1.0", metadata={}) package["1.0"]["foo.tar.gz"] = ReleaseFile("foo.tar.gz", b"") root["my-package"] = package view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 409)
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') blog = project.app_instance('blog') with h.push_context('test', 'blog', neighborhood='Projects'): post = BM.BlogPost.new( title='Test title', text='test post', labels=['the firstlabel', 'the second label'], delete=None ) ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') p = post.discussion_thread.add_post(text='test comment') p.add_multiple_attachments(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() blog.bulk_export(f, temp_dir, True) f.seek(0) blog = json.loads(f.read()) blog['posts'] = sorted( blog['posts'], key=lambda x: x['title'], reverse=True) file_path = 'blog/{}/{}/{}/test_file'.format( post._id, post.discussion_thread._id, list(post.discussion_thread.post_class().query.find())[0].slug ) assert_equal(blog['posts'][0]['discussion_thread']['posts'][0] ['attachments'][0]['path'], file_path) assert os.path.exists(os.path.join(temp_dir, file_path))
def test_attachment_methods(): d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") p = t.post("This is a post") p_att = p.attach("foo.text", StringIO("Hello, world!"), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach("foo2.text", StringIO("Hello, thread!"), discussion_id=d._id, thread_id=t._id) d_att = p.attach("foo3.text", StringIO("Hello, discussion!"), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert "wiki/_discuss" in att.url() assert "attachment/" in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject="Test comment notification") fs = FieldStorage() fs.name = "file_info" fs.filename = "fake.txt" fs.type = "text/plain" fs.file = StringIO("this is the content of the fake file\n") p = t.post(text=u"test message", forum=None, subject="", file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u"[test:wiki] Test comment notification") assert "\nAttachment: fake.txt (37 Bytes; text/plain)" in n.text
def test_upload_release_with_spaces(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = "foo.tar.gz" storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my package", ":action": "file_upload", "md5_digest": "Fake MD5", } root = Root() self.request.root = root view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 200) self.assertTrue("my-package" in root) self.assertIsInstance(root["my-package"], Package) self.assertTrue(root["my-package"].releases.get("1.0", False)) self.assertIsInstance(root["my-package"]["1.0"], Release) self.assertTrue(root["my-package"]["1.0"].release_files.get("foo.tar.gz", b"")) self.assertIsInstance(root["my-package"]["1.0"]["foo.tar.gz"], ReleaseFile) self.assertEqual(root["my-package"]["1.0"]["foo.tar.gz"].md5_digest, "Fake MD5") self.assertIsNotNone(root["my-package"]["1.0"].metadata) self.assertIsInstance(root["my-package"]["1.0"].metadata, dict) self.assertEqual(root["my-package"]["1.0"].release_files.get("foo.tar.gz", b"").size, 7)
def test(self): field = FieldStorage() field.filename = 'aaa' field.file = StringIO('abc') field = WebpyFileField(field) self.assertEqual('aaa', field.filename) self.assertEqual(['abc'], list(field.chunks()))
def test_upload_release_already_exists(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = 'foo.tar.gz' storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my_package", ":action": "file_upload", } root = Root() # Create initial release package = Package('my_package') package['1.0'] = Release('1.0', '1.0', metadata={}) package['1.0']['foo.tar.gz'] = ReleaseFile('foo.tar.gz', b'') root['my_package'] = package view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 409)
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') discussion = project.app_instance('discussion') thread = sorted(Forum.query.get(shortname='general').threads, key=attrgetter('last_post_date'))[-1] post = thread.first_post test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') post.add_attachment(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile('w+') temp_dir = tempfile.mkdtemp() discussion.bulk_export(f, temp_dir, True) f.seek(0) discussion = json.loads(f.read()) forums = sorted(discussion['forums'], key=lambda x: x['name']) threads = sorted(forums[0]['threads'], key=lambda x: x['subject']) file_path = os.path.join('discussion', str(post.discussion_id), str(post.thread_id), post.slug, 'test_file') assert_equal(threads[0]['posts'][0]['attachments'][0]['path'], file_path) os.path.exists(file_path)
def create_file(mimetype, filename, content): fs = FieldStorage() fs.file = TemporaryFile("wb+") fs.type = mimetype fs.filename = filename fs.file.write(content) fs.file.seek(0) return fs
def test_notification_two_attaches(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs1 = FieldStorage() fs1.name = 'file_info' fs1.filename = 'fake.txt' fs1.type = 'text/plain' fs1.file = StringIO('this is the content of the fake file\n') fs2 = FieldStorage() fs2.name = 'file_info' fs2.filename = 'fake2.txt' fs2.type = 'text/plain' fs2.file = StringIO('this is the content of the fake file\n') t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2]) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain) fake2.txt (37 Bytes; text/plain)' in n.text
def test_should_provide_simple_wrapper(self): cgi_field_storage = CGIFieldStorage() cgi_field_storage.file = 'foo' cgi_field_storage.filename = 'bar' field_storage = FieldStorage(cgi_field_storage) self.assertEqual(cgi_field_storage.file, field_storage.file) self.assertEqual(cgi_field_storage.filename, field_storage.filename)
def test_without_metabolize(self): self.maxDiff = 100000 import tempfile from cgi import FieldStorage ms_data_file = tempfile.NamedTemporaryFile() ms_data_file.write('foo') ms_data_file.flush() msfield = FieldStorage() msfield.file = ms_data_file params = MultiDict(ionisation_mode=1, ms_intensity_cutoff=200000, msms_intensity_cutoff=10, abs_peak_cutoff=1000, precursor_mz_precision=0.005, max_broken_bonds=4, max_water_losses=1, mz_precision=5.0, mz_precision_abs=0.001, scenario=[{'type': 'phase1', 'steps': '2'}, {'type': 'phase2', 'steps': '1'}], max_ms_level=3, structures='C1CCCC1 comp1', ms_data_file=msfield, structure_format='smiles', ms_data_format='mzxml', structure_database='', min_refscore=1, max_mz=9999, ) query = self.jobquery.allinone(params) expected_script = "{magma} read_ms_data --ms_data_format 'mzxml'" expected_script += " -i '1' -m '3' -a '1000.0'" expected_script += " -p '5.0' -q '0.001'" expected_script += " --precursor_mz_precision '0.005'" expected_script += " --call_back_url '/'" expected_script += " ms_data.dat {db}\n" expected_script += "{magma} add_structures -g -t 'smiles'" expected_script += " structures.dat {db}\n" expected_script += "{magma} annotate -c '200000.0'" expected_script += " -d '10.0'" expected_script += " -b '4'" expected_script += " --max_water_losses '1' --ncpus '1' --call_back_url '/'" expected_script += " --fast {db}\n" expected_query = JobQuery(directory=self.jobdir, prestaged=['ms_data.dat', 'structures.dat'], script=expected_script, status_callback_url='/', ) self.assertEqual(query, expected_query) self.assertMultiLineEqual(params['structures'], self.fetch_file('structures.dat')) self.assertMultiLineEqual('foo', self.fetch_file('ms_data.dat'))
def test_accept_multipart_content(self, result_request, tool_definition): field_storage = FieldStorage() field_storage.filename = 'x.txt' field_storage.file = StringIO('whee') tool_definition['argument_names'] = ('x_path',) raw_arguments = {'x': field_storage} result = result_request.prepare_arguments( tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_accept_multipart_content(self, result_request, tool_definition): field_storage = FieldStorage() field_storage.filename = 'x.txt' field_storage.file = StringIO('whee') tool_definition['argument_names'] = ('x_path', ) raw_arguments = {'x': field_storage} result = result_request.prepare_arguments(tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_binary_update(self): fs = FieldStorage() fs.file = BytesIO(b'fake_content') values = {'data':fs} entity = self.provider.create(File, values) values = {'data':fs, 'file_id':entity.file_id} self.provider.update(File, values)
def to_fieldstorage(f): fs = FieldStorage() fs.file = f fs.type = f.mimetype opt = f.mimetype_params opt['filename'] = f.filename fs.disposition_options = opt fs.type_options = opt return fs
def upload_request(self, files: dict, metadata=None, user=None): if metadata is None: metadata = '{}' fields = MultiDict({'metadata': metadata}) for name, content in files.items(): fs = FieldStorage() fs.file = BytesIO(content) fs.filename = name fields.add('file', fs) return self.generic_request(post=fields, user=user)
def test_valid_imagefile_upload(self): from adhocracy.forms.common import ValidImageFileUpload from formencode import Invalid from cgi import FieldStorage from io import BytesIO value = FieldStorage() value.file = BytesIO(b"binarydata") value.filename = u"test.png" value.name = u"thumbs" self.assertRaises(Invalid, ValidImageFileUpload.to_python, value)
def test_multiple_attachments(): test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test1.txt' test_file1.type = 'text/plain' test_file1.file = StringIO('test file1\n') test_file2 = FieldStorage() test_file2.name = 'file_info' test_file2.filename = 'test2.txt' test_file2.type = 'text/plain' test_file2.file = StringIO('test file2\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_multiple_attachments([test_file1, test_file2]) ThreadLocalORMSession.flush_all() assert_equals(len(test_post.attachments), 2) attaches = test_post.attachments assert 'test1.txt' in [attaches[0].filename, attaches[1].filename] assert 'test2.txt' in [attaches[0].filename, attaches[1].filename]
def import_stage(self, harvest_object): package_dict = json.loads(harvest_object.content) if not self._should_import_local(package_dict): package_dict['state'] = 'deleted' else: package_dict = self._apply_package_extras_white_list(package_dict) package_dict = self._apply_package_resource_extras_black_list(package_dict) package_dict = self._fix_date_in_fields(package_dict) package_dict = self._set_license(package_dict) package_dict = self._pop_black_list_resources_by_type(package_dict) harvest_object.content = json.dumps(package_dict) upload_resources = self._pop_upload_resources(package_dict) import_stage_result = super(GuiaHarvesterPlugin, self).import_stage(harvest_object) if import_stage_result: package_dict = json.loads(harvest_object.content) harvested_rels = package_dict.get('relationships', []) try: this_package = model.Package.get(package_dict['name']) if not this_package: raise logic.NotFound() except logic.NotFound as nf: log.info('import_stage(): could not find package "{0}"; relationships not updated: {1}'.format(package_dict['name'], nf)) return import_stage_result existing_rels = this_package.get_relationships() self._update_relationships(existing_rels, harvested_rels) for resource_dict in upload_resources: resource_url = resource_dict['url'] resource_filename = resource_url.split('/')[-1] try: response = requests.get(resource_url) resource_file = StringIO(response.content) except Exception,e: self._save_object_error('Resource not harvested for package "{0}". Unable to fetch resource from "{1}": {2}'.format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue cfs = FieldStorage() cfs.file = resource_file cfs.filename = resource_filename resource_dict['upload'] = cfs if 'created' in resource_dict: del resource_dict['created'] if 'last_modified' in resource_dict: del resource_dict['last_modified'] if 'api' in resource_dict: del resource_dict['api'] try: the_resource = toolkit.get_action('resource_create')(data_dict=resource_dict) except Exception,e: self._save_object_error('Resource not harvested for package "{0}". Unable to import the resource originally from "{1}": {2}'.format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue
def process(content, **kwargs): field = MunicipalityDataUploadField(**kwargs) field = field.bind(form, 'upload') field_storage = FieldStorage() field_storage.file = BytesIO(content) field_storage.type = 'text/plain' field_storage.filename = 'test.csv' field.process(PostData({'upload': field_storage})) return field
def test_create(self): view = self.get_view() content = FieldStorage() content.file = pkg_resources.resource_stream( 'velo', 'static/img/trollface.png' ) view.request.POST.update({ 'longitude': 12.3, 'latitude': -45.4, 'content': content, }) view.create()
def apply_model(self, model): self.title.data = model.title self.portrait.data = model.portrait self.export_fields.data = model.export_fields if model.organigram_file: fs = FieldStorage() fs.file = BytesIO(model.organigram_file.read()) fs.type = model.organigram_file.content_type fs.filename = model.organigram_file.filename self.organigram.data = self.organigram.process_fieldstorage(fs) if hasattr(self, 'is_hidden_from_public'): self.is_hidden_from_public.data = model.is_hidden_from_public self.reorder_export_fields()
def call_action(self, action, data_dict=None, context=None, apikey=None, files=None, requests_kwargs=None): """ :param action: the action name, e.g. 'package_create' :param data_dict: the dict to pass to the action, defaults to {} :param context: an override for the context to use for this action, remember to include a 'user' when necessary :param apikey: not supported :param files: None or {field-name: file-to-be-sent, ...} :param requests_kwargs: ignored for LocalCKAN (requests not used) """ # copy dicts because actions may modify the dicts they are passed # (CKAN...you so crazy) data_dict = dict(data_dict or []) context = dict(self.context if context is None else context) if apikey: # FIXME: allow use of apikey to set a user in context? raise CKANAPIError( "LocalCKAN.call_action does not support " "use of apikey parameter, use context['user'] instead") to_close = [] try: for fieldname in files or []: f = files[fieldname] if isinstance(f, tuple): # requests accepts (filename, file...) tuples filename, f = f[:2] else: filename = f.name try: f.seek(0) except (AttributeError, IOError): f = _write_temp_file(f) to_close.append(f) field_storage = FieldStorage() field_storage.file = f field_storage.filename = filename data_dict[fieldname] = field_storage return self._get_action(action)(context, data_dict) finally: for f in to_close: f.close()
def test_add_attachment(): test_file = FieldStorage() test_file.name = "file_info" test_file.filename = "test.txt" test_file.type = "text/plain" test_file.file = StringIO("test file\n") d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") test_post = t.post("test post") test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert test_post.attachments.count() == 1, test_post.attachments.count() attach = test_post.attachments.first() assert attach.filename == "test.txt", attach.filename assert attach.content_type == "text/plain", attach.content_type
def test_add_attachment(): test_file = FieldStorage() test_file.name = 'file_info' test_file.filename = 'test.txt' test_file.type = 'text/plain' test_file.file = StringIO('test file\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert test_post.attachments.count() == 1, test_post.attachments.count() attach = test_post.attachments.first() assert attach.filename == 'test.txt', attach.filename assert attach.content_type == 'text/plain', attach.content_type
def setup_with_tools(self): super(TestBulkExport, self).setup_with_tools() self.project = M.Project.query.get(shortname='test') self.tracker = self.project.app_instance('bugs') self.new_ticket(summary='foo', _milestone='1.0') self.new_ticket(summary='bar', _milestone='2.0') self.ticket = TM.Ticket.query.find(dict(summary='foo')).first() self.post = self.ticket.discussion_thread.add_post(text='silly comment') ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') self.post.add_attachment(test_file1) ThreadLocalORMSession.flush_all()
def test_add_attachment(): test_file = FieldStorage() test_file.name = 'file_info' test_file.filename = 'test.txt' test_file.type = 'text/plain' test_file.file = StringIO('test file\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert_equals(len(test_post.attachments), 1) attach = test_post.attachments[0] assert attach.filename == 'test.txt', attach.filename assert attach.content_type == 'text/plain', attach.content_type
def setup_with_tools(self): super(TestBulkExport, self).setup_with_tools() self.project = M.Project.query.get(shortname='test') self.tracker = self.project.app_instance('bugs') self.new_ticket(summary='foo', _milestone='1.0') self.new_ticket(summary='bar', _milestone='2.0') self.ticket = TM.Ticket.query.find(dict(summary='foo')).first() self.post = self.ticket.discussion_thread.add_post( text='silly comment') ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') self.post.add_attachment(test_file1) ThreadLocalORMSession.flush_all()
def test_uploaddb_post(self): self.config.add_route('results', '/results/{jobid}') from cgi import FieldStorage dbfile = FieldStorage() dbfile.file = StringIO() request = testing.DummyRequest(post={'db_file': dbfile}) request.user = User('bob', 'Bob Example', '*****@*****.**') views = Views(request) views.job_factory = Mock(JobFactory) job = self.fake_job() views.job_factory.fromDb.return_value = job response = views.uploaddb() views.job_factory.fromDb.assert_called_with(dbfile.file, 'bob') self.assertEqual(response.location, 'http://example.com/results/foo')
def test_with_structure_as_string_and_file(self): import tempfile from cgi import FieldStorage sfile = tempfile.TemporaryFile() sfile.write('foo') sfile.flush() sfield = FieldStorage() sfield.file = sfile params = {'structure_format': 'smiles', 'structures_file': sfield, 'structures': 'bar' } self.jobquery.add_structures(params) # File is kept and string is ignored self.assertMultiLineEqual('foo', self.fetch_file('structures.dat'))
def call_action(self, action, data_dict=None, context=None, apikey=None, files=None): """ :param action: the action name, e.g. 'package_create' :param data_dict: the dict to pass to the action, defaults to {} :param context: an override for the context to use for this action, remember to include a 'user' when necessary :param apikey: not supported :param files: None or {field-name: file-to-be-sent, ...} """ # copy dicts because actions may modify the dicts they are passed # (CKAN...you so crazy) data_dict = dict(data_dict or []) context = dict(self.context if context is None else context) if apikey: # FIXME: allow use of apikey to set a user in context? raise CKANAPIError("LocalCKAN.call_action does not support " "use of apikey parameter, use context['user'] instead") to_close = [] try: for fieldname in files or []: f = files[fieldname] if isinstance(f, tuple): # requests accepts (filename, file...) tuples filename, f = f[:2] else: filename = f.name try: f.seek(0) except (AttributeError, IOError): f = _write_temp_file(f) to_close.append(f) field_storage = FieldStorage() field_storage.file = f field_storage.filename = filename data_dict[fieldname] = field_storage return self._get_action(action)(context, data_dict) finally: for f in to_close: f.close()
def test_structures_as_file(self): import tempfile from cgi import FieldStorage sfile = tempfile.TemporaryFile() sfile.write('foo') sfile.flush() sfield = FieldStorage() sfield.file = sfile params = {'structure_format': 'smiles', 'structures_file': sfield} query = self.jobquery.add_structures(params) script = "{magma} add_structures -g -t 'smiles' structures.dat {db}\n" expected_query = JobQuery(directory=self.jobdir, prestaged=['structures.dat'], script=script, status_callback_url='/', ) self.assertEqual(query, expected_query) self.assertMultiLineEqual('foo', self.fetch_file('structures.dat'))
def test_update_dataset_form(session): request = DummyRequest(session, DummyPrincipal()) # Validate form = UpdateDatasetForm() form.request = request assert not form.validate() file = BytesIO() workbook = Workbook(file) worksheet = workbook.add_worksheet('DATA') workbook.add_worksheet('CITATION') worksheet.write_row(0, 0, ColumnMapper().columns.values()) worksheet.write_row( 1, 0, [ 100.1, # anr / NUMERIC '1.2.2008', # datum / DATE 1, # legislatur / INTEGER '2004-2008', # legisjahr / INT4RANGE 'kurztitel de', # titel_kurz_d 'kurztitel fr', # titel_kurz_f 'titel de', # titel_off_d 'titel fr', # titel_off_f 'stichwort', # stichwort / TEXT 2, # anzahl / INTEGER 3, # rechtsform ]) workbook.close() file.seek(0) field_storage = FieldStorage() field_storage.file = file field_storage.type = 'application/excel' field_storage.filename = 'test.xlsx' form.dataset.process(DummyPostData({'dataset': field_storage})) assert form.validate()
def test_upload_release_with_spaces(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = 'foo.tar.gz' storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my package", ":action": "file_upload", "md5_digest": "Fake MD5" } root = Root() self.request.root = root view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 200) self.assertTrue('my-package' in root) self.assertIsInstance(root['my-package'], Package) self.assertTrue(root['my-package'].releases.get('1.0', False)) self.assertIsInstance(root['my-package']['1.0'], Release) self.assertTrue(root['my-package']['1.0'].release_files.get( 'foo.tar.gz', b'')) self.assertIsInstance(root['my-package']['1.0']['foo.tar.gz'], ReleaseFile) self.assertEqual(root['my-package']['1.0']['foo.tar.gz'].md5_digest, "Fake MD5") self.assertIsNotNone(root['my-package']['1.0'].metadata) self.assertIsInstance(root['my-package']['1.0'].metadata, dict) self.assertEqual( root['my-package']['1.0'].release_files.get('foo.tar.gz', b'').size, 7)
def create_resource(filepath, package_id, api_key, name="Default"): """ Function to create a resource via the API. Could maybe also be done with ckan.action function. TODO: **kwargs """ # with open(filepath, 'rb') as f: # files = {"upload": f} # values = {"package_id": package_id, "name": name} # headers = {"Authorization": api_key} # api_url = "{0}/api/action/resource_create".format(SITE_URL) # r = requests.post(api_url, files=files, data=values, headers=headers) with open(filepath, 'rb') as f: field_storage = FieldStorage() field_storage.file = f field_storage.filename = name new_resource = toolkit.get_action('resource_create')({}, { 'package_id': package_id, 'upload': field_storage, "name": name }) return new_resource
def test_import_municipality_data_form(session): municipalities = MunicipalityCollection(session) municipalities.add(name="Boppelsen", bfs_number=82) municipalities.add(name="Adlikon", bfs_number=21) # Test apply form = ImportMunicipalityDataForm() form.request = Request(session) form.file.data = { 21: { 'dates': [date(2019, 1, 1), date(2019, 1, 7)] }, 241: { 'dates': [date(2019, 1, 3), date(2019, 1, 9)] }, 82: { 'dates': [date(2019, 1, 4), date(2019, 1, 10)] } } form.update_model(municipalities) assert [(m.bfs_number, [d.date for d in m.pickup_dates]) for m in municipalities.query() ] == [(21, [date(2019, 1, 1), date(2019, 1, 7)]), (82, [date(2019, 1, 4), date(2019, 1, 10)])] # Test validation form = ImportMunicipalityDataForm() form.request = Request(session) assert not form.validate() field_storage = FieldStorage() field_storage.file = BytesIO( "Adlikon;21;-1;Normal;12.2.2015".encode('cp1252')) field_storage.type = 'text/csv' field_storage.filename = 'test.csv' form.file.process(PostData({'file': field_storage})) assert form.validate()
def test_with_ms_data_as_string_and_file(self): import tempfile from cgi import FieldStorage msfile = tempfile.TemporaryFile() msfile.write('foo') msfile.flush() msfield = FieldStorage() msfield.file = msfile params = {'ms_data_format': 'mzxml', 'ms_data_file': msfield, 'ms_data': 'bar', 'max_ms_level': 3, 'abs_peak_cutoff': 1000, 'precursor_mz_precision': 0.005, 'mz_precision': 5.0, 'mz_precision_abs': 0.001, 'ionisation_mode': 1, } self.jobquery.add_ms_data(params) # File is kept and string is ignored self.assertMultiLineEqual('foo', self.fetch_file('ms_data.dat'))
def test_fileupload_handle(self): # Abstract file upload handle container = ContainerNode(name='container') request = self.layer.new_request() abstract_upload_handle = FileUploadHandle(container, request) # If request method is GET, existing files are read. Abstract # implementation returns empty result self.assertEqual(abstract_upload_handle(), {'files': []}) # If request method is POST, a file upload is assumed filedata = FieldStorage() filedata.type = 'text/plain' filedata.filename = 'test.txt' filedata.file = StringIO('I am the payload') request.method = 'POST' request.params['file'] = filedata del request.params['_LOCALE_'] res = abstract_upload_handle() self.assertEqual(res['files'][0]['name'], 'test.txt') self.assertEqual(res['files'][0]['size'], 0) self.assertEqual( res['files'][0]['error'], 'Abstract ``FileUploadHandle`` does not implement ``create_file``' ) # Concrete implementation of file upload handle upload_handle = ContainerFileUploadHandle(container, request) # Upload file res = upload_handle() self.assertEqual(res['files'], [{ 'url': '/test.txt', 'deleteType': 'GET', 'deleteUrl': '/test.txt/filedelete_handle', 'name': 'test.txt', 'size': 16 }]) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container <class 'cone.fileupload.tests.File'>: test.txt body: 'I am the payload' """, container.treerepr()) # Read existing files request = self.layer.new_request() upload_handle = ContainerFileUploadHandle(container, request) self.assertEqual(upload_handle()['files'], [{ 'url': '/test.txt', 'deleteType': 'GET', 'deleteUrl': '/test.txt/filedelete_handle', 'name': 'test.txt', 'size': 16 }]) # Test file delete handle file = container['test.txt'] request = self.layer.new_request() self.assertEqual( filedelete_handle(file, request), {'files': [{'test.txt': True}]} ) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container """, container.treerepr())
def testQuizAsXML(self): #test parsing quiz = self.root['quiz'] path = os.path.join(os.getcwd(),'xml_example_word.qml') xml_example = open(path) field_storage = FieldStorage() field_storage.file = xml_example field_storage.filename = 'xml_example_word.qml' file_upload = FileUpload(field_storage) qax = interfaces.IQuizAsXML(self.root['quiz']) status = qax.generateQuiz(file_upload) #status = qax.generateQuiz(xml_example) if status is not None: print status return self.assertEqual(quiz.title,u'Название теста. Вот как.') self.assertEqual(quiz.body,u'Contents of quiz') self.assertEqual(IAnnotations(quiz).get(PAGES_KEY), 1) self.assertEqual(interfaces.IScaleQuiz.providedBy(quiz), True) self.assertEqual(interfaces.ISlotQuiz.providedBy(quiz), False) results = [r for r in quiz.values() if interfaces.IQuizResult.providedBy(r)] self.assertEqual(results[0].title,u'Title of first result') self.assertEqual(results[1].title,u'Title of second result') self.assertEqual(results[0].body, results[1].body,u'Contents of result') self.assertEqual(IAnnotations(results[0]).get(INTERVAL_KEY), 10) self.assertEqual(IAnnotations(results[1]).get(INTERVAL_KEY), 20) questions = [q for q in quiz.values() if interfaces.IQuizQuestion.providedBy(q)] self.assertEqual(questions[0].title,u'Title of first question') self.assertEqual(questions[1].title,u'Title of second question') for q in questions: answers = [a for a in q.values() if interfaces.IQuizAnswer.providedBy(a)] self.assertEqual(answers[0].title,u'Title of first answer') self.assertEqual(answers[1].title,u'Title of second answer') self.assertEqual(answers[0].body,answers[1].body,u'Contents of answer') depends = [[d for d in answer.values() if interfaces.IAnswerDepends.providedBy(d)] for answer in answers] flatten = getUtility(interfaces.IQreatureUtility, name="Flatten") int_ids = getUtility(interfaces.IQreatureIntIds, context=quiz) depends = [d for d in flatten(depends)] leads = [[l for l in answer.values() if interfaces.IAnswerLeads.providedBy(l)] for answer in answers] if q.title == u'Title of first question': leads = [l for l in flatten(leads)] self.assertEqual(len(leads),1) i=0 for depend in depends: i+=1 if divmod(i,2)[1] == 1: self.assertEqual(int_ids.getObject(depend.result_id).title, u'Title of first result') elif divmod(i,2)[1] == 0: self.assertEqual(int_ids.getObject(depend.result_id).title, u'Title of second result') self.assertEqual(depend.depend_value, 100) self.assertTrue(interfaces.IScaleQuiz.providedBy(quiz)) self.assertTrue(interfaces.ILeadedQuiz.providedBy(quiz)) xml_example.close() #test generation. to generate depends, provide the quiz with ISlotLayer also xml_example = open(path) example_lines = xml_example.readlines() #check for BOM also example_lines = [example_lines[0].lstrip(codecs.BOM_UTF8)] + example_lines[1:] alsoProvides(quiz, interfaces.ISlotQuiz) xml_generated = qax.generateXML() generated_lines = xml_generated.data.split('\n') lines_to_compare = zip(example_lines,generated_lines) for example_line,generated_line in lines_to_compare: self.assertEqual(example_line, generated_line+'\n') xml_example.close()
def _upload_test_data(self): """ Upload test datasets, which are defined in the <test></test> section of the provided tool. """ if not self._tool.tests: raise ValueError('Tests are not defined.') self._test = self._tool.tests[0] # All inputs with the type 'data' self._data_inputs = { x.name: x for x in self._tool.input_params if x.type == 'data' } # Datasets from the <test></test> section test_datasets = { input_name: self._test.inputs[input_name][0] for input_name in self._data_inputs.keys() if input_name in self._test.inputs.keys() } # Conditional datasets for name, value in self._test.inputs.items(): if '|' in name: input_name = name.split('|')[1] if input_name in self._data_inputs.keys(): test_datasets.update( {input_name: self._test.inputs[name][0]}) if not test_datasets.keys(): not_supported_input_types = [ k for k, v in self._tool.inputs.items() if v.type == 'repeat' or v.type == 'data_collection' ] if not_supported_input_types: raise ValueError('Not supported input types.') else: # Some tests don't have data inputs at all, # so we can generate a tour without them self._use_datasets = False return test_data_paths = [os.path.abspath('test-data')] test_data_cache_dir = os.path.abspath( os.environ.get('GALAXY_TEST_DATA_REPO_CACHE', 'test-data-cache')) test_data_paths.extend( [x[0] for x in os.walk(test_data_cache_dir) if '.git' not in x[0]]) if self._tool.tool_shed: test_data_paths.append( os.path.abspath(os.path.join(self._tool.tool_dir, 'test-data'))) # Upload all test datasets for input_name, input in self._data_inputs.items(): if input_name in test_datasets.keys(): for i, data_path in enumerate(test_data_paths): input_path = os.path.join(data_path, test_datasets[input_name]) if os.path.exists(input_path): break elif i + 1 == len(test_data_paths): # the last path raise ValueError('Test dataset "%s" doesn\'t exist.' % input_name) upload_tool = self._trans.app.toolbox.get_tool('upload1') filename = os.path.basename(input_path) with open(input_path, 'r') as f: content = f.read() headers = { 'content-disposition': 'form-data; name="{}"; filename="{}"'.format( 'files_0|file_data', filename), } input_file = FieldStorage(headers=headers) input_file.file = input_file.make_file() input_file.file.write(content) inputs = { 'dbkey': '?', # is it always a question mark? 'file_type': input.extensions[0], 'files_0|type': 'upload_dataset', 'files_0|space_to_tab': None, 'files_0|to_posix_lines': 'Yes', 'files_0|file_data': input_file, } params = Params(inputs, sanitize=False) incoming = params.__dict__ output = upload_tool.handle_input(self._trans, incoming, history=None) job_errors = output.get('job_errors', []) if job_errors: # self._errors.extend(job_errors) raise ValueError('Cannot upload a dataset.') else: self._hids.update( {input_name: output['out_data'][0][1].hid})
def test_binary_create(self): fs = FieldStorage() fs.file = BytesIO(b'fake_content') values = {'data':fs} self.provider.create(File, values)
def test_binary_create(self): fs = FieldStorage() fs.file = StringIO('fake_content') values = {'data':fs} self.provider.create(File, values)