def test_notification_two_attaches(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs1 = FieldStorage() fs1.name = 'file_info' fs1.filename = 'fake.txt' fs1.type = 'text/plain' fs1.file = StringIO('this is the content of the fake file\n') fs2 = FieldStorage() fs2.name = 'file_info' fs2.filename = 'fake2.txt' fs2.type = 'text/plain' fs2.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2]) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') base_url = h.absurl('{}attachment/'.format(p.url())) assert_in( '\nAttachments:\n\n' '- [fake.txt]({0}fake.txt) (37 Bytes; text/plain)\n' '- [fake2.txt]({0}fake2.txt) (37 Bytes; text/plain)'.format(base_url), n.text)
def test_user_manual_form(wtfs_app, pdf_1, pdf_2): with open(pdf_1, 'rb') as file: pdf_1 = BytesIO(file.read()) with open(pdf_2, 'rb') as file: pdf_2 = BytesIO(file.read()) user_manual = UserManual(wtfs_app) form = UserManualForm() form.apply_model(user_manual) assert form.pdf.data is None # Add field_storage = FieldStorage() field_storage.file = pdf_1 field_storage.type = 'application/pdf' field_storage.filename = 'example_1.pdf' form.pdf.process(PostData({'pdf': field_storage})) form.update_model(user_manual) pdf_1.seek(0) assert user_manual.pdf == pdf_1.read() form.apply_model(user_manual) assert form.pdf.data == { 'filename': 'user_manual.pdf', 'size': 8130, 'mimetype': 'application/pdf' } # Replace field_storage = FieldStorage() field_storage.file = pdf_2 field_storage.type = 'application/pdf' field_storage.filename = 'example_2.pdf' form.pdf.process(PostData({'pdf': field_storage})) form.update_model(user_manual) pdf_2.seek(0) assert user_manual.pdf == pdf_2.read() form.apply_model(user_manual) assert form.pdf.data == { 'filename': 'user_manual.pdf', 'size': 9115, 'mimetype': 'application/pdf' } # Delete form.pdf.action = 'delete' form.update_model(user_manual) assert not user_manual.exists
def test_upload_release_already_exists(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = 'foo.tar.gz' storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my_package", ":action": "file_upload", } root = Root() # Create initial release package = Package('my_package') package['1.0'] = Release('1.0', '1.0', metadata={}) package['1.0']['foo.tar.gz'] = ReleaseFile('foo.tar.gz', b'') root['my_package'] = package view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 409)
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') blog = project.app_instance('blog') with h.push_context('test', 'blog', neighborhood='Projects'): post = BM.BlogPost.new( title='Test title', text='test post', labels=['the firstlabel', 'the second label'], delete=None) ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') p = post.discussion_thread.add_post(text='test comment') p.add_multiple_attachments(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() blog.bulk_export(f, temp_dir, True) f.seek(0) blog = json.loads(f.read()) blog['posts'] = sorted(blog['posts'], key=lambda x: x['title'], reverse=True) file_path = 'blog/{}/{}/{}/test_file'.format( post._id, post.discussion_thread._id, list(post.discussion_thread.post_class().query.find())[0].slug) assert_equal( blog['posts'][0]['discussion_thread']['posts'][0]['attachments'][0] ['path'], file_path) assert os.path.exists(os.path.join(temp_dir, file_path))
def test_attachment_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p_att = p.attach('foo.text', StringIO('Hello, world!'), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach('foo2.text', StringIO('Hello, thread!'), discussion_id=d._id, thread_id=t._id) d_att = p.attach('foo3.text', StringIO('Hello, discussion!'), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert 'wiki/_discuss' in att.url() assert 'attachment/' in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs = FieldStorage() fs.name='file_info' fs.filename='fake.txt' fs.type = 'text/plain' fs.file=StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum= None, subject= '', file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') discussion = project.app_instance('discussion') thread = sorted(Forum.query.get(shortname='general').threads, key=attrgetter('last_post_date'))[-1] post = thread.first_post test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') post.add_attachment(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile('w+') temp_dir = tempfile.mkdtemp() discussion.bulk_export(f, temp_dir, True) f.seek(0) discussion = json.loads(f.read()) forums = sorted(discussion['forums'], key=lambda x: x['name']) threads = sorted(forums[0]['threads'], key=lambda x: x['subject']) file_path = os.path.join('discussion', str(post.discussion_id), str(post.thread_id), post.slug, 'test_file') assert_equal(threads[0]['posts'][0]['attachments'][0]['path'], file_path) os.path.exists(file_path)
def test_attachment_methods(): d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") p = t.post("This is a post") p_att = p.attach("foo.text", StringIO("Hello, world!"), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach("foo2.text", StringIO("Hello, thread!"), discussion_id=d._id, thread_id=t._id) d_att = p.attach("foo3.text", StringIO("Hello, discussion!"), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert "wiki/_discuss" in att.url() assert "attachment/" in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject="Test comment notification") fs = FieldStorage() fs.name = "file_info" fs.filename = "fake.txt" fs.type = "text/plain" fs.file = StringIO("this is the content of the fake file\n") p = t.post(text=u"test message", forum=None, subject="", file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u"[test:wiki] Test comment notification") assert "\nAttachment: fake.txt (37 Bytes; text/plain)" in n.text
def test_upload_release_already_exists(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = "foo.tar.gz" storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my_package", ":action": "file_upload", } root = Root() # Create initial release package = Package("my_package") package["1.0"] = Release("1.0", "1.0", metadata={}) package["1.0"]["foo.tar.gz"] = ReleaseFile("foo.tar.gz", b"") root["my-package"] = package view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 409)
def test_attachment_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p_att = p.attach('foo.text', StringIO('Hello, world!'), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach('foo2.text', StringIO('Hello, thread!'), discussion_id=d._id, thread_id=t._id) d_att = p.attach('foo3.text', StringIO('Hello, discussion!'), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert 'wiki/_discuss' in att.url() assert 'attachment/' in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs = FieldStorage() fs.name = 'file_info' fs.filename = 'fake.txt' fs.type = 'text/plain' fs.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') discussion = project.app_instance('discussion') post = Forum.query.get(shortname='general').sorted_threads[0].first_post test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') post.add_attachment(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() discussion.bulk_export(f, temp_dir, True) f.seek(0) discussion = json.loads(f.read()) forums = sorted(discussion['forums'], key=lambda x: x['name']) threads = sorted(forums[0]['threads'], key=lambda x: x['subject']) file_path = os.path.join( 'discussion', str(post.discussion_id), str(post.thread_id), post.slug, 'test_file' ) assert_equal(threads[0]['posts'][0]['attachments'][0]['path'], file_path) os.path.exists(file_path)
def test_upload_release_with_spaces(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = "foo.tar.gz" storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my package", ":action": "file_upload", "md5_digest": "Fake MD5", } root = Root() self.request.root = root view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 200) self.assertTrue("my-package" in root) self.assertIsInstance(root["my-package"], Package) self.assertTrue(root["my-package"].releases.get("1.0", False)) self.assertIsInstance(root["my-package"]["1.0"], Release) self.assertTrue(root["my-package"]["1.0"].release_files.get("foo.tar.gz", b"")) self.assertIsInstance(root["my-package"]["1.0"]["foo.tar.gz"], ReleaseFile) self.assertEqual(root["my-package"]["1.0"]["foo.tar.gz"].md5_digest, "Fake MD5") self.assertIsNotNone(root["my-package"]["1.0"].metadata) self.assertIsInstance(root["my-package"]["1.0"].metadata, dict) self.assertEqual(root["my-package"]["1.0"].release_files.get("foo.tar.gz", b"").size, 7)
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') blog = project.app_instance('blog') with h.push_context('test', 'blog', neighborhood='Projects'): post = BM.BlogPost.new( title='Test title', text='test post', labels=['the firstlabel', 'the second label'], delete=None ) ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') p = post.discussion_thread.add_post(text='test comment') p.add_multiple_attachments(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() blog.bulk_export(f, temp_dir, True) f.seek(0) blog = json.loads(f.read()) blog['posts'] = sorted( blog['posts'], key=lambda x: x['title'], reverse=True) file_path = 'blog/{}/{}/{}/test_file'.format( post._id, post.discussion_thread._id, list(post.discussion_thread.post_class().query.find())[0].slug ) assert_equal(blog['posts'][0]['discussion_thread']['posts'][0] ['attachments'][0]['path'], file_path) assert os.path.exists(os.path.join(temp_dir, file_path))
def test(self): field = FieldStorage() field.filename = 'aaa' field.file = StringIO('abc') field = WebpyFileField(field) self.assertEqual('aaa', field.filename) self.assertEqual(['abc'], list(field.chunks()))
def test_should_provide_simple_wrapper(self): cgi_field_storage = CGIFieldStorage() cgi_field_storage.file = 'foo' cgi_field_storage.filename = 'bar' field_storage = FieldStorage(cgi_field_storage) self.assertEqual(cgi_field_storage.file, field_storage.file) self.assertEqual(cgi_field_storage.filename, field_storage.filename)
def create_file(mimetype, filename, content): fs = FieldStorage() fs.file = TemporaryFile("wb+") fs.type = mimetype fs.filename = filename fs.file.write(content) fs.file.seek(0) return fs
def test_notification_two_attaches(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs1 = FieldStorage() fs1.name = 'file_info' fs1.filename = 'fake.txt' fs1.type = 'text/plain' fs1.file = StringIO('this is the content of the fake file\n') fs2 = FieldStorage() fs2.name = 'file_info' fs2.filename = 'fake2.txt' fs2.type = 'text/plain' fs2.file = StringIO('this is the content of the fake file\n') t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2]) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain) fake2.txt (37 Bytes; text/plain)' in n.text
def test_accept_multipart_content(self, result_request, tool_definition): field_storage = FieldStorage() field_storage.filename = 'x.txt' field_storage.file = StringIO('whee') tool_definition['argument_names'] = ('x_path', ) raw_arguments = {'x': field_storage} result = result_request.prepare_arguments(tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_accept_multipart_content(self, result_request, tool_definition): field_storage = FieldStorage() field_storage.filename = 'x.txt' field_storage.file = StringIO('whee') tool_definition['argument_names'] = ('x_path',) raw_arguments = {'x': field_storage} result = result_request.prepare_arguments( tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_valid_imagefile_upload(self): from adhocracy.forms.common import ValidImageFileUpload from formencode import Invalid from cgi import FieldStorage from io import BytesIO value = FieldStorage() value.file = BytesIO(b"binarydata") value.filename = u"test.png" value.name = u"thumbs" self.assertRaises(Invalid, ValidImageFileUpload.to_python, value)
def upload_request(self, files: dict, metadata=None, user=None): if metadata is None: metadata = '{}' fields = MultiDict({'metadata': metadata}) for name, content in files.items(): fs = FieldStorage() fs.file = BytesIO(content) fs.filename = name fields.add('file', fs) return self.generic_request(post=fields, user=user)
def test_multiple_attachments(): test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test1.txt' test_file1.type = 'text/plain' test_file1.file = StringIO('test file1\n') test_file2 = FieldStorage() test_file2.name = 'file_info' test_file2.filename = 'test2.txt' test_file2.type = 'text/plain' test_file2.file = StringIO('test file2\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_multiple_attachments([test_file1, test_file2]) ThreadLocalORMSession.flush_all() assert_equals(len(test_post.attachments), 2) attaches = test_post.attachments assert 'test1.txt' in [attaches[0].filename, attaches[1].filename] assert 'test2.txt' in [attaches[0].filename, attaches[1].filename]
def process(content, **kwargs): field = MunicipalityDataUploadField(**kwargs) field = field.bind(form, 'upload') field_storage = FieldStorage() field_storage.file = BytesIO(content) field_storage.type = 'text/plain' field_storage.filename = 'test.csv' field.process(PostData({'upload': field_storage})) return field
def import_stage(self, harvest_object): package_dict = json.loads(harvest_object.content) if not self._should_import_local(package_dict): package_dict['state'] = 'deleted' else: package_dict = self._apply_package_extras_white_list(package_dict) package_dict = self._apply_package_resource_extras_black_list(package_dict) package_dict = self._fix_date_in_fields(package_dict) package_dict = self._set_license(package_dict) package_dict = self._pop_black_list_resources_by_type(package_dict) harvest_object.content = json.dumps(package_dict) upload_resources = self._pop_upload_resources(package_dict) import_stage_result = super(GuiaHarvesterPlugin, self).import_stage(harvest_object) if import_stage_result: package_dict = json.loads(harvest_object.content) harvested_rels = package_dict.get('relationships', []) try: this_package = model.Package.get(package_dict['name']) if not this_package: raise logic.NotFound() except logic.NotFound as nf: log.info('import_stage(): could not find package "{0}"; relationships not updated: {1}'.format(package_dict['name'], nf)) return import_stage_result existing_rels = this_package.get_relationships() self._update_relationships(existing_rels, harvested_rels) for resource_dict in upload_resources: resource_url = resource_dict['url'] resource_filename = resource_url.split('/')[-1] try: response = requests.get(resource_url) resource_file = StringIO(response.content) except Exception,e: self._save_object_error('Resource not harvested for package "{0}". Unable to fetch resource from "{1}": {2}'.format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue cfs = FieldStorage() cfs.file = resource_file cfs.filename = resource_filename resource_dict['upload'] = cfs if 'created' in resource_dict: del resource_dict['created'] if 'last_modified' in resource_dict: del resource_dict['last_modified'] if 'api' in resource_dict: del resource_dict['api'] try: the_resource = toolkit.get_action('resource_create')(data_dict=resource_dict) except Exception,e: self._save_object_error('Resource not harvested for package "{0}". Unable to import the resource originally from "{1}": {2}'.format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue
def apply_model(self, model): self.title.data = model.title self.portrait.data = model.portrait self.export_fields.data = model.export_fields if model.organigram_file: fs = FieldStorage() fs.file = BytesIO(model.organigram_file.read()) fs.type = model.organigram_file.content_type fs.filename = model.organigram_file.filename self.organigram.data = self.organigram.process_fieldstorage(fs) if hasattr(self, 'is_hidden_from_public'): self.is_hidden_from_public.data = model.is_hidden_from_public self.reorder_export_fields()
def call_action(self, action, data_dict=None, context=None, apikey=None, files=None, requests_kwargs=None): """ :param action: the action name, e.g. 'package_create' :param data_dict: the dict to pass to the action, defaults to {} :param context: an override for the context to use for this action, remember to include a 'user' when necessary :param apikey: not supported :param files: None or {field-name: file-to-be-sent, ...} :param requests_kwargs: ignored for LocalCKAN (requests not used) """ # copy dicts because actions may modify the dicts they are passed # (CKAN...you so crazy) data_dict = dict(data_dict or []) context = dict(self.context if context is None else context) if apikey: # FIXME: allow use of apikey to set a user in context? raise CKANAPIError( "LocalCKAN.call_action does not support " "use of apikey parameter, use context['user'] instead") to_close = [] try: for fieldname in files or []: f = files[fieldname] if isinstance(f, tuple): # requests accepts (filename, file...) tuples filename, f = f[:2] else: filename = f.name try: f.seek(0) except (AttributeError, IOError): f = _write_temp_file(f) to_close.append(f) field_storage = FieldStorage() field_storage.file = f field_storage.filename = filename data_dict[fieldname] = field_storage return self._get_action(action)(context, data_dict) finally: for f in to_close: f.close()
def test_add_attachment(): test_file = FieldStorage() test_file.name = "file_info" test_file.filename = "test.txt" test_file.type = "text/plain" test_file.file = StringIO("test file\n") d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") test_post = t.post("test post") test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert test_post.attachments.count() == 1, test_post.attachments.count() attach = test_post.attachments.first() assert attach.filename == "test.txt", attach.filename assert attach.content_type == "text/plain", attach.content_type
def test_add_attachment(): test_file = FieldStorage() test_file.name = 'file_info' test_file.filename = 'test.txt' test_file.type = 'text/plain' test_file.file = StringIO('test file\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert test_post.attachments.count() == 1, test_post.attachments.count() attach = test_post.attachments.first() assert attach.filename == 'test.txt', attach.filename assert attach.content_type == 'text/plain', attach.content_type
def test_add_attachment(): test_file = FieldStorage() test_file.name = 'file_info' test_file.filename = 'test.txt' test_file.type = 'text/plain' test_file.file = StringIO('test file\n') d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') test_post = t.post('test post') test_post.add_attachment(test_file) ThreadLocalORMSession.flush_all() assert_equals(len(test_post.attachments), 1) attach = test_post.attachments[0] assert attach.filename == 'test.txt', attach.filename assert attach.content_type == 'text/plain', attach.content_type
def setup_with_tools(self): super(TestBulkExport, self).setup_with_tools() self.project = M.Project.query.get(shortname='test') self.tracker = self.project.app_instance('bugs') self.new_ticket(summary='foo', _milestone='1.0') self.new_ticket(summary='bar', _milestone='2.0') self.ticket = TM.Ticket.query.find(dict(summary='foo')).first() self.post = self.ticket.discussion_thread.add_post(text='silly comment') ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') self.post.add_attachment(test_file1) ThreadLocalORMSession.flush_all()
def setup_with_tools(self): super(TestBulkExport, self).setup_with_tools() self.project = M.Project.query.get(shortname='test') self.tracker = self.project.app_instance('bugs') self.new_ticket(summary='foo', _milestone='1.0') self.new_ticket(summary='bar', _milestone='2.0') self.ticket = TM.Ticket.query.find(dict(summary='foo')).first() self.post = self.ticket.discussion_thread.add_post( text='silly comment') ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = BytesIO(b'test file1\n') self.post.add_attachment(test_file1) ThreadLocalORMSession.flush_all()
def test_addmsdata_as_file(self): from cgi import FieldStorage ms_file = FieldStorage() ms_file.filename = 'c:\bla\bla\F1234.mzxml' post = {'ms_data_file': ms_file} self.rpc.request.POST = post self.jobquery.add_ms_data.return_value = self.jq response = self.rpc.add_ms_data() self.jobquery.add_ms_data.assert_called_with(post, True) self.job2.db.moleculesTotalCount.assert_called_with() self.assertEqual(self.job2.ms_filename, 'c:\bla\bla\F1234.mzxml') self.rpc.new_job.assert_called_with() self._assert_status_callback_url() self.rpc.job_factory.submitQuery.assert_called_with(self.jq, self.job2) self.assertEquals(response, {'success': True, 'jobid': self.jobid2})
def call_action(self, action, data_dict=None, context=None, apikey=None, files=None): """ :param action: the action name, e.g. 'package_create' :param data_dict: the dict to pass to the action, defaults to {} :param context: an override for the context to use for this action, remember to include a 'user' when necessary :param apikey: not supported :param files: None or {field-name: file-to-be-sent, ...} """ # copy dicts because actions may modify the dicts they are passed # (CKAN...you so crazy) data_dict = dict(data_dict or []) context = dict(self.context if context is None else context) if apikey: # FIXME: allow use of apikey to set a user in context? raise CKANAPIError("LocalCKAN.call_action does not support " "use of apikey parameter, use context['user'] instead") to_close = [] try: for fieldname in files or []: f = files[fieldname] if isinstance(f, tuple): # requests accepts (filename, file...) tuples filename, f = f[:2] else: filename = f.name try: f.seek(0) except (AttributeError, IOError): f = _write_temp_file(f) to_close.append(f) field_storage = FieldStorage() field_storage.file = f field_storage.filename = filename data_dict[fieldname] = field_storage return self._get_action(action)(context, data_dict) finally: for f in to_close: f.close()
def test_update_dataset_form(session): request = DummyRequest(session, DummyPrincipal()) # Validate form = UpdateDatasetForm() form.request = request assert not form.validate() file = BytesIO() workbook = Workbook(file) worksheet = workbook.add_worksheet('DATA') workbook.add_worksheet('CITATION') worksheet.write_row(0, 0, ColumnMapper().columns.values()) worksheet.write_row( 1, 0, [ 100.1, # anr / NUMERIC '1.2.2008', # datum / DATE 1, # legislatur / INTEGER '2004-2008', # legisjahr / INT4RANGE 'kurztitel de', # titel_kurz_d 'kurztitel fr', # titel_kurz_f 'titel de', # titel_off_d 'titel fr', # titel_off_f 'stichwort', # stichwort / TEXT 2, # anzahl / INTEGER 3, # rechtsform ]) workbook.close() file.seek(0) field_storage = FieldStorage() field_storage.file = file field_storage.type = 'application/excel' field_storage.filename = 'test.xlsx' form.dataset.process(DummyPostData({'dataset': field_storage})) assert form.validate()
def test_upload_release_with_spaces(self): from papaye.models import Root, Package, Release, ReleaseFile from papaye.views.simple import UploadView # Create a fake test file uploaded_file = io.BytesIO(b"content") storage = FieldStorage() storage.filename = 'foo.tar.gz' storage.file = uploaded_file self.request.POST = { "content": storage, "some_metadata": "Fake Metadata", "version": "1.0", "name": "my package", ":action": "file_upload", "md5_digest": "Fake MD5" } root = Root() self.request.root = root view = UploadView(root, self.request) result = view() self.assertIsInstance(result, Response) self.assertEqual(result.status_int, 200) self.assertTrue('my-package' in root) self.assertIsInstance(root['my-package'], Package) self.assertTrue(root['my-package'].releases.get('1.0', False)) self.assertIsInstance(root['my-package']['1.0'], Release) self.assertTrue(root['my-package']['1.0'].release_files.get( 'foo.tar.gz', b'')) self.assertIsInstance(root['my-package']['1.0']['foo.tar.gz'], ReleaseFile) self.assertEqual(root['my-package']['1.0']['foo.tar.gz'].md5_digest, "Fake MD5") self.assertIsNotNone(root['my-package']['1.0'].metadata) self.assertIsInstance(root['my-package']['1.0'].metadata, dict) self.assertEqual( root['my-package']['1.0'].release_files.get('foo.tar.gz', b'').size, 7)
def test_import_municipality_data_form(session): municipalities = MunicipalityCollection(session) municipalities.add(name="Boppelsen", bfs_number=82) municipalities.add(name="Adlikon", bfs_number=21) # Test apply form = ImportMunicipalityDataForm() form.request = Request(session) form.file.data = { 21: { 'dates': [date(2019, 1, 1), date(2019, 1, 7)] }, 241: { 'dates': [date(2019, 1, 3), date(2019, 1, 9)] }, 82: { 'dates': [date(2019, 1, 4), date(2019, 1, 10)] } } form.update_model(municipalities) assert [(m.bfs_number, [d.date for d in m.pickup_dates]) for m in municipalities.query() ] == [(21, [date(2019, 1, 1), date(2019, 1, 7)]), (82, [date(2019, 1, 4), date(2019, 1, 10)])] # Test validation form = ImportMunicipalityDataForm() form.request = Request(session) assert not form.validate() field_storage = FieldStorage() field_storage.file = BytesIO( "Adlikon;21;-1;Normal;12.2.2015".encode('cp1252')) field_storage.type = 'text/csv' field_storage.filename = 'test.csv' form.file.process(PostData({'file': field_storage})) assert form.validate()
def create_resource(filepath, package_id, api_key, name="Default"): """ Function to create a resource via the API. Could maybe also be done with ckan.action function. TODO: **kwargs """ # with open(filepath, 'rb') as f: # files = {"upload": f} # values = {"package_id": package_id, "name": name} # headers = {"Authorization": api_key} # api_url = "{0}/api/action/resource_create".format(SITE_URL) # r = requests.post(api_url, files=files, data=values, headers=headers) with open(filepath, 'rb') as f: field_storage = FieldStorage() field_storage.file = f field_storage.filename = name new_resource = toolkit.get_action('resource_create')({}, { 'package_id': package_id, 'upload': field_storage, "name": name }) return new_resource
def test_allinone_with_ms_data_as_file(self): from cgi import FieldStorage ms_file = FieldStorage() ms_file.filename = r'c:\bla\bla\F1234.mzxml' post = {'ms_data_file': ms_file} request = testing.DummyRequest(post=post) request.user = User('bob', 'Bob Example', '*****@*****.**') job = self.fake_job() jobquery = Mock(JobQuery) job.jobquery.return_value = jobquery views = Views(request) views.job_factory = Mock(JobFactory) views.job_factory.fromScratch = Mock(return_value=job) response = views.allinone() views.job_factory.fromScratch.assert_called_with('bob') jobquery.allinone.assert_called_with(post) views.job_factory.submitQuery.assert_called_with( jobquery.allinone(), job) self.assertEqual(response, {'success': True, 'jobid': 'foo'}) self.assertEqual(job.ms_filename, r'c:\bla\bla\F1234.mzxml') job.jobquery.assert_called_with('http://example.com/status/foo.json', False, 1)
import pretend import pytest from pyramid.httpexceptions import HTTPNotFound from sqlalchemy.orm.exc import NoResultFound from webob.multidict import MultiDict from warehouse.admin.interfaces import ISponsorLogoStorage from warehouse.admin.views import sponsors as views from warehouse.sponsors.models import Sponsor from ....common.db.sponsors import SponsorFactory COLOR_LOGO_FILE = FieldStorage() COLOR_LOGO_FILE.filename = "colorlogo.png" COLOR_LOGO_FILE.file = io.BytesIO(( b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06" b"\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\rIDATx\xdac\xfc\xcf\xc0P\x0f\x00" b"\x04\x85\x01\x80\x84\xa9\x8c!\x00\x00\x00\x00IEND\xaeB`\x82")) COLOR_LOGO_FILE.type = "image/png" WHITE_LOGO_FILE = FieldStorage() WHITE_LOGO_FILE.filename = "whitelogo.png" WHITE_LOGO_FILE.file = io.BytesIO(( b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06" b"\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\rIDATx\xdac\xfc\xcf\xc0P\x0f\x00" b"\x04\x85\x01\x80\x84\xa9\x8c!\x00\x00\x00\x00IEND\xaeB`\x82")) WHITE_LOGO_FILE.type = "image/png"
def prepare_field_storage(name, x): field_storage = FieldStorage() field_storage.filename = name field_storage.file = prepare_file(x) return field_storage
def index(self, id): print 'index' context = { 'model': ckan.model, 'session': ckan.model.Session, 'user': pylons.c.user or pylons.c.author } try: plugins.toolkit.c.pkg_dict = plugins.toolkit.get_action( 'package_show')(context, { 'id': id }) plugins.toolkit.c.pkg = context['package'] plugins.toolkit.c.resources_json = h.json.dumps( plugins.toolkit.c.pkg_dict.get('resources', [])) except plugins.toolkit.ObjectNotFound: plugins.toolkit.abort(404, plugins.toolkit._('Dataset not found')) except plugins.toolkit.NotAuthorized: plugins.toolkit.abort( 401, plugins.toolkit._('Unauthorized to read package %s') % id) vars = { 'errors': {}, 'data': { 'title': '', #plugins.toolkit._('Clone of {dataset}').format(dataset=plugins.toolkit.c.pkg_dict['title'])', 'name': '' } } if plugins.toolkit.request.method == 'POST': post_data = plugins.toolkit.request.POST if post_data['action-type'] == 'clone': context = { 'model': ckan.model, 'session': ckan.model.Session, 'user': pylons.c.user or pylons.c.author } try: plugins.toolkit.check_access('package_create', context) plugins.toolkit.check_access('package_update', context, {'id': id}) del context['package'] except plugins.toolkit.NotAuthorized: plugins.toolkit.abort( 401, plugins.toolkit._( 'Unauthorized to clone this package')) #get current package... pkg_dict = plugins.toolkit.get_action('package_show')(None, { 'id': id }) #update necessary fields title = ckan.plugins.toolkit.request.params.getone('title') name = ckan.plugins.toolkit.request.params.getone('name') dt = datetime.datetime.now() pkg_dict['title'] = title pkg_dict['name'] = name pkg_dict['metadata_created'] = dt pkg_dict['metadata_modified'] = dt del pkg_dict['id'] del pkg_dict['revision_id'] del pkg_dict['revision_timestamp'] resources = pkg_dict['resources'] for resource in resources: if resource['url_type'] == 'upload': #copy file upload = uploader.ResourceUpload(resource) filepath = upload.get_path(resource['id']) cfs = FieldStorage() cfs.file = open(filepath) cfs.filename = resource['url'].split('/')[-1] resource['upload'] = cfs resource['created'] = dt del resource['id'] del resource['revision_id'] del resource['revision_timestamp'] del pkg_dict['resources'] #create a new one based on existing one... try: #for some reason, the pkg_dict given to 'package_create' still has the old id pkg_dict_new = plugins.toolkit.get_action( 'package_create')(context, pkg_dict) for resource in resources: resource['package_id'] = pkg_dict_new['id'] plugins.toolkit.get_action('resource_create')(context, resource) #if package already has a review date set, return it... if pkg_dict.get('next_review_date'): package_review = get_package_review( ckan.model.Session, pkg_dict_new['id']) if package_review: package_review.next_review_date = pkg_dict.get( 'next_review_date') update_package_review(context['session'], package_review) else: add_package_review( context['session'], pkg_dict_new['id'], pkg_dict.get('next_review_date')) except plugins.toolkit.ValidationError as ve: plugins.toolkit.c.pkg_dict = plugins.toolkit.get_action( 'package_show')(context, { 'id': id }) plugins.toolkit.c.pkg = context['package'] plugins.toolkit.c.resources_json = h.json.dumps( plugins.toolkit.c.pkg_dict.get('resources', [])) errorsOther = dict(ve.error_dict) if 'name' in errorsOther: del errorsOther['name'] vars = { 'errors': ve.error_dict, 'errorsOther': errorsOther, 'data': { 'title': title, 'name': name } } return plugins.toolkit.render("dsaction-index.html", extra_vars=vars) ckan.plugins.toolkit.redirect_to(controller="package", action="edit", id=pkg_dict_new['id']) else: get_data = plugins.toolkit.request.GET if 'action-type' in get_data and get_data[ 'action-type'] == 'export': print 'export' #task 1: work out if the dataset has items in filestore #get package pid = convert_to_id(id, context) query = ckan.model.Session.query( ckan.model.Package).filter(ckan.model.Package.id == pid) file_zip_path = exportPackages(query) #serve zip file fileapp = paste.fileapp.FileApp(file_zip_path) fileapp.content_disposition(filename='%s.zip' % id) status, headers, app_iter = request.call_application(fileapp) response.headers.update(dict(headers)) content_type = 'application/zip' response.headers['Content-Type'] = content_type response.status = status #remove tmp zip file - not sure if this will cause issues deleting the file before it has been fully served? os.remove(file_zip_path) return app_iter return plugins.toolkit.render("dsaction-index.html", extra_vars=vars)
def _generate_resources_from_folder(self, dataset): ''' Given a dataset folder, it'll return a list of resource metadata ''' resources = [] file_list = [ f for f in os.listdir(os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'] )) if os.path.isfile(os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'], f )) ] resource_files = self._remove_hidden_files(file_list) log.debug(resource_files) # for resource_file in resource_files: for resource_file in (x for x in resource_files if x != 'meta.xml'): resource_path = os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'], resource_file ) if resource_file == 'link.xml': with retry_open_file(resource_path, 'r') as links_xml: links = ( etree.parse(links_xml) .findall('link') ) for link in links: url = self._get(link, 'url') if url: # generate hash for URL md5 = hashlib.md5() md5.update(url) resources.append({ 'url': url, 'zh_hash': md5.hexdigest(), 'name': self._get(link, 'lable'), 'description': self._get(link, 'description'), 'format': self._get(link, 'type'), 'resource_type': 'api', }) else: resource_file = self._validate_filename(resource_file) if resource_file: resource_dict = { 'name': resource_file, 'url': '', 'format': resource_file.split('.')[-1], 'resource_type': 'file' } # calculate the hash of this file BUF_SIZE = 65536 # lets read stuff in 64kb chunks! md5 = hashlib.md5() with retry_open_file(resource_path, 'rb') as f: while True: data = f.read(BUF_SIZE) if not data: break md5.update(data) resource_dict['zh_hash'] = md5.hexdigest() # add file to FieldStorage with retry_open_file(resource_path, 'r', close=False) as f: # noqa field_storage = FieldStorage() field_storage.file = f field_storage.filename = f.name resource_dict['upload'] = field_storage resources.append(resource_dict) sorted_resources = sorted( resources, cmp=lambda x, y: self._sort_resource(x, y) ) return sorted_resources
def test_fileupload_handle(self): # Abstract file upload handle container = ContainerNode(name='container') request = self.layer.new_request() abstract_upload_handle = FileUploadHandle(container, request) # If request method is GET, existing files are read. Abstract # implementation returns empty result self.assertEqual(abstract_upload_handle(), {'files': []}) # If request method is POST, a file upload is assumed filedata = FieldStorage() filedata.type = 'text/plain' filedata.filename = 'test.txt' filedata.file = StringIO('I am the payload') request.method = 'POST' request.params['file'] = filedata del request.params['_LOCALE_'] res = abstract_upload_handle() self.assertEqual(res['files'][0]['name'], 'test.txt') self.assertEqual(res['files'][0]['size'], 0) self.assertEqual( res['files'][0]['error'], 'Abstract ``FileUploadHandle`` does not implement ``create_file``' ) # Concrete implementation of file upload handle upload_handle = ContainerFileUploadHandle(container, request) # Upload file res = upload_handle() self.assertEqual(res['files'], [{ 'url': '/test.txt', 'deleteType': 'GET', 'deleteUrl': '/test.txt/filedelete_handle', 'name': 'test.txt', 'size': 16 }]) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container <class 'cone.fileupload.tests.File'>: test.txt body: 'I am the payload' """, container.treerepr()) # Read existing files request = self.layer.new_request() upload_handle = ContainerFileUploadHandle(container, request) self.assertEqual(upload_handle()['files'], [{ 'url': '/test.txt', 'deleteType': 'GET', 'deleteUrl': '/test.txt/filedelete_handle', 'name': 'test.txt', 'size': 16 }]) # Test file delete handle file = container['test.txt'] request = self.layer.new_request() self.assertEqual( filedelete_handle(file, request), {'files': [{'test.txt': True}]} ) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container """, container.treerepr())
def testQuizAsXML(self): #test parsing quiz = self.root['quiz'] path = os.path.join(os.getcwd(),'xml_example_word.qml') xml_example = open(path) field_storage = FieldStorage() field_storage.file = xml_example field_storage.filename = 'xml_example_word.qml' file_upload = FileUpload(field_storage) qax = interfaces.IQuizAsXML(self.root['quiz']) status = qax.generateQuiz(file_upload) #status = qax.generateQuiz(xml_example) if status is not None: print status return self.assertEqual(quiz.title,u'Название теста. Вот как.') self.assertEqual(quiz.body,u'Contents of quiz') self.assertEqual(IAnnotations(quiz).get(PAGES_KEY), 1) self.assertEqual(interfaces.IScaleQuiz.providedBy(quiz), True) self.assertEqual(interfaces.ISlotQuiz.providedBy(quiz), False) results = [r for r in quiz.values() if interfaces.IQuizResult.providedBy(r)] self.assertEqual(results[0].title,u'Title of first result') self.assertEqual(results[1].title,u'Title of second result') self.assertEqual(results[0].body, results[1].body,u'Contents of result') self.assertEqual(IAnnotations(results[0]).get(INTERVAL_KEY), 10) self.assertEqual(IAnnotations(results[1]).get(INTERVAL_KEY), 20) questions = [q for q in quiz.values() if interfaces.IQuizQuestion.providedBy(q)] self.assertEqual(questions[0].title,u'Title of first question') self.assertEqual(questions[1].title,u'Title of second question') for q in questions: answers = [a for a in q.values() if interfaces.IQuizAnswer.providedBy(a)] self.assertEqual(answers[0].title,u'Title of first answer') self.assertEqual(answers[1].title,u'Title of second answer') self.assertEqual(answers[0].body,answers[1].body,u'Contents of answer') depends = [[d for d in answer.values() if interfaces.IAnswerDepends.providedBy(d)] for answer in answers] flatten = getUtility(interfaces.IQreatureUtility, name="Flatten") int_ids = getUtility(interfaces.IQreatureIntIds, context=quiz) depends = [d for d in flatten(depends)] leads = [[l for l in answer.values() if interfaces.IAnswerLeads.providedBy(l)] for answer in answers] if q.title == u'Title of first question': leads = [l for l in flatten(leads)] self.assertEqual(len(leads),1) i=0 for depend in depends: i+=1 if divmod(i,2)[1] == 1: self.assertEqual(int_ids.getObject(depend.result_id).title, u'Title of first result') elif divmod(i,2)[1] == 0: self.assertEqual(int_ids.getObject(depend.result_id).title, u'Title of second result') self.assertEqual(depend.depend_value, 100) self.assertTrue(interfaces.IScaleQuiz.providedBy(quiz)) self.assertTrue(interfaces.ILeadedQuiz.providedBy(quiz)) xml_example.close() #test generation. to generate depends, provide the quiz with ISlotLayer also xml_example = open(path) example_lines = xml_example.readlines() #check for BOM also example_lines = [example_lines[0].lstrip(codecs.BOM_UTF8)] + example_lines[1:] alsoProvides(quiz, interfaces.ISlotQuiz) xml_generated = qax.generateXML() generated_lines = xml_generated.data.split('\n') lines_to_compare = zip(example_lines,generated_lines) for example_line,generated_line in lines_to_compare: self.assertEqual(example_line, generated_line+'\n') xml_example.close()
def import_stage(self, harvest_object): package_dict = json.loads(harvest_object.content) if not self._should_import_local(package_dict): package_dict['state'] = 'deleted' else: package_dict = self._apply_package_extras_white_list(package_dict) package_dict = self._apply_package_resource_extras_black_list( package_dict) package_dict = self._fix_date_in_fields(package_dict) package_dict = self._set_license(package_dict) package_dict = self._pop_black_list_resources_by_type(package_dict) harvest_object.content = json.dumps(package_dict) upload_resources = self._pop_upload_resources(package_dict) import_stage_result = super(GuiaHarvesterPlugin, self).import_stage(harvest_object) if import_stage_result: package_dict = json.loads(harvest_object.content) harvested_rels = package_dict.get('relationships', []) try: this_package = model.Package.get(package_dict['name']) if not this_package: raise logic.NotFound() except logic.NotFound as nf: log.info( 'import_stage(): could not find package "{0}"; relationships not updated: {1}' .format(package_dict['name'], nf)) return import_stage_result existing_rels = this_package.get_relationships() self._update_relationships(existing_rels, harvested_rels) for resource_dict in upload_resources: resource_url = resource_dict['url'] resource_filename = resource_url.split('/')[-1] try: response = requests.get(resource_url) resource_file = StringIO(response.content) except Exception, e: self._save_object_error( 'Resource not harvested for package "{0}". Unable to fetch resource from "{1}": {2}' .format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue cfs = FieldStorage() cfs.file = resource_file cfs.filename = resource_filename resource_dict['upload'] = cfs if 'created' in resource_dict: del resource_dict['created'] if 'last_modified' in resource_dict: del resource_dict['last_modified'] if 'api' in resource_dict: del resource_dict['api'] try: the_resource = toolkit.get_action('resource_create')( data_dict=resource_dict) except Exception, e: self._save_object_error( 'Resource not harvested for package "{0}". Unable to import the resource originally from "{1}": {2}' .format(package_dict['name'], resource_url, e), harvest_object, 'Import') continue
def test_attachments_form(swissvotes_app, attachments): session = swissvotes_app.session() names = list(attachments.keys()) # Test apply / update votes = SwissVoteCollection(swissvotes_app) vote = votes.add( id=1, bfs_number=Decimal('1'), date=date(1990, 6, 2), legislation_number=4, legislation_decade=NumericRange(1990, 1994), title_de="Vote DE", title_fr="Vote FR", short_title_de="V D", short_title_fr="V F", votes_on_same_day=2, _legal_form=1, ) form = AttachmentsForm() form.request = DummyRequest(session, DummyPrincipal()) # ... empty form.apply_model(vote) assert all([getattr(form, name).data is None for name in names]) form.update_model(vote) assert all([getattr(vote, name) is None for name in names]) # ... add attachments (de_CH) for name, attachment in attachments.items(): setattr(vote, name, attachment) session.flush() # ... not present with fr_CH vote.session_manager.current_locale = 'fr_CH' form.apply_model(vote) assert all([getattr(form, name).data is None for name in names]) # ... present with de_CH vote.session_manager.current_locale = 'de_CH' form.apply_model(vote) for name in names: data = getattr(form, name).data assert data['size'] assert data['filename'] == name assert data['mimetype'] in ( 'application/pdf', 'application/zip', 'application/vnd.ms-office', 'application/octet-stream', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) form.update_model(vote) for name in names: file = getattr(vote, name) assert file == attachments[name] assert file.reference.filename == name assert file.reference.content_type in ( 'application/pdf', 'application/zip', 'application/vnd.ms-office', 'application/octet-stream', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) # ... replace all de_CH for name in names: field_storage = FieldStorage() field_storage.file = BytesIO(f'{name}-1'.encode()) field_storage.type = 'image/png' # ignored field_storage.filename = f'{name}.png' # extension removed getattr(form, name).process(DummyPostData({name: field_storage})) form.update_model(vote) assert all([ getattr(vote, name).reference.file.read() == f'{name}-1'.encode() for name in names ]) # ... add all fr_CH vote.session_manager.current_locale = 'fr_CH' for name in names: field_storage = FieldStorage() field_storage.file = BytesIO(f'{name}-fr'.encode()) field_storage.type = 'image/png' # ignored field_storage.filename = f'{name}.png' # extension removed getattr(form, name).process(DummyPostData({name: field_storage})) form.update_model(vote) assert all([ getattr(vote, name).reference.file.read() == f'{name}-fr'.encode() for name in names ]) # ... delete all fr_CH for name in names: getattr(form, name).action = 'delete' form.update_model(vote) assert all([getattr(vote, name) is None for name in names]) vote.session_manager.current_locale = 'de_CH' assert all([getattr(vote, name) is not None for name in names]) # ... delete all de_CH for name in names: getattr(form, name).action = 'delete' form.update_model(vote) assert vote.files == [] # Test validation form = AttachmentsForm() assert form.validate()
def test_fileupload_handle(self): # Abstract file upload handle container = ContainerNode(name='container') request = self.layer.new_request() abstract_upload_handle = FileUploadHandle(container, request) # If request method is GET, existing files are read. Abstract # implementation returns empty result self.assertEqual(abstract_upload_handle(), {'files': []}) # If request method is POST, a file upload is assumed filedata = FieldStorage() filedata.type = 'text/plain' filedata.filename = 'test.txt' filedata.file = StringIO('I am the payload') request.method = 'POST' request.params['file'] = filedata del request.params['_LOCALE_'] res = abstract_upload_handle() self.assertEqual(res['files'][0]['name'], 'test.txt') self.assertEqual(res['files'][0]['size'], 0) self.assertEqual( res['files'][0]['error'], 'Abstract ``FileUploadHandle`` does not implement ``create_file``' ) # Concrete implementation of file upload handle upload_handle = ContainerFileUploadHandle(container, request) # Upload file res = upload_handle() self.assertEqual(res['files'], [{ 'name': 'test.txt', 'size': 16, 'view_url': '/test.txt', 'download_url': '/test.txt/download', 'delete_url': '/test.txt/filedelete_handle', 'delete_type': 'GET' }]) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container <class 'cone.fileupload.tests.File'>: test.txt body: 'I am the payload' """, container.treerepr()) # Read existing files request = self.layer.new_request() upload_handle = ContainerFileUploadHandle(container, request) self.assertEqual(upload_handle()['files'], [{ 'name': 'test.txt', 'size': 16, 'view_url': '/test.txt', 'download_url': '/test.txt/download', 'delete_url': '/test.txt/filedelete_handle', 'delete_type': 'GET' }]) # Test file delete handle file = container['test.txt'] request = self.layer.new_request() self.assertEqual( filedelete_handle(file, request), {'files': [{'test.txt': True}]} ) self.checkOutput(""" <class 'cone.fileupload.tests.ContainerNode'>: container """, container.treerepr())
def _generate_resources_from_folder(self, dataset): ''' Given a dataset folder, it'll return a list of resource metadata ''' resources = [] file_list = [ f for f in os.listdir(os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'] )) if os.path.isfile(os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'], f )) ] resource_files = self._remove_hidden_files(file_list) log.debug(resource_files) # for resource_file in resource_files: for resource_file in (x for x in resource_files if x != 'meta.xml'): resource_path = os.path.join( self.config['data_path'], dataset, self.config['metafile_dir'], resource_file ) if resource_file == 'link.xml': with retry_open_file(resource_path, 'r') as links_xml: links = ( etree.parse(links_xml) .findall('link') ) for link in links: url = self._get(link, 'url') if url: # generate hash for URL md5 = hashlib.md5() md5.update(url) resources.append({ 'url': url, 'zh_hash': md5.hexdigest(), 'name': self._get(link, 'lable'), 'description': self._get(link, 'description'), 'format': self._get(link, 'type'), 'resource_type': 'api', }) else: resource_file = self._validate_filename(resource_file) if resource_file: resource_dict = { 'name': resource_file, 'url': resource_file, 'url_type': 'upload', 'format': resource_file.split('.')[-1], 'resource_type': 'file' } # calculate the hash of this file BUF_SIZE = 65536 # lets read stuff in 64kb chunks! md5 = hashlib.md5() with retry_open_file(resource_path, 'rb') as f: while True: data = f.read(BUF_SIZE) if not data: break md5.update(data) resource_dict['zh_hash'] = md5.hexdigest() # add file to FieldStorage with retry_open_file(resource_path, 'r', close=False) as f: # noqa field_storage = FieldStorage() field_storage.file = f field_storage.filename = f.name resource_dict['upload'] = field_storage resources.append(resource_dict) sorted_resources = sorted( resources, cmp=lambda x, y: self._sort_resource(x, y) ) return sorted_resources