def get_tokens(self, text, unfiltered=False): """ Return an iterable of (tokentype, value) pairs generated from `text`. If `unfiltered` is set to `True`, the filtering mechanism is bypassed even if filters are defined. Also preprocess the text, i.e. expand tabs and strip it if wanted and applies registered filters. """ if isinstance(text, basestring): if self.stripall: text = text.strip() elif self.stripnl: text = text.strip('\n') if sys.version_info[0] < 3 and isinstance(text, unicode): text = StringIO(text.encode('utf-8')) self.encoding = 'utf-8' else: text = StringIO(text) def streamer(): for i, t, v in self.get_tokens_unprocessed(text): yield t, v stream = streamer() if not unfiltered: stream = apply_filters(stream, self.filters, self) return stream
def test_file_input(self): """ Test file input :return: void """ file_arguments = [ ('file_argument_1', Encoding.normalize('')), ('file_argument_2', self.rand_str(5)), ('file_argument_3', self.rand_str(10)), ] rule = '/' + self.rand_str(20) # Make post data data = dict() for key, value in file_arguments: if sys.version_info < (3, 0): value = StringIO(value) else: value = BytesIO(value.encode()) data[key] = (value, key + '.txt') # Call route with self.app.test_request_context(rule, method='POST', data=data): input = Input(request) self.assert_equal(len(file_arguments), len(input)) for key, value in file_arguments: self.assert_in(key, input) self.assert_is_instance(input[key], FileStorage) self.assert_equal(key + '.txt', Encoding.normalize(input[key].filename)) self.assert_equal(value, Encoding.normalize(input[key].stream.read()))
def __init__(self, data): if type(data) is bytes: data = StringIO(data) elif type(data) is unicode: data = StringIO(data.encode('utf-8')) stream = _xson._stream_from_fileobj(data) self.reader = _xson._reader_from_stream(stream)
def _write_restore_blob(self, restore, db): if isinstance(restore, unicode): restore = StringIO(restore.encode("utf-8")) elif isinstance(restore, bytes): restore = StringIO(restore) info = db.put(restore) self.restore_blob_id = info.identifier self.content_length = info.length
def _write_restore_blob(self, restore, db): if isinstance(restore, six.text_type): restore = StringIO(restore.encode("utf-8")) elif isinstance(restore, bytes): restore = StringIO(restore) info = db.put(restore, random_url_id(16)) self.restore_blob_id = info.identifier self.content_length = info.length
def get_output(line): line = StringIO(line.encode('utf-8')) tree = ET.parse(line) root = tree.getroot() nodes = get_nodes_iter(root, []) cc = get_cchild(nodes) cr = get_child_relation(cc) gc = get_children(cr) p = get_parents(gc) r = relations(p) c = correct(root, r) return c
def do_POST(self): """Serve a POST request.""" #print r, info, "by: ", self.client_address if sys.version_info.major == 2: r, info = self.deal_post_data_2() f = StringIO() f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Upload Result Page</title>\n") f.write("<body>\n<h2>Upload Result Page</h2>\n") f.write("<hr>\n") if r: f.write("<strong>Success:</strong>") else: f.write("<strong>Failed:</strong>") f.write(info) f.write("<br><a href=\"%s\">back</a>" % self.headers['referer']) f.write("<hr><small>Powered By: bones7456, check new version at ") f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">") f.write("here</a>.</small></body>\n</html>\n") length = f.tell() f.seek(0) else: r, info = self.deal_post_data_3() info = info.replace('\n', '<br>') f = ('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">') +\ ('<html><head>') +\ ('<meta http-equiv="Content-Type" content="text/html; charset=utf-8">') +\ ('<title>Upload Result Page</title>') +\ ('</head><body>') +\ ('<h1>Upload Result Page</h1>') +\ ('<hr>') if r: f = f + ('<strong>Success:<strong><br/>') + info else: f = f + ('<strong>Failed:<strong>') + info f = f + '<br><a href="%s">back</a>' % self.headers['referer'] +\ '</body></html>' f = f.encode('utf-8') length = len(f) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() if f: if sys.version_info.major == 2: self.copyfile(f, self.wfile) f.close() else: self.wfile.write(f)
def users(self, request, pk=None, **kwargs): tenant = self.get_object() if request.method in ('POST', 'PUT'): if 'csv' not in request.data: return Response( "Expecting 'csv' parameter as a file or JSON string", status=HTTP_400_BAD_REQUEST) csvfile = request.data['csv'] if isinstance(csvfile, basestring): csvfile = StringIO(csvfile.encode('utf-8')) reader = UnicodeDictReader(csvfile) tenant_url = self.get_serializer(instance=tenant).data['url'] data = [dict(tenant=tenant_url, **row) for row in reader] serializer = serializers.UserSerializer( data=data, many=True, context={'request': request}) serializer.is_valid(raise_exception=True) for user in serializer.validated_data: del user['tenant'] send_task('exchange', 'create_user')(tenant_uuid=tenant.uuid.hex, notify=user.pop('notify'), **user) return Response("%s users scheduled for creation" % len(serializer.validated_data)) elif request.method == 'GET': users = models.User.objects.filter(tenant=tenant) serializer = serializers.UserSerializer( instance=users, many=True, context={'request': request}) response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="%s_users.csv"' % tenant.backend_id exclude = ('url', 'tenant', 'tenant_uuid', 'tenant_domain', 'manager', 'notify') headers = [ f for f in serializers.UserSerializer.Meta.fields if f not in exclude ] writer = UnicodeDictWriter(response, fieldnames=headers) writer.writeheader() writer.writerows(serializer.data) return response
def put_attachment(self, content, name=None, content_type=None, content_length=None): """Put attachment in blob database See `get_short_identifier()` for restrictions on the upper bound for number of attachments per object. :param content: String or file object. """ db = get_blob_db() if name is None: name = getattr(content, "name", None) if name is None: raise InvalidAttachment("cannot save attachment without name") old_meta = self.blobs.get(name) if isinstance(content, unicode): content = StringIO(content.encode("utf-8")) elif isinstance(content, bytes): content = StringIO(content) bucket = self._blobdb_bucket() # do we need to worry about BlobDB reading beyond content_length? info = db.put(content, get_short_identifier(), bucket=bucket) self.external_blobs[name] = BlobMeta( id=info.identifier, content_type=content_type, content_length=info.length, digest=info.digest, ) if self.migrating_blobs_from_couch and self._attachments: self._attachments.pop(name, None) if self._atomic_blobs is None: self.save() if old_meta and old_meta.id: db.delete(old_meta.id, bucket) elif old_meta and old_meta.id: self._atomic_blobs[name].append(old_meta) return True
def put_attachment(self, content, name=None, content_type=None, content_length=None): """Put attachment in blob database :param content: String or file object. """ db = get_blob_db() if name is None: name = getattr(content, "name", None) if name is None: raise InvalidAttachment("cannot save attachment without name") old_meta = self.blobs.get(name) if isinstance(content, unicode): content = StringIO(content.encode("utf-8")) elif isinstance(content, bytes): content = StringIO(content) bucket = self._blobdb_bucket() # do we need to worry about BlobDB reading beyond content_length? info = db.put(content, name, bucket) self.external_blobs[name] = BlobMeta( id=info.name, content_type=content_type, content_length=info.length, digest=info.digest, ) if self.migrating_blobs_from_couch and self._attachments: self._attachments.pop(name, None) if self._atomic_blobs is None: self.save() if old_meta and old_meta.id: db.delete(old_meta.id, bucket) elif old_meta and old_meta.id: self._atomic_blobs[name].append(old_meta) return True
def test_combined_input(self): """ Test combined input :return: void """ get_arguments = [ ('get_argument_string_1', ''), ('get_argument_string_2', self.rand_str(5)), ('get_argument_string_3', self.rand_str(10)), ('get_argument_integer_1', 0), ('get_argument_integer_2', self.rand_int(1, 10)), ('get_argument_integer_3', self.rand_int(20, 100)), ] post_arguments = [ ('post_argument_string_1', ''), ('post_argument_string_2', self.rand_str(5)), ('post_argument_string_3', self.rand_str(10)), ('post_argument_integer_1', 0), ('post_argument_integer_2', self.rand_int(1, 10)), ('post_argument_integer_3', self.rand_int(20, 100)), ] file_arguments = [ ('file_argument_1', ''), ('file_argument_2', self.rand_str(5)), ('file_argument_3', self.rand_str(10)), ] rule = self._get_url_with_arguments('/' + self.rand_str(20), get_arguments) # Make post data data = dict() for key, value in post_arguments: data[key] = value for key, value in file_arguments: if sys.version_info < (3, 0): value = StringIO(value) else: value = BytesIO(value.encode()) data[key] = (value, key + '.txt') # Call route with self.app.test_request_context(rule, method='POST', data=data): input = Input(request) self.assert_equal( len(get_arguments) + len(post_arguments) + len(file_arguments), len(input)) for key, value in get_arguments: self.assert_in(key, input) self.assert_equal(str(value), Encoding.normalize(input[key])) for key, value in post_arguments: self.assert_in(key, input) self.assert_equal(str(value), Encoding.normalize(input[key])) for key, value in file_arguments: self.assert_in(key, input) self.assert_is_instance(input[key], FileStorage) self.assert_equal(key + '.txt', Encoding.normalize(input[key].filename)) self.assert_equal(value, Encoding.normalize(input[key].stream.read()))
def users(self, request, pk=None, **kwargs): tenant = self.get_object() if request.method in ('POST', 'PUT'): if 'csv' not in request.data: return Response( { 'detail': "Expecting 'csv' parameter as a file or JSON string" }, status=HTTP_400_BAD_REQUEST) csvfile = request.data['csv'] if isinstance(csvfile, basestring): csvfile = StringIO(csvfile.encode('utf-8')) reader = UnicodeDictReader(csvfile) tenant_url = self.get_serializer(instance=tenant).data['url'] try: data = [dict(tenant=tenant_url, **row) for row in reader] except: # A wide exception as CSV is such a great module return Response({'detail': "Could not parse CSV payload"}, status=HTTP_400_BAD_REQUEST) serializer = serializers.UserSerializer( data=data, many=True, context={'request': request}) serializer.is_valid(raise_exception=True) mailbox_size_sum = sum( [user['mailbox_size'] for user in serializer.validated_data]) mailbox_quota = tenant.quotas.get(name='mailbox_size') if mailbox_quota.is_exceeded(mailbox_size_sum): return Response( { 'detail': "Size of users' mailboxes exceeds tenant's quota %s" % mailbox_quota.limit }, status=HTTP_400_BAD_REQUEST) # check if global notification has been requested notify_user = request.data.get('notify', False) for user in serializer.validated_data: del user['tenant'] send_task('exchange', 'create_user')(tenant_uuid=tenant.uuid.hex, notify=notify_user, **user) return Response({ 'status': "%s users scheduled for creation" % len(serializer.validated_data) }) elif request.method == 'GET': users = models.User.objects.filter(tenant=tenant) serializer = serializers.UserSerializer( instance=users, many=True, context={'request': request}) response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="%s_users.csv"' % tenant.backend_id exclude = ('url', 'tenant', 'tenant_uuid', 'tenant_domain', 'manager', 'notify', 'send_on_behalf_members', 'send_as_members', 'quotas') headers = [ f for f in serializers.UserSerializer.Meta.fields if f not in exclude ] writer = UnicodeDictWriter(response, fieldnames=headers) writer.writeheader() writer.writerows(serializer.data) return response