def test_large_file(): for filename in [ 'image-500k.png', 'image-2560x1600.png', 'image-600x400.png', 'image-high-res.jpg', ]: with open_dataset(filename) as dataset_: expected_value = dataset_.read() content_type, body = encoded_dataset(filename) target = ValueTarget() parser = StreamingFormDataParser( headers={'Content-Type': content_type} ) parser.register(filename, target) parser.data_received(body) assert target.value == expected_value
def test_file_content_multiple(): with open_dataset('file.txt') as dataset_: expected_value = dataset_.read() content_type, body = encoded_dataset('file.txt') txt = ValueTarget() parser = StreamingFormDataParser(headers={'Content-Type': content_type}) parser.register('file.txt', txt) size = 50 chunks = [] while body: chunks.append(body[:size]) body = body[size:] for chunk in chunks: parser.data_received(chunk) assert txt.value == expected_value
def test_special_filenames(self): filenames = [ 'a;b.txt', 'a"b.txt', 'a";b.txt', 'a;"b.txt', 'a";";.txt', 'a\\"b.txt', 'a\\b.txt' ] for filename in filenames: data = '''\ --1234 Content-Disposition: form-data; name=files; filename={} Foo --1234--'''.format(filename).replace('\n', '\r\n').encode('utf-8') target = ValueTarget() parser = StreamingFormDataParser( headers={'Content-Type': 'multipart/form-data; boundary=1234'}) parser.register('files', target) parser.data_received(data) self.assertEqual(target.value, b'Foo')
def test_file_content_multiple(self): with open(data_file_path('file.txt'), 'rb') as file_: expected_value = file_.read() content_type, body = load_file(data_file_path('file.txt')) txt = ValueTarget() parser = StreamingFormDataParser( headers={'Content-Type': content_type}) parser.register('file.txt', txt) size = 50 chunks = [] while body: chunks.append(body[:size]) body = body[size:] for chunk in chunks: parser.data_received(chunk) self.assertEqual(txt.value, expected_value)
def test_content_type_passed_to_target(): filename = 'image-600x400.png' with open_dataset(filename) as dataset_: expected_data = dataset_.read() target = ValueTarget() with open_dataset(filename) as file_: encoder = MultipartEncoder( fields={filename: (filename, file_, 'image/png')} ) parser = StreamingFormDataParser( headers={'Content-Type': encoder.content_type} ) parser.register(filename, target) parser.data_received(encoder.to_string()) assert target.value == expected_data assert target.multipart_content_type == 'image/png'
def test_file_upload(): data = b'''\ --1234 Content-Disposition: form-data; name="files"; filename="ab.txt" Foo --1234--'''.replace( b'\n', b'\r\n' ) target = ValueTarget() parser = StreamingFormDataParser( headers={'Content-Type': 'multipart/form-data; boundary=1234'} ) parser.register('files', target) parser.data_received(data) assert target.multipart_filename == 'ab.txt' assert target.value == b'Foo' assert target._started assert target._finished
def test_extra_headers(): # example from https://tools.ietf.org/html/rfc2388 data = b'''\ --1234 Content-Disposition: form-data; name="files" Content-Type: text/plain;charset=windows-1250 Content-Transfer-Encoding: quoted-printable Joe owes =80100. --1234--'''.replace( b'\n', b'\r\n' ) target = ValueTarget() parser = StreamingFormDataParser( headers={'Content-Type': 'multipart/form-data; boundary=1234'} ) parser.register('files', target) parser.data_received(data) assert target.value == b'Joe owes =80100.'
def test_missing_filename_directive(): data = b'''\ --1234 Content-Disposition: form-data; name="files" Foo --1234-- '''.replace( b'\n', b'\r\n' ) target = ValueTarget() assert not target.multipart_filename parser = StreamingFormDataParser( headers={'Content-Type': 'multipart/form-data; boundary=1234'} ) parser.register('files', target) parser.data_received(data) assert target.value == b'Foo' assert not target.multipart_filename
def test_basic(self): target = ValueTarget() self.assertEqual(target.value, b'') target.multipart_filename = None target.start() self.assertTrue(target.multipart_filename is None) self.assertEqual(target.value, b'') target.data_received(b'hello') target.data_received(b' ') target.data_received(b'world') target.finish() self.assertTrue(target.multipart_filename is None) self.assertEqual(target.value, b'hello world')
colNum = 0 for value in row: # if this is a blob type, handle differently if data_types[colNum] == cx_Oracle.DB_TYPE_BLOB: value2 = value.read() rowvalues.append(value2) files = { 'file': io.BytesIO(value2), 'context': contextfile } with s.post(urlfile, files=files, stream=True) as r: if r.status_code >= 300: raise Exception( f"Failed with status {r.status_code}:\n\n{r.json()}" ) fileval = ValueTarget() parser = StreamingFormDataParser(headers=r.headers) parser.register('file', fileval) #parser.register('results', FileTarget("resultsblob{}_{}.json".format(count,colNum))) for chunk in r.iter_content(chunk_size=4096): parser.data_received(chunk) query = "UPDATE {} SET {} = :1 WHERE {} = :2".format( table, field_names[colNum], field_names[firstNum]) if len(fileval.value) > 0: rowmaskedvalues.append(fileval.value) else: rowmaskedvalues.append(value) else: rowvalues.append(value) # try to get a value to use as a reference for update statements. Hopefully a unique row id. if firstval == '':
for index, document in enumerate(data, 1): files = { 'file': ('document.json', json_util.dumps(document), 'application/json'), 'context': context } logging.info(f"POST: sending document {index} to {url}") with session.post(url, files=files, stream=True) as r: if r.status_code >= 300: raise Exception( f"Failed with status {r.status_code}:\n\n{r.json()}" ) logging.info( f"Placing results into 'results/results{index}.json'.") parser = StreamingFormDataParser(headers=r.headers) output = ValueTarget() parser.register('file', output) parser.register('results', FileTarget(f'results/results{index}.json')) for chunk in r.iter_content(4096): parser.data_received(chunk) logging.info( f"Inserting masked document {index} into darkshield.masked..." ) out_coll.insert_one(json_util.loads(output.value)) finally: teardown(session)
def do_POST(self): if cfg.VERBOSE_LOGGING: print("Got request from {0}".format(self.client_address)) try: if not self.check_auth(True): return self.send_response(200) self.send_header('Content-type', "text/html") self.end_headers() self.wfile.write(b"") content_length = int(self.headers['Content-Length']) post_data = self.rfile.read(content_length) if self.path == "/upload-sound/": f = tempfile.NamedTemporaryFile(delete=False) f.close() file = FileTarget(f.name) aliases = ValueTarget() use_gpio_pin = ValueTarget() gpio_pin = ValueTarget() parser = StreamingFormDataParser(headers=self.headers) parser.register("aliases", aliases) parser.register("use-gpio-pin", use_gpio_pin) parser.register("gpio-pin", gpio_pin) parser.register("file", file) parser.data_received(post_data) if SOUNDBOARD.contains_sound_file(file.multipart_filename): print("Sound file is already registered, not going to add it again") self.wfile.write(b"Could not add duplicate sound file") return mime, encoding = mimetypes.guess_type(file.multipart_filename) print("Received file '{0}' with mimetype '{1}'".format(file.multipart_filename, mime)) if str(mime).startswith("audio"): save_path = cfg.SOUNDS_DIR + "/" + file.multipart_filename print("Saving file to '{0}'".format(save_path)) shutil.move(f.name, save_path) self.wfile.write(b"Sound saved") aliases = aliases.value.decode("utf-8") aliases = [x.strip() for x in aliases.split(',')] if use_gpio_pin.value == b"on": SOUNDBOARD.add_sound(file.multipart_filename, aliases, int(gpio_pin.value.decode("utf-8"))) else: SOUNDBOARD.add_sound(file.multipart_filename, aliases) else: os.remove(f.name) self.wfile.write(b"Not a sound file!") elif self.path == "/edit-sound/": sound = ValueTarget() aliases = ValueTarget() use_gpio_pin = ValueTarget() gpio_pin = ValueTarget() parser = StreamingFormDataParser(headers=self.headers) parser.register("sound", sound) parser.register("aliases", aliases) parser.register("use-gpio-pin", use_gpio_pin) parser.register("gpio-pin", gpio_pin) parser.data_received(post_data) print("Got a request for editing sound file '{0}'".format(sound.value.decode("utf-8"))) s = SOUNDBOARD.get_sound_by_name(sound.value.decode("utf-8")) if not s: print("Sound to edit does not exist") self.wfile.write(b"Sound does not exist") else: print("Editing data for '{0}'".format(s["file"])) s["aliases"] = aliases.value.decode("utf-8").split(",") if use_gpio_pin.value == b"on": s["GPIO_pin"] = int(gpio_pin.value.decode("utf-8")) else: s.pop("GPIO_pin", None) if cfg.VERBOSE_LOGGING: print(s) SOUNDBOARD.write_to_config() self.wfile.write(b"Saved changes successfully") except Exception as ex: print("ERROR: {0}".format(ex)) self.send_response(500) self.end_headers() if cfg.VERBOSE_LOGGING: raise ex
def on_post(self, req, resp): payload = req.context['user']['user'] if req.get_header('Content-Type') != None: if req.get_header('Content-Type')[0:19] != 'multipart/form-data': raise falcon.HTTPBadRequest( code=400, description= 'wrong Content-Type, only support multipart/form-data ') else: raise falcon.HTTPBadRequest(code=406, description='not found Content-Type. ') def convert_lowercase_mutlipart(req_bytes_data): # cobert lower case MIME to defalut MIME type,Support cgi data = { 'content-disposition': 'Content-Disposition', 'content-type': 'Content-Type' } for k, v in data.items(): req_bytes_data = req_bytes_data.replace( bytes(k, encoding='utf-8'), bytes(v, encoding='utf-8')) return req_bytes_data parser = StreamingFormDataParser(headers=req.headers) leave_data_bytes = ValueTarget() parser.register('leavesData', leave_data_bytes) # save in memory don't do anything to it ! leave_proof_image_bytes = ValueTarget() parser.register('proofImage', leave_proof_image_bytes) # load request parser.data_received(convert_lowercase_mutlipart(req.stream.read())) # check data if leave_proof_image_bytes != None: if (leave_proof_image_bytes.multipart_filename[-3:] not in [ 'png', 'jpg', 'PNG', "JPG" ]) and (leave_proof_image_bytes.multipart_filename[-4:] not in ["jpeg", "JPEG"]): raise falcon.HTTPBadRequest( code=401, description='file type not support') if sys.getsizeof(leave_proof_image_bytes.value ) > config.LEAVE_PROOF_IMAGE_SIZE_LIMIT: raise falcon.HTTPBadRequest( code=402, description='file size over limit.') try: leave_data = json.loads(leave_data_bytes.value.decode('utf-8')) except json.decoder.JSONDecodeError: raise falcon.HTTPBadRequest( code=403, description='leavesData JSONDecodeError ') submit_status = leave_cache.submit_leave( username=payload['username'], leave_data=leave_data, leave_proof=leave_proof_image_bytes) if isinstance(submit_status, bool): if submit_status is True: resp.status = falcon.HTTP_200 return True elif isinstance(submit_status, int): if submit_status == error_code.LEAVE_SUBMIT_WRONG_DATE: raise falcon.HTTPForbidden( code=410, description="leave date not accept.") elif submit_status == error_code.LEAVE_SUBMIT_NEED_PROOF: raise falcon.HTTPForbidden(code=411, description='need proof image') elif submit_status == error_code.LEAVE_SUBMIT_DATE_CONFLICT: raise falcon.HTTPForbidden( code=412, description='request leave date, is already submitted.') elif submit_status == error_code.LEAVE_SUBMIT_SOMETHING_ERROR: pass raise falcon.HTTPInternalServerError()
def test_value_target_basic(): target = ValueTarget() assert target.value == b'' target.multipart_filename = None target.start() assert target.multipart_filename is None assert target.value == b'' target.data_received(b'hello') target.data_received(b' ') target.data_received(b'world') target.finish() assert target.multipart_filename is None assert target.value == b'hello world'
def do_POST(self): try: # Petición nuevo restaurante if self.path == '/restaurants/new': # Recuperamos contenido del mensaje content_len = int(self.headers.get('Content-Length', '0')) data_b = self.rfile.read(content_len) # Parseamos datos del formulario form_parser = StreamingFormDataParser(headers=self.headers) restaurant_name = ValueTarget() form_parser.register('restaurant_name', restaurant_name) form_parser.data_received(data_b) # Generamos entrada en la BD name_str = restaurant_name.value.decode() if name_str: RESTAURANTS_DB.insertNewRestaurant(name_str) # Respuesta a petición self.send_response(303, 'Redirecto to main page') self.send_header('Location', '/restaurants') self.end_headers() # Petición editar restaurante if self.path.endswith('/edit'): # Recuperamos contenido mensaje content_len = int(self.headers.get('Content-Length', '0')) data_b = self.rfile.read(content_len) # Parseamos datos formulario form_parser = StreamingFormDataParser(headers=self.headers) restaurant_name = ValueTarget() form_parser.register('restaurant_name', restaurant_name) form_parser.data_received(data_b) # Recuperamos id del restaurante try: id = int(self.path.split('/')[-2]) except ValueError: self.send_response(400, 'Bad request') return # Modificamos restaurante en BD new_name = restaurant_name.value.decode() if new_name: print("Setting restaurant name") RESTAURANTS_DB.setRestaurantNameById(id, new_name) # Respuesta a petición self.send_response(303, 'Redirect to main page') self.send_header('Location', '/restaurants') self.end_headers() if self.path.endswith('/delete'): # Recuperamos id del restaurante try: id = int(self.path.split('/')[-2]) except ValueError: self.send_response(400, 'Bad request') return # Eliminamos restaurante de BD RESTAURANTS_DB.deleteRestaurantById(id) # Respuesta a petición self.send_response(303, 'Redirect to main page') self.send_header('Location', '/restaurants') self.end_headers() except IOError: self.send_response(500, 'Server error while attending the request')
def test_not_sent(self): target = ValueTarget() self.assertEqual(target.value, b'') self.assertTrue(target.multipart_filename is None)
# This code will read the table sequentially, however parallel processing is possible # for faster scans, see # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan while not done: batch_name = f'batch{batch_index}' batch_index += 1 files = {'file': (batch_name, json.dumps(items), 'application/json'), 'context': context} logging.info(f"POST: sending '{batch_name}' to {url}") with session1.post(url, files=files, stream=True) as r: if r.status_code >= 300: raise Exception(f"Failed {batch_name} with status {r.status_code}:\n\n{r.json()}") results_file_name = f'{batch_name}-results.json' logging.info(f"Extracting '{results_file_name}' into the 'results' folder.") parser = StreamingFormDataParser(headers=r.headers) masked_batch = ValueTarget() parser.register('file', masked_batch) parser.register('results', FileTarget(f'results/{results_file_name}')) for chunk in r.iter_content(): parser.data_received(chunk) masked_batch = json.loads(masked_batch.value) # The batch writer will automatically handle buffering and sending items in batches. # In addition, the batch writer will also automatically handle any unprocessed items and resend them as needed. with target_table.batch_writer() as batch: for item in masked_batch: batch.put_item(Item=item) start_key = response.get('LastEvaluatedKey', None) done = start_key is None scan_kwargs['ExclusiveStartKey'] = start_key
def test_value_target_not_set(): target = ValueTarget() assert target.multipart_filename is None assert target.value == b''