def send(): url = f'{host}/files/fileSearchContext.mask' headers = {'Accept-Encoding': "gzip", 'Transfer-Encoding': "gzip"} extension = os.path.splitext(file_path)[1] os.makedirs(folder_name, exist_ok=True) with open(file_path, 'rb') as f: if len(media_type) > 0: encoder = MultipartEncoder(fields={ 'context': ('context', context, 'application/json'), 'file': ('file', f, media_type) }) else: encoder = MultipartEncoder(fields={ 'context': ('context', context, 'application/json'), 'file': ('file', f) }) with session.post(url, data=encoder, stream=True, headers={'Content-Type': encoder.content_type}) as r: if r.status_code >= 300: raise Exception(f"Failed with status {r.status_code}:\n\n{r.json()}") parser = StreamingFormDataParser(headers=r.headers) parser.register('file', FileTarget(f'{folder_name}/masked{extension}')) parser.register('results', FileTarget(f'{folder_name}/results.json')) for chunk in r.iter_content(chunk_size): parser.data_received(chunk)
def prepare(self): self.value = ValueTarget() self.file_ = FileTarget('/tmp/file-{}.dat'.format(int(time()))) self._parser = StreamingFormDataParser(headers=self.request.headers) self._parser.register('name', self.value) self._parser.register('file', self.file_)
def test_file_target_not_set(): filename = os.path.join(tempfile.gettempdir(), 'file_not_sent.txt') target = FileTarget(filename) assert not os.path.exists(filename) assert target.filename == filename assert target.multipart_filename is None
def test_not_sent(self): filename = os.path.join(tempfile.gettempdir(), 'file_not_sent.txt') target = FileTarget(filename) self.assertFalse(os.path.exists(filename)) self.assertEqual(target.filename, filename) self.assertTrue(target.multipart_filename is None)
def prepare(self): self.value = ValueTarget() name = 'uploaded-file-tornado-{}.dat'.format(int(time())) self.file_ = FileTarget(os.path.join(tempfile.gettempdir(), name)) self._parser = StreamingFormDataParser(headers=self.request.headers) self._parser.register('name', self.value) self._parser.register('file', self.file_)
def prepare(self): gigabyte = 1024 * 1024 * 1024 self.request.connection.set_max_body_size(100 * gigabyte) self.value = ValueTarget() name = 'uploaded-file-tornado-{}.dat'.format(int(time())) self.file_ = FileTarget(os.path.join(tempfile.gettempdir(), name)) self._parser = StreamingFormDataParser(headers=self.request.headers) self._parser.register('name', self.value) self._parser.register('file', self.file_)
def mask(filename): basename = ntpath.basename(filename) process_files = [(filename, 'masked')] for file_name, masked_folder in process_files: files = {'file': open(file_name, 'rb'), 'context': context} logging.info(f"POST: sending '{file_name}' to {url}") with s.post(url, files=files, stream=True) as r: if r.status_code >= 300: logging.info( f"Failed with status {r.status_code}:\n\n{r.json()}") break logging.info( f"Extracting 'masked_{basename}' and 'masked_{basename}_results.json' into {masked_folder}." ) parser = StreamingFormDataParser(headers=r.headers) parser.register('file', FileTarget(f'{masked_folder}/masked_{basename}')) parser.register( 'results', FileTarget(f'{masked_folder}/masked_{basename}_results.json')) for chunk in r.iter_content(4096): parser.data_received(chunk)
def prepare(self): if self.request.method == "POST": self.request.connection.set_max_body_size(self.max_upload_size) tmpname = self.file_manager.gen_temp_upload_path() self._targets = { 'root': ValueTarget(), 'print': ValueTarget(), 'path': ValueTarget(), } self._file = FileTarget(tmpname) self._parser = StreamingFormDataParser(self.request.headers) self._parser.register('file', self._file) for name, target in self._targets.items(): self._parser.register(name, target)
def upload_page(): value = ValueTarget() name = 'uploaded-file-tornado-{}.dat'.format(int(time())) file = FileTarget(os.path.join(tempfile.gettempdir(), name)) parser = StreamingFormDataParser(headers=bottle.request.headers) parser.register('name', value) parser.register('file', file) while True: chunk = bottle.request.environ['wsgi.input'].read(8192) if not chunk: break parser.data_received(chunk) return {'name': value.value, 'filename': file.filename}
def test_file_target_exceeds_max_size(self): data = b'''\ --1234 Content-Disposition: form-data; name="files"; filename="ab.txt" Foo --1234--'''.replace(b'\n', b'\r\n') target = FileTarget('/tmp/file.txt', validator=MaxSizeValidator(1)) parser = StreamingFormDataParser( headers={'Content-Type': 'multipart/form-data; boundary=1234'}) parser.register('files', target) self.assertRaises(ValidationError, parser.data_received, data) self.assertTrue(target._started) self.assertTrue(target._finished)
def prepare(self) -> None: super(FileUploadHandler, self).prepare() if self.request.method == "POST": assert isinstance(self.request.connection, HTTP1Connection) self.request.connection.set_max_body_size(self.max_upload_size) tmpname = self.file_manager.gen_temp_upload_path() self._targets = { 'root': ValueTarget(), 'print': ValueTarget(), 'path': ValueTarget(), 'checksum': ValueTarget(), } self._file = FileTarget(tmpname) self._sha256_target = SHA256Target() self._parser = StreamingFormDataParser(self.request.headers) self._parser.register('file', self._file) self._parser.register('file', self._sha256_target) for name, target in self._targets.items(): self._parser.register(name, target)
def upload_file(project_name='default'): # project_name = request.form.get('project_name', 'default') # print(project_name) file_ = FileTarget(os.path.join(tempfile.gettempdir(), 'temp.zip')) parser = StreamingFormDataParser(headers=request.headers) parser.register('file', file_) while True: chunk = request.stream.read(8192) if not chunk: break parser.data_received(chunk) status, data = process_zip('/tmp/temp.zip', project_name, app.basedir) if not status: raise FileNotFoundError resp = r.post(f'{app.base_url}{project_name}/images', json=data) return jsonify(resp.json())
def upload_file(): if request.method == 'POST': file_ = FileTarget(os.path.join(tempfile.gettempdir(), 'test')) parser = StreamingFormDataParser(headers=request.headers) parser.register('file', file_) time_start = time.perf_counter() while True: chunk = request.stream.read(8192) if not chunk: break parser.data_received(chunk) time_finish = time.perf_counter() response = dedent( ''' <!doctype html> <head> <title>Done!</title> </head> <body> <h1> {file_name} ({content_type}): upload done </h1> <h2> Time spent on file reception: {duration}s </h2> </body> '''.format( file_name=file_.multipart_filename, content_type=file_.multipart_content_type, duration=(time_finish - time_start), ) ) return response return page
def upload_file(): if request.method == 'POST': rand_name = "tmp-" + secrets.token_hex(4) file_ = FileTarget(os.path.join(tempfile.gettempdir(), rand_name)) parser = StreamingFormDataParser(headers=request.headers) parser.register('file', file_) while True: chunk = request.stream.read(8192) if not chunk: break parser.data_received(chunk) old_file = os.path.join(MY_UPLOAD_DIR, rand_name) finalfilename = secure_filename(file_.multipart_filename) if not finalfilename: os.remove(os.path.join(MY_UPLOAD_DIR, rand_name)) return """<!doctype html><style>body{color: #f8a830;background-color: #000000;}</style><head><title>File missing</title></head><body><h1>No file selected</h1></body>""" new_file = os.path.join(MY_UPLOAD_DIR, finalfilename) os.rename(old_file, new_file) filesize = os.path.getsize(UPLOAD_FOLDER + finalfilename) return render_template("success.html", bytes_rcvd=filesize) return render_template('upload.html')
def upload_file(): if request.method == 'POST': file = FileTarget(os.path.join(tempfile.gettempdir(), "test")) hdict = {} for h in request.headers: hdict[h[0]] = h[1] parser = StreamingFormDataParser(headers=hdict) parser.register('file', file) timeA = time.perf_counter() while True: chunk = request.stream.read(8192) if not chunk: break parser.data_received(chunk) timeB = time.perf_counter() print("time spent on file reception: %fs" % (timeB - timeA)) return file.multipart_filename + ": upload done" return page
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan while not done: batch_name = f'batch{batch_index}' batch_index += 1 files = {'file': (batch_name, json.dumps(items), 'application/json'), 'context': context} logging.info(f"POST: sending '{batch_name}' to {url}") with session1.post(url, files=files, stream=True) as r: if r.status_code >= 300: raise Exception(f"Failed {batch_name} with status {r.status_code}:\n\n{r.json()}") results_file_name = f'{batch_name}-results.json' logging.info(f"Extracting '{results_file_name}' into the 'results' folder.") parser = StreamingFormDataParser(headers=r.headers) masked_batch = ValueTarget() parser.register('file', masked_batch) parser.register('results', FileTarget(f'results/{results_file_name}')) for chunk in r.iter_content(): parser.data_received(chunk) masked_batch = json.loads(masked_batch.value) # The batch writer will automatically handle buffering and sending items in batches. # In addition, the batch writer will also automatically handle any unprocessed items and resend them as needed. with target_table.batch_writer() as batch: for item in masked_batch: batch.put_item(Item=item) start_key = response.get('LastEvaluatedKey', None) done = start_key is None scan_kwargs['ExclusiveStartKey'] = start_key if not done: response = table.scan(**scan_kwargs)
def test_basic(self): filename = os.path.join(tempfile.gettempdir(), 'file.txt') target = FileTarget(filename) target.multipart_filename = 'file001.txt' target.start() self.assertEqual(target.filename, filename) self.assertEqual(target.multipart_filename, 'file001.txt') self.assertTrue(os.path.exists(filename)) target.data_received(b'hello') target.data_received(b' ') target.data_received(b'world') target.finish() self.assertTrue(os.path.exists(filename)) self.assertEqual(target.filename, filename) self.assertEqual(target.multipart_filename, 'file001.txt') with open(filename, 'rb') as file_: self.assertEqual(file_.read(), b'hello world')
filename = askopenfilename() if not filename: break basename = ntpath.basename(filename) process_files = [(filename, 'masked')] for file_name, masked_folder in process_files: files = {'file': open(file_name, 'rb'), 'context': context} logging.info(f"POST: sending '{file_name}' to {url}") with s.post(url, files=files, stream=True) as r: if r.status_code >= 300: logging.info( f"Failed with status {r.status_code}:\n\n{r.json()}" ) break logging.info( f"Extracting 'masked_{basename}' and 'masked_{basename}_results.json' into {masked_folder}." ) parser = StreamingFormDataParser(headers=r.headers) parser.register( 'file', FileTarget(f'{masked_folder}/masked_{basename}')) parser.register( 'results', FileTarget( f'{masked_folder}/masked_{basename}_results.json')) for chunk in r.iter_content(4096): parser.data_received(chunk) finally: teardown(s)
def test_basic(self): filename = '/tmp/file.txt' target = FileTarget(filename) target.start() self.assertTrue(os.path.exists(filename)) target.data_received(b'hello') target.data_received(b' ') target.data_received(b'world') target.finish() self.assertTrue(os.path.exists(filename)) with open(filename, 'rb') as file_: self.assertEqual(file_.read(), b'hello world')
def test_file_target_basic(): filename = os.path.join(tempfile.gettempdir(), 'file.txt') target = FileTarget(filename) target.multipart_filename = 'file001.txt' target.start() assert target.filename == filename assert target.multipart_filename == 'file001.txt' assert os.path.exists(filename) target.data_received(b'hello') target.data_received(b' ') target.data_received(b'world') target.finish() assert target.filename == filename assert target.multipart_filename == 'file001.txt' assert os.path.exists(filename) with open(filename, 'rb') as file_: assert file_.read() == b'hello world'
for index, document in enumerate(data, 1): files = { 'file': ('document.json', json_util.dumps(document), 'application/json'), 'context': context } logging.info(f"POST: sending document {index} to {url}") with session.post(url, files=files, stream=True) as r: if r.status_code >= 300: raise Exception( f"Failed with status {r.status_code}:\n\n{r.json()}" ) logging.info( f"Placing results into 'results/results{index}.json'.") parser = StreamingFormDataParser(headers=r.headers) output = ValueTarget() parser.register('file', output) parser.register('results', FileTarget(f'results/results{index}.json')) for chunk in r.iter_content(4096): parser.data_received(chunk) logging.info( f"Inserting masked document {index} into darkshield.masked..." ) out_coll.insert_one(json_util.loads(output.value)) finally: teardown(session)
def do_POST(self): if cfg.VERBOSE_LOGGING: print("Got request from {0}".format(self.client_address)) try: if not self.check_auth(True): return self.send_response(200) self.send_header('Content-type', "text/html") self.end_headers() self.wfile.write(b"") content_length = int(self.headers['Content-Length']) post_data = self.rfile.read(content_length) if self.path == "/upload-sound/": f = tempfile.NamedTemporaryFile(delete=False) f.close() file = FileTarget(f.name) aliases = ValueTarget() use_gpio_pin = ValueTarget() gpio_pin = ValueTarget() parser = StreamingFormDataParser(headers=self.headers) parser.register("aliases", aliases) parser.register("use-gpio-pin", use_gpio_pin) parser.register("gpio-pin", gpio_pin) parser.register("file", file) parser.data_received(post_data) if SOUNDBOARD.contains_sound_file(file.multipart_filename): print("Sound file is already registered, not going to add it again") self.wfile.write(b"Could not add duplicate sound file") return mime, encoding = mimetypes.guess_type(file.multipart_filename) print("Received file '{0}' with mimetype '{1}'".format(file.multipart_filename, mime)) if str(mime).startswith("audio"): save_path = cfg.SOUNDS_DIR + "/" + file.multipart_filename print("Saving file to '{0}'".format(save_path)) shutil.move(f.name, save_path) self.wfile.write(b"Sound saved") aliases = aliases.value.decode("utf-8") aliases = [x.strip() for x in aliases.split(',')] if use_gpio_pin.value == b"on": SOUNDBOARD.add_sound(file.multipart_filename, aliases, int(gpio_pin.value.decode("utf-8"))) else: SOUNDBOARD.add_sound(file.multipart_filename, aliases) else: os.remove(f.name) self.wfile.write(b"Not a sound file!") elif self.path == "/edit-sound/": sound = ValueTarget() aliases = ValueTarget() use_gpio_pin = ValueTarget() gpio_pin = ValueTarget() parser = StreamingFormDataParser(headers=self.headers) parser.register("sound", sound) parser.register("aliases", aliases) parser.register("use-gpio-pin", use_gpio_pin) parser.register("gpio-pin", gpio_pin) parser.data_received(post_data) print("Got a request for editing sound file '{0}'".format(sound.value.decode("utf-8"))) s = SOUNDBOARD.get_sound_by_name(sound.value.decode("utf-8")) if not s: print("Sound to edit does not exist") self.wfile.write(b"Sound does not exist") else: print("Editing data for '{0}'".format(s["file"])) s["aliases"] = aliases.value.decode("utf-8").split(",") if use_gpio_pin.value == b"on": s["GPIO_pin"] = int(gpio_pin.value.decode("utf-8")) else: s.pop("GPIO_pin", None) if cfg.VERBOSE_LOGGING: print(s) SOUNDBOARD.write_to_config() self.wfile.write(b"Saved changes successfully") except Exception as ex: print("ERROR: {0}".format(ex)) self.send_response(500) self.end_headers() if cfg.VERBOSE_LOGGING: raise ex
for file_name, media_type, masked_folder in process_files: with open(file_name, 'rb') as f: os.makedirs(masked_folder, exist_ok=True) encoder = MultipartEncoder( fields={ 'context': ('context', context, 'application/json'), 'file': (file_name, f, media_type) }) logging.info(f"POST: sending '{file_name}' to {url}") with session.post( url, data=encoder, stream=True, headers={'Content-Type': encoder.content_type}) as r: if r.status_code >= 300: raise Exception( f"Failed with status {r.status_code}:\n\n{r.json()}" ) logging.info( f"Extracting '{file_name}' and 'results.json' into {masked_folder}." ) parser = StreamingFormDataParser(headers=r.headers) parser.register('file', FileTarget(f'{masked_folder}/{file_name}')) parser.register( 'results', FileTarget(f'{masked_folder}/results.json')) for chunk in r.iter_content(4096): parser.data_received(chunk) finally: teardown(session)
try: setup(session) url = 'http://localhost:8080/api/darkshield/files/fileSearchContext.mask' context = json.dumps({ "fileSearchContextName": file_search_context_name, "fileMaskContextName": file_mask_context_name }) process_files = [('example.jpeg', 'image/jpeg', 'jpeg-masked'), ('example.pdf', 'application/pdf', 'pdf-masked')] for file_name, media_type, masked_folder in process_files: with open(file_name, 'rb') as f: os.makedirs(masked_folder, exist_ok=True) encoder = MultipartEncoder(fields={ 'context': ('context', context, 'application/json'), 'file': (file_name, f, media_type) }) logging.info(f"POST: sending '{file_name}' to {url}") with session.post(url, data=encoder, stream=True, headers={'Content-Type': encoder.content_type}) as r: if r.status_code >= 300: raise Exception(f"Failed with status {r.status_code}:\n\n{r.json()}") logging.info(f"Extracting '{file_name}' and 'results.json' into {masked_folder}.") parser = StreamingFormDataParser(headers=r.headers) parser.register('file', FileTarget(f'{masked_folder}/{file_name}')) parser.register('results', FileTarget(f'{masked_folder}/results.json')) for chunk in r.iter_content(4096): parser.data_received(chunk) finally: teardown(session)