def _upload_video(self, video_file, callback, session, proxy): session = session or self.http_settings.session proxy = proxy or self.http_settings.proxy filename = path.Path(video_file).name filehash = self._generate_file_hash(filename) fields = [] fields.append(('filename', filehash)) fields.append( ('content', (path.Path(video_file).name, open(video_file, 'rb')))) encoder = MultipartEncoder(fields) if callback: monitor = MultipartEncoderMonitor(encoder, callback) else: monitor = MultipartEncoderMonitor(encoder) url = '{nginx_uploader_url}?filename={filehash}' \ '&X-Progress-ID={filehash}'.format(nginx_uploader_url=self.nginx_uploader_url, filehash=filehash) self._get_upload_progress2(filehash) upload_video = session.post( url, data=monitor, proxies=proxy, headers={'Content-Type': monitor.content_type}) return filehash
def create_multipart_monitor(encoder, callback=None): if callback: if not hasattr(callback, '__call__'): raise InvalidUploadCallback( 'Callback {c} needs to be callable'.format(c=callback)) return MultipartEncoderMonitor(encoder, callback) else: return MultipartEncoderMonitor(encoder)
def f(m: MultipartEncoderMonitor): # update every 100KB m.buf_bytes_read += m.bytes_read - m.prev_bytes_read m.prev_bytes_read = m.bytes_read if m.buf_bytes_read >= 1e5: # print(f"{m.buf_bytes_read=}, {m.prev_bytes_read=}") bar.update(m.buf_bytes_read) m.buf_bytes_read = 0
def post_files(self, files: FileList, **data: JSON) -> JSON: # upload files using custom json-data protocol # build the fields file_header = {"Content-Encoding": "gzip"} def mk_file_fields(field_name: str, f: Path): # compress the file, in-place # TODO - disable compression where unneeded, e.g. .gz, .zip, .png, etc with compress_file(f) as f_gz: return ( field_name, (f.name, open(f_gz, "rb"), guess_type(f), file_header), ) fields = [mk_file_fields(k, x) for (k, v) in files.items() for x in v] fields.append(("json_data", json.dumps(data))) e = MultipartEncoder(fields=fields) extra_headers = {"Content-Type": f"{e.content_type}; dp-files=True"} max_size = 25 if c.config.is_public else 100 if e.len > max_size * SIZE_1_MB: raise ReportTooLargeError( f"Report and attachments over f{max_size} MB after compression (~{e.len/SIZE_1_MB:.1f} MB) - please reduce the size of your charts/plots" ) elif e.len > SIZE_1_MB: log.debug("Using upload monitor") fill_char = click.style("=", fg="yellow") with click.progressbar( length=e.len, width=0, show_eta=True, label="Uploading files", fill_char=fill_char, ) as bar: def f(m: MultipartEncoderMonitor): # update every 100KB m.buf_bytes_read += m.bytes_read - m.prev_bytes_read m.prev_bytes_read = m.bytes_read if m.buf_bytes_read >= 1e5: # print(f"{m.buf_bytes_read=}, {m.prev_bytes_read=}") bar.update(m.buf_bytes_read) m.buf_bytes_read = 0 m = MultipartEncoderMonitor(e, callback=f) m.buf_bytes_read = 0 m.prev_bytes_read = 0 r = self.session.post(self.url, data=m, headers=extra_headers, timeout=self.timeout) else: r = self.session.post(self.url, data=e, headers=extra_headers, timeout=self.timeout) return _process_res(r)
def upContent(self, request): """ Upload content to datastore server """ oid = request['_id']['$oid'] thumbkey = request['thumbnail_key'] contentkey = request['content_key'] thumbpath = self.dataDIR + oid + '/' + thumbkey contentpath = self.dataDIR + oid + '/' + contentkey thumburl = self.localServerURLformat % ('image/new') contenturl = self.localServerURLformat % ('content/new') thumbfid = 'thumb_' + oid contentfid = 'content_' + oid thumbfile = open(thumbpath, 'rb') enc = MultipartEncoder({thumbfid: (thumbkey, thumbfile)}) mon = MultipartEncoderMonitor(enc) r = requests.post(thumburl, data=mon, headers={'Content-Type': mon.content_type}) thumbfile.close() # Upload content contentsize = int(request['size']) def contentmonitor(monitor): progress = float(monitor.bytes_read) / contentsize mdisp.updateMsg(progress, self.getStorage(), self.getNetwork(), self.getPower()) return contentfile = open(contentpath, 'rb') enc = MultipartEncoder({contentfid: (contentkey, contentfile)}) mon = MultipartEncoderMonitor(enc, contentmonitor) r = requests.post(contenturl, data=mon, headers={'Content-Type': mon.content_type}) contentfile.close() # Trash content once uploaded os.remove(thumbpath) os.remove(contentpath) try: os.rmdir(self.dataDIR + oid) except: mdisp.errMsg(1234, 'rmdir fail') # Final status update mdisp.updateMsg(1.0, self.getStorage(), self.getNetwork(), self.getPower()) return
def upload(self, video_file, title, tags, description, tag_ids, upload_id=None, callback=None, session=None, proxy=None): if isinstance(tag_ids, (list, tuple)): tag_ids = [("listch", str(t)) for t in tag_ids] else: tag_ids = [("listch", str(tag_ids))] upload_id = upload_id or self._initiate_new_upload() session = session or self.http_settings.session proxy = proxy or self.http_settings.proxy main_url = self._get_path_to_cgi() now = arrow.utcnow().timestamp q = "upload_id={upload_id}".format(upload_id=upload_id) url = "{main_url}ubr_upload.pl?{q}".format(main_url=main_url, q=q) fields = [] fields.append(('MAX_FILE_SIZE', str(os.path.getsize(video_file)))) fields.append(('upload_range', str(1))) fields.append(('adult', '')) fields.append(('field_myvideo_keywords', " ".join([t for t in tags]))) fields.append(('field_myvideo_title', title)) fields.append(('field_myvideo_descr', description)) fields.append( ('upfile_0', (path.Path(video_file).name, open(video_file, 'rb')))) fields.extend(tag_ids) multipart_encoder = MultipartEncoder(fields) if callback: monitor = MultipartEncoderMonitor(multipart_encoder, callback) else: monitor = MultipartEncoderMonitor(multipart_encoder) upload = session.post(url, data=monitor, headers={'Content-Type': monitor.content_type}, proxies=proxy) self._start_progress_tracker(upload_id=upload_id) return upload_id
def _get_sequence_data_pkg(self, sequence_file, upload_id): """ Creates the data encoder, and attaches a monitor for callback functionality """ # build data encoder encoder = self._get_multipart_encoder(sequence_file, upload_id) # create callback monitor for file progressk monitor = MultipartEncoderMonitor(encoder, self._send_file_callback) # override max byte read size # This lambda overrides httplibs hard coded 8192 byte read size # More details: https://github.com/requests/toolbelt/issues/75#issuecomment-237189952 monitor._read = monitor.read monitor.read = lambda size: monitor._read(1024 * 1024) # return the monitor/encoder object return monitor
def upload(url, filename, check=True): creds = get_credentials() url = creds.get('opsmgr').get('url') + url multipart = MultipartEncoderMonitor.from_fields( fields={ 'product[file]': ('product[file]', open(filename, 'rb'), 'application/octet-stream') }, callback=ProgressBar().update ) response = requests.post(url, auth=auth(creds), verify=False, data=multipart, headers={ 'Content-Type': multipart.content_type } ) sys.stdout.write('.100%\n') sys.stdout.flush() if response.status_code == 422: errors = response.json()["errors"] try: product = errors.get('product', []) for reason in product: if reason.startswith('Metadata already exists for'): print('-','version already uploaded') return response except: pass check_response(response, check) return response
def upload_file(upload_url, upload_fields, filepath, callback=None): """Upload a pre-signed file to Cloudsmith.""" upload_fields = upload_fields.items() upload_fields.append( ('file', (os.path.basename(filepath), click.open_file(filepath, 'rb')))) encoder = MultipartEncoder(upload_fields) monitor = MultipartEncoderMonitor(encoder, callback=callback) config = cloudsmith_api.Configuration() if config.proxy: proxies = {'http': config.proxy, 'https': config.proxy} else: proxies = None headers = {'content-type': monitor.content_type} client = get_files_api() headers['user-agent'] = client.api_client.user_agent resp = requests.post(upload_url, data=monitor, headers=headers, proxies=proxies) try: resp.raise_for_status() except requests.RequestException as exc: raise ApiException(resp.status_code, headers=exc.response.headers, body=exc.response.content)
def submit(self): self.submit_url = self._get_submit_url() mkzip(os.path.dirname(sys.argv[0]), SUBMISSION_FILENAME, self.filenames, self.max_zip_size) fd = open(SUBMISSION_FILENAME, "rb") m = MultipartEncoder(fields={'zipfile': ('student.zip', fd, 'application/zip')}) monitor = MultipartEncoderMonitor(m, self.upload_progress_callback) try: r = self.s.post(self.submit_url, data=monitor, headers={'Content-Type': monitor.content_type}) r.raise_for_status() except requests.exceptions.HTTPError as e: if r.status_code == 403: raise RuntimeError("You don't have access to this quiz.") elif r.status_code in [404,429,500]: try: response_json = r.json() message = response_json.get("message") or "An internal server error occurred." except: message = "An unknown error occurred" raise RuntimeError(message) else: raise fd.close() self.submission = r.json()
def upload_file_to_device(self): if not self.parent.geoshark_widget.device_on_connect or self.server is None: return file = self.left_file_model.filePath(self.lefttableview.currentIndex()) filename = file.split('/')[-1] url = 'http://{}/data/{}'.format(self.server, '/'.join(self.right_file_model_path)) filesize = os.path.getsize(file) if filesize == 0: show_error(_('File error'), _('File size must be non zero.')) return progress = ProgressBar(text=_('Upload File Into GeoShark'), window_title=_('Upload file to GeoShark')) encoder = MultipartEncoder( fields={'upload_file': (filename, open(file, 'rb'))} # added mime-type here ) data = MultipartEncoderMonitor(encoder, lambda monitor: progress.update((monitor.bytes_read/filesize)*99)) try: res = requests.post(url, data=data, headers={'Content-Type': encoder.content_type}, timeout=5) except requests.exceptions.RequestException: progress.close() show_error(_('GeoShark error'), _('GeoShark is not responding.')) return if res.ok: progress.update(100) self.right_file_model_update()
def multipart_post(data): encoder = MultipartEncoder(fields=data) monitor = MultipartEncoderMonitor(encoder) r = requests.post("https://catbox.moe/user/api.php", data=monitor, headers={'Content-Type': monitor.content_type}) return r
def _multipart_post(self, data): encoder = MultipartEncoder(fields=data) monitor = MultipartEncoderMonitor(encoder) r = requests.post(self.file_host_url, data=monitor, headers={'Content-Type': monitor.content_type}) return r
def upload(path, fname): server = get_server() total = check_size(path) url = "https://{}.zippyshare.com/upload".format(server) pb = tqdm(total=total, unit='B', unit_scale=True) data = { 'name': fname, 'file': (fname, open(path, 'rb'), get_mime_type(fname)) } if cfg.private == True: data['private'] = "true" else: data['notprivate'] = "true" multi = MultipartEncoder(fields=data) try: monitor = MultipartEncoderMonitor( multi, lambda multi: pb.update(monitor.bytes_read - pb.n)) s.headers.update({'Content-Type': monitor.content_type}) r = s.post(url, data=monitor) r.raise_for_status() finally: if s.headers.get('Content-Type'): del s.headers['Content-Type'] pb.close() return r.text
def upload(self): #上传 self.progress = 0 self.fileName = urlPath + scheme_Name + '.ipa' #'/Users/Jerry/Desktop/doctor-iphone/doctor-iphone.ipa' self.fileName = '/Users/Jerry/Desktop/Demo/test/test.ipa' self.file_size = os.path.getsize(self.fileName) / 1024 / 1024.0 url = "http://www.pgyer.com/apiv2/app/upload?" \ "_api_key=2e8571d626b9a8c8b752e59624481847&" \ "userKey=e34bf7d5e70d78339d60ea7bb68867c8&" \ "enctype=multipart/form-data" file = open(self.fileName) filejson = {'file': ('doctor-iphone', file)} # 后边的是其他参数 e = MultipartEncoder(fields=filejson) m = MultipartEncoderMonitor(e, callback=self.callBack) # r = requests.post(url, files=filejson)#没有进度展示 r = requests.post(url, data=m, headers={'Content-Type': m.content_type}) j = r.json() if j: jsReult = j print('\n') if j['code'] == '0': print('**upload success!\n ipa下载地址:' + jsReult['buildShortcutUrl'] + '**') else: print('**fail:' + jsReult['message'] + '**')
def upload(url, input_path): if not os.path.exists(input_path): sys.exit(2) filesize = os.stat(input_path).st_size filename = os.path.basename(input_path) filename = urllib.parse.quote_plus(filename) def callback(monitor): print('\ruploading: ', end="") print('\ruploading: ' + str(round(100*monitor.bytes_read/filesize)) + '%', end="") m = MultipartEncoderMonitor.from_fields( fields={'file': (filename, open(input_path, 'rb'))}, callback=callback ) r = requests.post(upload_url, data=m, headers={'Content-Type': m.content_type}) d = r.json() if d['result'] == 'ng': sys.stderr.write('upload fail \n') sys.exit(1) print("") print('id=' + str(d['id']) +', mp4_filename=' + d['mp4_filename']) return d['id'], d['mp4_filename']
def handle(self, *args, **options): self.stderr.write( "`syncdata` command is deprecated and will be removed in 0.13.0 in favor of `sync`. Type `kolibri manage sync --help` for available options." ) self.stdout.write("Uploading database to central server...\n") buff = io.BytesIO() zip_archive = zipfile.ZipFile(buff, mode="w", compression=zipfile.ZIP_DEFLATED) zip_archive.write(DB_PATH, "db.sqlite3") zip_archive.close() encoder = MultipartEncoder({ "project": options["project"], "file": ("db.sqlite3.zip", buff, "application/octet-stream"), }) monitor = MultipartEncoderMonitor(encoder, create_callback(encoder)) r = requests.post( CENTRAL_SERVER_DB_UPLOAD_URL, data=monitor, headers={"Content-Type": monitor.content_type}, ) print("\nUpload finished! (Returned status {0} {1})".format( r.status_code, r.reason))
def _request_fn(self, url, data, headers, files, requests_kwargs, progress): if files: # use streaming newfiles = dict([(k, (getattr(files[k], 'name', 'upload_filename'), files[k])) for k in files]) intersect = set(data.keys()) & set(newfiles.keys()) if intersect: raise CKANAPIError('field-name for files ("{}")'.format( ', '.join(list(intersect))) + ' cannot also be field name in data_dict.') data.update(newfiles) m = MultipartEncoder(data) if progress: m = MultipartEncoderMonitor(m, progress(m)) headers.update({'Content-Type': m.content_type}) r = self.session.post(url, data=m, headers=headers, allow_redirects=False, **requests_kwargs) else: r = self.session.post(url, data=data, headers=headers, files=files, allow_redirects=False, **requests_kwargs) # allow_redirects=False because: if a post is redirected (e.g. 301 due # to a http to https redirect), then the second request is made to the # new URL, but *without* the data. This gives a confusing "No request # body data" error. It is better to just return the 301 to the user, so # we disallow redirects. return r.status_code, r.text
def _post_stream(self, path, files=None, form_data=None): from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor def my_callback(monitor): progress = (monitor.bytes_read / monitor.len) * 100 print("\r uploading...:%d%%(%d/%d)" % (progress, monitor.bytes_read, monitor.len), end=" ") url = self._make_url(path) headers = self._get_headers() m = MultipartEncoderMonitor( MultipartEncoder(fields={ **files, **form_data }), my_callback) res = requests.post(url, data=m, headers={ **{ 'Content-Type': m.content_type }, **headers }) return self._parse_response(res)
def upload(url, filename, check=True): creds = get_credentials() url = creds.get('opsmgr').get('url') + url multipart = MultipartEncoderMonitor.from_fields( fields={ 'product[file]': ('product[file]', open(filename, 'rb'), 'application/octet-stream') }, callback=ProgressBar().update) response = requests.post(url, auth=auth(creds), verify=False, data=multipart, headers={'Content-Type': multipart.content_type}) sys.stdout.write('.100%\n') sys.stdout.flush() if response.status_code == 422: errors = response.json()["errors"] try: product = errors.get('product', []) for reason in product: if reason.startswith('Metadata already exists for'): print('-', 'version already uploaded') return response except: pass check_response(response, check) return response
def validate(self, project_id, fims_metadata, expedition_code, upload, is_public): with open(fims_metadata, 'rb') as f: e = MultipartEncoder({ 'fimsMetadata': (fims_metadata, f, "application/octet-stream"), 'upload': str(upload), 'projectId': project_id, 'expeditionCode': expedition_code, 'public': str(is_public) }) m = MultipartEncoderMonitor(e, self.upload_progress) headers = {'Content-Type': m.content_type} print("0% uploaded") r = self.session.post(self.rest_root + self.validate_url, allow_redirects=False, headers=headers, data=m) if r.status_code > 299: print('status code: %s' % r.status_code) print(r.json()['usrMessage'] or 'Server Error') sys.exit() return r.json()
def upload(ymlfile): infourl = 'http://fir.im/api/v2/app/info/%s?token=%s&type=%s' uploadurl = 'http://up.qiniu.com/' updateinfourl = 'http://fir.im/api/v2/app/%s?token=%s' if len(ymlfile) == 0: f = open('config.yml') else: f = open(ymlfile[0]) x = yaml.load(f) infourl = infourl % (x['appid'], x['token'], x['type']) print '===== do get info from ', infourl, ' =====' response = requests.get(infourl) print json.loads(response.text) pkgkey = json.loads(response.text)['bundle']['pkg']['key'] pkgtoken = json.loads(response.text)['bundle']['pkg']['token'] print pkgkey print pkgtoken print '===== do upload file',x['filepath'], 'to', uploadurl, ' =====' e = MultipartEncoder( fields={'key': pkgkey, 'token': pkgtoken, 'file': ('filename', open(glob(x['filepath'])[0], 'rb'), 'text/plain')} ) callback = my_callback(e) m = MultipartEncoderMonitor(e, callback) uploadresponse = requests.post(uploadurl, data=m, headers={'Content-Type': m.content_type}) # print uploadresponse.text appOid = json.loads(uploadresponse.text)['appOid'] updateinfourl = updateinfourl % (appOid, x['token']) print '===== do update app info ', updateinfourl, ' =====' updateinforesponse = requests.put(updateinfourl, x)
def post_file(api: Api, name, fp): fp.seek(0, 2) file_size = fp.tell() fp.seek(0, 0) upload = api.post_uploads(name, file_size) progress_bar = None def update_callback(monitor): progress_bar.n = monitor.bytes_read progress_bar.refresh() data = MultipartEncoder(fields={'file': (name, fp)}) monitor = MultipartEncoderMonitor(data, update_callback) progress_bar = tqdm(desc=name, total=data.len, unit='iB', unit_scale=True) headers = { 'Content-Type': monitor.content_type, 'Content-Length': str(data.len) } res = api.session.put(upload['upload_url'], data=monitor, headers=headers) if progress_bar.n != data.len: raise IOError('file size mismatches') progress_bar.close() res.raise_for_status() return upload['id']
def upload_with_progress_bar(data, url, kwargs, label=None, token=None): """ Uses multipart data to show progress of upload to the user. Requires request.files['data'].read() instead of request.data on the backend site. :param data: path to file to upload :param url: target url :param kwargs: additional args for the post request :param label: label of progress bar :param token: token to authorize the request :return: response from server """ encoder = MultipartEncoder({'data': ('data', data, 'text/plain')}) with tqdm(desc=label, total=encoder.len, disable=not label, dynamic_ncols=True, unit='B', unit_scale=True, unit_divisor=1024) as bar: multipart_monitor = MultipartEncoderMonitor( encoder, lambda monitor: bar.update(monitor.bytes_read - bar.n)) r = requests.post(url, data=multipart_monitor, headers={ 'Content-Type': multipart_monitor.content_type, 'X-Token': token }, params=kwargs) return r
def push(self, filepath): """Push the bytes from filepath to the Storage.""" logger.debug("Starting to push %r", str(filepath)) def _progress(monitor): # XXX Facundo 2020-07-01: use a real progress bar if monitor.bytes_read <= monitor.len: progress = 100 * monitor.bytes_read / monitor.len print("Uploading... {:.2f}%\r".format(progress), end="", flush=True) with filepath.open("rb") as fh: encoder = MultipartEncoder( fields={"binary": (filepath.name, fh, "application/octet-stream")} ) # create a monitor (so that progress can be displayed) as call the real pusher monitor = MultipartEncoderMonitor(encoder, _progress) response = _storage_push(monitor, self.storage_base_url) if not response.ok: raise CommandError( "Failure while pushing file: [{}] {!r}".format( response.status_code, response.content ) ) result = response.json() if not result["successful"]: raise CommandError("Server error while pushing file: {}".format(result)) upload_id = result["upload_id"] logger.debug("Uploading bytes ended, id %s", upload_id) return upload_id
def test_storage_push_succesful(): """Bytes are properly pushed to the Storage.""" test_monitor = MultipartEncoderMonitor( MultipartEncoder(fields={"binary": ("filename", "somefile", "application/octet-stream")}) ) with patch("requests.Session") as mock: _storage_push(test_monitor, "http://test.url:0000") cm_session_mock = mock().__enter__() # check request was properly called url = "http://test.url:0000/unscanned-upload/" headers = { "Content-Type": test_monitor.content_type, "Accept": "application/json", "User-Agent": build_user_agent(), } cm_session_mock.post.assert_called_once_with(url, headers=headers, data=test_monitor) # check the retries were properly setup (protocol, adapter), _ = cm_session_mock.mount.call_args assert protocol == "https://" assert isinstance(adapter, HTTPAdapter) assert adapter.max_retries.backoff_factor == 2 assert adapter.max_retries.total == 5 assert adapter.max_retries.status_forcelist == [500, 502, 503, 504]
def upload(filepath, comment, url, headers): """Upload a file with progress bar. Parameters ---------- filepath : string The local filepath of the file to upload. comment : string The comment for the file to upload. url : string The url of the file to upload. headers : dictionary The headers of the upload's request. Returns ------- headers : dictionary | json The response of the upload's request. """ filename = (os.sep).join(filepath.split(os.sep)[-2:]) encoder = MultipartEncoder({'file': (filename, open(filepath, 'rb'), 'text/plain'), "comment":comment}) with tqdm(desc=f"Submit {filename}", total=encoder.len, ncols=100, unit='o', unit_scale=True, unit_divisor=1024 ) as progress_bar : multipart_monitor = MultipartEncoderMonitor(encoder, lambda monitor: progress_bar.update(monitor.bytes_read - progress_bar.n)) headers = {**headers, 'Content-Type': multipart_monitor.content_type,} response = requests.post(url, data=multipart_monitor, headers=headers, ) return response
def silence_upload(endpoint, data, callback=None, return_value=None): if Api.bearer is None: return False m = MultipartEncoder(fields=data) if callback is not None: m = MultipartEncoderMonitor(m, callback) try: bearer = Config.load_token() resp = requests.post(Api.apiUrl + endpoint, data=m, headers={ "Content-Type": m.content_type, "Accept": "application/json", "Authorization": "Bearer " + bearer }) if return_value: return_value.value = resp return resp except: print("Failed", endpoint, data) return False pass
def post_request(self, path, json=None, files=None, log=log_responses, bar=False): if files is not None and bar: _files_dict = { key: (key, file_, 'text/plain') for key, file_ in files.items()} encoder = MultipartEncoder(_files_dict) with tqdm(total=encoder.len, unit_scale=True, desc='Uploading') as tqdm_bar: monitor = MultipartEncoderMonitor(encoder, lambda monitor: tqdm_bar.update( monitor.bytes_read - tqdm_bar.n)) response = requests.post( urljoin(self.server_address, path), json=json, data=monitor, headers={"Access-Token": self.access_token, "Content-Type": monitor.content_type}) else: response = requests.post( urljoin(self.server_address, path), json=json, files=files, headers={"Access-Token":self.access_token}) if log: log_response(response) if response.status_code != 200: message = response.status_code try: json = response.json() if 'message' in json: message = json['message'] except JSONDecodeError: message = 'No json to retrieve message from' raise ServerError(message, response.content) return response
def upload_large_file(url, file_path, data, chunk_callback=None): if not path.isfile(file_path): print(f'File {file_path} not found') return file = open(file_path, 'rb') base_name = path.basename(file_path) data.update({'file': (base_name, file, get_mime_type(file_path))}) encoder = MultipartEncoder(data) prev_bytes_read = 0 def progress_callback(x): nonlocal prev_bytes_read if chunk_callback: chunk_callback(x.bytes_read - prev_bytes_read) prev_bytes_read = x.bytes_read monitor = MultipartEncoderMonitor(encoder, progress_callback) headers = { 'Content-Type': monitor.content_type, 'Authorization': session.token } with requests.post(url, data=monitor, headers=headers, stream=True) as res: if res.status_code == 401: login_and_retry() return res
def upload_file( self, io: IO, remotepath: str, ondup="overwrite", callback: Callable[[MultipartEncoderMonitor], None] = None, ): """Upload the content of io to remotepath WARNING: This api can not set local_ctime and local_mtime """ assert remotepath.startswith( "/"), "`remotepath` must be an absolute path" remotePath = Path(remotepath) url = PcsNode.File.url() params = { "method": "upload", "ondup": ondup, "dir": remotePath.parent.as_posix(), "filename": remotePath.name, "BDUSS": self._bduss, } m = MultipartEncoder(fields={"file": ("file", io, "")}) monitor = MultipartEncoderMonitor(m, callback=callback) resp = self._request(Method.Post, url, params=params, data=monitor) return resp.json()
def provider_upload(self, name, version, provider, file): r = self.api_get(self.api['provider']['upload'], args={ 'username': self.username, 'boxname': name, 'version': version, 'provider': provider }) data = r.json() encoder = MultipartEncoder({ 'file': (file, open(file, 'rb'), 'application/octet-stream') }) bar = ProgressBar(expected_size=encoder.len, filled_char='=') def callback(monitor): bar.show(monitor.bytes_read) monitor = MultipartEncoderMonitor(encoder, callback) self.api_put( data['upload_path'], monitor, isjson=False, anonymous=True, headers={ 'Content-Type': monitor.content_type } ) print('')
def upload_file(self, file_object, path, callback_func): r = requests.get('https://cloud-api.yandex.net/v1/disk/resources/upload', headers=self.api_key, params={'path': path}) upload_url = r.json().get('href') m = MultipartEncoderMonitor.from_fields(fields={'file': file_object}, callback=callback_func) r = requests.put(upload_url, data=m, headers={'Content-Type': m.content_type}) return True if r.status_code == 201 else False