def upload_temporary_file(api, project, path, lookup=None, hours=24, name=None, chunk_size=100 * 1024 * 1024): """ Upload a file to the temporary file storage location. Example: .. code-block:: python api = tator.get_api(host, token) for progress, response in tator.util.upload_temporary_file(api, project_id, path): print(f"Upload progress: {progress}%") print(response.message) :param api: :class:`tator.TatorApi` object. :param project: Unique integer identifying a project. :param path: Path to the file. :param lookup: [Optional] md5hash of lookup parameters. :param hours: [Optional] Number of hourse file is kept alive. Default is 24. :param name: [Optional] Name of temporary file in database. Defaults to basename of path. :param chunk_size: [Optional] Chunk size in bytes. Default is 100MB. :returns: Generator that yields tuple containing progress (0-100) and a response. The response is `None` until the last yield, when the response is the response object from :meth:`tator.util.TatorApi.create_temporary_file`. """ if name is None: name = os.path.basename(path) if lookup is None: lookup = name host = api.api_client.configuration.host tusURL = urljoin(host, "files/") tus = TusClient(tusURL) uploader = tus.uploader(path, chunk_size=chunk_size, retries=10, retry_delay=15) last_progress = 0 yield (last_progress, None) num_chunks = math.ceil(uploader.get_file_size() / chunk_size) for chunk_count in range(num_chunks): uploader.upload_chunk() this_progress = round((chunk_count / num_chunks) * 100, 1) if this_progress != last_progress: yield (this_progress, None) last_progress = this_progress response = api.create_temporary_file(project, temporary_file_spec={ "url": uploader.url, "name": name, "lookup": lookup, "hours": 24, }) yield (100, response)
def test_upload_file(self, live_server): tus_client = TusClient( live_server.url + reverse('tus_upload') ) uploader = tus_client.uploader('tests/files/hello_world.txt', chunk_size=200) uploader.upload() assert uploader.request.status_code == 204
def upload_file(path, tus_url): logger.info(f"Uploading file {path}...") tus = TusClient(tus_url) chunk_size = 1 * 1024 * 1024 # 1 Mb uploader = tus.uploader(path, chunk_size=chunk_size) num_chunks = math.ceil(uploader.file_size / chunk_size) for _ in progressbar(range(num_chunks)): uploader.upload_chunk() return uploader.url
def uploadFile_v2(self,filePath, typeId, md5=None, section=None, fname=None, upload_gid=None, upload_uid=None, chunk_size=2*1024*1024): if md5==None: md5 = md5_sum(filePath) if upload_uid is None: upload_uid = str(uuid1()) if upload_gid is None: upload_gid = str(uuid1()) if fname is None: fname=os.path.basename(filePath) if section is None: section="New Files" tus = TusClient(self.tusURL) uploader = tus.uploader(filePath, chunk_size=chunk_size, retries=10, retry_delay=15) num_chunks=math.ceil(uploader.get_file_size()/chunk_size) last_progress = 0 yield last_progress for chunk_count in range(num_chunks): uploader.upload_chunk() this_progress = round((chunk_count / num_chunks) *100,1) if this_progress != last_progress: yield this_progress last_progress = this_progress mime,_ = mimetypes.guess_type(fname) if mime.find('video') >= 0: endpoint = 'Transcode' else: endpoint = 'SaveImage' # Initiate transcode. out = requests.post(f'{self.url}/{endpoint}/{self.project}', headers=self.headers, json={ 'type': typeId, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, }) print("{}, {}".format(fname, out.json()['message'])) out.raise_for_status() yield 100
def tus_upload(self, upload_token, fullpath, filename): tus = TusClient(f"{self.tus_url}/new-package/tus/") try: uploader = tus.uploader( fullpath, chunk_size=UPLOAD_CHUNK_SIZE, metadata={"filename": filename, "upload-token": upload_token} ) uploader.upload() except TusCommunicationError: log.exception(f"Failed to upload file '{filename}'") raise Exit
def upload_file(path, tus_url): if not tus_url.endswith('/'): tus_url += '/' logger.info(f"Uploading file {path}...") tus = TusClient(tus_url) chunk_size = 1 * 1024 * 1024 # 1 Mb uploader = tus.uploader(path, chunk_size=chunk_size) num_chunks = math.ceil(uploader.get_file_size() / chunk_size) for _ in progressbar(range(num_chunks)): uploader.upload_chunk() return uploader.url
async def handle_file_upload(step): validate_keys(step, ["file-upload", "name"]) filename = step["file-upload"] fullpath = "/".join(current_regression.split("/")[:-1]) fullpath = f"{fullpath}/{filename}" if "name" in step: filename = step["name"] tus = TusClient("http://127.0.0.1:1080/new-package/tus/") try: uploader = tus.uploader( fullpath, chunk_size=5 * 1024 * 1024, metadata={"filename": filename, "upload-token": token} ) except Exception: raise RegressionFailure(f"Couldn't upload file '{filename}'") uploader.upload()
def uploadFile(self, filePath, lookup=None, hours=24, name=None): """ Upload a file to the temporary file storage location """ if name is None: name = os.path.basename(filePath) if lookup is None: lookup = name tus = TusClient(self.tusURL) chunk_size=100*1024*1024 # 100 Mb uploader = tus.uploader(filePath, chunk_size=chunk_size, retries=10, retry_delay=15) num_chunks=math.ceil(uploader.get_file_size()/chunk_size) for _ in range(num_chunks): uploader.upload_chunk() return self.new({"url": uploader.url, "name": name, "lookup": lookup, "hours": 24})
def uploadFiles(self, fileList, section=None,chunk_size=2*1024*1024): upload_uid = str(uuid1()) upload_gid = str(uuid1()) in_mem_buf = io.BytesIO() tus = TusClient(self.tusURL) in_mem_tar = tarfile.TarFile(mode='w', fileobj=in_mem_buf) for idx,fp in enumerate(fileList): in_mem_tar.add(fp, os.path.basename(fp)) uploader = tus.uploader(file_stream=in_mem_buf, chunk_size=chunk_size, retries=10, retry_delay=15) last_progress = 0 num_chunks=math.ceil(uploader.get_file_size()/chunk_size) yield last_progress for chunk_count in range(num_chunks): uploader.upload_chunk() this_progress = round((chunk_count / num_chunks) *100,1) if this_progress != last_progress: yield this_progress last_progress = this_progress # Initiate transcode. out = requests.post(f'{self.url}/Transcode/{self.project}', headers=self.headers, json={ 'type': -1, #Tar-based inport 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': "archive.tar", 'section': section, 'md5': "N/A", }) out.raise_for_status() yield 100
def uploadFile(self, typeId, filePath, waitForTranscode=True, progressBars=True, md5=None, section=None): if md5 == None: md5 = md5_sum(filePath) upload_uid = str(uuid1()) upload_gid = str(uuid1()) fname = os.path.basename(filePath) if section is None: section = "New Files" found = self.byMd5(md5) if found: print(f"File with {md5} found in db ({found['name']})") return False tus = TusClient(self.tusURL) chunk_size = 1 * 1024 * 1024 # 1 Mb uploader = tus.uploader(filePath, chunk_size=chunk_size) num_chunks = math.ceil(uploader.file_size / chunk_size) if progressBars: bar = progressbar.ProgressBar(prefix="Upload", redirect_stdout=True) else: bar = progressbar.NullBar() for _ in bar(range(num_chunks)): uploader.upload_chunk() # Initiate transcode. out = requests.post(self.url + '/Transcode' + "/" + self.project, headers=self.headers, json={ 'type': typeId, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, }) try: print("{}, {}".format(fname, out.json()['message'])) out.raise_for_status() except Exception as e: print("Error: '{}'".format(out.text)) return False if waitForTranscode == True: # Poll for the media being created every 5 seconds if progressBars: bar = progressbar.ProgressBar(prefix="Transcode", redirect_stdout=True) else: bar = progressbar.NullBar() #check quickly for the 1st half second then go slow for i in bar(count()): if i % 2 == 0: media = self.byMd5(md5) if media: bar.finish() break else: if i < 20: time.sleep(0.1) else: print("Waiting for transcode...") time.sleep(2.5) return True
def upload_media_archive(api, project, paths, section="Test Section", chunk_size=2 * 1024 * 1024): """ Uploads multiple media files as an archive. Example: .. code-block:: python api = tator.get_api(host, token) for progress, response in tator.util.upload_media_archive(api, project_id, paths): print(f"Upload progress: {progress}%") print(response.message) :param api: :class:`tator.TatorApi` object. :param project: Unique integer identifying a project. :param paths: List of paths to the media files. :param section: [Optional] Media section to upload to. :param chunk_size: [Optional] Chunk size in bytes. Default is 2MB. :returns: Generator that yields tuple containing progress (0-100) and a response. The response is `None` until the last yield, when the response is the response object from :meth:`tator.TatorApi.transcode`. """ upload_uid = str(uuid1()) upload_gid = str(uuid1()) in_mem_buf = io.BytesIO() host = api.api_client.configuration.host tusURL = urljoin(host, "files/") tus = TusClient(tusURL) in_mem_tar = tarfile.TarFile(mode='w', fileobj=in_mem_buf) for idx, fp in enumerate(paths): in_mem_tar.add(fp, os.path.basename(fp)) uploader = tus.uploader(file_stream=in_mem_buf, chunk_size=chunk_size, retries=10, retry_delay=15) last_progress = 0 num_chunks = math.ceil(uploader.get_file_size() / chunk_size) yield (last_progress, None) for chunk_count in range(num_chunks): uploader.upload_chunk() this_progress = round((chunk_count / num_chunks) * 100, 1) if this_progress != last_progress: yield (this_progress, None) last_progress = this_progress # Initiate transcode. spec = { 'type': -1, #Tar-based inport 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': "archive.tar", 'section': section, 'md5': "N/A", } response = api.transcode(project, transcode_spec=spec) yield (100, response)
def uploadFile(self, typeId, filePath, waitForTranscode=True, progressBars=True, md5=None,section=None, fname=None): """ Upload a new file to Tator """ if md5==None: md5 = md5_sum(filePath) upload_uid = str(uuid1()) upload_gid = str(uuid1()) if fname is None: fname=os.path.basename(filePath) if section is None: section="New Files" tus = TusClient(self.tusURL) chunk_size=100*1024*1024 # 100 Mb uploader = tus.uploader(filePath, chunk_size=chunk_size, retries=10, retry_delay=15) num_chunks=math.ceil(uploader.get_file_size()/chunk_size) if progressBars: bar=progressbar.ProgressBar(prefix="Upload",redirect_stdout=True) else: bar=progressbar.NullBar() for _ in bar(range(num_chunks)): uploader.upload_chunk() mediaType = self.mediaTypeApi.get(typeId) if mediaType['dtype'] == 'video': endpoint = 'Transcode' else: endpoint = 'SaveImage' # Initiate transcode. out = requests.post(f'{self.url}/{endpoint}/{self.project}', headers=self.headers, json={ 'type': typeId, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, }) try: print("{}, {}".format(fname, out.json()['message'])) out.raise_for_status() except Exception as e: print("Error: '{}'".format(out.text)) return False if (waitForTranscode == True) and (endpoint == 'Transcode'): # Poll for the media being created every 5 seconds if progressBars: bar=progressbar.ProgressBar(prefix="Transcode",redirect_stdout=True) else: bar=progressbar.NullBar() #check quickly for the 1st half second then go slow for i in bar(count()): if i % 2 == 0: media=self.byMd5(md5) if media: bar.finish() break else: if i < 20: time.sleep(0.1) else: print("Waiting for transcode...") time.sleep(2.5) #return media id if we waited for it return media['id'] # return true for async calls # TODO: Make this less hokey return True
def upload_media(api, type_id, path, md5=None, section=None, fname=None, upload_gid=None, upload_uid=None, chunk_size=2*1024*1024): """ Uploads a single media file. Example: .. code-block:: python api = tator.get_api(host, token) for progress, response in tator.util.upload_media(api, type_id, path): print(f"Upload progress: {progress}%") print(response.message) :param api: :class:`tator.TatorApi` object. :param type_id: Unique integer identifying a media type. :param path: Path to the media file. :param md5: [Optional] md5 sum of the media. :param section: [Optional] Media section to upload to. :param fname: [Optional] Filename to use for upload. :param upload_gid: [Optional] Group ID of the upload. :param upload_uid: [Optional] Unique ID of the upload. :param chunk_size: [Optional] Chunk size in bytes. Default is 2MB. :returns: Generator that yields tuple containing progress (0-100) and a response. The response is `None` until the last yield, when the response is the response object from :meth:`tator.TatorApi.save_video` or :meth:`tator.TatorApi.transcode`. """ if md5==None: md5 = md5sum(path) if upload_uid is None: upload_uid = str(uuid1()) if upload_gid is None: upload_gid = str(uuid1()) if fname is None: fname=os.path.basename(path) if section is None: section="New Files" host = api.api_client.configuration.host tusURL = urljoin(host, "files/") tus = TusClient(tusURL) uploader = tus.uploader(path, chunk_size=chunk_size, retries=10, retry_delay=15) num_chunks=math.ceil(uploader.get_file_size()/chunk_size) last_progress = 0 yield (last_progress, None) for chunk_count in range(num_chunks): uploader.upload_chunk() this_progress = round((chunk_count / num_chunks) *100,1) if this_progress != last_progress: yield (this_progress, None) last_progress = this_progress mime,_ = mimetypes.guess_type(fname) response = api.get_media_type(type_id) project_id = response.project spec = { 'type': type_id, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, } # Initiate transcode or save image. if mime.find('video') >= 0: response = api.transcode(project_id, transcode_spec=spec) else: response = api.create_media(project_id, media_spec=spec) yield (100, response)