Exemple #1
0
    def do_chunked_upload(self, path):
        # Prepare media file
        size = os.stat(path).st_size
        encoded_size = size * (4 / 3)

        root = self.api.get_base_folder()['id']

        media = UDSFile(ntpath.basename(path),
                        None,
                        MimeTypes().guess_type(
                            urllib.request.pathname2url(path))[0],
                        Format.format(size),
                        Format.format(encoded_size),
                        parents=[root],
                        size_numeric=size)

        parent = self.api.create_media_folder(media)
        print("Created parent folder with ID %s" % (parent['id']))

        # Should be the same
        no_chunks = math.ceil(size / CHUNK_READ_LENGTH_BYTES)
        no_docs = math.ceil(encoded_size / MAX_DOC_LENGTH)
        print("Requires %s chunks to read and %s docs to store." %
              (no_chunks, no_docs))

        # Append all chunks to chunk list
        chunk_list = list()
        for i in range(no_docs):
            chunk_list.append(
                Chunk(path, i, size, media=media, parent=parent['id']))

        # Begin timing run
        start_time = time.time()

        total = 0
        total_chunks = len(chunk_list)
        """for chunk in chunk_list:
            total = total + 1
            self.upload_chunked_part(chunk)
            elapsed_time = round(time.time() - start_time, 2)
            current_speed = round((total * CHUNK_READ_LENGTH_BYTES) / (elapsed_time * 1024 * 1024), 2)
            progress_bar("Uploading %s at %sMB/s" %
                         (media.name, current_speed), total, total_chunks)"""

        # Concurrently execute chunk upload and report back when done.
        with concurrent.futures.ProcessPoolExecutor(
                max_workers=MAX_WORKERS_ALLOWED) as executor:
            for file in executor.map(ext_upload_chunked_part, chunk_list):
                total = total + file
                elapsed_time = round(time.time() - start_time, 2)
                current_speed = round((total) / (elapsed_time * 1024 * 1024),
                                      2)
                progress_bar(
                    "Uploading %s at %sMB/s" % (media.name, current_speed),
                    total, size)

        finish_time = round(time.time() - start_time, 1)

        progress_bar("Uploaded %s in %ss" % (media.name, finish_time), 1, 1)
Exemple #2
0
    def do_chunked_upload(self, path):
        # Prepare media file
        size = os.stat(path).st_size
        file_hash = self.hash_file(path)

        encoded_size = size * (4/3)

        root = self.api.get_base_folder()['id']

        media = UDSFile(ntpath.basename(path), None, MimeTypes().guess_type(urllib.request.pathname2url(path))[0],
                        Format.format(size), Format.format(encoded_size), parents=[root], size_numeric=size, sha256=file_hash)

        parent = self.api.create_media_folder(media)

        # Should be the same
        no_chunks = math.ceil(size / CHUNK_READ_LENGTH_BYTES)
        no_docs = math.ceil(encoded_size / MAX_DOC_LENGTH)

        # Append all chunks to chunk list
        chunk_list = list()
        for i in range(no_docs):
            chunk_list.append(
                Chunk(path, i, size, media=media, parent=parent['id'])
            )

        total = 0
        total_chunks = len(chunk_list)
        progress_bar_chunks = tqdm(total=total_chunks,
                                   unit='chunks', dynamic_ncols=True, position=0)
        progress_bar_speed = tqdm(total=total_chunks * CHUNK_READ_LENGTH_BYTES, unit_scale=1,
                                  unit='B', dynamic_ncols=True, position=1)

        for chunk in chunk_list:
            total += 1
            self.upload_chunked_part(chunk)
            progress_bar_speed.update(CHUNK_READ_LENGTH_BYTES)
            progress_bar_chunks.update(1)
        """# Concurrently execute chunk upload and report back when done.
        with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_WORKERS_ALLOWED) as executor:
            for file in executor.map(ext_upload_chunked_part, chunk_list):
                total = total + file
                elapsed_time = round(time.time() - start_time, 2)
                current_speed = round(
                    (total) / (elapsed_time * 1024 * 1024), 2)
                progress_bar("Uploading %s at %sMB/s" %
                             (media.name, current_speed), total, size)"""

        print("\n")
        # Print new file output
        table = [[media.name, media.size, media.encoded_size, parent['id']]]
        print(tabulate(table, headers=[
            'Name', 'Size', 'Encoded', 'ID', ]))