Esempio n. 1
0
 def f(m: MultipartEncoderMonitor):
     # update every 100KB
     m.buf_bytes_read += m.bytes_read - m.prev_bytes_read
     m.prev_bytes_read = m.bytes_read
     if m.buf_bytes_read >= 1e5:
         # print(f"{m.buf_bytes_read=}, {m.prev_bytes_read=}")
         bar.update(m.buf_bytes_read)
         m.buf_bytes_read = 0
Esempio n. 2
0
    def post_files(self, files: FileList, **data: JSON) -> JSON:
        # upload files using custom json-data protocol
        # build the fields
        file_header = {"Content-Encoding": "gzip"}

        def mk_file_fields(field_name: str, f: Path):
            # compress the file, in-place
            # TODO - disable compression where unneeded, e.g. .gz, .zip, .png, etc
            with compress_file(f) as f_gz:
                return (
                    field_name,
                    (f.name, open(f_gz, "rb"), guess_type(f), file_header),
                )

        fields = [mk_file_fields(k, x) for (k, v) in files.items() for x in v]
        fields.append(("json_data", json.dumps(data)))

        e = MultipartEncoder(fields=fields)
        extra_headers = {"Content-Type": f"{e.content_type}; dp-files=True"}

        max_size = 25 if c.config.is_public else 100
        if e.len > max_size * SIZE_1_MB:
            raise ReportTooLargeError(
                f"Report and attachments over f{max_size} MB after compression (~{e.len/SIZE_1_MB:.1f} MB) - please reduce the size of your charts/plots"
            )
        elif e.len > SIZE_1_MB:
            log.debug("Using upload monitor")
            fill_char = click.style("=", fg="yellow")
            with click.progressbar(
                    length=e.len,
                    width=0,
                    show_eta=True,
                    label="Uploading files",
                    fill_char=fill_char,
            ) as bar:

                def f(m: MultipartEncoderMonitor):
                    # update every 100KB
                    m.buf_bytes_read += m.bytes_read - m.prev_bytes_read
                    m.prev_bytes_read = m.bytes_read
                    if m.buf_bytes_read >= 1e5:
                        # print(f"{m.buf_bytes_read=}, {m.prev_bytes_read=}")
                        bar.update(m.buf_bytes_read)
                        m.buf_bytes_read = 0

                m = MultipartEncoderMonitor(e, callback=f)
                m.buf_bytes_read = 0
                m.prev_bytes_read = 0
                r = self.session.post(self.url,
                                      data=m,
                                      headers=extra_headers,
                                      timeout=self.timeout)
        else:
            r = self.session.post(self.url,
                                  data=e,
                                  headers=extra_headers,
                                  timeout=self.timeout)
        return _process_res(r)
Esempio n. 3
0
    def post_files(self, files: FileList, **data: JSON) -> JSON:
        # upload files using custom json-data protocol
        # build the fields
        file_header = {"Content-Encoding": "gzip"}

        def mk_file_fields(field_name: str, f: Path):
            # compress the file, in-place
            # TODO - disable compression where unneeded, e.g. .gz, .zip, .png, etc
            with compress_file(f) as f_gz:
                return (field_name, (f.name, open(f_gz, "rb"), guess_type(f), file_header))

        fields = [mk_file_fields(k, x) for (k, v) in files.items() for x in v]
        fields.append(("json_data", json.dumps(data)))

        e = MultipartEncoder(fields=fields)
        extra_headers = {"Content-Type": f"{e.content_type}; dp-files=True"}
        if e.len > 1e6:  # 1 MB
            log.debug("Using upload monitor")
            fill_char = click.style("=", fg="yellow")
            with click.progressbar(
                length=e.len, width=0, show_eta=True, label="Uploading files", fill_char=fill_char
            ) as bar:

                def f(m: MultipartEncoderMonitor):
                    # update every 100KB
                    m.buf_bytes_read += m.bytes_read - m.prev_bytes_read
                    m.prev_bytes_read = m.bytes_read
                    if m.buf_bytes_read >= 1e5:
                        # print(f"{m.buf_bytes_read=}, {m.prev_bytes_read=}")
                        bar.update(m.buf_bytes_read)
                        m.buf_bytes_read = 0

                m = MultipartEncoderMonitor(e, callback=f)
                m.buf_bytes_read = 0
                m.prev_bytes_read = 0
                r = self.session.post(self.url, data=m, headers=extra_headers, timeout=self.timeout)
        else:
            r = self.session.post(self.url, data=e, headers=extra_headers, timeout=self.timeout)
        return self._process_res(r)