예제 #1
0
 def _simple_upload(self):
     """One-shot upload, less than 5MB"""
     self.buffer.seek(0)
     data = self.buffer.read()
     path = ('https://www.googleapis.com/upload/storage/v1/b/%s/o' %
             quote_plus(self.bucket))
     metadata = {'name': self.key}
     if self.metadata is not None:
         metadata['metadata'] = self.metadata
     metadata = json.dumps(metadata)
     data = ('--==0=='
             '\nContent-Type: application/json; charset=UTF-8'
             '\n\n' + metadata + '\n--==0=='
             '\nContent-Type: application/octet-stream'
             '\n\n').encode() + data + b'\n--==0==--'
     r = self.gcsfs._call(
         'POST',
         path,
         uploadType='multipart',
         headers={'Content-Type': 'multipart/related; boundary="==0=="'},
         data=data)
     size, md5 = int(r.json()['size']), r.json()['md5Hash']
     if self.consistency == 'size':
         assert size == self.buffer.tell(), "Size mismatch"
     if self.consistency == 'md5':
         self.md5.update(data)
         assert b64encode(
             self.md5.digest()) == md5.encode(), "MD5 checksum failed"
예제 #2
0
파일: core.py 프로젝트: matthewtberry/gcsfs
 def _simple_upload(self):
     """One-shot upload, less than 5MB"""
     self.buffer.seek(0)
     data = self.buffer.read()
     path = "https://www.googleapis.com/upload/storage/v1/b/%s/o" % quote_plus(
         self.bucket)
     metadata = {"name": self.key}
     if self.metadata is not None:
         metadata["metadata"] = self.metadata
     metadata = json.dumps(metadata)
     data = (("--==0=="
              "\nContent-Type: application/json; charset=UTF-8"
              "\n\n" + metadata + "\n--==0=="
              "\nContent-Type: {0}"
              "\n\n").format(self.content_type).encode() + data +
             b"\n--==0==--")
     r = self.gcsfs._call(
         "POST",
         path,
         uploadType="multipart",
         headers={"Content-Type": 'multipart/related; boundary="==0=="'},
         data=data,
     )
     size, md5 = int(r.json()["size"]), r.json()["md5Hash"]
     if self.consistency == "size":
         assert size == self.buffer.tell(), "Size mismatch"
     if self.consistency == "md5":
         self.md5.update(data)
         assert b64encode(
             self.md5.digest()) == md5.encode(), "MD5 checksum failed"
예제 #3
0
파일: core.py 프로젝트: rafa-guedes/gcsfs
    def _upload_chunk(self, final=False):
        """ Write one part of a multi-block file upload

        Parameters
        ----------
        final: bool
            Complete and commit upload
        """
        self.buffer.seek(0)
        data = self.buffer.getvalue()
        head = {}
        l = len(data)
        if final and self.autocommit:
            if l:
                head['Content-Range'] = 'bytes %i-%i/%i' % (
                    self.offset, self.offset + l - 1, self.offset + l)
            else:
                # closing when buffer is empty
                head['Content-Range'] = 'bytes */%i' % self.offset
                data = None
        else:
            if l < GCS_MIN_BLOCK_SIZE:
                if not self.autocommit:
                    return
                elif not final:
                    raise ValueError("Non-final chunk write below min size.")
            head['Content-Range'] = 'bytes %i-%i/*' % (
                self.offset, self.offset + l - 1)
        head.update({'Content-Type': 'application/octet-stream',
                     'Content-Length': str(l)})
        r = self.gcsfs._call('POST', self.location,
                             uploadType='resumable', headers=head, data=data)
        if 'Range' in r.headers:
            end = int(r.headers['Range'].split('-')[1])
            shortfall = (self.offset + l - 1) - end
            if shortfall:
                if self.consistency == 'md5':
                    self.md5.update(data[:-shortfall])
                self.buffer = io.BytesIO(data[-shortfall:])
                self.buffer.seek(shortfall)
                self.offset += l - shortfall
                return False
            else:
                if self.consistency == 'md5':
                    self.md5.update(data)
        elif l:
            #
            assert final, "Response looks like upload is over"
            size, md5 = int(r.json()['size']), r.json()['md5Hash']
            if self.consistency == 'size':
                assert size == self.buffer.tell() + self.offset, "Size mismatch"
            if self.consistency == 'md5':
                assert b64encode(
                    self.md5.digest()) == md5.encode(), "MD5 checksum failed"
        else:
            assert final, "Response looks like upload is over"
        return True
예제 #4
0
파일: core.py 프로젝트: jhamman/gcsfs
 def _upload_chunk(self, final=False):
     self.buffer.seek(0)
     data = self.buffer.read()
     head = self.gcsfs.header.copy()
     l = self.buffer.tell()
     if final:
         if l:
             head['Content-Range'] = 'bytes %i-%i/%i' % (
                 self.offset, self.offset + l - 1, self.offset + l)
         else:
             # closing when buffer is empty
             head['Content-Range'] = 'bytes */%i' % self.offset
             data = None
     else:
         head['Content-Range'] = 'bytes %i-%i/*' % (self.offset,
                                                    self.offset + l - 1)
     head.update({
         'Content-Type': 'application/octet-stream',
         'Content-Length': str(l)
     })
     r = requests.post(self.location,
                       params={'uploadType': 'resumable'},
                       headers=head,
                       data=data)
     validate_response(r, self.location)
     if 'Range' in r.headers:
         assert not final, "Response looks like upload is partial"
         shortfall = (self.offset + l - 1) - int(
             r.headers['Range'].split('-')[1])
         if shortfall:
             if self.consistency == 'md5':
                 self.md5.update(data[:-shortfall])
             self.buffer = io.BytesIO(data[-shortfall:])
             self.buffer.seek(shortfall)
         else:
             if self.consistency == 'md5':
                 self.md5.update(data)
             self.buffer = io.BytesIO()
         self.offset += l - shortfall
     else:
         assert final, "Response looks like upload is over"
         size, md5 = int(r.json()['size']), r.json()['md5Hash']
         if self.consistency == 'size':
             assert size == self.buffer.tell(
             ) + self.offset, "Size mismatch"
         if self.consistency == 'md5':
             assert b64encode(
                 self.md5.digest()) == md5.encode(), "MD5 checksum failed"
         self.buffer = io.BytesIO()
         self.offset += l
예제 #5
0
파일: core.py 프로젝트: kayibal/gcsfs
 def _simple_upload(self):
     """One-shot upload, less than 5MB"""
     self.buffer.seek(0)
     data = self.buffer.read()
     path = ('https://www.googleapis.com/upload/storage/v1/b/%s/o' %
             quote_plus(self.bucket))
     r = self.gcsfs.session.post(path,
                                 params={
                                     'uploadType': 'media',
                                     'name': self.key
                                 },
                                 data=data)
     validate_response(r, path)
     size, md5 = int(r.json()['size']), r.json()['md5Hash']
     if self.consistency == 'size':
         assert size == self.buffer.tell(), "Size mismatch"
     if self.consistency == 'md5':
         self.md5.update(data)
         assert b64encode(
             self.md5.digest()) == md5.encode(), "MD5 checksum failed"
예제 #6
0
    def _upload_chunk(self, final=False):
        """ Write one part of a multi-block file upload

        Parameters
        ----------
        final: bool
            Complete and commit upload
        """
        self.buffer.seek(0)
        data = self.buffer.getvalue()
        head = {}
        l = len(data)
        if final and self.autocommit:
            if l:
                head["Content-Range"] = "bytes %i-%i/%i" % (
                    self.offset,
                    self.offset + l - 1,
                    self.offset + l,
                )
            else:
                # closing when buffer is empty
                head["Content-Range"] = "bytes */%i" % self.offset
                data = None
        else:
            if l < GCS_MIN_BLOCK_SIZE:
                if not self.autocommit:
                    return False
                elif not final:
                    raise ValueError("Non-final chunk write below min size.")
            head["Content-Range"] = "bytes %i-%i/*" % (self.offset, self.offset + l - 1)
        head.update({"Content-Type": self.content_type, "Content-Length": str(l)})
        r = self.gcsfs._call(
            "POST", self.location, uploadType="resumable", headers=head, data=data
        )
        if "Range" in r.headers:
            end = int(r.headers["Range"].split("-")[1])
            shortfall = (self.offset + l - 1) - end
            if shortfall:
                if self.consistency == "md5":
                    self.md5.update(data[:-shortfall])
                self.buffer = io.BytesIO(data[-shortfall:])
                self.buffer.seek(shortfall)
                self.offset += l - shortfall
                return False
            else:
                if self.consistency == "md5":
                    self.md5.update(data)
        elif l:
            #
            assert final, "Response looks like upload is over"
            size, md5 = int(r.json()["size"]), r.json()["md5Hash"]
            if self.consistency == "size":
                # update offset with final chunk of data
                self.offset += l
                assert size == self.buffer.tell() + self.offset, "Size mismatch"
            if self.consistency == "md5":
                # update md5 with final chunk of data
                self.md5.update(data)
                assert (
                    b64encode(self.md5.digest()) == md5.encode()
                ), "MD5 checksum failed"
        else:
            assert final, "Response looks like upload is over"
        return True