Пример #1
0
 def _upload_bytes(self, data: IO):
     print(
         "You are using the LocalModelProvider. Therefore no upload is done!"
     )
     filename = datetime.datetime.now().strftime("data_%Y%m%d_%H%M%S.npz")
     with open(os.path.join(self.upload_dir, filename), 'wb') as f:
         f.write(data.getbuffer())
     print('File saved')
Пример #2
0
 def _upload_bytes(self, data: IO):
     # Note: the don't pass data directly to requests because the byte stream is not at the start.
     # Use getbuffer or getvalue instead. See https://github.com/psf/requests/issues/2589
     print("Uploading data")
     filename = datetime.datetime.now().strftime("data_%Y%m%d_%H%M%S.npz")
     filename_out = os.path.join(self.temp_upload_dir, filename)
     with open(filename_out, 'wb') as f:
         f.write(data.getbuffer())
     upload_thread = threading.Thread(target=upload_data, args=(self.url_base, filename_out, self.api_key))
     upload_thread.start()
 def _put_file(self, key: str, file: IO) -> str:
     blob = self._bucket.blob(key)
     with map_gcloud_exceptions(key):
         if isinstance(file, io.BytesIO):
             # not passing a size triggers a resumable upload to avoid trying to upload
             # large files in a single request
             # For BytesIO, getting the size is cheap, therefore we pass it
             blob.upload_from_file(file_obj=file,
                                   size=file.getbuffer().nbytes)
         else:
             blob.upload_from_file(file_obj=file)
     return key