def test_media_io_base_stream_unlimited_chunksize_resume(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) # Set up a seekable stream and try to upload in single chunk. fd = BytesIO(b'01234"56789"') media_upload = MediaIoBaseUpload(fd=fd, mimetype='text/plain', chunksize=-1, resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) # The single chunk fails, restart at the right point. http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '308', 'location': 'http://upload.example.com/2', 'range': '0-4' }, ''), ({ 'status': '200' }, 'echo_request_body'), ]) body = request.execute(http=http) self.assertEqual('56789', body)
def test_resumable_media_handle_uploads_of_unknown_size_eof(self): http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '200' }, 'echo_request_headers_as_json'), ]) self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) fd = BytesIO(b'data goes here') # Create an upload that doesn't know the full size of the media. upload = MediaIoBaseUpload(fd=fd, mimetype='image/png', chunksize=15, resumable=True) request = zoo.animals().insert(media_body=upload, body=None) status, body = request.next_chunk(http=http) self.assertEqual(body, { 'Content-Range': 'bytes 0-13/14', 'Content-Length': '14', })
def save_byte_file(self, content, file_id, filename="results"): """ Save byte file in Gdrive. :param content: the content of the file (string) :param file_id: the file id if file exist :param filename: the filename if fileid does not exist """ mime_type = "text/json" data = io.BytesIO(base64.b64encode(json.dumps(content).encode())) media = MediaIoBaseUpload(data, mimetype=mime_type) if file_id is not None: self.service.files().update(body={ 'mimeType': mime_type }, fileId=file_id, media_mime_type=mime_type, media_body=media).execute() else: file_metadata = { "name": filename, "mimeType": mime_type, } file = self.service.files().create(body=file_metadata, media_body=media, fields="id, name").execute() print("Uploaded File '{}' with ID: {}".format( file.get("name"), file.get("id")))
def save_new_file(gds: Resource, file_data: BytesIO, mimetype: str, folder_id: str, filename: str) -> dict: """Uploads a new file to Google Drive. Args: gds (Resource): google drive service. file_data (BytesIO): file content as a buffer. mimetype (str): MIME type of the file. folder_id (str): Google Drive's id of the folder. filename (str): filename of the file. Returns: dict: metadata of the uploaded file. """ log("Saving new file: %s", filename) file_metadata = { "name": filename, "mimeType": mimetype, "parents": [folder_id] } media = MediaIoBaseUpload(file_data, mimetype=mimetype) res = (gds.files().create(body=file_metadata, media_body=media, fields="id").execute()) return res
def test_media_io_base_stream_chunksize_resume(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) # Set up a seekable stream and try to upload in chunks. fd = BytesIO(b'0123456789') media_upload = MediaIoBaseUpload(fd=fd, mimetype='text/plain', chunksize=5, resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) # The single chunk fails, pull the content sent out of the exception. http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '400' }, 'echo_request_body'), ]) try: body = request.execute(http=http) except HttpError as e: self.assertEqual(b'01234', e.content)
def uploadFileToBucket(path, bucket_name, dest_path=None): '''Upload file to the bucket''' from googleapiclient.http import MediaIoBaseUpload storage = _getStorage() body = { 'name': dest_path or path, } # if the plugin was called from a windows OS, we need to convert the path separators for gsutil if platform.system() == 'Windows': body['name'] = pathlib.PurePath(body['name']).as_posix() print('INFO: Uploading file to "gs://%s/%s"...' % (bucket_name, body['name'])) with open(path, 'rb') as f: # TODO: make sure file uploaded or there is an isssue storage.objects().insert( bucket=bucket_name, body=body, media_body=MediaIoBaseUpload(f, 'application/octet-stream', chunksize=8 * 1024 * 1024), ).execute() return True
def upload_file(self, filename: str, text: str): """ google driveへのファイルアップロード Parameters ---------- filename : str アップロードするファイル名 text : str ファイルの内容(銘柄情報のcsvフォーマット) Returns ------- str ファイルキー値 """ fh = io.BytesIO(text.encode('utf8')) file_metadata = { "name": filename, "mimeType": self.MIME_TYPE, "parents": [self.file_key] } media = MediaIoBaseUpload(fh, mimetype=self.MIME_TYPE, resumable=True) file_info = self.service.files().create(body=file_metadata, media_body=media, fields='id').execute() return file_info['id']
def object_put(auth, path, data, mimetype='application/octet-stream'): bucket, filename = path.split(':', 1) service = get_service('storage', 'v1', auth) media = MediaIoBaseUpload(data, mimetype=mimetype, chunksize=CHUNKSIZE, resumable=True) request = service.objects().insert(bucket=bucket, name=filename, media_body=media) response = None errors = 0 while response is None: error = None try: status, response = request.next_chunk() if project.verbose and status: print("Uploaded %d%%." % int(status.progress() * 100)) except HttpError as e: if e.resp.status < 500: raise error = e except (httplib2.HttpLib2Error, IOError) as e: error = e errors = (errors + 1) if error else 0 if errors > RETRIES: raise error if project.verbose: print("Uploaded 100%.")
def upload_revision(self, document_name, document, folder_id, original_format, title='Untitled', target_format='*/*'): """Upload file to a Google Drive folder. Args: document_name: Name of the document document: content of the document to upload. folder_id: id of the Google Drive folder original_format: file format of the document content title: document title target_format: file format that that the uploaded file will transform into. Returns: A string to represent the uploaded file's id. """ file_metadata = { 'name': document_name, 'title': title, 'parents': [folder_id], 'mimeType': target_format } fh = BytesIO(document) media = MediaIoBaseUpload(fh, mimetype=original_format, resumable=True) file = self._service.files().insert(body=file_metadata, media_body=media, convert=True, fields='id').execute() print('File ID: ' + file.get('id')) return file.get('id')
def create_file_in_folder(self, folder_id, filename, file_stream, mimetype): """ Creates a new file in the specified folder. Args: folder_id (str): google resource ID for the drive folder to put the file into. filename (str): name of the uploaded file. file_stream (file-like/stream): contents of the file to upload. mimetype (str): mimetype of the given file. Returns: file ID (str). Throws: googleapiclient.errors.HttpError: For some non-retryable 4xx or 5xx error. See the full list here: https://developers.google.com/drive/api/v3/handle-errors """ file_metadata = { 'name': filename, 'parents': [folder_id], } media = MediaIoBaseUpload(file_stream, mimetype=mimetype) uploaded_file = self._client.files().create( # pylint: disable=no-member body=file_metadata, media_body=media, fields='id').execute() LOG.info(u'File uploaded: ID="{}", name="{}"'.format( uploaded_file.get('id'), filename).encode('utf-8')) return uploaded_file.get('id')
def ext_upload_chunked_part(chunk): api = GoogleAPI() #print("Chunk %s, bytes %s to %s" % (chunk.part, chunk.range_start, chunk.range_end)) with open(chunk.path, "r") as fd: mm = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ) chunk_bytes = mm[chunk.range_start:chunk.range_end] encoded_chunk = Encoder.encode(chunk_bytes) file_metadata = { 'name': chunk.media.name + str(chunk.part), 'mimeType': 'application/vnd.google-apps.document', 'parents': [chunk.parent], 'properties': { 'part': str(chunk.part) } } mediaio_file = MediaIoBaseUpload(io.StringIO(encoded_chunk), mimetype='text/plain') api.upload_single_file(mediaio_file, file_metadata) return len(chunk_bytes)
def push(bucket, local_file, metadata=None): objconn = JBoxGS.connect().objects() fh = open(local_file, "rb") media = MediaIoBaseUpload(fh, JBoxGS._get_mime_type(local_file), resumable=True, chunksize=JBoxGS.CHUNK_SIZE*1024*1024) uploader = None if metadata: uploader = objconn.insert(bucket=bucket, media_body=media, name=os.path.basename(local_file), body={"metadata": metadata}) else: uploader = objconn.insert(bucket=bucket, media_body=media, name=os.path.basename(local_file)) done = False num_retries = 0 while not done: try: _, done = uploader.next_chunk() except HttpError, err: num_retries += 1 if num_retries > JBoxGS.MAX_RETRIES: fh.close() raise if err.resp.status in JBoxGS.RETRYABLE_ERRORS: backoff = min(JBoxGS.BACKOFF_FACTOR ** (num_retries - 1), JBoxGS.MAX_BACKOFF) sleep(backoff + random()) else: sleep(JBoxGS.SLEEP_TIME) except:
def make_backup_google_drive(self, ts, name, dump_stream, info_file, info_file_content, cloud_params): # Upload two backup objects to Google Drive GoogleDriveService = self.env[ "ir.config_parameter"].get_google_drive_service() folder_id = self.env["ir.config_parameter"].get_param( "odoo_backup_sh_google_disk.google_disk_folder_id") db_metadata = { "name": compute_backup_filename(name, ts, info_file_content.get("encrypted")), "parents": [folder_id], } info_metadata = { "name": compute_backup_info_filename(name, ts), "parents": [folder_id], } db_mimetype = "application/zip" info_mimetype = "text/plain" dump_stream.seek(0) info_file.seek(0) for obj, mimetype, metadata in [ [dump_stream, db_mimetype, db_metadata], [info_file, info_mimetype, info_metadata], ]: media = MediaIoBaseUpload(obj, mimetype, resumable=True) GoogleDriveService.files().create(body=metadata, media_body=media, fields="id").execute()
def write_file(self, bucket_name, file_name, content, content_type): media = MediaIoBaseUpload(io.BytesIO(content), content_type) response = self.objects().insert(bucket=bucket_name, name=file_name, media_body=media).execute() return response
def test_media_io_base_next_chunk_no_retry_403_not_configured(self): fd = BytesIO(b"i am png") upload = MediaIoBaseUpload( fd=fd, mimetype='image/png', chunksize=500, resumable=True) http = HttpMockSequence([ ({'status': '403'}, NOT_CONFIGURED_RESPONSE), ({'status': '200'}, '{}') ]) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar' method = u'POST' request = HttpRequest( http, model.response, uri, method=method, headers={}, resumable=upload) request._rand = lambda: 1.0 request._sleep = mock.MagicMock() with self.assertRaises(HttpError): request.execute(num_retries=3) request._sleep.assert_not_called()
def _save(self, name, content): name = os.path.join(settings.GOOGLE_DRIVE_STORAGE_MEDIA_ROOT, name) folder_path = os.path.sep.join(self._split_path(name)[:-1]) folder_data = self._get_or_create_folder(folder_path) parent_id = None if folder_data is None else folder_data['id'] # Now we had created (or obtained) folder on GDrive # Upload the file mime_type = mimetypes.guess_type(name) if mime_type[0] is None: mime_type = self._UNKNOWN_MIMETYPE_ media_body = MediaIoBaseUpload(content.file, mime_type, resumable=True, chunksize=1024 * 512) body = { 'name': self._split_path(name)[-1], 'mimeType': mime_type } # Set the parent folder. if parent_id: body['parents'] = [parent_id] file_data = self._drive_service.files().create( body=body, media_body=media_body).execute() # Setting up permissions for p in self._permissions: self._drive_service.permissions().create(fileId=file_data["id"], body={**p.raw}).execute() return file_data.get(u'originalFilename', file_data.get(u'name'))
def create_file(fileName, parentID=None, drive_service=None, raw_data=None): ''' Create a file on Drive, returns the newely created folders ID ''' if not drive_service: drive_service = create_drive_service() body = { 'name': fileName, 'mimeType': "text/plain", } if parentID: body['parents'] = [parentID] if raw_data: fh = BytesIO(raw_data) media = MediaIoBaseUpload(fh, mimetype='text/plain', chunksize=1024 * 1024, resumable=True) else: media_body = None file = drive_service.files().create(body=body, fields='id', media_body=media).execute() logging.info('created folder %s' % file['id']) make_file_public(drive_service, file['id']) return file['id']
def upload_video(video: BytesIO, tags: list, title: str, desc: str, youtube: googleapiclient.discovery.Resource): """Uploads the video, returns response from `youtube` service.""" body = { 'snippet': { 'title': truncate('Nightcore - ' + title.strip(), 100), 'description': desc, 'tags': create_tags(tags), 'categoryId': YT_CATEGORY }, 'status': { 'privacyStatus': 'public' } } logging.info('Uploading video to YouTube') req = youtube.videos().insert( part=','.join(body.keys()) + ',id', body=body, media_body=MediaIoBaseUpload(video, VIDEO_MIME, chunksize=1024 * 1024, resumable=True), ) logging.debug('Response from YouTube: %s', json.dumps(req.execute(), indent=None))
def googledrive_upload(self, filename, file_handle, parent_id=None): mime = MimeTypes() guessed_type = mime.guess_type(filename)[0] file_metadata = {gdu.NAME: filename} if self.write_as_google_doc and guessed_type == gdu.CSV: file_metadata[gdu.MIME_TYPE] = gdu.SPREADSHEET if filename.lower().endswith(".csv"): file_metadata[gdu.NAME] = filename + ".csv" if guessed_type is None: guessed_type = gdu.BINARY_STREAM media = MediaIoBaseUpload(file_handle, mimetype=guessed_type, resumable=True) query = gdu.query_parents_in([parent_id], name=filename, trashed=False) files = self.googledrive_list(query) if len(files) == 0: self.googledrive_create(body=file_metadata, media_body=media, parent_id=parent_id) else: self.googledrive_update(file_id=gdu.get_id(files[0]), body=file_metadata, media_body=media, parent_id=parent_id)
def export_to_drive(request, credential, sheet_id): """ Export a sheet to Google Drive. """ http = credential.authorize(httplib2.Http()) service = build('drive', 'v3', http=http, cache_discovery=False) sheet = get_sheet(sheet_id) if 'error' in sheet: return jsonResponse({'error': {'message': sheet["error"]}}) file_metadata = { 'name': strip_tags(sheet['title'].strip()), 'mimeType': 'application/vnd.google-apps.document' } html_string = bytes(sheet_to_html_string(sheet), "utf8") media = MediaIoBaseUpload(BytesIO(html_string), mimetype='text/html', resumable=True) new_file = service.files().create(body=file_metadata, media_body=media, fields='webViewLink').execute() return jsonResponse(new_file)
def upload_chunked_part(self, chunk, api=None): """Upload a chunked part to drive and return the size of the chunk""" if not api: api = self.api with open(chunk.path, "r") as fd: mm = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ) chunk_bytes = mm[chunk.range_start:chunk.range_end] encoded_chunk = encoder.encode(chunk_bytes) file_metadata = { 'name': chunk.media.name + str(chunk.part), 'mimeType': 'application/vnd.google-apps.document', 'parents': [chunk.parent], 'properties': { 'part': str(chunk.part) } } mediaio_file = MediaIoBaseUpload(io.StringIO(encoded_chunk), mimetype='text/plain') self.api.upload_single_file(mediaio_file, file_metadata) return len(chunk_bytes)
def upload_file(self, temporary_file_name, filename, file_type, vid, parent_id): total_size = int(vid.headers.get('content-length')) file_id = None mimetype = "" if file_type == 'MP4': mimetype = "video/mp4" filename = f'{filename}.mp4' elif file_type == 'M4A': mimetype = "audio/m4a" filename = f'{filename}.m4a' elif file_type == 'CHAT': mimetype = 'text/plain' filename = f'{filename}.txt' elif file_type == 'TRANSCRIPT': filename = f'{filename}.vtt' mimetype = 'text/vtt' with open(temporary_file_name, 'rb') as temporary_file: chunk_size = 1024*1024 # file_bytes = BytesIO(vid.content) media = MediaIoBaseUpload(temporary_file, mimetype, resumable=True, chunksize=chunk_size) body = { "name": filename, "parents": [parent_id], "mimetype": mimetype } res = self.drive_service.files().create(body=body, media_body=media, fields='id').execute() os.remove(temporary_file_name) file_id = res.get('id') logger.info(f'**** uploaded file {filename} in drive folder_id {parent_id}') return file_id
def createFile(self, userFolderId, filename, filecontent, mimeType): """takes file content in string as input and stores it into a file with name filename in the folder for the user""" # encode string to bytes if type(filecontent) == str: filecontent = str.encode(filecontent) if userFolderId != None: file_metadata = { 'name': filename, 'parents': [userFolderId], 'mimeType': mimeType } else: file_metadata = { 'name': filename, 'mimeType': mimeType } fh = io.BytesIO(filecontent) data = MediaIoBaseUpload(fh, mimetype=mimeType, # chunksize=1024*1024, resumable=True) file = self.service.files().create(body=file_metadata, media_body=data, fields='id').execute() return file.get('id')
def upload_file(service, filename, filedata): results = service.files().list( q=(f"parents in '{UPLOAD_ID}' and name='{filename[:3]}'" " and mimeType='application/vnd.google-apps.folder'"), pageSize=1, fields='files(id)').execute() folder = None if len(results['files']): folder = results['files'][0] if not folder: # create folder file_metadata = { 'name': filename[:3], 'parents': [UPLOAD_ID], 'mimeType': 'application/vnd.google-apps.folder' } folder = service.files().create(body=file_metadata, fields='id').execute() file_metadata = {'name': filename, 'parents': [folder['id']]} media = MediaIoBaseUpload(io.BytesIO(filedata), mimetype='application/octet-stream', resumable=False) r = service.files().create( body=file_metadata, # upload file media_body=media, fields='id').execute() if not r.get('id'): raise RuntimeError('Error uploading file!')
def test_media_io_base_next_chunk_retries(self): try: import io except ImportError: return f = open(datafile('small.png'), 'r') fd = io.BytesIO(f.read()) upload = MediaIoBaseUpload(fd=fd, mimetype='image/png', chunksize=500, resumable=True) # Simulate 5XXs for both the request that creates the resumable upload and # the upload itself. http = HttpMockSequence([ ({ 'status': '500' }, ''), ({ 'status': '500' }, ''), ({ 'status': '503' }, ''), ({ 'status': '200', 'location': 'location' }, ''), ({ 'status': '500' }, ''), ({ 'status': '500' }, ''), ({ 'status': '503' }, ''), ({ 'status': '200' }, '{}'), ]) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar' method = u'POST' request = HttpRequest(http, model.response, uri, method=method, headers={}, resumable=upload) sleeptimes = [] request._sleep = lambda x: sleeptimes.append(x) request._rand = lambda: 10 request.execute(num_retries=3) self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
def main(): creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('drive', 'v3', credentials=creds) # ------ search for upload folder, or create it ------ results = service.files().list( q= f"mimeType='application/vnd.google-apps.folder' and name='{sys.argv[1]}'", spaces='drive', pageSize=10, fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) if not items: print('Folder not found, creating it...') file_metadata = { 'name': sys.argv[1], 'mimeType': 'application/vnd.google-apps.folder' } file = service.files().create(body=file_metadata, fields='id').execute() folder_id = file.get('id') else: folder_id = items[0]['id'] print(f'Folder ID: {folder_id}') # ------ ZIP folder ------ zip_file = zipdir(sys.argv[2]) # ------ upload file ------ file_metadata = { 'name': os.path.basename(sys.argv[3]), 'parents': [folder_id] } media = MediaIoBaseUpload(zip_file, mimetype="application/zip", resumable=True) file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print(f'Upload success. File ID: {file.get("id")}')
def test_media_io_base_upload_streamable(self): fd = BytesIO(b'stuff') upload = MediaIoBaseUpload(fd=fd, mimetype='image/png', chunksize=500, resumable=True) self.assertEqual(True, upload.has_stream()) self.assertEqual(fd, upload.stream())
def create_file(self, file): file_metadata = {'name': 'photo.jpg'} # media = MediaFileUpload(file, mimetype='image/jpeg') media = MediaIoBaseUpload(file, mimetype='image/jpeg') file = self.service.files().create(body=file_metadata, media_body=media, fields='id').execute() return file
def upload(loc_file): service = connect() with open(loc_file["file"], "r", encoding="utf-8-sig)") as fh: media_body = MediaIoBaseUpload(fh, mimetype="text/plain") body = {"title": loc_file["file"]} service.files().update(fileId=loc_file["id"], body=body, media_body=media_body).execute()
def test_media_io_base_upload_serializable(self): f = open(datafile('small.png'), 'rb') upload = MediaIoBaseUpload(fd=f, mimetype='image/png') try: json = upload.to_json() self.fail('MediaIoBaseUpload should not be serializable.') except NotImplementedError: pass