def close(self): media = http.MediaIoBaseUpload(six.BytesIO(self.data), 'application/octet-stream', chunksize=self.chunk_size, resumable=self.resumable) resp = self.conn.objects().insert( bucket=self.bucket, name=self.object_name, body={}, media_body=media).execute(num_retries=self.num_retries) etag = resp['md5Hash'] md5 = hashlib.md5(self.data).digest() if six.PY3: md5 = md5.encode('utf-8') etag = etag.encode('utf-8') md5 = base64.b64encode(md5) if etag != md5: err = _('MD5 of object: %(object_name)s before: ' '%(md5)s and after: %(etag)s is not same.') % { 'object_name': self.object_name, 'md5': md5, 'etag': etag, } raise exception.InvalidBackup(reason=err) else: LOG.debug('MD5 before: %(md5)s and after: %(etag)s ' 'writing object: %(object_name)s in GCS.', {'etag': etag, 'md5': md5, 'object_name': self.object_name, }) return md5
def upload_creative_asset(self, asset_type, filename, asset_key, advertiser_id, retry_count=0): try: creative_asset = { 'assetIdentifier': { 'name': filename, 'type': asset_type } } asset_file = blobstore.BlobReader(asset_key) mimetype = blobstore.BlobInfo(asset_key).content_type media = http.MediaIoBaseUpload(asset_file, mimetype=mimetype, resumable=False) return self.service.creativeAssets().insert( advertiserId=advertiser_id, profileId=self.profile_id, media_body=media, body=creative_asset).execute() except http.HttpError, e: if e.resp.status in [403, 500, 503 ] and retry_count < self.MAX_RETRIES: return self.upload_creative_asset(asset_type, filename, asset_key, advertiser_id, retry_count + 1) else: raise
def add_string_as_docs_file_to_folder(service, parent_folder_id, file_name, text, update=False): #Check if item exists results = service.files().list( fields="nextPageToken, files(id, name)", q="mimeType = 'application/vnd.google-apps.document' and '{0}' in parents and name = '{1}' and trashed = False" .format(parent_folder_id.replace("'", r"\'"), file_name.replace("'", r"\'"))).execute() items = results.get('files', []) if items: print("File {0} already exists".format(file_name)) if update: #If update, delete old file and create new one for item in items: service.files().delete(items.get('id')) else: return items[0] temp_f = StringIO(text) file_metadata = { 'name': file_name, 'mimeType': 'application/vnd.google-apps.document', 'parents': [parent_folder_id] } media = http.MediaIoBaseUpload(temp_f, mimetype='text/plain;charset=utf-8', chunksize=-1, resumable=True) f = service.files().create(body=file_metadata, media_body=media, fields="id, name").execute() print("File {0} added".format(file_name)) return f
def put(service): start_time = time.time() csvstr = downloadData() media = http.MediaIoBaseUpload(io.BytesIO(csvstr), 'text/csv') req = service.objects().insert(bucket=_BUCKET_NAME, name='all_month1.csv', media_body=media) resp = req.execute() print json.dumps(resp, indent=2) print("---Time %s in seconds to Upload file to Cloud ---" % (time.time() - start_time)) print("---Time %s in seconds to insert data to CloudSQL ---" % (time.time() - start_time)) print "File is Succeesfully uploaded"
def InsertRowsAsync(self, dataset_id, table_id, rows, truncate=False, num_retries=5): responses = [] for i in xrange(0, len(rows), INSERTION_MAX_ROWS): rows_chunk = rows[i:i + INSERTION_MAX_ROWS] logging.info('Inserting %d rows into %s.%s.', len(rows_chunk), dataset_id, table_id) body = { 'configuration': { 'jobReference': { 'projectId': self._project_id, 'jobId': str(uuid.uuid4()), }, 'load': { 'destinationTable': { 'projectId': self._project_id, 'datasetId': dataset_id, 'tableId': table_id, }, 'sourceFormat': 'NEWLINE_DELIMITED_JSON', 'writeDisposition': 'WRITE_TRUNCATE' if truncate else 'WRITE_APPEND', } } } # Format rows as newline-delimited JSON. media_buffer = io.BytesIO() for row in rows_chunk: json.dump(row, media_buffer, separators=(',', ':')) print >> media_buffer media_body = http.MediaIoBaseUpload( media_buffer, mimetype='application/octet-stream') responses.append(self._service.jobs().insert( projectId=self._project_id, body=body, media_body=media_body).execute(num_retries=num_retries)) # Only truncate on the first insert! truncate = False # TODO(dtu): Return a Job object. return responses
def write(self,data): drive_mime = File.MEME_TYPES[self.mime_type]['drive'] file_mime = File.MEME_TYPES[self.mime_type]['mime'] file_service = self.service.files() file_metadata = { 'name': self.file_name, 'mimeType': drive_mime, } media = http.MediaIoBaseUpload(data, mimetype=file_mime, chunksize=1024*1024, resumable=True) #update if self.ids: file_service.update(fileId=self.ids[0], body=file_metadata, media_body=media).execute(http=self.http.get('update')) return { 'id' : self.ids[0], 'name' :self.names[0], 'status' : 'updated' } #create else: if self.parent_id: file_metadata.update( { 'parents': [self.parent_id] } ) file_data = file_service.create(body=file_metadata, media_body=media, fields='name,id').execute(http=self.http.get('create')) self.ids = [file_data.get('id')] self.names = [file_data.get('name')] return { 'id' : file_data.get('id'), 'name' : file_data.get('name'), 'status' : 'created' }
def upload_data(self, data, to_paths, to_filename, mime_type="text/csv", parent_id='root', max_retry=10, retry_interval=10, overwrite=True): """ Upload data in memory. :param data: :param to_paths: A list of path [[path, to, dir], [path, to, dir], ...] :param to_filename: Filename :param mime_type: :param parent_id: Parent folder ID :param max_retry: Max number of retry :param retry_interval: Interval second between retries :param overwrite: Bool :return: """ # validations if not data: raise Exception("Invalid data: %s" % data) if not isinstance(to_paths, list): raise Exception("Invalid to_paths: %s" % to_paths) if not to_filename: raise Exception("filename not found: %s" % to_filename) # media if isinstance(data, io.BytesIO): fh = data else: fh = io.BytesIO(data) media = http.MediaIoBaseUpload(fh, mimetype=mime_type, chunksize=1024 * 1024, resumable=True) return call_with_retry( self._upload, (media, to_paths, to_filename, mime_type, parent_id, overwrite), max_retry, retry_interval)
def _upload_new_playstore_apk(play, package_name, apk, info): edit_id = play.edits().insert(body={}, packageName=package_name).execute()['id'] apk_response = play.edits().apks().upload( editId=edit_id, packageName=package_name, media_body=googhttp.MediaIoBaseUpload(apk, mimetype=_APK_MIME)).execute() track_response = play.edits().tracks().update( editId=edit_id, packageName=package_name, track=_PLAYSTORE_TRACK, body={ 'releases': [{ 'name': info['shortrev'], 'versionCodes': [str(apk_response['versionCode'])], 'status': 'completed' }] }).execute() play.edits().commit(editId=edit_id, packageName=package_name).execute()
def CreateFile(self, file_name, content, parent_folder=None): """ Creates a file with the given name and content Args: file_name: The name of the file to create. content: The content of the new file. parent_folder (optional): The folder to copy the file into. Defaults to root folder. Returns: The file id. """ fh = BytesIO(str(content)) media = http.MediaIoBaseUpload(fh, mimetype='text/csv', resumable=True) file_metadata = { 'title': file_name, 'mimeType': 'text/csv', 'parents': [{ 'id': parent_folder }] } file = self.service.files().insert(body=file_metadata, media_body=media).execute() return file['id']