コード例 #1
0
ファイル: docfile.py プロジェクト: JR-Mitchell/DriveFinance
    def write_from_string(self, string, mimetype="text/plain"):
        """ Writes a string to the Google Docs file

        :param string: the information to write to the file
        :type string: str
        :param mimetype: the mimetype to interpret the string as,
            defaults to "text/plain"
        :type mimetype: str, optional

        :raises Exception: Exception if the file has been modified since
            the object initialisation
        """
        my_file = StringIO.StringIO()
        my_file.write(string)
        media_body = media.MediaIoBaseUpload(my_file,
                                             mimetype=mimetype,
                                             resumable=True)
        self.assert_no_changes()
        request = self.service.files().update(
            fileId=self.file_id,
            media_body=media_body)  #Google inconsistent with kwarg casing
        response = None
        while response is None:
            status, response = request.next_chunk()
        my_file.close()
コード例 #2
0
    def upload_object(self,
                      file_path,
                      file_name=None,
                      predefinedAcl='authenticatedRead',
                      readers=[],
                      owners=[]):
        """
            upload object: pass file path to upload into GCS.
            file_path: file path of which needs to uploaded.
            file_name: Name of file to store in the GCS.
            predefinedAcl: by default 'authenticatedRead'.
            readers: if needed can be defined the person for read access.
            owner: if needed can be defined for the ownership.
        """

        service = self.__create_service()
        # This is the request body as specified:
        # http://g.co/cloud/storage/docs/json_api/v1/objects/insert#request
        if not name:
            name = filename
        body = {
            'name': name,
            'predefinedAcl': predefinedAcl,
            'projection': 'full',
            'uploadType': 'media',
        }

        # If specified, create the access control objects and add them to the
        # request body

        if readers or owners:
            body['acl'] = []

        for r in readers:
            body['acl'].append({
                'entity': 'user-%s' % r,
                'role': 'READER',
                'name': r
            })

        for o in owners:
            body['acl'].append({
                'entity': 'user-%s' % o,
                'role': 'OWNER',
                'email': o
            })
        # Now insert them into the specified bucket as a media insertion.

        # http://g.co/dev/resources/api-libraries/documentation/storage/v1/python/latest/storage_v1.objects.html#insert
        with open(filename, 'rb') as f:
            req = service.objects().insert(name=name,
                                           predefinedAcl=predefinedAcl,
                                           bucket=self.BUCKET_NAME,
                                           media_body=http.MediaIoBaseUpload(
                                               f, 'application/octet-stream'))
            # You can also just set media_body=filename, but for the sake of
            # demonstration, pass in the more generic file handle, which could
            # very well be a StringIO or similar.
            resp = req.execute()
        return resp['mediaLink']
コード例 #3
0
def put_object(bucket, filename, contents):
    if test_mode:
        return
    service = _create_service()

    # This is the request body as specified:
    # http://g.co/cloud/storage/docs/json_api/v1/objects/insert#request
    body = {
        'name': filename,
    }

    # Now insert them into the specified bucket as a media insertion.
    # http://g.co/dv/resources/api-libraries/documentation/storage/v1/python/latest/storage_v1.objects.html#insert
    f = io.BytesIO(contents)
    req = service.objects().insert(
        bucket=bucket,
        body=body,
        # You can also just set media_body=filename, but for the sake of
        # demonstration, pass in the more generic file handle, which could
        # very well be a StringIO or similar.
        media_body=http.MediaIoBaseUpload(f, 'application/octet-stream')
    )
    try:
        resp = req.execute()
    except errors.HttpError as e:
        if e.resp.status in [400, 404]:
            raise NotFoundError()
        raise
    return resp
コード例 #4
0
    def _upload_io(self, io_obj, dest_uri, metadata=False):
        bucket, name = parse_gcs_uri(dest_uri)
        if self.exists(dest_uri):
            raise Exception("File already exists: " + dest_uri)

        mimetype, _ = mimetypes.guess_type(dest_uri)
        mimetype = mimetype or _BINARY_MIMETYPE

        # Chunked file upload
        media = google_http.MediaIoBaseUpload(io_obj, mimetype, resumable=True)
        upload_req = self.api_client.objects().insert(bucket=bucket,
                                                      name=name,
                                                      media_body=media)

        upload_resp = None
        while upload_resp is None:
            status, upload_resp = upload_req.next_chunk()
            if status:
                log.debug("Uploaded %d%%." % int(status.progress() * 100))

        log.debug('Upload Complete! %s', dest_uri)

        if metadata:
            return self.api_client.objects().get(bucket=bucket,
                                                 object=name).execute()
コード例 #5
0
def upload_object(bucket, filename, readers="", owners=""):
    service = create_service()

    body = {
        'name': filename,
    }

    if readers or owners:
        body['acl'] = []

    for r in readers:
        body['acl'].append({
            'entity': 'user-%s' % r,
            'role': 'READER',
            'email': r
        })
    for o in owners:
        body['acl'].append({
            'entity': 'user-%s' % o,
            'role': 'OWNER',
            'email': o
        })

    with open(filename, 'rb') as f:
        req = service.objects().insert(bucket=bucket,
                                       body=body,
                                       media_body=http.MediaIoBaseUpload(
                                           f, 'video/ogg'))
        resp = req.execute()

    return resp
コード例 #6
0
 def put_string(self, contents, dest_path, mimetype=None):
     mimetype = mimetype or mimetypes.guess_type(dest_path)[0] or DEFAULT_MIMETYPE
     assert isinstance(mimetype, six.string_types)
     if not isinstance(contents, six.binary_type):
         contents = contents.encode("utf-8")
     media = http.MediaIoBaseUpload(six.BytesIO(contents), mimetype, resumable=bool(contents))
     self._do_put(media, dest_path)
コード例 #7
0
ファイル: gcs.py プロジェクト: zhanghaijie01/cinder
 def close(self):
     media = http.MediaIoBaseUpload(six.BytesIO(self.data),
                                    'application/octet-stream',
                                    chunksize=self.chunk_size,
                                    resumable=self.resumable)
     resp = self.conn.objects().insert(
         bucket=self.bucket,
         name=self.object_name,
         body={},
         media_body=media).execute(num_retries=self.num_retries)
     etag = resp['md5Hash']
     md5 = hashlib.md5(self.data).digest()
     if six.PY3:
         md5 = md5.encode('utf-8')
         etag = etag.encode('utf-8')
     md5 = base64.b64encode(md5)
     if etag != md5:
         err = _('MD5 of object: %(object_name)s before: '
                 '%(md5)s and after: %(etag)s is not same.') % {
                     'object_name': self.object_name,
                     'md5': md5,
                     'etag': etag,
                 }
         raise exception.InvalidBackup(reason=err)
     else:
         LOG.debug(
             'MD5 before: %(md5)s and after: %(etag)s '
             'writing object: %(object_name)s in GCS.', {
                 'etag': etag,
                 'md5': md5,
                 'object_name': self.object_name,
             })
         return md5
コード例 #8
0
ファイル: storage.py プロジェクト: jmelloy/automation
def put_object(file,
               bucket,
               key,
               mimetype="application/octet-stream",
               check_size=True,
               gs=None):
    """Uploads a file to google cloud storage.  Retries once automatically if 503 returned"""
    gs = gs or service("storage", scope="devstorage.read_write")

    logger.info("[upload_cloud_store] Uploading %s to %s" % (key, bucket))

    if check_size:
        exists, data = cs_get_object_info(bucket, key)
        # I'd like to use md5 for this but not sure how google's calculating it
        #  u'md5Hash': u'n2j1RoJz0ewlq7khTTCdwg==', ??? maybe base64?
        if exists and data["size"] == check_size:
            logger.info("[upload_cloud_store] Skipping upload for %s" %
                        (key, ))
            return data

    upload = http.MediaIoBaseUpload(file, mimetype=mimetype, resumable=True)

    resp = gs.objects().insert(bucket=bucket, name=key,
                               media_body=upload).execute()

    logger.debug(resp)

    return resp
コード例 #9
0
    def upload_file(cls, name, directory_id, data, mimetype):
        """Upload file data to provided google drive directory."""
        file_metadata = {
            "name": name,
            "parents": [directory_id],
            "mimeType": mimetype
        }

        try:
            credentials = cls.get_credentials()
        except FileNotFoundError:
            LOGGER.error("Failed to load credentials from file: %s", APP_CONFIG.GOOGLE_CREDENTIALS_FILE)
            return None

        try:
            google_drive = discovery.build(
                cls.GOOGLE_DRIVE_SERVICE,
                cls.GOOGLE_DRIVE_SERVICE_VERSION,
                credentials=credentials
            )
            file_media = http.MediaIoBaseUpload(data, mimetype=mimetype, resumable=True)
            file_id = google_drive.files().create(body=file_metadata, media_body=file_media, fields="id").execute()
        except errors.Error as err:
            LOGGER.error("Failed to upload file into google drive: %s", err)
            return None

        return file_id["id"]
コード例 #10
0
	def upload_object(self,content):
		print "FUNCTION: upload_object"
		Logger.log_writer("FUNCTION: upload_object")
		body = {
		'name': self.uploadFolderName+"_output"+"/"+self.uploadFileName+".txt",
		}

		stream = e.BytesIO()
		for line in content:
			stream.write(line+'\n')

		'''
		if readers or owners:
			body['acl'] = []

		for r in readers:
			body['acl'].append({
			    'entity': 'user-%s' % r,
			    'role': 'READER',
			    'email': r
			})
		for o in owners:
			body['acl'].append({
			    'entity': 'user-%s' % o,
			    'role': 'OWNER',
			    'email': o
			})
		'''
		req = self.gcs_client.objects().insert(bucket=self.bucket,body=body,media_body=e.MediaIoBaseUpload(stream, 'text/plain'))
		resp = req.execute()
		return resp
コード例 #11
0
def gcs_blob_upload(fname, bucket, media, mimetype):
    'upload an object to a Google Cloud Storage bucket'

    # build blob metadata and upload via GCS API
    body = {'name': fname, 'uploadType': 'multipart', 'contentType': mimetype}
    return GCS.objects().insert(bucket=bucket, body=body,
            media_body=http.MediaIoBaseUpload(io.BytesIO(media), mimetype),
            fields='bucket,name').execute()
コード例 #12
0
 def upload_object(self, bucket, file_object):
     body = {
         'name': 'storage-api-client-sample-file.txt',
     }
     req = storage.objects().insert(
         bucket=bucket, body=body, media_body=http.MediaIoBaseUpload(
             file_object, 'application/octet-stream'))
     resp = req.execute()
     return resp
コード例 #13
0
	def upload_XMLobject(self,content):
		print "FUNCTION: upload_XMLobject"
		Logger.log_writer("FUNCTION: upload_XMLobject")
		body = {
        'name': self.uploadFolderName+"_output"+"/"+self.uploadFileName+".xml",
    	}
		stream = e.BytesIO()
		content.write(stream)
		req = self.gcs_client.objects().insert(bucket=self.bucket,body=body,media_body=e.MediaIoBaseUpload(stream, 'text/xml'))
		resp = req.execute()
		return resp
コード例 #14
0
 def append(self, new: typing.List[PlaylistItem]) -> None:
     b = json.dumps([i._asdict() for i in new],
                    ensure_ascii=False).encode("utf-8")
     file = http.MediaIoBaseUpload(io.BytesIO(b),
                                   mimetype="application/json")
     self.__drive_svc.files().create(body={
         "name":
         datetime.now().date().isoformat() + ".json",
         "parents": [self.__folder_id]
     },
                                     media_body=file).execute()
コード例 #15
0
ファイル: gee.py プロジェクト: jkachika/columbus-worker
def upload_object(bucket, filename, readers, owners, user_settings=None, access=ServiceAccount.GCS):
    """
    Uploads the specified file to the specified bucket. The object path in the bucket is same as the
    path of the file specified.

    :param str bucket: Name of the cloud storage bucket
    :param str filename: fully qualified name of the file to upload
    :param list(str) readers: list of email addresses
    :param list(str) owners: list of email addresses
    :param dict user_settings: optional, a dictionary of user credentials for appropriate services.
                            If one is not provided, then this method must be invoked by an EngineThread
                            which defines the settings
    :param str access: must be 'storage'. Other values are for internal use only

    :return: Returns the response obtained from the API by uploading the object
    """
    if access == ServiceAccount.EARTH_ENGINE:
        service = CredentialManager.get_ee_storage_service(user_settings)
    else:
        service = CredentialManager.get_server_storage_service(user_settings)
    # This is the request body as specified:
    # http://g.co/cloud/storage/docs/json_api/v1/objects/insert#request
    body = {
        'name': filename[1:] if filename[0] == '/' else filename,
    }
    # If specified, create the access control objects and add them to the
    # request body
    if readers or owners:
        body['acl'] = []

    for r in readers:
        body['acl'].append({
            'entity': 'user-%s' % r,
            'role': 'READER',
            'email': r
        })
    for o in owners:
        body['acl'].append({
            'entity': 'user-%s' % o,
            'role': 'OWNER',
            'email': o
        })
    # Now insert them into the specified bucket as a media insertion.
    # http://g.co/dev/resources/api-libraries/documentation/storage/v1/python/latest/storage_v1.objects.html#insert
    with open(filename, 'rb') as f:
        req = service.objects().insert(
            bucket=bucket, body=body,
            # You can also just set media_body=filename, but # for the sake of
            # demonstration, pass in the more generic file handle, which could
            # very well be a StringIO or similar.
            media_body=http.MediaIoBaseUpload(f, 'application/octet-stream'))
        resp = req.execute(num_retries=3)
    return resp
コード例 #16
0
def insertStorageObject(auths, file, data):
    credentials = AccessTokenCredentials(auths["access_token"], "MyAgent/1.0",
                                         None)
    storage = discovery.build("storage", "v1", credentials=credentials)

    # The BytesIO object may be replaced with any io.Base instance.
    media = http.MediaIoBaseUpload(
        io.BytesIO(data.replace("'", "\"").replace("u\"", "\"")), "text/plain")
    return json.dumps(storage.objects().insert(bucket=WebConfig["BucketName"],
                                               name=file,
                                               media_body=media).execute(),
                      indent=2)
コード例 #17
0
ファイル: gcloud_util.py プロジェクト: yhppark902/makani
 def UploadStream(self,
                  io_base,
                  gcs_file_name,
                  mimetype='application/octet-stream'):
     # Only make the request resumable if the stream is nonempty.
     position = io_base.tell()
     resumable = bool(io_base.readline(1))
     io_base.seek(position)
     media = gapi_http.MediaIoBaseUpload(io_base,
                                         mimetype,
                                         chunksize=1024 * 1024,
                                         resumable=resumable)
     return self._UploadWithProgress(media, gcs_file_name)
コード例 #18
0
def upload_large_object(bucket, filename, gckey):
    service = create_service()
    body = {
        'name': gckey,
    }
    with open(filename, 'rb') as f:
        req = service.objects().insert(
            bucket=bucket,
            body=body,
            # You can also just set media_body=filename, but for the sake of
            # demonstration, pass in the more generic file handle, which could
            # very well be a StringIO or similar.
            media_body=http.MediaIoBaseUpload(f, 'application/octet-stream'))
        resp = req.execute()
コード例 #19
0
ファイル: gcpclient.py プロジェクト: ellis-wu/GCP-traning
def gs_object_upload(bucket, file, filename):
    try:
        body = {
            'name': filename,
        }
        mime = magic.Magic(mime=True)
        fileType = mime.from_file(file)
        with open(file, 'rb') as f:
            request = gcp_credential('storage').objects()
            response = request.insert(bucket=bucket, body=body,
                                      media_body=http.MediaIoBaseUpload(f, fileType)).execute()
        print("upload [%s] to [%s] success" % (filename, bucket))
    except:
        sys.exit('upload file fail')
コード例 #20
0
	def upload_image(self,link):
		print "FUNCTION: upload_image"
		Logger.log_writer("FUNCTION: upload_image")
		parts = link.split(".")
		ext = parts[len(parts)-1]
		body = {
		'name': self.uploadFolderName+"_output"+"/"+self.uploadFileName+"."+str(ext),
		}

		stream = cStringIO.StringIO(urllib.urlopen(link).read())
		req = self.gcs_client.objects().insert(bucket=self.bucket,body=body,media_body=e.MediaIoBaseUpload(stream, "image/jpeg"))
		resp = req.execute()
		#Logger.log_writer("Response:{}".format(resp))
		return resp
コード例 #21
0
	def upload_local_image(self,filename):
		print "FUNCTION: Process.upload_local_image"
		Logger.log_writer("FUNCTION: Process.upload_local_image")
		
		body = {
			'name': self.uploadFolderName+"_output"+"/"+self.uploadFileName+".jpg",
		    }
		
		with open(filename, 'rb') as f:
			req = self.gcs_client.objects().insert(
			    bucket=self.bucket, body=body,media_body=e.MediaIoBaseUpload(
				f, "image/jpeg"))
			resp = req.execute()
		return resp
コード例 #22
0
    def put(self, bytes, path, overwrite=True, delay=False):
        mime = mimetypes.guess_type(path)[0]
        if mime is None:
            mime = 'application/octet-stream'

        fh = io.BytesIO(bytes)
        media = http.MediaIoBaseUpload(fh, mimetype=mime)
        file_metadata = {'name': path[1:]}

        try:
            ret = self.service.files().create(body=file_metadata,
                                              media_body=media).execute()
            self.items[ret['name']] = (ret['id'], 0, None, None)
        except Exception:
            return False
コード例 #23
0
    def write(self, path, buf, offset, fh):
        fh.seek(offset)
        fh.write(buf)

        mime = mimetypes.guess_type(path[1:])[0]
        if mime is None:
            mime = 'application/octet-stream'

        media = http.MediaIoBaseUpload(fh, mimetype=mime, resumable=True)
        file_metadata = {'name': path[1:]}
        self.service.files().update(fileId=self.items[path[1:]][0],
                                    body=file_metadata,
                                    media_body=media).execute()
        tmp = list(self.items[path[1:]])
        tmp[1] += len(buf)
        self.items[path[1:]] = tuple(tmp)
コード例 #24
0
def upload_objects(bucket, paths):
    service = create_service()

    for path in paths:

        body = {
            'name': 'data/' + '/'.join(str(path).split('/')[3:]),
        }

        with open(path, 'rb') as f:
            req = service.objects().insert(bucket=bucket,
                                           body=body,
                                           media_body=http.MediaIoBaseUpload(
                                               f, 'application/octet-stream'))

            resp = req.execute()
コード例 #25
0
 def _do_upload(self, file_obj, folder_id, filename, content_type):
     mime_type = content_type or guess_mime_type(filename)
     media = gah.MediaIoBaseUpload(file_obj,
                                   mime_type,
                                   chunksize=self.chunk_size,
                                   resumable=True)
     body = {
         "name": filename,
         "mimeType": mime_type,
         "parents": [folder_id],
     }
     stored_file = self._service.files().create(body=body,
                                                media_body=media,
                                                supportsTeamDrives=True,
                                                fields="id").execute()
     return stored_file["id"]
コード例 #26
0
    def put_text_file(self, local_file_path, full_bucket_path):
        """Put a text object into a bucket.

        Args:
            local_file_path: The local path of the file to upload.
            full_bucket_path: The full GCS path for the output.
        """
        storage_service = self.service
        bucket, object_path = get_bucket_and_path_from(full_bucket_path)

        req_body = {'name': object_path}
        with open(local_file_path, 'rb') as f:
            req = storage_service.objects().insert(
                bucket=bucket,
                body=req_body,
                media_body=http.MediaIoBaseUpload(f,
                                                  'application/octet-stream'))
            _ = req.execute()
コード例 #27
0
    def save(self, buffer):
        path = urljoin(self.prefix, buffer.path)

        file_obj = buffer.get_rewound_file()
        md5_base64 = base64.b64encode(buffer.md5).decode('ascii')
        self.objects.insert(
            media_body=http.MediaIoBaseUpload(
                file_obj, 'application/octet-stream'
            ),
            name=path,
            body={
                'md5Hash': md5_base64,
                'metadata': {
                    'count': str(buffer.count),
                },
            },
            bucket=self.bucket,
        ).execute()
コード例 #28
0
def upload_object(bucket, filename, encryption_key, key_hash):
    """Uploads an object, specifying a custom encryption key."""
    service = create_service()

    with open(filename, 'rb') as f:
        request = service.objects().insert(
            bucket=bucket,
            name=filename,
            # You can also just set media_body=filename, but for the sake of
            # demonstration, pass in the more generic file handle, which could
            # very well be a StringIO or similar.
            media_body=http.MediaIoBaseUpload(f, 'application/octet-stream'))
        request.headers['x-goog-encryption-algorithm'] = 'AES256'
        request.headers['x-goog-encryption-key'] = encryption_key
        request.headers['x-goog-encryption-key-sha256'] = key_hash

        resp = request.execute()

    return resp
コード例 #29
0
def put(name,
        input_handle,
        readers=[],
        owners=[],
        mime_type='application/octet-stream'):
    input_handle.seek(0)
    (bucket_name, file_name) = split_bucket_and_name(name)

    # This is the request body as specified:
    # http://g.co/cloud/storage/docs/json_api/v1/objects/insert#request
    body = {
        'name': file_name,
    }

    # If specified, create the access control objects and add them to the
    # request body
    if readers or owners:
        body['acl'] = []

    for r in readers:
        body['acl'].append({
            'entity': 'user-%s' % r,
            'role': 'READER',
            'email': r
        })
    for o in owners:
        body['acl'].append({
            'entity': 'user-%s' % o,
            'role': 'OWNER',
            'email': o
        })

    # Now insert them into the specified bucket as a media insertion.
    req = get_service().objects().insert(
        bucket=bucket_name,
        body=body,
        # You can also just set media_body=filename, but # for the sake of
        # demonstration, pass in the more generic file handle, which could
        # very well be a StringIO or similar.
        media_body=http.MediaIoBaseUpload(input_handle, mime_type))
    resp = req.execute()

    return resp
コード例 #30
0
def google_bucket_upload_object(bucket, filepath, pathSaveBucket):
    service = google_bucket_create_service()

    # This is the request body as specified:
    # http://g.co/cloud/storage/docs/json_api/v1/objects/insert#request

    filename = get_file_name(filepath)

    body = {
        'name': pathSaveBucket,
    }

    # If specified, create the access control objects and add them to the

    # https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls#resource-representations
    body['acl'] = []
    body['acl'].append({
        'entity': 'allUsers',
        'role': 'READER',
        'name': 'allUsers'
    })

    body['acl'].append({
        'entity': '*****@*****.**',
        'role': 'OWNER',
        'email': '*****@*****.**'
    })

    # Now insert them into the specified bucket as a media insertion.
    # http://g.co/dev/resources/api-libraries/documentation/storage/v1/python/latest/storage_v1.objects.html#insert

    with open(filepath, 'rb') as f:
        req = service.objects().insert(
            bucket=bucket,
            body=body,
            # You can also just set media_body=filename, but # for the sake of
            # demonstration, pass in the more generic file handle, which could
            # very well be a StringIO or similar.
            media_body=http.MediaIoBaseUpload(f, get_mime_type(filepath)))
        resp = req.execute()

    return resp