示例#1
0
def cloudstorage_upload(service, project_id, bucket, source_file, dest_file, show_status_messages=True):
    """Upload a local file to a Cloud Storage bucket.

    Args:
        service: BigQuery service object that is authenticated.  Example: service = build('bigquery','v2', http=http)
        project_id: string, Name of Google project to upload to
        bucket: string, Name of Cloud Storage bucket (exclude the "gs://" prefix)
        source_file: string, Path to the local file to upload
        dest_file: string, Name to give the file on Cloud Storage

    Returns:
        Response of the upload in a JSON format
    """
    # Starting code for this function is a combination from these sources:
    #   https://code.google.com/p/google-cloud-platform-samples/source/browse/file-transfer-json/chunked_transfer.py?repo=storage
    #   https://developers.google.com/api-client-library/python/guide/media_upload
    filename = source_file
    bucket_name = bucket
    object_name = dest_file
    assert bucket_name and object_name

    if show_status_messages:
        print('Upload request for {0}'.format(source_file))
    media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
    request = service.objects().insert(bucket=bucket_name, name=object_name,
                                       media_body=media)

    response = request.execute()

    if show_status_messages:
        print('Upload complete')

    return response
示例#2
0
 def upload(self, filename, bucket_name, object_name):
     assert bucket_name and object_name
     print 'Building upload request...'
     media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
     if not media.mimetype():
         media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
     request = self.cloud.objects().insert(bucket=bucket_name,
                                           name=object_name,
                                           media_body=media)
     print 'Uploading file: %s to bucket: %s object: %s ' % (filename,
                                                             bucket_name,
                                                             object_name)
     progressless_iters = 0
     response = None
     while response is None:
         error = None
         try:
             progress, response = request.next_chunk()
             if progress:
                 self.print_with_carriage_return(
                     'Upload %d%%' % (100 * progress.progress()))
         except HttpError, err:
             error = err
             if err.resp.status < 500:
                 raise
         except RETRYABLE_ERRORS, err:
             error = err
示例#3
0
文件: gme.py 项目: keithmoss/Hodor
def upload_file_init(ctx, asset_id, asset_type, filepath):
  """Upload the first 256KB of a given file to an asset.
  This forces it into an "uploading" state which prevents processing from
  occurring until all files are uploaded.

  Built as an experiment and abandoned in favour of multithreaded uploading.

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : str
    The type of asset being represented. Possible values: table, raster
  filepath : str
    The absolute path to the file.
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  chunk_size = 262144 # 256KB - smallest possible chunk size for resumable upload
  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = ctx.service().tables() if asset_type == "vector" else ctx.service().rasters()
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)

  try:
    next_chunk(ctx, request)
  except NoContent as e:
    pass
  ctx.log("Init uploading %s" % (os.path.basename(filepath)))
def put(service):
    #User inputs the file name that needs to be uploaded.
    fileName = raw_input('Enter file name to be uploaded to Cloud:\n')
    #Encrypt the given file using AES encryption
    if not fileName or not os.path.isfile(fileName):
        print 'Invalid file name or file not found. Terminating!'
        return
        
    directory, f_name = os.path.split(fileName)
    #Upload the file to Bucket
    try:
        media = MediaFileUpload(fileName, chunksize=_CHUNK_SIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(fileName, _DEFAULT_MIMETYPE, resumable=True)
        request = service.objects().insert(bucket=_BUCKET_NAME, name=f_name,
                                           media_body=media)

        response = None
        start = datetime.datetime.now()
        while response is None:
            status, response = request.next_chunk()
            if status:
                print "Uploaded %d%%." % int(status.progress() * 100)
            print "Upload Complete!"

        end = datetime.datetime.now()
        duration = end - start
        print ('Upload took {} seconds'.format(duration.seconds))
        #Removes references to the uploaded file
        media = request = None

    except client.AccessTokenRefreshError:
        print ("Error in the credentials")
示例#5
0
def upload(filename, bucketName, clientEmail, keyFile, **kwargs):
    service = Google_Service_Builder.buildService(clientEmail, keyFile, 
                                                  domain="devstorage.read_write",
                                                  service="storage", 
                                                  version="v1", **kwargs)

    print 'Building upload request...'
    media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
    request = service.objects().insert(bucket=bucketName, name=filename,
                                                            media_body=media)

    print 'Uploading file: %s to: %s/%s' % (filename, bucketName, filename)

    progressless_iters = 0
    response = None
    while response is None:
        error = None
        try:
            progress, response = request.next_chunk()
            if progress:
                print 'Upload progress: %.2f%%' % (100.0 * progress.progress())
        except HttpError, err:
            error = err
            if err.resp.status < 500:
                raise
        except RETRYABLE_ERRORS, err:
            error = err
示例#6
0
文件: cli.py 项目: ruo91/Hodor
    def upload_file(self, file, id, resource):
      # Retry transport and file IO errors.
      RETRYABLE_ERRORS = (httplib2.HttpLib2Error, IOError)
      chunk_size = chunk_size = getattr(self, 'chunk_size', -1)

      self.log("Uploading file '%s'" % (file))
      start_time = time.time()

      media = MediaFileUpload(file, chunksize=chunk_size, resumable=True)
      if not media.mimetype():
        # media = MediaFileUpload(file, 'application/octet-stream', resumable=True)
        raise Exception("Could not determine mime-type. Please make lib mimetypes aware of it.")
      request = resource.files().insert(id=id, filename=os.path.basename(file), media_body=media)

      progressless_iters = 0
      response = None
      while response is None:
        error = None
        try:
          start_time_chunk = time.time()
          progress, response = request.next_chunk()
          if progress:
            Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
            print "%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2))
        except HttpError, err:
          # Contray to the documentation GME does't return 201/200 for the last chunk
          if err.resp.status == 204:
            response = ""
          else:
            error = err
            if err.resp.status < 500 and err.resp.status != 410:
              raise
        except RETRYABLE_ERRORS, err:
          error = err
def upload(argv):
  filename = argv[1]
  bucket_name, object_name = argv[2][5:].split('/', 1)
  assert bucket_name and object_name

  service = get_authenticated_service(RW_SCOPE)

  print 'Building upload request...'
  media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
  request = service.objects().insert(bucket=bucket_name, name=object_name,
                                     media_body=media)

  print 'Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name,
                                                          object_name)

  progressless_iters = 0
  response = None
  while response is None:
    error = None
    try:
      progress, response = request.next_chunk()
      if progress:
        print_with_carriage_return('Upload %d%%' % (100 * progress.progress()))
    except HttpError, err:
      error = err
      if err.resp.status < 500:
        raise
    except RETRYABLE_ERRORS, err:
      error = err
示例#8
0
文件: GDrive.py 项目: kurik/scripts
 def upload(self, filename, gdrivename=None, parent_folder="root"):
     logging.debug(
         "Going to upload file to GDrive. filename=%s , gdrivename=%s , parent_folder=%s"
         % (filename, gdrivename, parent_folder)
     )
     # Convert the name of the file on GDrive in case it is not provided
     if gdrivename is None or gdrivename == "":
         gdrivename = filename.split("/")[-1]
     # Check whether the file does not already exists
     try:
         self.get_id(gdrivename, parent_folder)
     except:
         pass
     else:
         logging.error("The file to upload %s already exists" % gdrivename)
         raise FileExistsError(gdrivename)
     # Prepare for the file upload
     logging.debug("Creating the media object for uploading from %s" % filename)
     media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
     if not media.mimetype():
         logging.debug("MIME type of the file has not been recognized, using the default %s" % DEFAULT_MIMETYPE)
         media = MediaFileUpload(filename, mimeType=DEFAULT_MIMETYPE, chunksize=CHUNKSIZE, resumable=True)
     body = {
         "name": gdrivename,
         #'parents': [{"id": parent_folder}],
         "parents": [parent_folder],
     }
     logging.debug("Starting upload of the %s file as %s" % (filename, gdrivename))
     request = self.service.files().create(body=body, media_body=media, fields="id")
     retry = 5
     while retry > 0:
         try:
             response = None
             while response is None:
                 status, response = request.next_chunk()
                 if status:
                     logging.info("Uploaded %d%%." % int(status.progress() * 100))
             logging.info("Upload has been completed")
             # No need for a retry
             retry = -1
         except apiclient.errors.HttpError as e:
             if e.resp.status in [404]:
                 # Start the upload all over again.
                 request = self.service.files().create(body=body, media_body=media, fields="id")
             elif e.resp.status in [500, 502, 503, 504]:
                 # Call next_chunk() again, but use an exponential backoff for repeated errors.
                 logging.warning("Upload of a chunk has failed, retrying ...")
                 retry -= 1
                 time.sleep(3)
             else:
                 # Do not retry. Log the error and fail.
                 logging.error("The upload has failed: %s" % str(e))
                 raise
     if retry == 0:
         logging.error("The upload has failed.")
         raise ConnectionError
     fid = response.get("id")
     self.cache[fid] = (gdrivename, parent_folder)
     return fid
示例#9
0
    def update(self, new_path=None, parent_id='root'):
        try:
            if not hasattr(self, 'id'):
                return self.create(parent_id)

            existing_file = self.get_file(self.id)

            if new_path is not None:
                self.path = new_path

            mime_type = defaul_mime_type
            media_body = None

            if not os.path.isdir(self.path):
                media_body = MediaFileUpload(self.path, resumable=True)
                if media_body.size() == 0:
                    logger.error('cannot update no content file %s', self.path)
                    return None
                if media_body.mimetype() is not None:
                    mime_type = media_body.mimetype()
                else:
                    media_body._mimetype = mime_type
            else:
                mime_type = folder_mime_type

            existing_file['title'] = os.path.basename(self.path)
            existing_file['parents'] = [{'id': parent_id}]
            existing_file['mimeType'] = mime_type

            logger.info('updated %s', self.path)
            with drive.lock:
                metadata = drive.service.files().update(
                    fileId=self.id,
                    body=existing_file,
                    media_body=media_body).execute()

            self.id = metadata['id']
            if metadata.has_key('downloadUrl'):
                self.download_url = metadata['downloadUrl']
            if metadata.has_key('md5Checksum'):
                self.md5Checksum = metadata['md5Checksum']
            return metadata
        except errors.HttpError, error:
            logger.error('an error occurred: %s', error)
            return None
示例#10
0
def upload_zip_to_gcs(server_key, archive_file, backup=False):
    name = get_gcs_archive_name(server_key)
    credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE)
    http = credentials.authorize(httplib2.Http())
    service = build('storage', STORAGE_API_VERSION, http=http)
    retry = True
    while retry:
        media = MediaFileUpload(archive_file, chunksize=CHUNKSIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(archive_file, 'application/zip', resumable=True)
        request = service.objects().insert(bucket=app_bucket, name=name, media_body=media)
        progress = previous_progress = None
        tries = 0
        response = None
        while response is None:
            try:
                status, response = request.next_chunk()
                tries = 0
                progress = int(status.progress() * 100) if status is not None else 0
                if response is not None:  # Done
                    retry = False
                    progress = 100
                if progress != previous_progress:
                    if progress % 10 == 0:
                        logger.info("Server {0} archive is {1}% uploaded".format(server_key, progress))
                    if not backup:
                        try:
                            client.post_event(server_key, STOP_EVENT, progress)
                        except Exception as e:
                            logger.exception(
                                "Error sending controller save event for server [{0}]: {1}".format(
                                    server_key, e
                                )
                            )
                previous_progress = progress
            except HttpError as e:
                if e.resp.status in [404]:  # Start upload all over again
                    response = None
                    logging.error(
                        "Error ({0}) uploading archive for server {1}. Retrying....".format(
                            str(e), server_key
                        )
                    )
                elif e.resp.status in [500, 502, 503, 504]:  # Retry with backoff
                    tries += 1
                    if tries > NUM_RETRIES:
                        raise
                    sleeptime = 2**min(tries, 4)
                    logger.error(
                        "Error ({0}) uploading archive for server {1}. Sleeping {2} seconds.".format(
                            str(e), server_key, sleeptime
                        )
                    )
                    time.sleep(sleeptime)
                else:
                    raise
    os.remove(archive_file)
示例#11
0
    def create(self, parent_id='root'):
        mime_type = defaul_mime_type
        media_body = None

        if not os.path.isdir(self.path):
            media_body = MediaFileUpload(self.path, resumable=True)
            if media_body.size() == 0:
                logger.error('cannot create no content file %s', self.path)
                return None
            if media_body.mimetype() is not None:
                mime_type = media_body.mimetype()
            else:
                media_body._mimetype = mime_type
        else:
            mime_type = folder_mime_type

        body = {
            'title': os.path.basename(self.path),
            'mimeType': mime_type,
            'parents': [{'id': parent_id}]
        }

        try:
            with drive.lock:
                metadata = drive.service.files().insert(
                    body=body,
                    media_body=media_body).execute()

            logger.info('created %s, %s', self.path, body['mimeType'])

            self.id = metadata['id']
            if metadata.has_key('downloadUrl'):
                self.download_url = metadata['downloadUrl']
            if metadata.has_key('md5Checksum'):
                self.md5Checksum = metadata['md5Checksum']
            return metadata
        except errors.HttpError, error:
            logger.error('an error occurred: %s', error)
            return None
示例#12
0
    def upload_file_to_bucket(self, bucket_name, file_path):
        def handle_progressless_iter(error, progressless_iters):
            if progressless_iters > NUM_RETRIES:
                self.logger.info('Failed to make progress for too many consecutive iterations.')
                raise error

            sleeptime = random.random() * (2 ** progressless_iters)
            self.logger.info(
                'Caught exception ({}). Sleeping for {} seconds before retry #{}.'.format(
                    str(error), sleeptime, progressless_iters))

            time.sleep(sleeptime)

        self.logger.info('Building upload request...')
        media = MediaFileUpload(file_path, chunksize=CHUNKSIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(file_path, DEFAULT_MIMETYPE, resumable=True)

        blob_name = os.path.basename(file_path)
        if not self.bucket_exists(bucket_name):
            self.logger.error("Bucket {} doesn't exists".format(bucket_name))
            raise "Bucket doesn't exist"

        request = self._storage.objects().insert(
            bucket=bucket_name, name=blob_name, media_body=media)
        self.logger.info('Uploading file: {}, to bucket: {}, blob: {}'.format(
            file_path, bucket_name, blob_name))

        progressless_iters = 0
        response = None
        while response is None:
            error = None
            try:
                progress, response = request.next_chunk()
                if progress:
                    self.logger.info('Upload {}%'.format(100 * progress.progress()))
            except errors.HttpError as error:
                if error.resp.status < 500:
                    raise
            except RETRYABLE_ERRORS as error:
                if error:
                    progressless_iters += 1
                    handle_progressless_iter(error, progressless_iters)
                else:
                    progressless_iters = 0

        self.logger.info('Upload complete!')
        self.logger.info('Uploaded Object:')
        self.logger.info(json_dumps(response, indent=2))
        return (True, blob_name)
示例#13
0
    def test_media_file_upload_to_from_json(self):
        upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
        self.assertEqual("image/png", upload.mimetype())
        self.assertEqual(190, upload.size())
        self.assertEqual(True, upload.resumable())
        self.assertEqual(500, upload.chunksize())
        self.assertEqual("PNG", upload.getbytes(1, 3))

        json = upload.to_json()
        new_upload = MediaUpload.new_from_json(json)

        self.assertEqual("image/png", new_upload.mimetype())
        self.assertEqual(190, new_upload.size())
        self.assertEqual(True, new_upload.resumable())
        self.assertEqual(500, new_upload.chunksize())
        self.assertEqual("PNG", new_upload.getbytes(1, 3))
def upload_creative_asset(service, profile_id, advertiser_id, asset_name, path_to_asset_file, asset_type):
    """Uploads a creative asset and returns an assetIdentifier."""
    # Construct the creative asset metadata
    creative_asset = {"assetIdentifier": {"name": asset_name, "type": asset_type}}

    media = MediaFileUpload(path_to_asset_file)
    if not media.mimetype():
        media = MediaFileUpload(path_to_asset_file, "application/octet-stream")

    response = (
        service.creativeAssets()
        .insert(advertiserId=advertiser_id, profileId=profile_id, media_body=media, body=creative_asset)
        .execute()
    )

    return response["assetIdentifier"]
示例#15
0
文件: gme.py 项目: keithmoss/Hodor
def upload_file_worker(ctx, asset_id, asset_type, filepath, chunk_size):
  print "upload_file_worker %s" % (filepath)
  """Upload a given file to an asset in its own thread as
  part of upload_files_multithreaded().

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : int
    A GME asset type defined by the Asset class.
  filepath : str
    The absolute path to the file.
  chunk_size : int
    The size of each upload chunk (must be a multiple of 256KB). Defaults to -1 (native Python streaming)
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  ctx.log("Begun uploading %s" % (os.path.basename(filepath)))
  start_time = time.time()

  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = get_asset_resource(ctx.service(ident=current_process().ident), asset_type)
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
  response = None
  while response is None:
    try:
      start_time_chunk = time.time()
      progress, response = next_chunk(ctx, request)
      # Dodgy math is dodgy
      # if progress:
      #   Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
      #   ctx.log("%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2)))
    except NoContent as e:
      # Files uploads return a 204 No Content "error" that actually means it's finished successfully.
      response = ""

  ctx.log("Finished uploading %s (%s mins)" % (os.path.basename(filepath), round((time.time() - start_time) / 60, 2)))
示例#16
0
文件: gme.py 项目: keithmoss/Hodor
def upload_file(ctx, asset_id, asset_type, filepath, chunk_size=-1):
  """Upload a given file to an asset.

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : str
    The type of asset being represented. Possible values: table, raster
  filepath : str
    The absolute path to the file.
  chunk_size : int
    The size of each upload chunk (must be a multiple of 256KB). Defaults to -1 (native Python streaming)
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  ctx.log("Begun uploading %s" % (os.path.basename(filepath)))
  start_time = time.time()

  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = ctx.service().tables() if asset_type == "vector" else ctx.service().rasters()
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
  response = None
  while response is None:
    try:
      start_time_chunk = time.time()
      progress, response = next_chunk(ctx, request)
      # Dodgy math is dodgy
      if progress:
        Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
        ctx.log("%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2)))
    except NoContent as e:
      # Files uploads return a 204 No Content "error" that actually means it's finished successfully.
      response = ""

  ctx.log("Finished uploading %s (%s mins)" % (os.path.basename(filepath), round((time.time() - start_time) / 60, 2)))
示例#17
0
def upload(argv):
  filename = argv[1]
  bucket_name, object_name = argv[2][5:].split('/', 1)
  assert bucket_name and object_name

  service = get_authenticated_service(SCOPES)

  print 'Building upload request...'
  media = MediaFileUpload(filename)
  if not media.mimetype():
    media = MediaFileUpload(filename, DEFAULT_MIMETYPE)
  request = service.objects().insert(bucket=bucket_name, name=object_name, media_body=media)

  print 'Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name,
                                                          object_name)
  response = request.execute()

  print 'Uploaded Object:'
  print json_dumps(response, indent=2)
def upload_creative_asset(service, profile_id, advertiser_id, asset_name,
                          path_to_asset_file, asset_type):
    """Uploads a creative asset and returns an assetIdentifier."""
    # Construct the creative asset metadata
    creative_asset = {
        'assetIdentifier': {
            'name': asset_name,
            'type': asset_type
        }
    }

    media = MediaFileUpload(path_to_asset_file)
    if not media.mimetype():
        media = MediaFileUpload(path_to_asset_file, 'application/octet-stream')

    response = service.creativeAssets().insert(advertiserId=advertiser_id,
                                               profileId=profile_id,
                                               media_body=media,
                                               body=creative_asset).execute()

    return response['assetIdentifier']
示例#19
0
文件: cli.py 项目: ruo91/Hodor
    def upload_file(self, file, id, resource):
        # Retry transport and file IO errors.
        RETRYABLE_ERRORS = (httplib2.HttpLib2Error, IOError)
        chunk_size = chunk_size = getattr(self, 'chunk_size', -1)

        self.log("Uploading file '%s'" % (file))
        start_time = time.time()

        media = MediaFileUpload(file, chunksize=chunk_size, resumable=True)
        if not media.mimetype():
            # media = MediaFileUpload(file, 'application/octet-stream', resumable=True)
            raise Exception(
                "Could not determine mime-type. Please make lib mimetypes aware of it."
            )
        request = resource.files().insert(id=id,
                                          filename=os.path.basename(file),
                                          media_body=media)

        progressless_iters = 0
        response = None
        while response is None:
            error = None
            try:
                start_time_chunk = time.time()
                progress, response = request.next_chunk()
                if progress:
                    Mbps = ((chunk_size / (time.time() - start_time_chunk)) *
                            0.008 * 0.001)
                    print "%s%% (%s/Mbps)" % (round(
                        progress.progress() * 100), round(Mbps, 2))
            except HttpError, err:
                # Contray to the documentation GME does't return 201/200 for the last chunk
                if err.resp.status == 204:
                    response = ""
                else:
                    error = err
                    if err.resp.status < 500 and err.resp.status != 410:
                        raise
            except RETRYABLE_ERRORS, err:
                error = err
def upload_creative_asset(
    service, profile_id, advertiser_id, asset_name, path_to_asset_file,
    asset_type):
  """Uploads a creative asset and returns a creative asset metadata object."""
  # Construct the creative asset metadata
  creative_asset = {
      'assetIdentifier': {
          'name': asset_name,
          'type': asset_type
      }
  }

  media = MediaFileUpload(path_to_asset_file)
  if not media.mimetype():
    media = MediaFileUpload(path_to_asset_file, 'application/octet-stream')

  response = service.creativeAssets().insert(
      advertiserId=advertiser_id,
      profileId=profile_id,
      media_body=media,
      body=creative_asset).execute()

  return response
示例#21
0
        def method(self, **kwargs):
            for name in kwargs.iterkeys():
                if name not in argmap:
                    raise TypeError('Got an unexpected keyword argument "%s"' %
                                    name)

            for name in required_params:
                if name not in kwargs:
                    raise TypeError('Missing required parameter "%s"' % name)

            for name, regex in pattern_params.iteritems():
                if name in kwargs:
                    if isinstance(kwargs[name], basestring):
                        pvalues = [kwargs[name]]
                    else:
                        pvalues = kwargs[name]
                    for pvalue in pvalues:
                        if re.match(regex, pvalue) is None:
                            raise TypeError(
                                'Parameter "%s" value "%s" does not match the pattern "%s"'
                                % (name, pvalue, regex))

            for name, enums in enum_params.iteritems():
                if name in kwargs:
                    if kwargs[name] not in enums:
                        raise TypeError(
                            'Parameter "%s" value "%s" is not an allowed value in "%s"'
                            % (name, kwargs[name], str(enums)))

            actual_query_params = {}
            actual_path_params = {}
            for key, value in kwargs.iteritems():
                to_type = param_type.get(key, 'string')
                # For repeated parameters we cast each member of the list.
                if key in repeated_params and type(value) == type([]):
                    cast_value = [_cast(x, to_type) for x in value]
                else:
                    cast_value = _cast(value, to_type)
                if key in query_params:
                    actual_query_params[argmap[key]] = cast_value
                if key in path_params:
                    actual_path_params[argmap[key]] = cast_value
            body_value = kwargs.get('body', None)
            media_filename = kwargs.get('media_body', None)

            if self._developerKey:
                actual_query_params['key'] = self._developerKey

            model = self._model
            # If there is no schema for the response then presume a binary blob.
            if 'response' not in methodDesc:
                model = RawModel()

            headers = {}
            headers, params, query, body = model.request(
                headers, actual_path_params, actual_query_params, body_value)

            expanded_url = uritemplate.expand(pathUrl, params)
            url = urlparse.urljoin(self._baseUrl, expanded_url + query)

            resumable = None
            multipart_boundary = ''

            if media_filename:
                # Convert a simple filename into a MediaUpload object.
                if isinstance(media_filename, basestring):
                    (media_mime_type,
                     encoding) = mimetypes.guess_type(media_filename)
                    if media_mime_type is None:
                        raise UnknownFileType(media_filename)
                    if not mimeparse.best_match([media_mime_type],
                                                ','.join(accept)):
                        raise UnacceptableMimeTypeError(media_mime_type)
                    media_upload = MediaFileUpload(media_filename,
                                                   media_mime_type)
                elif isinstance(media_filename, MediaUpload):
                    media_upload = media_filename
                else:
                    raise TypeError(
                        'media_filename must be str or MediaUpload.')

                if media_upload.resumable():
                    resumable = media_upload

                # Check the maxSize
                if maxSize > 0 and media_upload.size() > maxSize:
                    raise MediaUploadSizeError("Media larger than: %s" %
                                               maxSize)

                # Use the media path uri for media uploads
                if media_upload.resumable():
                    expanded_url = uritemplate.expand(mediaResumablePathUrl,
                                                      params)
                else:
                    expanded_url = uritemplate.expand(mediaPathUrl, params)
                url = urlparse.urljoin(self._baseUrl, expanded_url + query)

                if body is None:
                    # This is a simple media upload
                    headers['content-type'] = media_upload.mimetype()
                    expanded_url = uritemplate.expand(mediaResumablePathUrl,
                                                      params)
                    if not media_upload.resumable():
                        body = media_upload.getbytes(0, media_upload.size())
                else:
                    # This is a multipart/related upload.
                    msgRoot = MIMEMultipart('related')
                    # msgRoot should not write out it's own headers
                    setattr(msgRoot, '_write_headers', lambda self: None)

                    # attach the body as one part
                    msg = MIMENonMultipart(*headers['content-type'].split('/'))
                    msg.set_payload(body)
                    msgRoot.attach(msg)

                    # attach the media as the second part
                    msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
                    msg['Content-Transfer-Encoding'] = 'binary'

                    if media_upload.resumable():
                        # This is a multipart resumable upload, where a multipart payload
                        # looks like this:
                        #
                        #  --===============1678050750164843052==
                        #  Content-Type: application/json
                        #  MIME-Version: 1.0
                        #
                        #  {'foo': 'bar'}
                        #  --===============1678050750164843052==
                        #  Content-Type: image/png
                        #  MIME-Version: 1.0
                        #  Content-Transfer-Encoding: binary
                        #
                        #  <BINARY STUFF>
                        #  --===============1678050750164843052==--
                        #
                        # In the case of resumable multipart media uploads, the <BINARY
                        # STUFF> is large and will be spread across multiple PUTs.  What we
                        # do here is compose the multipart message with a random payload in
                        # place of <BINARY STUFF> and then split the resulting content into
                        # two pieces, text before <BINARY STUFF> and text after <BINARY
                        # STUFF>. The text after <BINARY STUFF> is the multipart boundary.
                        # In apiclient.http the HttpRequest will send the text before
                        # <BINARY STUFF>, then send the actual binary media in chunks, and
                        # then will send the multipart delimeter.

                        payload = hex(random.getrandbits(300))
                        msg.set_payload(payload)
                        msgRoot.attach(msg)
                        body = msgRoot.as_string()
                        body, _ = body.split(payload)
                        resumable = media_upload
                    else:
                        payload = media_upload.getbytes(0, media_upload.size())
                        msg.set_payload(payload)
                        msgRoot.attach(msg)
                        body = msgRoot.as_string()

                    multipart_boundary = msgRoot.get_boundary()
                    headers['content-type'] = (
                        'multipart/related; '
                        'boundary="%s"') % multipart_boundary

            logging.info('URL being requested: %s' % url)
            return self._requestBuilder(self._http,
                                        model.response,
                                        url,
                                        method=httpMethod,
                                        body=body,
                                        headers=headers,
                                        methodId=methodId,
                                        resumable=resumable)
示例#22
0
文件: client.py 项目: z123/GCS-Python
 def upload_from_filename(self, filename=None, bucket_id=None, blob_location=None):
     media = MediaFileUpload(filename)
     if not media.mimetype():
         media = MediaFileUpload(filename, DEFAULT_MIMETYPE)
     self.gcs.objects().insert(bucket=bucket_id, name=blob_location, media_body=media).execute()
示例#23
0
    def method(self, **kwargs):
        # Don't bother with doc string, it will be over-written by createMethod.

        for name in kwargs.iterkeys():
            if name not in parameters.argmap:
                raise TypeError('Got an unexpected keyword argument "%s"' %
                                name)

        # Remove args that have a value of None.
        keys = kwargs.keys()
        for name in keys:
            if kwargs[name] is None:
                del kwargs[name]

        for name in parameters.required_params:
            if name not in kwargs:
                raise TypeError('Missing required parameter "%s"' % name)

        for name, regex in parameters.pattern_params.iteritems():
            if name in kwargs:
                if isinstance(kwargs[name], basestring):
                    pvalues = [kwargs[name]]
                else:
                    pvalues = kwargs[name]
                for pvalue in pvalues:
                    if re.match(regex, pvalue) is None:
                        raise TypeError(
                            'Parameter "%s" value "%s" does not match the pattern "%s"'
                            % (name, pvalue, regex))

        for name, enums in parameters.enum_params.iteritems():
            if name in kwargs:
                # We need to handle the case of a repeated enum
                # name differently, since we want to handle both
                # arg='value' and arg=['value1', 'value2']
                if (name in parameters.repeated_params
                        and not isinstance(kwargs[name], basestring)):
                    values = kwargs[name]
                else:
                    values = [kwargs[name]]
                for value in values:
                    if value not in enums:
                        raise TypeError(
                            'Parameter "%s" value "%s" is not an allowed value in "%s"'
                            % (name, value, str(enums)))

        actual_query_params = {}
        actual_path_params = {}
        for key, value in kwargs.iteritems():
            to_type = parameters.param_types.get(key, 'string')
            # For repeated parameters we cast each member of the list.
            if key in parameters.repeated_params and type(value) == type([]):
                cast_value = [_cast(x, to_type) for x in value]
            else:
                cast_value = _cast(value, to_type)
            if key in parameters.query_params:
                actual_query_params[parameters.argmap[key]] = cast_value
            if key in parameters.path_params:
                actual_path_params[parameters.argmap[key]] = cast_value
        body_value = kwargs.get('body', None)
        media_filename = kwargs.get('media_body', None)

        if self._developerKey:
            actual_query_params['key'] = self._developerKey

        model = self._model
        if methodName.endswith('_media'):
            model = MediaModel()
        elif 'response' not in methodDesc:
            model = RawModel()

        headers = {}
        headers, params, query, body = model.request(headers,
                                                     actual_path_params,
                                                     actual_query_params,
                                                     body_value)

        expanded_url = uritemplate.expand(pathUrl, params)
        url = _urljoin(self._baseUrl, expanded_url + query)

        resumable = None
        multipart_boundary = ''

        if media_filename:
            # Ensure we end up with a valid MediaUpload object.
            if isinstance(media_filename, basestring):
                (media_mime_type,
                 encoding) = mimetypes.guess_type(media_filename)
                if media_mime_type is None:
                    raise UnknownFileType(media_filename)
                if not mimeparse.best_match([media_mime_type],
                                            ','.join(accept)):
                    raise UnacceptableMimeTypeError(media_mime_type)
                media_upload = MediaFileUpload(media_filename,
                                               mimetype=media_mime_type)
            elif isinstance(media_filename, MediaUpload):
                media_upload = media_filename
            else:
                raise TypeError('media_filename must be str or MediaUpload.')

            # Check the maxSize
            if maxSize > 0 and media_upload.size() > maxSize:
                raise MediaUploadSizeError("Media larger than: %s" % maxSize)

            # Use the media path uri for media uploads
            expanded_url = uritemplate.expand(mediaPathUrl, params)
            url = _urljoin(self._baseUrl, expanded_url + query)
            if media_upload.resumable():
                url = _add_query_parameter(url, 'uploadType', 'resumable')

            if media_upload.resumable():
                # This is all we need to do for resumable, if the body exists it gets
                # sent in the first request, otherwise an empty body is sent.
                resumable = media_upload
            else:
                # A non-resumable upload
                if body is None:
                    # This is a simple media upload
                    headers['content-type'] = media_upload.mimetype()
                    body = media_upload.getbytes(0, media_upload.size())
                    url = _add_query_parameter(url, 'uploadType', 'media')
                else:
                    # This is a multipart/related upload.
                    msgRoot = MIMEMultipart('related')
                    # msgRoot should not write out it's own headers
                    setattr(msgRoot, '_write_headers', lambda self: None)

                    # attach the body as one part
                    msg = MIMENonMultipart(*headers['content-type'].split('/'))
                    msg.set_payload(body)
                    msgRoot.attach(msg)

                    # attach the media as the second part
                    msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
                    msg['Content-Transfer-Encoding'] = 'binary'

                    payload = media_upload.getbytes(0, media_upload.size())
                    msg.set_payload(payload)
                    msgRoot.attach(msg)
                    # encode the body: note that we can't use `as_string`, because
                    # it plays games with `From ` lines.
                    fp = StringIO.StringIO()
                    g = Generator(fp, mangle_from_=False)
                    g.flatten(msgRoot, unixfrom=False)
                    body = fp.getvalue()

                    multipart_boundary = msgRoot.get_boundary()
                    headers['content-type'] = (
                        'multipart/related; '
                        'boundary="%s"') % multipart_boundary
                    url = _add_query_parameter(url, 'uploadType', 'multipart')

        logger.info('URL being requested: %s %s' % (httpMethod, url))
        return self._requestBuilder(self._http,
                                    model.response,
                                    url,
                                    method=httpMethod,
                                    body=body,
                                    headers=headers,
                                    methodId=methodId,
                                    resumable=resumable)
示例#24
0
    def method(self, **kwargs):
      for name in six.iterkeys(kwargs):
        if name not in argmap:
          raise TypeError('Got an unexpected keyword argument "%s"' % name)

      for name in required_params:
        if name not in kwargs:
          raise TypeError('Missing required parameter "%s"' % name)

      for name, regex in six.iteritems(pattern_params):
        if name in kwargs:
          if isinstance(kwargs[name], six.string_types):
            pvalues = [kwargs[name]]
          else:
            pvalues = kwargs[name]
          for pvalue in pvalues:
            if re.match(regex, pvalue) is None:
              raise TypeError(
                  'Parameter "%s" value "%s" does not match the pattern "%s"' %
                  (name, pvalue, regex))

      for name, enums in six.iteritems(enum_params):
        if name in kwargs:
          if kwargs[name] not in enums:
            raise TypeError(
                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                (name, kwargs[name], str(enums)))

      actual_query_params = {}
      actual_path_params = {}
      for key, value in six.iteritems(kwargs):
        to_type = param_type.get(key, 'string')
        # For repeated parameters we cast each member of the list.
        if key in repeated_params and type(value) == type([]):
          cast_value = [_cast(x, to_type) for x in value]
        else:
          cast_value = _cast(value, to_type)
        if key in query_params:
          actual_query_params[argmap[key]] = cast_value
        if key in path_params:
          actual_path_params[argmap[key]] = cast_value
      body_value = kwargs.get('body', None)
      media_filename = kwargs.get('media_body', None)

      if self._developerKey:
        actual_query_params['key'] = self._developerKey

      model = self._model
      # If there is no schema for the response then presume a binary blob.
      if 'response' not in methodDesc:
        model = RawModel()

      headers = {}
      headers, params, query, body = model.request(headers,
          actual_path_params, actual_query_params, body_value)

      expanded_url = uritemplate.expand(pathUrl, params)
      url = six.moves.urllib.parse.urljoin(self._baseUrl, expanded_url + query)

      resumable = None
      multipart_boundary = ''

      if media_filename:
        # Ensure we end up with a valid MediaUpload object.
        if isinstance(media_filename, six.string_types):
          (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
          if media_mime_type is None:
            raise UnknownFileType(media_filename)
          if not mimeparse.best_match([media_mime_type], ','.join(accept)):
            raise UnacceptableMimeTypeError(media_mime_type)
          media_upload = MediaFileUpload(media_filename, media_mime_type)
        elif isinstance(media_filename, MediaUpload):
          media_upload = media_filename
        else:
          raise TypeError('media_filename must be str or MediaUpload.')

        # Check the maxSize
        if maxSize > 0 and media_upload.size() > maxSize:
          raise MediaUploadSizeError("Media larger than: %s" % maxSize)

        # Use the media path uri for media uploads
        if media_upload.resumable():
          expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
        else:
          expanded_url = uritemplate.expand(mediaPathUrl, params)
        url = six.moves.urllib.parse.urljoin(self._baseUrl, expanded_url + query)

        if media_upload.resumable():
          # This is all we need to do for resumable, if the body exists it gets
          # sent in the first request, otherwise an empty body is sent.
          resumable = media_upload
        else:
          # A non-resumable upload
          if body is None:
            # This is a simple media upload
            headers['content-type'] = media_upload.mimetype()
            body = media_upload.getbytes(0, media_upload.size())
          else:
            # This is a multipart/related upload.
            msgRoot = MIMEMultipart('related')
            # msgRoot should not write out it's own headers
            setattr(msgRoot, '_write_headers', lambda self: None)

            # attach the body as one part
            msg = MIMENonMultipart(*headers['content-type'].split('/'))
            msg.set_payload(body)
            msgRoot.attach(msg)

            # attach the media as the second part
            msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
            msg['Content-Transfer-Encoding'] = 'binary'

            payload = media_upload.getbytes(0, media_upload.size())
            msg.set_payload(payload)
            msgRoot.attach(msg)
            body = msgRoot.as_string()

            multipart_boundary = msgRoot.get_boundary()
            headers['content-type'] = ('multipart/related; '
                                       'boundary="%s"') % multipart_boundary

      logging.info('URL being requested: %s' % url)
      return self._requestBuilder(self._http,
                                  model.response,
                                  url,
                                  method=httpMethod,
                                  body=body,
                                  headers=headers,
                                  methodId=methodId,
                                  resumable=resumable)
示例#25
0
            fields_to_return = 'nextPageToken,items(name,size,contentType,metadata(my-key))'
            req = service.objects().list(bucket=justa_file_name, fields=fields_to_return)

            while req is not None:
                resp = req.execute()
                items = resp["items"]
                for item in items:
                if item["name"] == file_name:
                # delete existing file
                service.objects().delete(bucket=bucket_name, object=file_name ).execute()
                logging.info('Deleted existing file: %s from bucket %s'%(file_name,bucket_name))
                req = service.objects().list_next(req, resp)

                logging.info('Building upload request...')
                media = MediaFileUpload(local_file, chunksize=CHUNKSIZE, resumable=True)
                if not media.mimetype():
                media = MediaFileUpload(local_file, DEFAULT_MIMETYPE, resumable=True)
                request = service.objects().insert(bucket=bucket_name, name=blob_name,
                media_body=media)

                logging.info('Uploading file: %s, to bucket: %s, blob: %s ' % (local_file, bucket_name,
                blob_name))

                progressless_iters = 0
                response = None
                while response is None:
                error = None
                try:
                progress, response = request.next_chunk()
                if progress:
                logging.info('Upload %d%%' % (100 * progress.progress()))
示例#26
0
        def method(self, **kwargs):
            for name in kwargs.iterkeys():
                if name not in argmap:
                    raise TypeError('Got an unexpected keyword argument "%s"' % name)

            for name in required_params:
                if name not in kwargs:
                    raise TypeError('Missing required parameter "%s"' % name)

            for name, regex in pattern_params.iteritems():
                if name in kwargs:
                    if isinstance(kwargs[name], basestring):
                        pvalues = [kwargs[name]]
                    else:
                        pvalues = kwargs[name]
                    for pvalue in pvalues:
                        if re.match(regex, pvalue) is None:
                            raise TypeError(
                                'Parameter "%s" value "%s" does not match the pattern "%s"' % (name, pvalue, regex)
                            )

            for name, enums in enum_params.iteritems():
                if name in kwargs:
                    if kwargs[name] not in enums:
                        raise TypeError(
                            'Parameter "%s" value "%s" is not an allowed value in "%s"'
                            % (name, kwargs[name], str(enums))
                        )

            actual_query_params = {}
            actual_path_params = {}
            for key, value in kwargs.iteritems():
                to_type = param_type.get(key, "string")
                # For repeated parameters we cast each member of the list.
                if key in repeated_params and type(value) == type([]):
                    cast_value = [_cast(x, to_type) for x in value]
                else:
                    cast_value = _cast(value, to_type)
                if key in query_params:
                    actual_query_params[argmap[key]] = cast_value
                if key in path_params:
                    actual_path_params[argmap[key]] = cast_value
            body_value = kwargs.get("body", None)
            media_filename = kwargs.get("media_body", None)

            if self._developerKey:
                actual_query_params["key"] = self._developerKey

            model = self._model
            # If there is no schema for the response then presume a binary blob.
            if "response" not in methodDesc:
                model = RawModel()

            headers = {}
            headers, params, query, body = model.request(headers, actual_path_params, actual_query_params, body_value)

            expanded_url = uritemplate.expand(pathUrl, params)
            url = urlparse.urljoin(self._baseUrl, expanded_url + query)

            resumable = None
            multipart_boundary = ""

            if media_filename:
                # Ensure we end up with a valid MediaUpload object.
                if isinstance(media_filename, basestring):
                    (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
                    if media_mime_type is None:
                        raise UnknownFileType(media_filename)
                    if not mimeparse.best_match([media_mime_type], ",".join(accept)):
                        raise UnacceptableMimeTypeError(media_mime_type)
                    media_upload = MediaFileUpload(media_filename, media_mime_type)
                elif isinstance(media_filename, MediaUpload):
                    media_upload = media_filename
                else:
                    raise TypeError("media_filename must be str or MediaUpload.")

                # Check the maxSize
                if maxSize > 0 and media_upload.size() > maxSize:
                    raise MediaUploadSizeError("Media larger than: %s" % maxSize)

                # Use the media path uri for media uploads
                if media_upload.resumable():
                    expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
                else:
                    expanded_url = uritemplate.expand(mediaPathUrl, params)
                url = urlparse.urljoin(self._baseUrl, expanded_url + query)

                if media_upload.resumable():
                    # This is all we need to do for resumable, if the body exists it gets
                    # sent in the first request, otherwise an empty body is sent.
                    resumable = media_upload
                else:
                    # A non-resumable upload
                    if body is None:
                        # This is a simple media upload
                        headers["content-type"] = media_upload.mimetype()
                        body = media_upload.getbytes(0, media_upload.size())
                    else:
                        # This is a multipart/related upload.
                        msgRoot = MIMEMultipart("related")
                        # msgRoot should not write out it's own headers
                        setattr(msgRoot, "_write_headers", lambda self: None)

                        # attach the body as one part
                        msg = MIMENonMultipart(*headers["content-type"].split("/"))
                        msg.set_payload(body)
                        msgRoot.attach(msg)

                        # attach the media as the second part
                        msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
                        msg["Content-Transfer-Encoding"] = "binary"

                        payload = media_upload.getbytes(0, media_upload.size())
                        msg.set_payload(payload)
                        msgRoot.attach(msg)
                        body = msgRoot.as_string()

                        multipart_boundary = msgRoot.get_boundary()
                        headers["content-type"] = ("multipart/related; " 'boundary="%s"') % multipart_boundary

            logging.info("URL being requested: %s" % url)
            return self._requestBuilder(
                self._http,
                model.response,
                url,
                method=httpMethod,
                body=body,
                headers=headers,
                methodId=methodId,
                resumable=resumable,
            )
示例#27
0
    def method(self, **kwargs):
        # Don't bother with doc string, it will be over-written by createMethod.

        for name in kwargs.iterkeys():
            if name not in parameters.argmap:
                raise TypeError('Got an unexpected keyword argument "%s"' % name)

        # Remove args that have a value of None.
        keys = kwargs.keys()
        for name in keys:
            if kwargs[name] is None:
                del kwargs[name]

        for name in parameters.required_params:
            if name not in kwargs:
                raise TypeError('Missing required parameter "%s"' % name)

        for name, regex in parameters.pattern_params.iteritems():
            if name in kwargs:
                if isinstance(kwargs[name], basestring):
                    pvalues = [kwargs[name]]
                else:
                    pvalues = kwargs[name]
                for pvalue in pvalues:
                    if re.match(regex, pvalue) is None:
                        raise TypeError(
                            'Parameter "%s" value "%s" does not match the pattern "%s"' %
                            (name, pvalue, regex))

        for name, enums in parameters.enum_params.iteritems():
            if name in kwargs:
                # We need to handle the case of a repeated enum
                # name differently, since we want to handle both
                # arg='value' and arg=['value1', 'value2']
                if (name in parameters.repeated_params and
                        not isinstance(kwargs[name], basestring)):
                    values = kwargs[name]
                else:
                    values = [kwargs[name]]
                for value in values:
                    if value not in enums:
                        raise TypeError(
                            'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                            (name, value, str(enums)))

        actual_query_params = {}
        actual_path_params = {}
        for key, value in kwargs.iteritems():
            to_type = parameters.param_types.get(key, 'string')
            # For repeated parameters we cast each member of the list.
            if key in parameters.repeated_params and type(value) == type([]):
                cast_value = [_cast(x, to_type) for x in value]
            else:
                cast_value = _cast(value, to_type)
            if key in parameters.query_params:
                actual_query_params[parameters.argmap[key]] = cast_value
            if key in parameters.path_params:
                actual_path_params[parameters.argmap[key]] = cast_value
        body_value = kwargs.get('body', None)
        media_filename = kwargs.get('media_body', None)

        if self._developerKey:
            actual_query_params['key'] = self._developerKey

        model = self._model
        if methodName.endswith('_media'):
            model = MediaModel()
        elif 'response' not in methodDesc:
            model = RawModel()

        headers = {}
        headers, params, query, body = model.request(headers,
                                                     actual_path_params, actual_query_params, body_value)

        expanded_url = uritemplate.expand(pathUrl, params)
        url = urlparse.urljoin(self._baseUrl, expanded_url + query)

        resumable = None
        multipart_boundary = ''

        if media_filename:
            # Ensure we end up with a valid MediaUpload object.
            if isinstance(media_filename, basestring):
                (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
                if media_mime_type is None:
                    raise UnknownFileType(media_filename)
                if not mimeparse.best_match([media_mime_type], ','.join(accept)):
                    raise UnacceptableMimeTypeError(media_mime_type)
                media_upload = MediaFileUpload(media_filename,
                                               mimetype=media_mime_type)
            elif isinstance(media_filename, MediaUpload):
                media_upload = media_filename
            else:
                raise TypeError('media_filename must be str or MediaUpload.')

            # Check the maxSize
            if maxSize > 0 and media_upload.size() > maxSize:
                raise MediaUploadSizeError("Media larger than: %s" % maxSize)

            # Use the media path uri for media uploads
            expanded_url = uritemplate.expand(mediaPathUrl, params)
            url = urlparse.urljoin(self._baseUrl, expanded_url + query)
            if media_upload.resumable():
                url = _add_query_parameter(url, 'uploadType', 'resumable')

            if media_upload.resumable():
                # This is all we need to do for resumable, if the body exists it gets
                # sent in the first request, otherwise an empty body is sent.
                resumable = media_upload
            else:
                # A non-resumable upload
                if body is None:
                    # This is a simple media upload
                    headers['content-type'] = media_upload.mimetype()
                    body = media_upload.getbytes(0, media_upload.size())
                    url = _add_query_parameter(url, 'uploadType', 'media')
                else:
                    # This is a multipart/related upload.
                    msgRoot = MIMEMultipart('related')
                    # msgRoot should not write out it's own headers
                    setattr(msgRoot, '_write_headers', lambda self: None)

                    # attach the body as one part
                    msg = MIMENonMultipart(*headers['content-type'].split('/'))
                    msg.set_payload(body)
                    msgRoot.attach(msg)

                    # attach the media as the second part
                    msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
                    msg['Content-Transfer-Encoding'] = 'binary'

                    payload = media_upload.getbytes(0, media_upload.size())
                    msg.set_payload(payload)
                    msgRoot.attach(msg)
                    body = msgRoot.as_string()

                    multipart_boundary = msgRoot.get_boundary()
                    headers['content-type'] = ('multipart/related; '
                                               'boundary="%s"') % multipart_boundary
                    url = _add_query_parameter(url, 'uploadType', 'multipart')

        logger.info('URL being requested: %s' % url)
        return self._requestBuilder(self._http,
                                    model.response,
                                    url,
                                    method=httpMethod,
                                    body=body,
                                    headers=headers,
                                    methodId=methodId,
                                    resumable=resumable)
示例#28
0
    def method(self, **kwargs):
      for name in kwargs.iterkeys():
        if name not in argmap:
          raise TypeError('Got an unexpected keyword argument "%s"' % name)

      for name in required_params:
        if name not in kwargs:
          raise TypeError('Missing required parameter "%s"' % name)

      for name, regex in pattern_params.iteritems():
        if name in kwargs:
          if isinstance(kwargs[name], basestring):
            pvalues = [kwargs[name]]
          else:
            pvalues = kwargs[name]
          for pvalue in pvalues:
            if re.match(regex, pvalue) is None:
              raise TypeError(
                  'Parameter "%s" value "%s" does not match the pattern "%s"' %
                  (name, pvalue, regex))

      for name, enums in enum_params.iteritems():
        if name in kwargs:
          if kwargs[name] not in enums:
            raise TypeError(
                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                (name, kwargs[name], str(enums)))

      actual_query_params = {}
      actual_path_params = {}
      for key, value in kwargs.iteritems():
        to_type = param_type.get(key, 'string')
        # For repeated parameters we cast each member of the list.
        if key in repeated_params and type(value) == type([]):
          cast_value = [_cast(x, to_type) for x in value]
        else:
          cast_value = _cast(value, to_type)
        if key in query_params:
          actual_query_params[argmap[key]] = cast_value
        if key in path_params:
          actual_path_params[argmap[key]] = cast_value
      body_value = kwargs.get('body', None)
      media_filename = kwargs.get('media_body', None)

      if self._developerKey:
        actual_query_params['key'] = self._developerKey

      model = self._model
      # If there is no schema for the response then presume a binary blob.
      if 'response' not in methodDesc:
        model = RawModel()

      headers = {}
      headers, params, query, body = model.request(headers,
          actual_path_params, actual_query_params, body_value)

      expanded_url = uritemplate.expand(pathUrl, params)
      url = urlparse.urljoin(self._baseUrl, expanded_url + query)

      resumable = None
      multipart_boundary = ''

      if media_filename:
        # Convert a simple filename into a MediaUpload object.
        if isinstance(media_filename, basestring):
          (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
          if media_mime_type is None:
            raise UnknownFileType(media_filename)
          if not mimeparse.best_match([media_mime_type], ','.join(accept)):
            raise UnacceptableMimeTypeError(media_mime_type)
          media_upload = MediaFileUpload(media_filename, media_mime_type)
        elif isinstance(media_filename, MediaUpload):
          media_upload = media_filename
        else:
          raise TypeError('media_filename must be str or MediaUpload.')

        if media_upload.resumable():
          resumable = media_upload

        # Check the maxSize
        if maxSize > 0 and media_upload.size() > maxSize:
          raise MediaUploadSizeError("Media larger than: %s" % maxSize)

        # Use the media path uri for media uploads
        if media_upload.resumable():
          expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
        else:
          expanded_url = uritemplate.expand(mediaPathUrl, params)
        url = urlparse.urljoin(self._baseUrl, expanded_url + query)

        if body is None:
          # This is a simple media upload
          headers['content-type'] = media_upload.mimetype()
          expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
          if not media_upload.resumable():
            body = media_upload.getbytes(0, media_upload.size())
        else:
          # This is a multipart/related upload.
          msgRoot = MIMEMultipart('related')
          # msgRoot should not write out it's own headers
          setattr(msgRoot, '_write_headers', lambda self: None)

          # attach the body as one part
          msg = MIMENonMultipart(*headers['content-type'].split('/'))
          msg.set_payload(body)
          msgRoot.attach(msg)

          # attach the media as the second part
          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
          msg['Content-Transfer-Encoding'] = 'binary'

          if media_upload.resumable():
            # This is a multipart resumable upload, where a multipart payload
            # looks like this:
            #
            #  --===============1678050750164843052==
            #  Content-Type: application/json
            #  MIME-Version: 1.0
            #
            #  {'foo': 'bar'}
            #  --===============1678050750164843052==
            #  Content-Type: image/png
            #  MIME-Version: 1.0
            #  Content-Transfer-Encoding: binary
            #
            #  <BINARY STUFF>
            #  --===============1678050750164843052==--
            #
            # In the case of resumable multipart media uploads, the <BINARY
            # STUFF> is large and will be spread across multiple PUTs.  What we
            # do here is compose the multipart message with a random payload in
            # place of <BINARY STUFF> and then split the resulting content into
            # two pieces, text before <BINARY STUFF> and text after <BINARY
            # STUFF>. The text after <BINARY STUFF> is the multipart boundary.
            # In apiclient.http the HttpRequest will send the text before
            # <BINARY STUFF>, then send the actual binary media in chunks, and
            # then will send the multipart delimeter.

            payload = hex(random.getrandbits(300))
            msg.set_payload(payload)
            msgRoot.attach(msg)
            body = msgRoot.as_string()
            body, _ = body.split(payload)
            resumable = media_upload
          else:
            payload = media_upload.getbytes(0, media_upload.size())
            msg.set_payload(payload)
            msgRoot.attach(msg)
            body = msgRoot.as_string()

          multipart_boundary = msgRoot.get_boundary()
          headers['content-type'] = ('multipart/related; '
                                     'boundary="%s"') % multipart_boundary

      logging.info('URL being requested: %s' % url)
      return self._requestBuilder(self._http,
                                  model.response,
                                  url,
                                  method=httpMethod,
                                  body=body,
                                  headers=headers,
                                  methodId=methodId,
                                  resumable=resumable)
示例#29
0
def LoadCsvIntoGcsBucket(file_path_name, gcs_client, file_name, entity_name):
    try:
        if not isNotEmpty(file_path_name):   
            logger.info('LoadCsvIntoGcsBucket - file_path_name is empty') 
            return None
        if not gcs_client:   
            logger.info('LoadCsvIntoGcsBucket - gcs_client is empty') 
            return None
        if not isNotEmpty(file_name):   
            logger.info('LoadCsvIntoGcsBucket - file_name is empty') 
            return None
        if not isNotEmpty(entity_name):  
            logger.info('LoadCsvIntoGcsBucket - entity_name is empty') 
            return None     
                
        logger.info('LoadCsvIntoGcsBucket - file_path_name name: %s\n'%(str(file_path_name)))
        blob_name = file_name
        logger.info('LoadCsvIntoGcsBucket - file name: %s, mapped to blob name: %s\n'%(file_name,blob_name))
        bucket_name="wf_"+entity_name.lower()
        logger.info('LoadCsvIntoGcsBucket - file name: %s, mapped to gcs bucket name: %s\n'%(file_name,bucket_name))                            
        '''
        determine if bucket exists
        '''
        bucket_exists=False
        fields_to_return = 'nextPageToken,items(name,location,timeCreated)'
        req = gcs_client.buckets().list(
                project=config._PROJECT_ID,
                fields=fields_to_return,  # optional
                maxResults=42)            # optional
        
        '''
        If you have too many items to list in one request, list_next() will
        automatically handle paging with the pageToken. 
        The list is limited to 52 file, per request
        '''
        while req is not None:
            resp = req.execute()
            items = resp['items']
            for item in items:
                if item["name"] == bucket_name:
                    bucket_exists=True
            req = gcs_client.buckets().list_next(req, resp)
                   
        if bucket_exists:
            logger.info('LoadCsvIntoGcsBucket - bucket: %s, exists\n'%(bucket_name))
            if _TEST:
                print('LoadCsvIntoGcsBucket - file name: %s, mapped to gcs bucket name: %s\n'%(file_name,bucket_name)) 
                
            '''
            If file already exists in bucket, then delete the file.
            Create a request to objects.list to retrieve a list of objects inside the bucket.
            
            '''
            fields_to_return = 'nextPageToken,items(name,size,contentType,metadata(my-key))'
            req = gcs_client.objects().list(bucket=bucket_name, fields=fields_to_return)
        
            while req is not None:
                resp = req.execute()
                #logger.info( '\n\n resp: %s\n'%(resp))
                if resp:
                    if resp['items']:
                        items=resp['items']
                        for item in items:
                            if item["name"] == blob_name:
                                logger.info('LoadCsvIntoGcsBucket - blob: %s found in bucket %s\n'%(blob_name,bucket_name))
                                # delete existing file
                                response=gcs_client.objects().delete(bucket=bucket_name, object=blob_name ).execute()
                                if 'error' in response:
                                    logger.error('LoadCsvIntoGcsBucket - delete of blob %s from bucket %s failed.'%(blob_name,bucket_name))
                                else:
                                    logger.info('LoadCsvIntoGcsBucket - deleted blob %s from bucket %s\n'%(blob_name,bucket_name))   
                            #else:
                            #    logger.info("LoadCsvIntoGcsBucket - bucket %s is empty\n'%(bucket_name))
                
                req = gcs_client.objects().list_next(req, resp)
                                    
            logger.info('LoadCsvIntoGcsBucket - building upload CSV request to GCS ...')
            media = MediaFileUpload(file_path_name, chunksize=config._CHUNKSIZE, resumable=True)
            if not media.mimetype():
                media = MediaFileUpload(file_path_name, config._DEFAULT_MIMETYPE, resumable=True)
            request = gcs_client.objects().insert(bucket=bucket_name, name=blob_name,
                                               media_body=media)
            
            logger.info('LoadCsvIntoGcsBucket - uploading CSV file: %s, to bucket: %s, as blob: %s\n' % (file_path_name, bucket_name,
                                                                    blob_name))
            
            progressless_iters = 0
            response = None
            while response is None:
                error = None
                try:
                    progress, response = request.next_chunk()
                    if progress:
                        logger.info('LoadCsvIntoGcsBucket - upload %d%%' % (100 * progress.progress()))
                except HttpError, err:
                    error = err
                    if err.resp.status < 500:
                        raise
                except logger._RETRYABLE_ERRORS, err:
                    error = err
def LoadCsvIntoGcsBucket(file_path_name, gcs_client, file_name, entity_name):
    try:
        if not isNotEmpty(file_path_name):
            logger.info('LoadCsvIntoGcsBucket - file_path_name is empty')
            return None
        if not gcs_client:
            logger.info('LoadCsvIntoGcsBucket - gcs_client is empty')
            return None
        if not isNotEmpty(file_name):
            logger.info('LoadCsvIntoGcsBucket - file_name is empty')
            return None
        if not isNotEmpty(entity_name):
            logger.info('LoadCsvIntoGcsBucket - entity_name is empty')
            return None

        logger.info('LoadCsvIntoGcsBucket - file_path_name name: %s\n' %
                    (str(file_path_name)))
        blob_name = file_name
        logger.info(
            'LoadCsvIntoGcsBucket - file name: %s, mapped to blob name: %s\n' %
            (file_name, blob_name))
        bucket_name = "wf_" + entity_name.lower()
        logger.info(
            'LoadCsvIntoGcsBucket - file name: %s, mapped to gcs bucket name: %s\n'
            % (file_name, bucket_name))
        '''
        determine if bucket exists
        '''
        bucket_exists = False
        fields_to_return = 'nextPageToken,items(name,location,timeCreated)'
        req = gcs_client.buckets().list(
            project=config._PROJECT_ID,
            fields=fields_to_return,  # optional
            maxResults=42)  # optional
        '''
        If you have too many items to list in one request, list_next() will
        automatically handle paging with the pageToken. 
        The list is limited to 52 file, per request
        '''
        while req is not None:
            resp = req.execute()
            items = resp['items']
            for item in items:
                if item["name"] == bucket_name:
                    bucket_exists = True
            req = gcs_client.buckets().list_next(req, resp)

        if bucket_exists:
            logger.info('LoadCsvIntoGcsBucket - bucket: %s, exists\n' %
                        (bucket_name))
            if _TEST:
                print(
                    'LoadCsvIntoGcsBucket - file name: %s, mapped to gcs bucket name: %s\n'
                    % (file_name, bucket_name))
            '''
            If file already exists in bucket, then delete the file.
            Create a request to objects.list to retrieve a list of objects inside the bucket.
            
            '''
            fields_to_return = 'nextPageToken,items(name,size,contentType,metadata(my-key))'
            req = gcs_client.objects().list(bucket=bucket_name,
                                            fields=fields_to_return)

            while req is not None:
                resp = req.execute()
                #logger.info( '\n\n resp: %s\n'%(resp))
                if resp:
                    if resp['items']:
                        items = resp['items']
                        for item in items:
                            if item["name"] == blob_name:
                                logger.info(
                                    'LoadCsvIntoGcsBucket - blob: %s found in bucket %s\n'
                                    % (blob_name, bucket_name))
                                # delete existing file
                                response = gcs_client.objects().delete(
                                    bucket=bucket_name,
                                    object=blob_name).execute()
                                if 'error' in response:
                                    logger.error(
                                        'LoadCsvIntoGcsBucket - delete of blob %s from bucket %s failed.'
                                        % (blob_name, bucket_name))
                                else:
                                    logger.info(
                                        'LoadCsvIntoGcsBucket - deleted blob %s from bucket %s\n'
                                        % (blob_name, bucket_name))
                            #else:
                            #    logger.info("LoadCsvIntoGcsBucket - bucket %s is empty\n'%(bucket_name))

                req = gcs_client.objects().list_next(req, resp)

            logger.info(
                'LoadCsvIntoGcsBucket - building upload CSV request to GCS ...'
            )
            media = MediaFileUpload(file_path_name,
                                    chunksize=config._CHUNKSIZE,
                                    resumable=True)
            if not media.mimetype():
                media = MediaFileUpload(file_path_name,
                                        config._DEFAULT_MIMETYPE,
                                        resumable=True)
            request = gcs_client.objects().insert(bucket=bucket_name,
                                                  name=blob_name,
                                                  media_body=media)

            logger.info(
                'LoadCsvIntoGcsBucket - uploading CSV file: %s, to bucket: %s, as blob: %s\n'
                % (file_path_name, bucket_name, blob_name))

            progressless_iters = 0
            response = None
            while response is None:
                error = None
                try:
                    progress, response = request.next_chunk()
                    if progress:
                        logger.info('LoadCsvIntoGcsBucket - upload %d%%' %
                                    (100 * progress.progress()))
                except HttpError, err:
                    error = err
                    if err.resp.status < 500:
                        raise
                except logger._RETRYABLE_ERRORS, err:
                    error = err
示例#31
0
    webbrowser.open(authorize_url, new=2)

  code = raw_input('Enter verification code: ').strip()
  credentials = flow.step2_exchange(code)

  # Create an httplib2.Http object and authorize it with our credentials
  http = httplib2.Http()
  http = credentials.authorize(http)

  drive_service = build('drive', 'v2', http=http)

  # Insert a file
  #  Prepare for upload and detect mime type
  #  (files without mime type are not accepted - see issue 310)
  #  https://code.google.com/p/google-api-python-client/issues/detail?id=310
  media_body = MediaFileUpload(FILENAME, resumable=True)
  if media_body.mimetype() == None:
    media_body = MediaFileUpload(FILENAME,
        mimetype='application/octet-stream', resumable=True)

  body = {
    'title': osp.basename(FILENAME),
  }

  file = drive_service.files().insert(body=body, media_body=media_body).execute()
  pprint.pprint(file)

# ---[ /boilerplate ]---


示例#32
0
def upload_zip_to_gcs(server_key, archive_file, backup=False):
    name = get_gcs_archive_name(server_key)
    credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE)
    http = credentials.authorize(httplib2.Http())
    service = build('storage', STORAGE_API_VERSION, http=http)
    retry = True
    while retry:
        media = MediaFileUpload(archive_file,
                                chunksize=CHUNKSIZE,
                                resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(archive_file,
                                    'application/zip',
                                    resumable=True)
        request = service.objects().insert(bucket=app_bucket,
                                           name=name,
                                           media_body=media)
        progress = previous_progress = None
        tries = 0
        response = None
        while response is None:
            try:
                status, response = request.next_chunk()
                tries = 0
                progress = int(status.progress() *
                               100) if status is not None else 0
                if response is not None:  # Done
                    retry = False
                    progress = 100
                if progress != previous_progress:
                    if progress % 10 == 0:
                        logger.info(
                            "Server {0} archive is {1}% uploaded".format(
                                server_key, progress))
                    if not backup:
                        try:
                            client.post_event(server_key, STOP_EVENT, progress)
                        except Exception as e:
                            logger.exception(
                                "Error sending controller save event for server [{0}]: {1}"
                                .format(server_key, e))
                previous_progress = progress
            except HttpError as e:
                if e.resp.status in [404]:  # Start upload all over again
                    response = None
                    logging.error(
                        "Error ({0}) uploading archive for server {1}. Retrying...."
                        .format(str(e), server_key))
                elif e.resp.status in [500, 502, 503,
                                       504]:  # Retry with backoff
                    tries += 1
                    if tries > NUM_RETRIES:
                        raise
                    sleeptime = 2**min(tries, 4)
                    logger.error(
                        "Error ({0}) uploading archive for server {1}. Sleeping {2} seconds."
                        .format(str(e), server_key, sleeptime))
                    time.sleep(sleeptime)
                else:
                    raise
    os.remove(archive_file)