Exemple #1
0
    def upload_file(self, file, id, resource):
      # Retry transport and file IO errors.
      RETRYABLE_ERRORS = (httplib2.HttpLib2Error, IOError)
      chunk_size = chunk_size = getattr(self, 'chunk_size', -1)

      self.log("Uploading file '%s'" % (file))
      start_time = time.time()

      media = MediaFileUpload(file, chunksize=chunk_size, resumable=True)
      if not media.mimetype():
        # media = MediaFileUpload(file, 'application/octet-stream', resumable=True)
        raise Exception("Could not determine mime-type. Please make lib mimetypes aware of it.")
      request = resource.files().insert(id=id, filename=os.path.basename(file), media_body=media)

      progressless_iters = 0
      response = None
      while response is None:
        error = None
        try:
          start_time_chunk = time.time()
          progress, response = request.next_chunk()
          if progress:
            Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
            print "%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2))
        except HttpError, err:
          # Contray to the documentation GME does't return 201/200 for the last chunk
          if err.resp.status == 204:
            response = ""
          else:
            error = err
            if err.resp.status < 500 and err.resp.status != 410:
              raise
        except RETRYABLE_ERRORS, err:
          error = err
 def upload(self, filename, bucket_name, object_name):
     assert bucket_name and object_name
     print 'Building upload request...'
     media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
     if not media.mimetype():
         media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
     request = self.cloud.objects().insert(bucket=bucket_name,
                                           name=object_name,
                                           media_body=media)
     print 'Uploading file: %s to bucket: %s object: %s ' % (filename,
                                                             bucket_name,
                                                             object_name)
     progressless_iters = 0
     response = None
     while response is None:
         error = None
         try:
             progress, response = request.next_chunk()
             if progress:
                 self.print_with_carriage_return(
                     'Upload %d%%' % (100 * progress.progress()))
         except HttpError, err:
             error = err
             if err.resp.status < 500:
                 raise
         except RETRYABLE_ERRORS, err:
             error = err
Exemple #3
0
    def test_http_request_to_from_json(self):
        def _postproc(*kwargs):
            pass

        http = httplib2.Http()
        media_upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
        req = HttpRequest(
            http,
            _postproc,
            "http://example.com",
            method="POST",
            body="{}",
            headers={"content-type": 'multipart/related; boundary="---flubber"'},
            methodId="foo",
            resumable=media_upload,
        )

        json = req.to_json()
        new_req = HttpRequest.from_json(json, http, _postproc)

        self.assertEqual({"content-type": 'multipart/related; boundary="---flubber"'}, new_req.headers)
        self.assertEqual("http://example.com", new_req.uri)
        self.assertEqual("{}", new_req.body)
        self.assertEqual(http, new_req.http)
        self.assertEqual(media_upload.to_json(), new_req.resumable.to_json())
Exemple #4
0
def upload_file_init(ctx, asset_id, asset_type, filepath):
  """Upload the first 256KB of a given file to an asset.
  This forces it into an "uploading" state which prevents processing from
  occurring until all files are uploaded.

  Built as an experiment and abandoned in favour of multithreaded uploading.

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : str
    The type of asset being represented. Possible values: table, raster
  filepath : str
    The absolute path to the file.
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  chunk_size = 262144 # 256KB - smallest possible chunk size for resumable upload
  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = ctx.service().tables() if asset_type == "vector" else ctx.service().rasters()
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)

  try:
    next_chunk(ctx, request)
  except NoContent as e:
    pass
  ctx.log("Init uploading %s" % (os.path.basename(filepath)))
def upload(filename, bucketName, clientEmail, keyFile, **kwargs):
    service = Google_Service_Builder.buildService(clientEmail, keyFile, 
                                                  domain="devstorage.read_write",
                                                  service="storage", 
                                                  version="v1", **kwargs)

    print 'Building upload request...'
    media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
    request = service.objects().insert(bucket=bucketName, name=filename,
                                                            media_body=media)

    print 'Uploading file: %s to: %s/%s' % (filename, bucketName, filename)

    progressless_iters = 0
    response = None
    while response is None:
        error = None
        try:
            progress, response = request.next_chunk()
            if progress:
                print 'Upload progress: %.2f%%' % (100.0 * progress.progress())
        except HttpError, err:
            error = err
            if err.resp.status < 500:
                raise
        except RETRYABLE_ERRORS, err:
            error = err
def put(service):
    #User inputs the file name that needs to be uploaded.
    fileName = raw_input('Enter file name to be uploaded to Cloud:\n')
    #Encrypt the given file using AES encryption
    if not fileName or not os.path.isfile(fileName):
        print 'Invalid file name or file not found. Terminating!'
        return
        
    directory, f_name = os.path.split(fileName)
    #Upload the file to Bucket
    try:
        media = MediaFileUpload(fileName, chunksize=_CHUNK_SIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(fileName, _DEFAULT_MIMETYPE, resumable=True)
        request = service.objects().insert(bucket=_BUCKET_NAME, name=f_name,
                                           media_body=media)

        response = None
        start = datetime.datetime.now()
        while response is None:
            status, response = request.next_chunk()
            if status:
                print "Uploaded %d%%." % int(status.progress() * 100)
            print "Upload Complete!"

        end = datetime.datetime.now()
        duration = end - start
        print ('Upload took {} seconds'.format(duration.seconds))
        #Removes references to the uploaded file
        media = request = None

    except client.AccessTokenRefreshError:
        print ("Error in the credentials")
  def test_http_request_to_from_json(self):

    def _postproc(*kwargs):
      pass

    http = httplib2.Http()
    media_upload = MediaFileUpload(
        datafile('small.png'), chunksize=500, resumable=True)
    req = HttpRequest(
        http,
        _postproc,
        'http://example.com',
        method='POST',
        body='{}',
        headers={'content-type': 'multipart/related; boundary="---flubber"'},
        methodId='foo',
        resumable=media_upload)

    json = req.to_json()
    new_req = HttpRequest.from_json(json, http, _postproc)

    self.assertEquals(new_req.headers,
                      {'content-type':
                       'multipart/related; boundary="---flubber"'})
    self.assertEquals(new_req.uri, 'http://example.com')
    self.assertEquals(new_req.body, '{}')
    self.assertEquals(new_req.http, http)
    self.assertEquals(new_req.resumable.to_json(), media_upload.to_json())
    self.assertEquals(new_req.multipart_boundary, '---flubber--')
  def test_resumable_media_good_upload_from_execute(self):
    """Not a multipart upload."""
    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
    request = zoo.animals().insert(media_body=media_upload, body=None)
    assertUrisEqual(self,
        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json',
        request.uri)

    http = HttpMockSequence([
      ({'status': '200',
        'location': 'http://upload.example.com'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/2',
        'range': '0-12'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/3',
        'range': '0-%d' % media_upload.size()}, ''),
      ({'status': '200'}, '{"foo": "bar"}'),
      ])

    body = request.execute(http=http)
    self.assertEquals(body, {"foo": "bar"})
Exemple #9
0
def cloudstorage_upload(service, project_id, bucket, source_file, dest_file, show_status_messages=True):
    """Upload a local file to a Cloud Storage bucket.

    Args:
        service: BigQuery service object that is authenticated.  Example: service = build('bigquery','v2', http=http)
        project_id: string, Name of Google project to upload to
        bucket: string, Name of Cloud Storage bucket (exclude the "gs://" prefix)
        source_file: string, Path to the local file to upload
        dest_file: string, Name to give the file on Cloud Storage

    Returns:
        Response of the upload in a JSON format
    """
    # Starting code for this function is a combination from these sources:
    #   https://code.google.com/p/google-cloud-platform-samples/source/browse/file-transfer-json/chunked_transfer.py?repo=storage
    #   https://developers.google.com/api-client-library/python/guide/media_upload
    filename = source_file
    bucket_name = bucket
    object_name = dest_file
    assert bucket_name and object_name

    if show_status_messages:
        print('Upload request for {0}'.format(source_file))
    media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
    request = service.objects().insert(bucket=bucket_name, name=object_name,
                                       media_body=media)

    response = request.execute()

    if show_status_messages:
        print('Upload complete')

    return response
def upload(argv):
  filename = argv[1]
  bucket_name, object_name = argv[2][5:].split('/', 1)
  assert bucket_name and object_name

  service = get_authenticated_service(RW_SCOPE)

  print 'Building upload request...'
  media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
  request = service.objects().insert(bucket=bucket_name, name=object_name,
                                     media_body=media)

  print 'Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name,
                                                          object_name)

  progressless_iters = 0
  response = None
  while response is None:
    error = None
    try:
      progress, response = request.next_chunk()
      if progress:
        print_with_carriage_return('Upload %d%%' % (100 * progress.progress()))
    except HttpError, err:
      error = err
      if err.resp.status < 500:
        raise
    except RETRYABLE_ERRORS, err:
      error = err
Exemple #11
0
 def upload(self, filename, gdrivename=None, parent_folder="root"):
     logging.debug(
         "Going to upload file to GDrive. filename=%s , gdrivename=%s , parent_folder=%s"
         % (filename, gdrivename, parent_folder)
     )
     # Convert the name of the file on GDrive in case it is not provided
     if gdrivename is None or gdrivename == "":
         gdrivename = filename.split("/")[-1]
     # Check whether the file does not already exists
     try:
         self.get_id(gdrivename, parent_folder)
     except:
         pass
     else:
         logging.error("The file to upload %s already exists" % gdrivename)
         raise FileExistsError(gdrivename)
     # Prepare for the file upload
     logging.debug("Creating the media object for uploading from %s" % filename)
     media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
     if not media.mimetype():
         logging.debug("MIME type of the file has not been recognized, using the default %s" % DEFAULT_MIMETYPE)
         media = MediaFileUpload(filename, mimeType=DEFAULT_MIMETYPE, chunksize=CHUNKSIZE, resumable=True)
     body = {
         "name": gdrivename,
         #'parents': [{"id": parent_folder}],
         "parents": [parent_folder],
     }
     logging.debug("Starting upload of the %s file as %s" % (filename, gdrivename))
     request = self.service.files().create(body=body, media_body=media, fields="id")
     retry = 5
     while retry > 0:
         try:
             response = None
             while response is None:
                 status, response = request.next_chunk()
                 if status:
                     logging.info("Uploaded %d%%." % int(status.progress() * 100))
             logging.info("Upload has been completed")
             # No need for a retry
             retry = -1
         except apiclient.errors.HttpError as e:
             if e.resp.status in [404]:
                 # Start the upload all over again.
                 request = self.service.files().create(body=body, media_body=media, fields="id")
             elif e.resp.status in [500, 502, 503, 504]:
                 # Call next_chunk() again, but use an exponential backoff for repeated errors.
                 logging.warning("Upload of a chunk has failed, retrying ...")
                 retry -= 1
                 time.sleep(3)
             else:
                 # Do not retry. Log the error and fail.
                 logging.error("The upload has failed: %s" % str(e))
                 raise
     if retry == 0:
         logging.error("The upload has failed.")
         raise ConnectionError
     fid = response.get("id")
     self.cache[fid] = (gdrivename, parent_folder)
     return fid
def upload_zip_to_gcs(server_key, archive_file, backup=False):
    name = get_gcs_archive_name(server_key)
    credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE)
    http = credentials.authorize(httplib2.Http())
    service = build('storage', STORAGE_API_VERSION, http=http)
    retry = True
    while retry:
        media = MediaFileUpload(archive_file, chunksize=CHUNKSIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(archive_file, 'application/zip', resumable=True)
        request = service.objects().insert(bucket=app_bucket, name=name, media_body=media)
        progress = previous_progress = None
        tries = 0
        response = None
        while response is None:
            try:
                status, response = request.next_chunk()
                tries = 0
                progress = int(status.progress() * 100) if status is not None else 0
                if response is not None:  # Done
                    retry = False
                    progress = 100
                if progress != previous_progress:
                    if progress % 10 == 0:
                        logger.info("Server {0} archive is {1}% uploaded".format(server_key, progress))
                    if not backup:
                        try:
                            client.post_event(server_key, STOP_EVENT, progress)
                        except Exception as e:
                            logger.exception(
                                "Error sending controller save event for server [{0}]: {1}".format(
                                    server_key, e
                                )
                            )
                previous_progress = progress
            except HttpError as e:
                if e.resp.status in [404]:  # Start upload all over again
                    response = None
                    logging.error(
                        "Error ({0}) uploading archive for server {1}. Retrying....".format(
                            str(e), server_key
                        )
                    )
                elif e.resp.status in [500, 502, 503, 504]:  # Retry with backoff
                    tries += 1
                    if tries > NUM_RETRIES:
                        raise
                    sleeptime = 2**min(tries, 4)
                    logger.error(
                        "Error ({0}) uploading archive for server {1}. Sleeping {2} seconds.".format(
                            str(e), server_key, sleeptime
                        )
                    )
                    time.sleep(sleeptime)
                else:
                    raise
    os.remove(archive_file)
    def test_resumable_media_good_upload(self):
        """Not a multipart upload."""
        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
        zoo = build("zoo", "v1", http=self.http)

        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
        request = zoo.animals().insert(media_body=media_upload, body=None)
        self.assertEqual(media_upload, request.resumable)

        # TODO: Google API does not recognize the PNG content type
        # self.assertEqual('image/png', request.resumable.mimetype())

        # self.assertEqual(request.body, None)
        # self.assertEqual(request.resumable_uri, None)

        http = HttpMockSequence(
            [
                ({"status": "200", "location": "http://upload.example.com"}, ""),
                ({"status": "308", "location": "http://upload.example.com/2", "range": "0-12"}, ""),
                (
                    {
                        "status": "308",
                        "location": "http://upload.example.com/3",
                        "range": "0-%d" % (media_upload.size() - 2),
                    },
                    "",
                ),
                ({"status": "200"}, '{"foo": "bar"}'),
            ]
        )

        status, body = request.next_chunk(http=http)
        self.assertEqual(None, body)
        self.assertTrue(isinstance(status, MediaUploadProgress))
        self.assertEqual(13, status.resumable_progress)

        # Two requests should have been made and the resumable_uri should have been
        # updated for each one.
        self.assertEqual(request.resumable_uri, "http://upload.example.com/2")

        self.assertEqual(media_upload, request.resumable)
        self.assertEqual(13, request.resumable_progress)

        status, body = request.next_chunk(http=http)
        self.assertEqual(request.resumable_uri, "http://upload.example.com/3")
        self.assertEqual(media_upload.size() - 1, request.resumable_progress)
        self.assertEqual(request.body, None)

        # Final call to next_chunk should complete the upload.
        status, body = request.next_chunk(http=http)
        self.assertEqual(body, {"foo": "bar"})
        self.assertEqual(status, None)
Exemple #14
0
    def upload_file_to_bucket(self, bucket_name, file_path):
        def handle_progressless_iter(error, progressless_iters):
            if progressless_iters > NUM_RETRIES:
                self.logger.info('Failed to make progress for too many consecutive iterations.')
                raise error

            sleeptime = random.random() * (2 ** progressless_iters)
            self.logger.info(
                'Caught exception ({}). Sleeping for {} seconds before retry #{}.'.format(
                    str(error), sleeptime, progressless_iters))

            time.sleep(sleeptime)

        self.logger.info('Building upload request...')
        media = MediaFileUpload(file_path, chunksize=CHUNKSIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(file_path, DEFAULT_MIMETYPE, resumable=True)

        blob_name = os.path.basename(file_path)
        if not self.bucket_exists(bucket_name):
            self.logger.error("Bucket {} doesn't exists".format(bucket_name))
            raise "Bucket doesn't exist"

        request = self._storage.objects().insert(
            bucket=bucket_name, name=blob_name, media_body=media)
        self.logger.info('Uploading file: {}, to bucket: {}, blob: {}'.format(
            file_path, bucket_name, blob_name))

        progressless_iters = 0
        response = None
        while response is None:
            error = None
            try:
                progress, response = request.next_chunk()
                if progress:
                    self.logger.info('Upload {}%'.format(100 * progress.progress()))
            except errors.HttpError as error:
                if error.resp.status < 500:
                    raise
            except RETRYABLE_ERRORS as error:
                if error:
                    progressless_iters += 1
                    handle_progressless_iter(error, progressless_iters)
                else:
                    progressless_iters = 0

        self.logger.info('Upload complete!')
        self.logger.info('Uploaded Object:')
        self.logger.info(json_dumps(response, indent=2))
        return (True, blob_name)
  def test_resumable_multipart_media_good_upload(self):
    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
    request = zoo.animals().insert(media_body=media_upload, body={})
    self.assertTrue(request.headers['content-type'].startswith(
        'application/json'))
    self.assertEquals('{"data": {}}', request.body)
    self.assertEquals(media_upload, request.resumable)

    # TODO: Google API does not recognize the PNG content type
    #self.assertEquals('image/png', request.resumable.mimetype())
    #self.assertNotEquals(request.body, None)
    #self.assertEquals(request.resumable_uri, None)

    http = HttpMockSequence([
      ({'status': '200',
        'location': 'http://upload.example.com'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/2',
        'range': '0-12'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/3',
        'range': '0-%d' % (media_upload.size() - 2)}, ''),
      ({'status': '200'}, '{"foo": "bar"}'),
      ])

    status, body = request.next_chunk(http=http)
    self.assertEquals(None, body)
    self.assertTrue(isinstance(status, MediaUploadProgress))
    self.assertEquals(13, status.resumable_progress)

    # Two requests should have been made and the resumable_uri should have been
    # updated for each one.
    self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')

    self.assertEquals(media_upload, request.resumable)
    self.assertEquals(13, request.resumable_progress)

    status, body = request.next_chunk(http=http)
    self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
    self.assertEquals(media_upload.size()-1, request.resumable_progress)
    self.assertEquals('{"data": {}}', request.body)

    # Final call to next_chunk should complete the upload.
    status, body = request.next_chunk(http=http)
    self.assertEquals(body, {"foo": "bar"})
    self.assertEquals(status, None)
 def uploadFile(self, path, filename, parentId, fileId=None):
   # body = {'title': filename, 'mimeType': mimetype, 'description': filename}
   body = {'title': filename, 'description': filename}
   body['parents'] = [{'id': parentId}]
   filepath = os.path.join(path, filename)
   # mbody = MediaFileUpload(filepath, mimetype=mimetype, resumable=True)
   mbody = MediaFileUpload(filepath, resumable=True)
   if mbody._mimetype is None: mbody._mimetype = 'application/octet-stream'
   if fileId is None:
     fileObj = self.service.files().insert(
       body=body, media_body=mbody).execute()
   else:
     fileObj = self.service.files().update(
       fileId=fileId, body=body, media_body=mbody).execute()
   return (fileObj['id'], fileObj)
def upload_creative_asset(service, profile_id, advertiser_id, asset_name, path_to_asset_file, asset_type):
    """Uploads a creative asset and returns an assetIdentifier."""
    # Construct the creative asset metadata
    creative_asset = {"assetIdentifier": {"name": asset_name, "type": asset_type}}

    media = MediaFileUpload(path_to_asset_file)
    if not media.mimetype():
        media = MediaFileUpload(path_to_asset_file, "application/octet-stream")

    response = (
        service.creativeAssets()
        .insert(advertiserId=advertiser_id, profileId=profile_id, media_body=media, body=creative_asset)
        .execute()
    )

    return response["assetIdentifier"]
    def test_resumable_media_good_upload_from_execute(self):
        """Not a multipart upload."""
        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
        zoo = build("zoo", "v1", http=self.http)

        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
        request = zoo.animals().insert(media_body=media_upload, body=None)
        assertUrisEqual(
            self, "https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json", request.uri
        )

        http = HttpMockSequence(
            [
                ({"status": "200", "location": "http://upload.example.com"}, ""),
                ({"status": "308", "location": "http://upload.example.com/2", "range": "0-12"}, ""),
                (
                    {"status": "308", "location": "http://upload.example.com/3", "range": "0-%d" % media_upload.size()},
                    "",
                ),
                ({"status": "200"}, '{"foo": "bar"}'),
            ]
        )

        body = request.execute(http=http)
        self.assertEqual(body, {"foo": "bar"})
Exemple #19
0
    def update(self, new_path=None, parent_id='root'):
        try:
            if not hasattr(self, 'id'):
                return self.create(parent_id)

            existing_file = self.get_file(self.id)

            if new_path is not None:
                self.path = new_path

            mime_type = defaul_mime_type
            media_body = None

            if not os.path.isdir(self.path):
                media_body = MediaFileUpload(self.path, resumable=True)
                if media_body.size() == 0:
                    logger.error('cannot update no content file %s', self.path)
                    return None
                if media_body.mimetype() is not None:
                    mime_type = media_body.mimetype()
                else:
                    media_body._mimetype = mime_type
            else:
                mime_type = folder_mime_type

            existing_file['title'] = os.path.basename(self.path)
            existing_file['parents'] = [{'id': parent_id}]
            existing_file['mimeType'] = mime_type

            logger.info('updated %s', self.path)
            with drive.lock:
                metadata = drive.service.files().update(
                    fileId=self.id,
                    body=existing_file,
                    media_body=media_body).execute()

            self.id = metadata['id']
            if metadata.has_key('downloadUrl'):
                self.download_url = metadata['downloadUrl']
            if metadata.has_key('md5Checksum'):
                self.md5Checksum = metadata['md5Checksum']
            return metadata
        except errors.HttpError, error:
            logger.error('an error occurred: %s', error)
            return None
Exemple #20
0
def upload_file_worker(ctx, asset_id, asset_type, filepath, chunk_size):
  print "upload_file_worker %s" % (filepath)
  """Upload a given file to an asset in its own thread as
  part of upload_files_multithreaded().

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : int
    A GME asset type defined by the Asset class.
  filepath : str
    The absolute path to the file.
  chunk_size : int
    The size of each upload chunk (must be a multiple of 256KB). Defaults to -1 (native Python streaming)
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  ctx.log("Begun uploading %s" % (os.path.basename(filepath)))
  start_time = time.time()

  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = get_asset_resource(ctx.service(ident=current_process().ident), asset_type)
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
  response = None
  while response is None:
    try:
      start_time_chunk = time.time()
      progress, response = next_chunk(ctx, request)
      # Dodgy math is dodgy
      # if progress:
      #   Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
      #   ctx.log("%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2)))
    except NoContent as e:
      # Files uploads return a 204 No Content "error" that actually means it's finished successfully.
      response = ""

  ctx.log("Finished uploading %s (%s mins)" % (os.path.basename(filepath), round((time.time() - start_time) / 60, 2)))
Exemple #21
0
def upload_file(ctx, asset_id, asset_type, filepath, chunk_size=-1):
  """Upload a given file to an asset.

  Parameters
  ----------
  ctx : Context
    A Click Context object.
  asset_id : str
    The Id of a valid raster or vector asset.
  asset_type : str
    The type of asset being represented. Possible values: table, raster
  filepath : str
    The absolute path to the file.
  chunk_size : int
    The size of each upload chunk (must be a multiple of 256KB). Defaults to -1 (native Python streaming)
  """
  @retries(1000)
  def next_chunk(ctx, request):
    return request.next_chunk()

  ctx.log("Begun uploading %s" % (os.path.basename(filepath)))
  start_time = time.time()

  media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
  if not media.mimetype():
    media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)

  resource = ctx.service().tables() if asset_type == "vector" else ctx.service().rasters()
  request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
  response = None
  while response is None:
    try:
      start_time_chunk = time.time()
      progress, response = next_chunk(ctx, request)
      # Dodgy math is dodgy
      if progress:
        Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
        ctx.log("%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2)))
    except NoContent as e:
      # Files uploads return a 204 No Content "error" that actually means it's finished successfully.
      response = ""

  ctx.log("Finished uploading %s (%s mins)" % (os.path.basename(filepath), round((time.time() - start_time) / 60, 2)))
Exemple #22
0
def upload(argv):
  filename = argv[1]
  bucket_name, object_name = argv[2][5:].split('/', 1)
  assert bucket_name and object_name

  service = get_authenticated_service(SCOPES)

  print 'Building upload request...'
  media = MediaFileUpload(filename)
  if not media.mimetype():
    media = MediaFileUpload(filename, DEFAULT_MIMETYPE)
  request = service.objects().insert(bucket=bucket_name, name=object_name, media_body=media)

  print 'Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name,
                                                          object_name)
  response = request.execute()

  print 'Uploaded Object:'
  print json_dumps(response, indent=2)
Exemple #23
0
def updateFile(service, fileID, path, createRevision):
    log_msg("Updating file: " + path + ". Progress: ", newline="", color=Fore.CYAN)

    file = service.files().get(fileId=fileID).execute()
    media_body = MediaFileUpload(path, resumable=True)
    if '"_mimetype": null' in media_body.to_json(): #unrecognized mimetype: set it manually
        media_body = MediaFileUpload(path, mimetype=getMimeType(path), resumable=True)

    request = service.files().update(fileId=fileID, body=file, newRevision=createRevision, media_body=media_body)

    while True:
        status, done = request.next_chunk()
        if status:
            progress = int(status.progress() * 100)
            if progress < 100:
                log_msg("%d%%" % progress, newline=" ... ")
        if done:
            log_msg("100%", newline=" ")
            log_msg("Done!", color=Fore.CYAN)
            return
 def updateFile(self, localFilePath, remoteFile, timestamp):
     body = {
         "parents": [{"id": self.id}],
         "title": remoteFile.name,
         "modifiedDate": datetime.utcfromtimestamp(timestamp).isoformat() + ".000Z",
     }
     media_body = MediaFileUpload(localFilePath, mimetype="*/*", resumable=True)
     if media_body != None and media_body.size() > 0:
         request = self.service.files().update(
             fileId=remoteFile.id, body=body, media_body=media_body, setModifiedDate=True
         )
         response = None
         while response is None:
             status, response = request.next_chunk()
         item = response
         return self.getFileFromItem(item)
     else:
         request = self.service.files().update(fileId=remoteFile.id, body=body, setModifiedDate=True)
         response = request.execute()
         return self.getFileFromItem(response)
Exemple #25
0
    def create(self, parent_id='root'):
        mime_type = defaul_mime_type
        media_body = None

        if not os.path.isdir(self.path):
            media_body = MediaFileUpload(self.path, resumable=True)
            if media_body.size() == 0:
                logger.error('cannot create no content file %s', self.path)
                return None
            if media_body.mimetype() is not None:
                mime_type = media_body.mimetype()
            else:
                media_body._mimetype = mime_type
        else:
            mime_type = folder_mime_type

        body = {
            'title': os.path.basename(self.path),
            'mimeType': mime_type,
            'parents': [{'id': parent_id}]
        }

        try:
            with drive.lock:
                metadata = drive.service.files().insert(
                    body=body,
                    media_body=media_body).execute()

            logger.info('created %s, %s', self.path, body['mimeType'])

            self.id = metadata['id']
            if metadata.has_key('downloadUrl'):
                self.download_url = metadata['downloadUrl']
            if metadata.has_key('md5Checksum'):
                self.md5Checksum = metadata['md5Checksum']
            return metadata
        except errors.HttpError, error:
            logger.error('an error occurred: %s', error)
            return None
def upload_creative_asset(
    service, profile_id, advertiser_id, asset_name, path_to_asset_file,
    asset_type):
  """Uploads a creative asset and returns a creative asset metadata object."""
  # Construct the creative asset metadata
  creative_asset = {
      'assetIdentifier': {
          'name': asset_name,
          'type': asset_type
      }
  }

  media = MediaFileUpload(path_to_asset_file)
  if not media.mimetype():
    media = MediaFileUpload(path_to_asset_file, 'application/octet-stream')

  response = service.creativeAssets().insert(
      advertiserId=advertiser_id,
      profileId=profile_id,
      media_body=media,
      body=creative_asset).execute()

  return response
Exemple #27
0
def uploadFile(service, parentID, path):
    log_msg("Uploading file: " + path + ". Progress: ", newline="", color=Fore.GREEN)

    media_body = MediaFileUpload(path, resumable=True)
    if '"_mimetype": null' in media_body.to_json(): #unrecognized mimetype: set it manually
        media_body = MediaFileUpload(path, mimetype=getMimeType(path), resumable=True)

    body = {"title": os.path.basename(path)}
    if parentID: #set the parent folder
        body["parents"] = [{"id": parentID}]

    request = service.files().insert(media_body=media_body, body=body)

    while True:
        status, done = request.next_chunk()
        if status:
            progress = int(status.progress() * 100)
            if progress < 100:
                log_msg("%d%%" % progress, newline=" ... ")
        if done:
            id = request.execute()["id"]
            log_msg("100%", newline=" ")
            log_msg("Done! ID: " + id, color=Fore.GREEN)
            return id
Exemple #28
0
def upload_line_inventory(line):
    folder_id = "11eI6k0sNr7uahvcaUdLCZIKZWLAQQnQM"
    file_metadata = {
        "name": line + "_inventory_count.csv",
        "mimeTyoe": "application/vnd.google-apps.spreadsheet",
        "parents": [folder_id]
    }

    media = MediaFileUpload("inventory_count.csv",
                            mimetype="text/csv",
                            resumable=True)

    file = service.files().create(body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()
    return None
Exemple #29
0
def upload_GDrive(config_filepath,
                  local_filepath,
                  remote_filename,
                  remote_folder_id=None,
                  mimetype='application/zip',
                  verbose=True):
    if verbose == True:
        print('Uploading {} to GDrive:{} ...'.format(local_filepath,
                                                     remote_filename))
    drive_service = get_drive_service(config_filepath)
    file_metadata = {'name': remote_filename}
    if not remote_folder_id == None:
        file_metadata['parents'] = [remote_folder_id]
    media = MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True)
    file = drive_service.files().create(body=file_metadata,
                                        media_body=media).execute()
def upload_file(name, myme):
    store = file.Storage('token.json')
    creds = store.get()
    if not creds or creds.invalid:
        flow = client.flow_from_clientsecrets('auth/client_secret.json',
                                              SCOPES)
        creds = tools.run_flow(flow, store)
    service = build('drive', 'v3', http=creds.authorize(Http()))

    file_metadata = {'name': name, 'writersCanShare': True}
    media = MediaFileUpload('files/' + name, mimetype=myme)
    files = service.files().create(body=file_metadata,
                                   media_body=media,
                                   fields='id, webContentLink').execute()

    return set_sharing_permission(files['id'], service)
Exemple #31
0
def upload(filename):
    memt = 'text/plain'
    if(filename.split('.')[-1] == 'csv'):
        memt = 'text/csv'
        

    creds = flowCreate()
    drive_service = build('drive', 'v3', credentials=creds)

    file_metadata = {'name': filename }
    media = MediaFileUpload('dataset/' + filename, mimetype=memt)

    file = drive_service.files().create(body=file_metadata, media_body=media, fields='id').execute()

    print ('File ID: %s' % file.get('id'))
    return true
Exemple #32
0
 def driveupload(self, drive):
     folder_id = '1E8IWN4ROK2bICbOvwsc8GZw2bgj96wBy'
     file_metadata = {
         'name': self.name,
         'mimeType': 'application/pdf',
         'unit': self.unit,
         'parents': [folder_id]
     }
     media = MediaFileUpload(f'{self.file}',
                             mimetype='application/pdf',
                             resumable=True)
     id = drive.files().create(body=file_metadata,
                               media_body=media,
                               fields='id').execute()
     self.id = id
     return
Exemple #33
0
    def upload_to_folder(self, file, folder_name):
        """Uploads a file to the give folder"""
        file_metadata = { 
            'name': file['name'], 
            'parents':[self.get_id(folder_name, mime_type=FOLDER_MIME_TYPE)]
        }

        media = MediaFileUpload(file['full_path'])

        result = self._service.files().create(
            body=file_metadata,
            media_body=media,
            fields='id'
        ).execute()

        return result
Exemple #34
0
def upload_sbs_radio_file():
    service = get_service()
    filename = time.strftime("%d%m") + ".mp3"
    file_metadata = {
      'name' : filename,
      'mimeType' : 'audio/mpeg'
    }
    media = MediaFileUpload(filename,
                            mimetype='audio/mpeg',
                            resumable=True)
    file = service.files().create(body=file_metadata,
                                        media_body=media,
                                        fields='id').execute()
    # remove file
    os.remove(filename)
    print ('Uploaded %s' % filename)
Exemple #35
0
def put_csv(file_path, title, description="", service=None):
    """
    Upload and convert csv file to spreadsheet on Drive
    """

    if not service:
        service = get_drive_service()

    mediaBody = MediaFileUpload(file_path, mimetype='text/csv')
    body = {'title': title, 'description': description, 'mimeType': 'text/csv'}

    request = service.files().insert(media_body=mediaBody,
                                     body=body,
                                     convert=True)
    response = request.execute()
    return response
Exemple #36
0
def update_file(file_path, fileId):
    """
    Modifies an already existing file on the Drive.
    :param file_path: The path of the source file on local storage.
    :type file_path: str
    :param fileId: The ID of the file to be modified.
    :type fileId: str
    :returns: A dictionary containing the ID of the file modified.
    """
    media_body = MediaFileUpload(file_path)

    results = file_service.update(fileId=fileId,
                                  media_body=media_body,
                                  fields="id").execute()

    return results
Exemple #37
0
def upload_file(credentials, local, folder_id=None, file_name=None, file_id=None):
    assert file_name
    media_body = MediaFileUpload(local, chunksize=1024*256, resumable=True)
    metadata = {'name': file_name, 'mimeType': 'text/csv'}
    http = credentials.get().authorize(httplib2.Http())
    client = discovery.build('drive', 'v3', http=http).files()
    if file_id:
        res = client.update(fileId=file_id, body=metadata, media_body=media_body).execute()
    else:
        if folder_id:
            metadata['parents'] = [folder_id]
        res = client.create(body=metadata, media_body=media_body).execute()
        if res:
            print('Uploaded "{}" to "{}"'.format(file_name, res['id']))
    if not res:
        raise Exception('Failed to upload "{}"'.format(file_name))
    def image(self, flags):
        """Uploads the given image for the achievement/leaderboard."""

        if flags.type == 'achievement':
            img_type = 'ACHIEVEMENT_ICON'
        else:
            img_type = 'LEADERBOARD_ICON'

        media = MediaFileUpload(flags.image, chunksize=-1, resumable=True)
        req = self.image_api.upload(resourceId=flags.appId,
                                    imageType=img_type,
                                    media_body=media)

        rsp = self.resumable_upload(req)

        print rsp['url']
Exemple #39
0
 def upload_file(self, local_file, parent=None):
     """Uploads file stored in local directory to the folder specified."""
     mime = MimeTypes()
     name = self._get_filename_from_path(local_file)
     parent = [self.search(i) for i in parent] if parent else 'root'
     file_metadata = {'name': name, 'parents': parent}
     media = MediaFileUpload(name,
                             mimetype=mime.guess_type(name),
                             resumable=True)
     try:
         file = self.__drive.files().create(body=file_metadata,
                                            media_body=media,
                                            fields='id').execute()
     except Exception as e:
         print("An error occurred while uploading file to Google Drive")
     return True
def uploadFileToFolder(service, folderID, fileName):
    """Uploads the file to the specified folder id on the said Google Drive
	Returns:
		fileID, A string of the ID from the uploaded file
	"""
    print("Uploading file to: " + folderID)
    file_metadata = {'name': fileName, 'parents': [folderID]}
    media = MediaFileUpload(fileName, resumable=True)
    file = service.files().create(body=file_metadata,
                                  media_body=media,
                                  fields='name,id').execute()
    fileID = file.get('id')
    print('File ID: %s ' % fileID)
    print('File Name: %s \n' % file.get('name'))

    return fileID
 def uploadFile(self, filename, cloud_path, mimeType):
     files = self.print_root_directory()
     file_metadata = 0
     for folder in files:
         if folder["name"] == cloud_path and (folder["mimeType"] == "application/vnd.google-apps.shortcut" or folder["mimeType"] == "application/vnd.google-apps.folder"):
             if folder["mimeType"] == "application/vnd.google-apps.shortcut":
                 shortcuts = self.drive.files().list(q= "mimeType='application/vnd.google-apps.shortcut'",fields='*',includeItemsFromAllDrives=True,supportsAllDrives=True).execute().get('files',[])
                 for shortcut in shortcuts:
                     if shortcut['name'] == cloud_path:
                         print(shortcut['shortcutDetails'])
                 file_metadata = {'name' : filename, 'parents': [shortcut['shortcutDetails']['targetId']]}
             else:
                 file_metadata = {'name' : filename, 'parents': [folder['id']]}
             media = MediaFileUpload(filename, mimetype=mimeType,resumable=True)
             file = self.drive.files().create(body=file_metadata,media_body=media,fields='id', supportsAllDrives = True).execute()
             break
Exemple #42
0
def upload(user, path, name, folder, msg):
    service = login(user)

    mimetype = MIME_TYPES.get('.' + path.split('.')[-1], 'text/plain')
    media = MediaFileUpload(path, mimetype=mimetype, resumable=True)
    request = service.files().create(media_body=media, body={'name': name, 'parents': [folder]})

    uploaded = False
    while not uploaded:
        status, uploaded = request.next_chunk()
        if uploaded:
            break
        msg.edit(user.getstr('drive_uploading_progress').format(p=int(status.progress() * 100)))

    msg.edit(user.getstr('drive_uploading_done'))  # TODO: Move in correct folder
    os.remove(path)
Exemple #43
0
def put_file(FILENAME, title, parent_id, desc='A test document'):
    # Insert a file
    media_body = MediaFileUpload(FILENAME,
                                 mimetype='text/plain',
                                 resumable=True)
    body = {
        'title': title,
        'description': desc,
        'parents': [{
            'id': parent_id
        }],
        'mimeType': 'text/plain'
    }

    file = drive_service.files().insert(body=body,
                                        media_body=media_body).execute()
Exemple #44
0
def upload_file_to_drive(service, file_name, path_to_file):
    file_metadata = {
        'name': file_name,
        'parents': ['0B_qHZ9yaJLRnd1RSUTNZazdicGs']
    }
    media = MediaFileUpload(f'{path_to_file}\\{file_name}', resumable=True)
    request = service.files().create(body=file_metadata,
                                     media_body=media,
                                     fields='id')
    response = None

    while response is None:
        status, response = request.next_chunk()
        if status:
            sys.stdout.write(("Uploaded %d%%. \r" % int(status.progress() * 100)))
            sys.stdout.flush()
Exemple #45
0
def upload_notebook(service, f, folder_id):
    print("  Uploading {}".format(f))

    notebook_name = os.path.basename(f)
    file_metadata = {
        "name": notebook_name,
        "mimeType": "application/vnd.google.colab",
        "parents": [folder_id]
    }
    media = MediaFileUpload(f,
                            mimetype="application/x-ipynb+json",
                            resumable=True)
    file = service.files().create(body=file_metadata,
                                  media_body=media,
                                  fields="id").execute()
    print("    File ID: {}".format(file.get("id")))
def writeToGDrive(filename,source,folder_id):
    file_metadata = {'name': filename,'parents': [folder_id],
    'mimeType': 'application/vnd.google-apps.spreadsheet'}
    media = MediaFileUpload(source,
                            mimetype='application/vnd.ms-excel')

    if fileInGDrive(filename) is False:
        file = SERVICE.files().create(body=file_metadata,
                                            media_body=media,
                                            fields='id').execute()
        print('Upload Success!')
        print('File ID:', file.get('id'))
        return file.get('id')

    else:
        print('File already exists as', filename)
Exemple #47
0
def main():
    """Shows basic usage of the Google Drive API.

    Creates a Google Drive API service object and outputs the names and IDs
    for up to 10 files.
    """
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http)

    file_metadata = {'name': 'video.avi'}
    media = MediaFileUpload('video.avi', mimetype='video/avi')
    file = service.files().create(body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()
    print('File ID: %s' % file.get('id'))
Exemple #48
0
 def youtube_request(self):
     tags = None
     if self.args.keywords:
         tags = self.args.keywords.split(",")
     body = dict(snippet=dict(title=self.args.title,
                              description=self.args.description,
                              tags=tags,
                              categoryId=self.args.category),
                 status=dict(privacyStatus=self.args.privacyStatus))
     request = self.youtube.videos().insert(part=",".join(body.keys()),
                                            body=body,
                                            media_body=MediaFileUpload(
                                                self.args.file,
                                                chunksize=-1,
                                                resumable=True))
     return request
Exemple #49
0
    def test_media_file_upload_to_from_json(self):
        upload = MediaFileUpload(datafile('small.png'),
                                 chunksize=500,
                                 resumable=True)
        self.assertEqual('image/png', upload.mimetype())
        self.assertEqual(190, upload.size())
        self.assertEqual(True, upload.resumable())
        self.assertEqual(500, upload.chunksize())
        self.assertEqual('PNG', upload.getbytes(1, 3))

        json = upload.to_json()
        new_upload = MediaUpload.new_from_json(json)

        self.assertEqual('image/png', new_upload.mimetype())
        self.assertEqual(190, new_upload.size())
        self.assertEqual(True, new_upload.resumable())
        self.assertEqual(500, new_upload.chunksize())
        self.assertEqual('PNG', new_upload.getbytes(1, 3))
Exemple #50
0
    def upload_to_drive(self, parent_id, filename):
        """Uploads the file to Google Drive.

        filename: A filepath to a file like '123abc.MP4'.

        parent_id: Google Drive document id of the parent folder

        Returns True or False representing successful vs unsuccessful upload
        """
        try:
            media_body = MediaFileUpload(
                    filename,
                    mimetype='application/octet-stream',
                    chunksize=1024 * 256,
                    resumable=True)
            body = {
                'name': filename,
                'description': "Zoom Recording",
                'parents': [parent_id],
                'mimeType': 'application/octet-stream'
            }
        except IOError as e:
            log("Couldn't generate upload for {0}. {1}".format(filename, e.strerror))
            return ''

        retries = 0
        request = self.drive.files().create(body=body, media_body=media_body)
        response = None
        # Upload the file
        while response is None:
            try:
                status, response = request.next_chunk()
                if status:
                    retries = 0
            except errors.HttpError, e:
                if e.resp.status == 404:
                    log("Error 404 - Aborting the upload of {0}".format(filename))
                    return False
                else:
                    if retries > 10:
                        log("Retries limit exceeded! Aborting")
                        return False
                    else:
                        retries += 1
                        time.sleep(2 ** retries)
                        print "Error ({0})({1})... retrying.".format(e.resp.status, e.message)
                        continue
def upload(inputPath, uploadType, foldername=None):
    """
    docstring
    """
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http)

    if uploadType != 'folder':
        filename = os.path.basename(inputPath)
        file_metadata = {'name': filename}
        if uploadType == 'video':
            media = MediaFileUpload(inputPath, mimetype='video/mp4')
        elif uploadType == 'image':
            media = MediaFileUpload(inputPath, mimetype='image/png')
        elif uploadType == 'audio':
            media = MediaFileUpload(inputPath, mimetype='audio/mp3')
        elif uploadType == 'json':
            media = MediaFileUpload(inputPath, mimetype='text/json')
        elif uploadType == 'text':
            media = MediaFileUpload(inputPath, mimetype='text/text')

        file = service.files().create(body=file_metadata,
                                      media_body=media,
                                      fields='id').execute()
    else:
        foldername = foldername
        file_metadata = {
            'name': foldername,
            'mimeType': 'application/vnd.google-apps.folder'
        }
        file = service.files().create(body=file_metadata,
                                      fields='id').execute()
        folder_id = file.get('id')
        for file in os.listdir(inputPath):
            # determine mimetype
            if file.endswith('.mp4'):
                mimetype = 'video/mp4'
            elif file.endswith('.mp3'):
                mimetype = 'audio/mp3'
            elif file.endswith('.json'):
                mimetype = 'text/json'
            elif file.endswith('.png'):
                mimetype = 'image/png'
            elif file.endswith('.txt'):
                mimetype = 'text/text'
            else:
                mimetype = 'text/text'
            filepath = os.path.join(inputPath, file)
            file_metadata = {'name': file, 'parents': [folder_id]}
            media = MediaFileUpload(filepath, mimetype=mimetype)
            file = service.files().create(body=file_metadata,
                                          media_body=media,
                                          fields='id').execute()
Exemple #52
0
def uploadFile():
    """Uploads file to folder in drive specified by FOLDER ID,
    Must by modified for file meta data and mimetype
    """
    folder_id = '<FOLDER ID>'
    file_metadata = {
        'name': '<NAME>',
        'mimeType': 'application/vnd.google-apps.spreadsheet',
        'parents': [folder_id]
    }
    media = MediaFileUpload('<NAME>',
                            mimetype='application/vnd.google-apps.spreadsheet',
                            resumable=True)
    file = drive_service.files().create(body=file_metadata,
                                        media_body=media,
                                        fields='id').execute()
    print('File ID: %s ' % file.get('id'))
Exemple #53
0
    def insert_file(self,
                    title,
                    description,
                    parent_id,
                    mime_type,
                    filename,
                    folder=False):
        """Insert new file.

        Args:
            service: Drive API service instance.
            title: Title of the file to insert, including the extension.
            description: Description of the file to insert.
            parent_id: Parent folder's ID.
            mime_type: MIME type of the file to insert.
            filename: Filename of the file to insert.
        Returns:
            Inserted file metadata if successful, None otherwise.
        """
        if not folder:
            media_body = MediaFileUpload(filename,
                                         mimetype=mime_type,
                                         resumable=True)
        body = {
            'title': title,
            'description': description,
            'mimeType': mime_type
        }
        # Set the parent folder.
        if parent_id:
            body['parents'] = [{'id': parent_id}]

        try:
            if not folder:
                file = self.service.files().insert(
                    body=body, media_body=media_body).execute()
            else:
                file = self.service.files().insert(body=body).execute()

            # Uncomment the following line to print the File ID
            print 'File ID: %s' % file['id']

            return file
        except errors.HttpError, error:
            print 'An error occured: %s' % error
            return None
Exemple #54
0
def uploadVideo(title, path):
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http)
    try:
        media = MediaFileUpload(path, mimetype='video/mp4', resumable=True)
    except Exception as e:
        return dict(success=False, message=e)
    try:
        file = service.files().create(body={
            'name': title
        },
                                      media_body=media,
                                      fields='id').execute()
    except Exception as e:
        return dict(success=False, message=e)
    return dict(success=True, message=file.get('id'), service=service)
 def handle(self, *args, **options):
     # TODO: specify which credential to use
     credential = DriveCredential.objects.latest('id').credential
     http = credential.authorize(httplib2.Http())
     service = build('drive', 'v2', http=http)
     mime_type = 'text/csv'
     for filename in args:
         logging.info('uploading %s' % filename)
         media_body = MediaFileUpload(filename,
                                      mimetype=mime_type,
                                      resumable=True)
         upload = service.files().insert(body=dict(title=filename,
                                                   mimeType=mime_type),
                                         media_body=media_body,
                                         convert=True).execute()
         logging.info('https://docs.google.com/spreadsheet/ccc?key=%s' %
                      upload['id'])
Exemple #56
0
def main():
    http = get_credentials().authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http)

    if not update_credential:
        parser = argparse.ArgumentParser()
        parser.add_argument("-p", "--ip", default=None)
        parser.add_argument("file_id")
        args = parser.parse_args()
        filename = get_script_path() + "/data"
        ip = args.ip if args.ip is not None else get_public_ip()
        gen_time_file(filename, ip)
        service.files().update(body={
            "name": "ip_history.txt"
        },
                               fileId=args.file_id,
                               media_body=MediaFileUpload(filename)).execute()
Exemple #57
0
def initialize_upload(youtube, filename, title, description, category,
                      keywords, privacy_status):
    print(f'Beginning upload of {filename} with title {title}...')
    body = dict(snippet=dict(title=title,
                             description=description,
                             tags=keywords,
                             categoryId=category),
                status=dict(privacyStatus=privacy_status.value,
                            selfDeclaredMadeForKids=False))
    insert_request = youtube.videos().insert(part=",".join(body.keys()),
                                             body=body,
                                             media_body=MediaFileUpload(
                                                 filename,
                                                 chunksize=-1,
                                                 resumable=True))

    return resumable_upload(insert_request)
Exemple #58
0
    def test_media_file_upload_to_from_json(self):
        upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
        self.assertEqual("image/png", upload.mimetype())
        self.assertEqual(190, upload.size())
        self.assertEqual(True, upload.resumable())
        self.assertEqual(500, upload.chunksize())
        self.assertEqual("PNG", upload.getbytes(1, 3))

        json = upload.to_json()
        new_upload = MediaUpload.new_from_json(json)

        self.assertEqual("image/png", new_upload.mimetype())
        self.assertEqual(190, new_upload.size())
        self.assertEqual(True, new_upload.resumable())
        self.assertEqual(500, new_upload.chunksize())
        self.assertEqual("PNG", new_upload.getbytes(1, 3))
Exemple #59
0
 def upload_from_filename(self, filename=None, bucket_id=None, blob_location=None):
     media = MediaFileUpload(filename)
     if not media.mimetype():
         media = MediaFileUpload(filename, DEFAULT_MIMETYPE)
     self.gcs.objects().insert(bucket=bucket_id, name=blob_location, media_body=media).execute()
Exemple #60
0
    def method(self, **kwargs):
        # Don't bother with doc string, it will be over-written by createMethod.

        for name in kwargs.iterkeys():
            if name not in parameters.argmap:
                raise TypeError('Got an unexpected keyword argument "%s"' % name)

        # Remove args that have a value of None.
        keys = kwargs.keys()
        for name in keys:
            if kwargs[name] is None:
                del kwargs[name]

        for name in parameters.required_params:
            if name not in kwargs:
                raise TypeError('Missing required parameter "%s"' % name)

        for name, regex in parameters.pattern_params.iteritems():
            if name in kwargs:
                if isinstance(kwargs[name], basestring):
                    pvalues = [kwargs[name]]
                else:
                    pvalues = kwargs[name]
                for pvalue in pvalues:
                    if re.match(regex, pvalue) is None:
                        raise TypeError(
                            'Parameter "%s" value "%s" does not match the pattern "%s"' %
                            (name, pvalue, regex))

        for name, enums in parameters.enum_params.iteritems():
            if name in kwargs:
                # We need to handle the case of a repeated enum
                # name differently, since we want to handle both
                # arg='value' and arg=['value1', 'value2']
                if (name in parameters.repeated_params and
                        not isinstance(kwargs[name], basestring)):
                    values = kwargs[name]
                else:
                    values = [kwargs[name]]
                for value in values:
                    if value not in enums:
                        raise TypeError(
                            'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                            (name, value, str(enums)))

        actual_query_params = {}
        actual_path_params = {}
        for key, value in kwargs.iteritems():
            to_type = parameters.param_types.get(key, 'string')
            # For repeated parameters we cast each member of the list.
            if key in parameters.repeated_params and type(value) == type([]):
                cast_value = [_cast(x, to_type) for x in value]
            else:
                cast_value = _cast(value, to_type)
            if key in parameters.query_params:
                actual_query_params[parameters.argmap[key]] = cast_value
            if key in parameters.path_params:
                actual_path_params[parameters.argmap[key]] = cast_value
        body_value = kwargs.get('body', None)
        media_filename = kwargs.get('media_body', None)

        if self._developerKey:
            actual_query_params['key'] = self._developerKey

        model = self._model
        if methodName.endswith('_media'):
            model = MediaModel()
        elif 'response' not in methodDesc:
            model = RawModel()

        headers = {}
        headers, params, query, body = model.request(headers,
                                                     actual_path_params, actual_query_params, body_value)

        expanded_url = uritemplate.expand(pathUrl, params)
        url = urlparse.urljoin(self._baseUrl, expanded_url + query)

        resumable = None
        multipart_boundary = ''

        if media_filename:
            # Ensure we end up with a valid MediaUpload object.
            if isinstance(media_filename, basestring):
                (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
                if media_mime_type is None:
                    raise UnknownFileType(media_filename)
                if not mimeparse.best_match([media_mime_type], ','.join(accept)):
                    raise UnacceptableMimeTypeError(media_mime_type)
                media_upload = MediaFileUpload(media_filename,
                                               mimetype=media_mime_type)
            elif isinstance(media_filename, MediaUpload):
                media_upload = media_filename
            else:
                raise TypeError('media_filename must be str or MediaUpload.')

            # Check the maxSize
            if maxSize > 0 and media_upload.size() > maxSize:
                raise MediaUploadSizeError("Media larger than: %s" % maxSize)

            # Use the media path uri for media uploads
            expanded_url = uritemplate.expand(mediaPathUrl, params)
            url = urlparse.urljoin(self._baseUrl, expanded_url + query)
            if media_upload.resumable():
                url = _add_query_parameter(url, 'uploadType', 'resumable')

            if media_upload.resumable():
                # This is all we need to do for resumable, if the body exists it gets
                # sent in the first request, otherwise an empty body is sent.
                resumable = media_upload
            else:
                # A non-resumable upload
                if body is None:
                    # This is a simple media upload
                    headers['content-type'] = media_upload.mimetype()
                    body = media_upload.getbytes(0, media_upload.size())
                    url = _add_query_parameter(url, 'uploadType', 'media')
                else:
                    # This is a multipart/related upload.
                    msgRoot = MIMEMultipart('related')
                    # msgRoot should not write out it's own headers
                    setattr(msgRoot, '_write_headers', lambda self: None)

                    # attach the body as one part
                    msg = MIMENonMultipart(*headers['content-type'].split('/'))
                    msg.set_payload(body)
                    msgRoot.attach(msg)

                    # attach the media as the second part
                    msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
                    msg['Content-Transfer-Encoding'] = 'binary'

                    payload = media_upload.getbytes(0, media_upload.size())
                    msg.set_payload(payload)
                    msgRoot.attach(msg)
                    body = msgRoot.as_string()

                    multipart_boundary = msgRoot.get_boundary()
                    headers['content-type'] = ('multipart/related; '
                                               'boundary="%s"') % multipart_boundary
                    url = _add_query_parameter(url, 'uploadType', 'multipart')

        logger.info('URL being requested: %s' % url)
        return self._requestBuilder(self._http,
                                    model.response,
                                    url,
                                    method=httpMethod,
                                    body=body,
                                    headers=headers,
                                    methodId=methodId,
                                    resumable=resumable)