示例#1
0
def uploadToGCS(filename, file, file_type, bucket_name, object_name):
    log('Building upload request...', True)
    media = MediaIoBaseUpload(io.BytesIO(file), file_type, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaIoBaseUpload(io.BytesIO(file), DEFAULT_MIMETYPE, chunksize=CHUNKSIZE, resumable=True)
    request = service.objects().insert(bucket=bucket_name, name=object_name, media_body=media)

    log('Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name, object_name))

    progressless_iters = 0
    response = None
    while response is None:
        error = None
        try:
            progress, response = request.next_chunk()
            if progress:
                log('Upload %d%%' % (100 * progress.progress()))
        except urllib.error.HTTPError as err:
            error = err
            if err.resp.status < 500:
                raise
        except RETRYABLE_ERRORS as err:
            error = err

        if error:
            progressless_iters += 1
            handle_progressless_iter(error, progressless_iters)
        else:
            progressless_iters = 0
    #print('\n')
    log('Upload complete!')
  def test_media_io_base_upload_serializable(self):
    f = open(datafile('small.png'), 'r')
    upload = MediaIoBaseUpload(fd=f, mimetype='image/png')

    try:
      json = upload.to_json()
      self.fail('MediaIoBaseUpload should not be serializable.')
    except NotImplementedError:
      pass
示例#3
0
    def test_media_io_base_upload_serializable(self):
        f = open(datafile('small.png'), 'r')
        upload = MediaIoBaseUpload(fd=f, mimetype='image/png')

        try:
            json = upload.to_json()
            self.fail('MediaIoBaseUpload should not be serializable.')
        except NotImplementedError:
            pass
示例#4
0
    def test_media_io_base_upload_serializable(self):
        f = open(datafile("small.png"), "r")
        upload = MediaIoBaseUpload(fh=f, mimetype="image/png")

        try:
            json = upload.to_json()
            self.fail("MediaIoBaseUpload should not be serializable.")
        except NotImplementedError:
            pass
  def test_media_io_base_upload_streamable(self):
    try:
      import io

      fd = io.BytesIO('stuff')
      upload = MediaIoBaseUpload(
          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
      self.assertEqual(True, upload.has_stream())
      self.assertEqual(fd, upload.stream())
    except ImportError:
      pass
示例#6
0
  def test_media_io_base_upload_streamable(self):
    try:
      import io

      fd = io.BytesIO('stuff')
      upload = MediaIoBaseUpload(
          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
      self.assertEqual(True, upload.has_stream())
      self.assertEqual(fd, upload.stream())
    except ImportError:
      pass
  def test_resumable_media_handle_resume_of_upload_of_unknown_size(self):
    http = HttpMockSequence([
      ({'status': '200',
        'location': 'http://upload.example.com'}, ''),
      ({'status': '400'}, ''),
      ])

    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    # Create an upload that doesn't know the full size of the media.
    fd = io.StringIO('data goes here')

    upload = MediaIoBaseUpload(
        fd=fd, mimetype='image/png', chunksize=500, resumable=True)

    request = zoo.animals().insert(media_body=upload, body=None)

    # Put it in an error state.
    self.assertRaises(HttpError, request.next_chunk, http=http)

    http = HttpMockSequence([
      ({'status': '400',
        'range': '0-5'}, 'echo_request_headers_as_json'),
      ])
    try:
      # Should resume the upload by first querying the status of the upload.
      request.next_chunk(http=http)
    except HttpError as e:
      expected = {
          'Content-Range': 'bytes */14',
          'content-length': '0'
          }
      self.assertEqual(expected, simplejson.loads(e.content),
        'Should send an empty body when requesting the current upload status.')
def update_file(service, file_id, filecontent, mime_type):
    """Insert new file.

  Args:
    service: Drive API service instance.
    title: Title of the file to insert, including the extension.
    description: Description of the file to insert.
    parent_id: Parent folder's ID.
    mime_type: MIME type of the file to insert.
    filename: Filename of the file to insert.
  Returns:
    Inserted file metadata if successful, None otherwise.
  """
    media_body = MediaIoBaseUpload(filecontent,
                                   mimetype=mime_type,
                                   resumable=True)
    try:
        file = service.files().update(
            fileId=file_id,
            media_body=media_body,
            newRevision=True,
        ).execute()

        # Uncomment the following line to print the File ID
        # print 'File ID: %s' % file['id']

        return file
    except errors.HttpError, error:
        print 'An error occured: %s' % error
        return None
 def _handle_timeline_notification(self, data):
   """Handle timeline notification."""
   for user_action in data.get('userActions', []):
     if user_action.get('type') == 'SHARE':
       # Fetch the timeline item.
       item = self.mirror_service.timeline().get(id=data['itemId']).execute()
       attachments = item.get('attachments', [])
       media = None
       if attachments:
         # Get the first attachment on that timeline item and do stuff with it.
         attachment = self.mirror_service.timeline().attachments().get(
             itemId=data['itemId'],
             attachmentId=attachments[0]['id']).execute()
         resp, content = self.mirror_service._http.request(
             attachment['contentUrl'])
         if resp.status == 200:
           media = MediaIoBaseUpload(
               io.BytesIO(content), attachment['contentType'],
               resumable=True)
         else:
           logging.info('Unable to retrieve attachment: %s', resp.status)
       body = {
           'text': 'Echoing your shared item: %s' % item.get('text', ''),
           'notification': {'level': 'DEFAULT'}
       }
       self.mirror_service.timeline().insert(
           body=body, media_body=media).execute()
       # Only handle the first successful action.
       break
     elif user_action.get('type') == 'CUSTOM' and user_action.get('payload') == 'social-stream':
       self._insert_social_stream()
     else:
       logging.info(
           "I don't know what to do with this notification: %s", user_action)
示例#10
0
def upload_to_bucket(file, bucket, mimetype, id):
    """
    SUCCESS: return None
    ERROR: return http error code
    """
    credentials = GoogleCredentials.get_application_default()
    service = apiclient.discovery.build('storage',
                                        'v1',
                                        credentials=credentials)
    image = MediaIoBaseUpload(file, mimetype=mimetype, resumable=True)
    req = service.objects().insert(bucket=bucket, media_body=image, name=id)
    # PROCESS UPLOAD IN CHUNKS
    resp = None
    serverErrorCount = 0  # to prevent infinite loop
    while resp is None:
        try:
            status, resp = req.next_chunk()
        # IF SERVER ERROR, TRY CHUNK AGAIN
        except apiclient.errors.HttpError as e:
            # IF 5xx ERROR RETRY
            if e.resp.status in [500, 502, 503, 504]:
                # Check how many retries have happened
                if serverErrorCount > 8:
                    return e.resp.status
                else:
                    time.sleep(1 + (serverErrorCount * 0.3))  # sleep it off
                    serverErrorCount += 1
                    continue
            # If OTHER ERROR, RETURN IT IMMEDIATELY
            else:
                return e.resp.status
    # IF NO ERROR, RETURN NONE
    return None
示例#11
0
    def _save(self, name, content):
        folder_path = os.path.sep.join(self._split_path(name)[:-1])
        folder_data = self._get_or_create_folder(folder_path)
        parent_id = None if folder_data is None else folder_data['id']
        # Now we had created (or obtained) folder on GDrive
        # Upload the file
        mime_type = mimetypes.guess_type(name)
        if mime_type[0] is None:
            mime_type = self._UNKNOWN_MIMETYPE_
        media_body = MediaIoBaseUpload(content.file,
                                       mime_type,
                                       resumable=True,
                                       chunksize=1024 * 512)
        body = {'name': self._split_path(name)[-1], 'mimeType': mime_type}
        # Set the parent folder.
        if parent_id:
            body['parents'] = [parent_id]
        file_data = self._drive_service.files().create(
            body=body, media_body=media_body).execute()

        # Setting up permissions
        for p in self._permissions:
            self._drive_service.permissions().create(fileId=file_data["id"],
                                                     body={
                                                         **p.raw
                                                     }).execute()

        return file_data.get(u'originalFilename', file_data.get(u'name'))
    def _save(self, name, content):
        folder_path = os.path.sep.join(self._split_path(name)[:-1])
        folder_data = self._get_or_create_folder(folder_path)
        parent_id = None if folder_data is None else folder_data['id']
        # Now we had created (or obtained) folder on GDrive
        # Upload the file
        fd = BytesIO(content.file.read())
        mime_type = mimetypes.guess_type(name)
        if mime_type[0] is None:
            mime_type = self._UNKNOWN_MIMETYPE_
        media_body = MediaIoBaseUpload(fd, mime_type, resumable=True)
        body = {
            'title': name,
            'mimeType': mime_type
        }
        # Set the parent folder.
        if parent_id:
            body['parents'] = [{'id': parent_id}]
        file_data = self._drive_service.files().insert(
            body=body,
            media_body=media_body).execute()

        # Setting up public permission
        public_permission = {
            'type': 'anyone',
            'role': 'reader'
        }
        self._drive_service.permissions().insert(fileId=file_data["id"], body=public_permission).execute()

        return file_data[u'originalFilename']
示例#13
0
    def upload(self, file_p, filename, mime_type="application/octet-stream", parent=None):

        """
        Upload file

        name = Name of the file
        source_location = absolutale location of the file where it is saved
        mime_type - Type of the file/folder
        parent - Id of the parent folder. Default is None
        """

        if parent is None:
                _file_metadata = {
                    'name': filename,
                }
        else:
                _file_metadata = {
                    'name': filename,
                    'parents': [parent]
                }

        # _media = MediaFileUpload(file_p, mimetype=mime_type, resumable=True)
        _media = MediaIoBaseUpload(fd=file_p, mimetype=mime_type, resumable=False)

        return self._service.files().create(body=_file_metadata, media_body=_media, fields='id').execute()
示例#14
0
 def upload_revision(self, document_name, document, folder_id, original_format, title='Untitled', target_format='*/*'):
     """Upload file to a Google Drive folder.
     
     Args:
         document_name: Name of the document
         document: content of the document to upload.
         folder_id: id of the Google Drive folder
         original_format: file format of the document content
         title: document title
         target_format: file format that that the uploaded file will transform into.
         
     Returns:
         A string to represent the uploaded file's id.
     """
     file_metadata = {
         'name': document_name,
         'title': title,
         'parents': [folder_id],
         'mimeType': target_format
     }
     fh = BytesIO(document)
     media = MediaIoBaseUpload(fh, mimetype=original_format, resumable=True) 
     file = self._drive_service.files().insert(body=file_metadata,
                                 media_body=media,
                                 convert=True,
                                 fields='id').execute()
     print ('File ID: ' + file.get('id'))
     return file.get('id')
  def test_media_io_base_stream_unlimited_chunksize_resume(self):
    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    try:
      import io

      # Set up a seekable stream and try to upload in single chunk.
      fd = io.BytesIO(b'01234"56789"')
      media_upload = MediaIoBaseUpload(
          fd=fd, mimetype='text/plain', chunksize=-1, resumable=True)

      request = zoo.animals().insert(media_body=media_upload, body=None)

      # The single chunk fails, restart at the right point.
      http = HttpMockSequence([
        ({'status': '200',
          'location': 'http://upload.example.com'}, ''),
        ({'status': '308',
          'location': 'http://upload.example.com/2',
          'range': '0-4'}, ''),
        ({'status': '200'}, 'echo_request_body'),
        ])

      body = request.execute(http=http)
      self.assertEqual('56789', body)

    except ImportError:
      pass
    def _save(self, name, content):
        folder_path = os.path.sep.join(self._split_path(name)[:-1])
        folder_data = self._get_or_create_folder(folder_path)
        parent_id = None if folder_data is None else folder_data['id']
        # Now we had created (or obtained) folder on GDrive
        # Upload the file
        fd = BytesIO(content.file.read())
        mime_type = mimetypes.guess_type(name)
        if mime_type[0] is None:
            mime_type = self._UNKNOWN_MIMETYPE_
        media_body = MediaIoBaseUpload(fd, mime_type, resumable=True)
        body = {
            'title': name,
            'mimeType': mime_type
        }
        # Set the parent folder.
        if parent_id:
            body['parents'] = [{'id': parent_id}]
        file_data = self._drive_service.files().insert(
            body=body,
            media_body=media_body).execute()

        # Setting up permissions
        for p in self._permissions:
            time.sleep(0.25)  # avoid HTTP 500 "An internal error has occurred which prevented the sharing of these item(s)"
            self._drive_service.permissions().insert(fileId=file_data["id"], body=p.raw).execute()

        return file_data.get(u'originalFilename', file_data.get(u'title'))
示例#17
0
    def create_drive_file_from_io(self, name, parent_ids, io_bytes, mime_type, description=None, propertyDict=None):
        file_metadata = {'name': name,
                         'description': description,
                         'mimeType': mime_type,
                         'parents': parent_ids}

        media_body = MediaIoBaseUpload(io_bytes,
                                       chunksize=-1,
                                       mimetype=mime_type,
                                       resumable=True)
        request = self.service.files().create(body=file_metadata,
                                              media_body=media_body,
                                              fields="id")

        response = None
        backoff = 1
        while response is None:
            try:
                status, response = request.next_chunk()
                # if status:
                #     print('{} percent {}'.format(name, int(status.progress() * 100)))
            except errors.HttpError, e:
                if e.resp.status in [404]:
                    # Start the upload all over again or error.
                    raise Exception('Upload Failed 404')
                elif e.resp.status in [500, 502, 503, 504]:
                    if backoff > 8:
                        raise Exception('Upload Failed: {}'.format(e))
                    print 'Retrying upload in: {} seconds'.format(backoff)
                    sleep(backoff + uniform(.001, .999))
                    backoff += backoff
                else:
                    msg = 'Upload Failed\n{}'.format(e)
                    raise Exception(msg)
示例#18
0
 def upload_string(self,
                   contents,
                   mime_type,
                   title,
                   description=None,
                   parent_ids=None):
     """Upload a string."""
     if description is None:
         description = title
     body = {
         'title': title,
         'description': description,
         'mimeType': mime_type
     }
     if parent_ids is not None:
         if isinstance(parent_ids, list):
             body['parents'] = [{
                 'id': parent_id
             } for parent_id in parent_ids
                                if isinstance(parent_id, basestring)]
         elif isinstance(parent_ids, basestring):
             body['parents'] = [{'id': parent_ids}]
     media_body = MediaIoBaseUpload(StringIO(contents), mimetype=mime_type)
     return self.service.files().insert(body=body,
                                        media_body=media_body,
                                        convert=True).execute()
示例#19
0
 def _upload(self):
   with gcs.open(self._file_name, read_buffer_size=self._BUFFER_SIZE) as f:
     media = MediaIoBaseUpload(f, mimetype='application/octet-stream',
                               chunksize=self._BUFFER_SIZE, resumable=True)
     request = self._ga_client.management().uploads().uploadData(
         accountId=self._account_id,
         webPropertyId=self._params['property_id'],
         customDataSourceId=self._params['dataset_id'],
         media_body=media)
     response = None
     tries = 0
     milestone = 0
     while response is None and tries < 5:
       try:
         status, response = request.next_chunk()
       except HttpError, e:
         if e.resp.status in [404, 500, 502, 503, 504]:
           tries += 1
           delay = 5 * 2 ** (tries + random())
           self.log_warn('%s, Retrying in %.1f seconds...', e, delay)
           time.sleep(delay)
         else:
           raise WorkerException(e)
       else:
         tries = 0
       if status:
         progress = int(status.progress() * 100)
         if progress >= milestone:
           self.log_info('Uploaded %d%%.', int(status.progress() * 100))
           milestone += 20
     self.log_info('Upload Complete.')
示例#20
0
    def put(self, local_path, remote_path, report_to=None):
        LOG.debug('Uploading %s to cloud storage (remote path: %s)', local_path, remote_path)
        filename = os.path.basename(local_path)
        bucket, name = self._parse_url(remote_path)
        if name.endswith("/"):
            name = os.path.join(name, filename)

        buckets = self._list_buckets()
        if bucket not in buckets:
            self._create_bucket(bucket)

        fd = open(local_path, 'rb')
        try:
            media = MediaIoBaseUpload(fd,
                    'application/octet-stream',
                    resumable=True)
            req = self.cloudstorage.objects().insert(
                    bucket=bucket, name=name, media_body=media
            )
            last_progress = 0
            response = None
            while response is None:
                status, response = req.next_chunk()
                if status:
                    percentage = int(status.progress() * 100)
                    if percentage - last_progress >= self.report_interval:
                        if report_to:
                            report_to(status.resumable_progress, status.total_size)
                        last_progress = percentage
        finally:
            fd.close()
        LOG.debug("Finished uploading %s", os.path.basename(local_path))
        return self._format_url(bucket, name)
示例#21
0
	def upload(self, data, mime_type=None, as_binary=False, **body):
		if not hasattr(self, "service"): return None
		
		if not as_binary:
			try:
				with open(data, 'rb') as d: data = d.read()
			except IOError as e:
				if DEBUG: print e
				return False
		
		import io, sys
		from apiclient.http import MediaIoBaseUpload
		
		if mime_type is None:
			mime_type = "application/octet-stream"
			
		chunk_size = 1024*1024	# unless data is tiny. check first
		data = io.BytesIO(data)

		if sys.getsizeof(data) < chunk_size:
			chunk_size = -1
		
		media_body = MediaIoBaseUpload(data, mimetype=mime_type,
			chunksize=chunk_size, resumable=True)
		
		try:
			upload = self.service.files().insert(
				body=body, media_body=media_body).execute()
			
			return upload
		except errors.HttpError as e:
			if DEBUG: print e
		
		return None
示例#22
0
    def test_resumable_media_handle_uploads_of_unknown_size_eof(self):
        http = HttpMockSequence([
            ({
                'status': '200',
                'location': 'http://upload.example.com'
            }, ''),
            ({
                'status': '200'
            }, 'echo_request_headers_as_json'),
        ])

        self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
        zoo = build('zoo', 'v1', http=self.http)

        fd = io.StringIO('data goes here')

        # Create an upload that doesn't know the full size of the media.
        upload = MediaIoBaseUpload(fd=fd,
                                   mimetype='image/png',
                                   chunksize=15,
                                   resumable=True)

        request = zoo.animals().insert(media_body=upload, body=None)
        status, body = request.next_chunk(http=http)
        self.assertEqual(body, {
            'Content-Range': 'bytes 0-13/14',
            'Content-Length': '14',
        })
示例#23
0
    def upload_csv_as_sheet(self, sheetName='GooGIS', body = {}, csv_file_obj = None, csv_path = None, update_sheetId = None):
        '''
        Method to upload to Google drive a csv file (or a path to a csv file) as a google-apps.spreadsheet
        :param sheetName:
        :param body:
        :param csv_file_obj:
        :param csv_path:
        :param update_sheetId:
        :return: response object
        '''
        body['mimeType'] = 'application/vnd.google-apps.spreadsheet'

        if csv_path or csv_file_obj:
            if csv_path:
                media_body = MediaFileUpload(csv_path, mimetype='text/csv', resumable=None)
            elif csv_file_obj:
                media_body = MediaIoBaseUpload(csv_file_obj, mimetype='text/csv', resumable=None)
            if update_sheetId:
                return self.service.files().update(fileId=update_sheetId, media_body=media_body).execute()
            else:
                body['description'] = 'GooGIS sheet'
                body['name'] = sheetName
                return self.service.files().create(body=body, media_body=media_body).execute()
        else:
            return None
示例#24
0
def credentails_put(cloud_path, credentials):
  service = get_service()
  bucket, filename = cloud_path.split(':',1)
  data = auth_encode(credentials)
  media = MediaIoBaseUpload(BytesIO(str(data)), mimetype="text/json")
  service.objects().insert(bucket=bucket, name=filename, media_body=media).execute()
  return filename
示例#25
0
def object_put(auth, path, data, mimetype='application/octet-stream'):
    bucket, filename = path.split(':', 1)
    service = get_service('storage', 'v1', auth)

    media = MediaIoBaseUpload(data,
                              mimetype=mimetype,
                              chunksize=CHUNKSIZE,
                              resumable=True)
    request = service.objects().insert(bucket=bucket,
                                       name=filename,
                                       media_body=media)

    response = None
    errors = 0
    while response is None:
        error = None
        try:
            status, response = request.next_chunk()
            if project.verbose and status:
                print "Uploaded %d%%." % int(status.progress() * 100)
        except HttpError, e:
            if e.resp.status < 500: raise
            error = e
        except (httplib2.HttpLib2Error, IOError), e:
            error = e
示例#26
0
    def upload(self, bucket, object, filename=None, file_handle=None,
               mime_type='application/octet-stream'):
        """
        Uploads a local file to Google Cloud Storage.

        :param bucket: The bucket to upload to.
        :type bucket: string
        :param object: The object name to set when uploading the local file.
        :type object: string
        :param filename: The local file path to the file to be uploaded.
        :type filename: string
        :param file_handle: The source of the bytes to upload.
        :type file_handle: io.Base or file object
        :param mime_type: The MIME type to set when uploading the file.
        :type mime_type: string
        """
        service = self.get_conn()
        if not (filename or file_handle):
            raise ValueError('Supply either filename or file_handle!')
        if filename:
            media = MediaFileUpload(filename, mime_type)
        else:
            media = MediaIoBaseUpload(file_handle, mime_type)
        try:
            service \
                .objects() \
                .insert(bucket=bucket, name=object, media_body=media) \
                .execute()
            return True
        except errors.HttpError as ex:
            if ex.resp['status'] == '404':
                return False
            raise
示例#27
0
    def test_media_io_base_stream_chunksize_resume(self):
        self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
        zoo = build('zoo', 'v1', http=self.http)

        try:
            import io

            # Set up a seekable stream and try to upload in chunks.
            fd = io.BytesIO(b'0123456789')
            media_upload = MediaIoBaseUpload(fd=fd,
                                             mimetype='text/plain',
                                             chunksize=5,
                                             resumable=True)

            request = zoo.animals().insert(media_body=media_upload, body=None)

            # The single chunk fails, pull the content sent out of the exception.
            http = HttpMockSequence([
                ({
                    'status': '200',
                    'location': 'http://upload.example.com'
                }, ''),
                ({
                    'status': '400'
                }, 'echo_request_body'),
            ])

            try:
                body = request.execute(http=http)
            except HttpError as e:
                self.assertEqual('01234', e.content)

        except ImportError:
            pass
示例#28
0
    def test_media_io_base_next_chunk_retries(self):
        try:
            import io
        except ImportError:
            return

        f = open(datafile('small.png'), 'r')
        fd = io.BytesIO(f.read())
        upload = MediaIoBaseUpload(fd=fd,
                                   mimetype='image/png',
                                   chunksize=500,
                                   resumable=True)

        # Simulate 5XXs for both the request that creates the resumable upload and
        # the upload itself.
        http = HttpMockSequence([
            ({
                'status': '500'
            }, ''),
            ({
                'status': '500'
            }, ''),
            ({
                'status': '503'
            }, ''),
            ({
                'status': '200',
                'location': 'location'
            }, ''),
            ({
                'status': '500'
            }, ''),
            ({
                'status': '500'
            }, ''),
            ({
                'status': '503'
            }, ''),
            ({
                'status': '200'
            }, '{}'),
        ])

        model = JsonModel()
        uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar'
        method = u'POST'
        request = HttpRequest(http,
                              model.response,
                              uri,
                              method=method,
                              headers={},
                              resumable=upload)

        sleeptimes = []
        request._sleep = lambda x: sleeptimes.append(x)
        request._rand = lambda: 10

        request.execute(num_retries=3)
        self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
示例#29
0
    def InsertRowData(self, tableObject, rowDictionary):

        sump = io.StringIO()
        w = csv.writer(sump, quoting=csv.QUOTE_ALL)
        w.writerows(rowDictionary)
        mu = MediaIoBaseUpload(sump, mimetype="application/octet-stream")
        return self.service.table().importRows(tableId=tableObject["tableId"],
                                               media_body=mu).execute()
示例#30
0
 def execute(self):
     media_body = MediaIoBaseUpload(self.reader,
                                    mimetype=mimetypes.guess_type(
                                        self.args["name"])[0])
     self.service.files().create(
         body=self.args,
         media_body=media_body,
     ).execute()
示例#31
0
 def upload_csv_under_folder(self, csv_upload_name, file_in_memory,
                             folder_id):
     file_metadata = dict(name=csv_upload_name, parents=[folder_id])
     media_body = MediaIoBaseUpload(io.BytesIO(
         file_in_memory.getvalue().encode('utf-8')),
                                    mimetype='text/csv')
     self.google_drive_api.files().create(body=file_metadata,
                                          media_body=media_body).execute()
示例#32
0
 def _upload_image(self, url):
     """
     @return: media_object
     """
     resp = urlfetch.fetch(url, deadline=20)
     media = MediaIoBaseUpload(io.BytesIO(resp.content),
                               mimetype='image/jpeg',
                               resumable=True)
     return media
示例#33
0
def initialize_upload(youtube,
                      file,
                      mimetype,
                      title,
                      description,
                      keywords=None,
                      category=22,
                      privacyStatus='private'):
    tags = None
    if keywords:
        tags = keywords.split(",")

    body = dict(snippet=dict(title=title,
                             description=description,
                             tags=tags,
                             categoryId=category),
                status=dict(privacyStatus=privacyStatus))

    fh = io.BytesIO()
    writer = io.BufferedWriter(fh)

    # try:
    #     os.mkfifo('my_fifo')
    # except FileExistsError:
    #     pass
    # fh = open('my_fifo', 'w', os.O_WRONLY|os.O_NONBLOCK)
    downloader = MediaIoBaseDownload(writer, file, chunksize=1024 * 1024)
    #o = os.open('my_fifo', os.O_RDONLY | os.O_NONBLOCK)
    reader = io.BufferedReader(fh)

    # Call the API's videos.insert method to create and upload the video.
    insert_request = youtube.videos().insert(
        part=",".join(body.keys()),
        body=body,
        # The chunksize parameter specifies the size of each chunk of data, in
        # bytes, that will be uploaded at a time. Set a higher value for
        # reliable connections as fewer chunks lead to faster uploads. Set a lower
        # value for better recovery on less reliable connections.
        #
        # Setting "chunksize" equal to -1 in the code below means that the entire
        # file will be uploaded in a single HTTP request. (If the upload fails,
        # it will still be retried where it left off.) This is usually a best
        # practice, but if you're using Python older than 2.6 or if you're
        # running on App Engine, you should set the chunksize to something like
        # 1024 * 1024 (1 megabyte).
        media_body=MediaIoBaseUpload(reader,
                                     chunksize=1024 * 1024,
                                     resumable=True,
                                     mimetype=mimetype))

    if "id" in insert_request:
        print("Video id", insert_request["id"])
    else:
        print("id not in insert_request")

    resumable_upload(insert_request, downloader, writer)
示例#34
0
 def test_media_io_base_upload_from_file_object(self):
     f = open(datafile("small.png"), "r")
     upload = MediaIoBaseUpload(fh=f, mimetype="image/png", chunksize=500, resumable=True)
     self.assertEqual("image/png", upload.mimetype())
     self.assertEqual(190, upload.size())
     self.assertEqual(True, upload.resumable())
     self.assertEqual(500, upload.chunksize())
     self.assertEqual("PNG", upload.getbytes(1, 3))
     f.close()
 def test_media_io_base_upload_from_file_object(self):
   f = open(datafile('small.png'), 'r')
   upload = MediaIoBaseUpload(
       fd=f, mimetype='image/png', chunksize=500, resumable=True)
   self.assertEqual('image/png', upload.mimetype())
   self.assertEqual(190, upload.size())
   self.assertEqual(True, upload.resumable())
   self.assertEqual(500, upload.chunksize())
   self.assertEqual('PNG', upload.getbytes(1, 3))
   f.close()
示例#36
0
    def test_media_io_base_upload_from_string_io(self):
        f = open(datafile("small.png"), "r")
        fh = StringIO.StringIO(f.read())
        f.close()

        upload = MediaIoBaseUpload(fh=fh, mimetype="image/png", chunksize=500, resumable=True)
        self.assertEqual("image/png", upload.mimetype())
        self.assertEqual(None, upload.size())
        self.assertEqual(True, upload.resumable())
        self.assertEqual(500, upload.chunksize())
        self.assertEqual("PNG", upload.getbytes(1, 3))
        f.close()
示例#37
0
    def test_media_io_base_upload_from_file_io(self):
        try:
            import io

            fh = io.FileIO(datafile("small.png"), "r")
            upload = MediaIoBaseUpload(fh=fh, mimetype="image/png", chunksize=500, resumable=True)
            self.assertEqual("image/png", upload.mimetype())
            self.assertEqual(190, upload.size())
            self.assertEqual(True, upload.resumable())
            self.assertEqual(500, upload.chunksize())
            self.assertEqual("PNG", upload.getbytes(1, 3))
        except ImportError:
            pass
示例#38
0
  def test_media_io_base_upload_from_string_io(self):
    f = open(datafile('small.png'), 'r')
    fd = StringIO.StringIO(f.read())
    f.close()

    upload = MediaIoBaseUpload(
        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
    self.assertEqual('image/png', upload.mimetype())
    self.assertEqual(None, upload.size())
    self.assertEqual(True, upload.resumable())
    self.assertEqual(500, upload.chunksize())
    self.assertEqual('PNG', upload.getbytes(1, 3))
    f.close()
  def test_media_io_base_upload_from_file_io(self):
    try:
      import io

      fd = io.FileIO(datafile('small.png'), 'r')
      upload = MediaIoBaseUpload(
          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
      self.assertEqual('image/png', upload.mimetype())
      self.assertEqual(190, upload.size())
      self.assertEqual(True, upload.resumable())
      self.assertEqual(500, upload.chunksize())
      self.assertEqual('PNG', upload.getbytes(1, 3))
    except ImportError:
      pass