def test_media_io_base_upload_serializable(self):
    f = open(datafile('small.png'), 'r')
    upload = MediaIoBaseUpload(fd=f, mimetype='image/png')

    try:
      json = upload.to_json()
      self.fail('MediaIoBaseUpload should not be serializable.')
    except NotImplementedError:
      pass
  def test_media_io_base_upload_streamable(self):
    try:
      import io

      fd = io.BytesIO('stuff')
      upload = MediaIoBaseUpload(
          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
      self.assertEqual(True, upload.has_stream())
      self.assertEqual(fd, upload.stream())
    except ImportError:
      pass
 def test_media_io_base_upload_from_file_io(self):
   fd = FileIO(datafile('small.png'), 'r')
   upload = MediaIoBaseUpload(
       fd=fd, mimetype='image/png', chunksize=500, resumable=True)
   self.assertEqual('image/png', upload.mimetype())
   self.assertEqual(190, upload.size())
   self.assertEqual(True, upload.resumable())
   self.assertEqual(500, upload.chunksize())
   self.assertEqual(b'PNG', upload.getbytes(1, 3))
  def test_media_io_base_upload_from_string_io(self):
    f = open(datafile('small.png'), 'r')
    fd = StringIO.StringIO(f.read())
    f.close()

    upload = MediaIoBaseUpload(
        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
    self.assertEqual('image/png', upload.mimetype())
    self.assertEqual(190, upload.size())
    self.assertEqual(True, upload.resumable())
    self.assertEqual(500, upload.chunksize())
    self.assertEqual('PNG', upload.getbytes(1, 3))
    f.close()
  def test_media_io_base_upload_from_bytes(self):
    try:
      import io

      f = open(datafile('small.png'), 'r')
      fd = io.BytesIO(f.read())
      upload = MediaIoBaseUpload(
          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
      self.assertEqual('image/png', upload.mimetype())
      self.assertEqual(190, upload.size())
      self.assertEqual(True, upload.resumable())
      self.assertEqual(500, upload.chunksize())
      self.assertEqual('PNG', upload.getbytes(1, 3))
    except ImportError:
      pass
 def test_media_io_base_upload_streamable(self):
   fd = BytesIO(b'stuff')
   upload = MediaIoBaseUpload(
       fd=fd, mimetype='image/png', chunksize=500, resumable=True)
   self.assertEqual(True, upload.has_stream())
   self.assertEqual(fd, upload.stream())
Beispiel #7
0
 def test_media_io_base_upload_streamable(self):
   fd = BytesIO(b'stuff')
   upload = MediaIoBaseUpload(
       fd=fd, mimetype='image/png', chunksize=500, resumable=True)
   self.assertEqual(True, upload.has_stream())
   self.assertEqual(fd, upload.stream())
Beispiel #8
0
    def close(self):
        super().close()  # close the file so that it's readable for upload
        if self.parsedMode.writing:
            # google doesn't accept the fractional second part
            now = datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
            onlineMetadata = {"modifiedTime": now}

            with open(self.localPath, "rb") as f:
                dataToWrite = f.read()
            debug(f"About to upload data: {dataToWrite}")

            if len(dataToWrite) > 0:
                upload = MediaFileUpload(self.localPath, resumable=True)
                if self.thisMetadata is None:
                    debug("Creating new file")
                    onlineMetadata.update({
                        "name":
                        basename(self.path),
                        "parents": [self.parentMetadata["id"]],
                        "createdTime":
                        now
                    })
                    request = self.fs.drive.files().create(body=onlineMetadata,
                                                           media_body=upload)
                else:
                    debug("Updating existing file")
                    request = self.fs.drive.files().update(
                        fileId=self.thisMetadata["id"],
                        body={},
                        media_body=upload)

                response = None
                while response is None:
                    status, response = request.next_chunk()
                    debug(f"{status}: {response}")
                # MediaFileUpload doesn't close it's file handle, so we have to workaround it (https://github.com/googleapis/google-api-python-client/issues/575)
                upload._fd.close()  # pylint: disable=protected-access
            else:
                fh = BytesIO(b"")
                media = MediaIoBaseUpload(fh,
                                          mimetype="application/octet-stream",
                                          chunksize=-1,
                                          resumable=False)
                if self.thisMetadata is None:
                    onlineMetadata.update({
                        "name":
                        basename(self.path),
                        "parents": [self.parentMetadata["id"]],
                        "createdTime":
                        now
                    })
                    createdFile = self.fs.drive.files().create(
                        body=onlineMetadata, media_body=media).execute(
                            num_retries=self.fs.retryCount)
                    debug(f"Created empty file: {createdFile}")
                else:
                    updatedFile = self.fs.drive.files().update(
                        fileId=self.thisMetadata["id"],
                        body={},
                        media_body=media).execute(
                            num_retries=self.fs.retryCount)
                    debug(f"Updated file to empty: {updatedFile}")
        remove(self.localPath)
Beispiel #9
0
def file_create(auth, name, filename, data, parent=None):
  """ Checks if file with name already exists ( outside of trash ) and 
    if not, uploads the file.  Determines filetype based on filename extension
    and attempts to map to Google native such as Docs, Sheets, Slides, etc...

    For example:
    -  ```file_create('user', 'Sample Document', 'sample.txt', BytesIO('File contents'))``` 
    -  Creates a Google Document object in the user's drive.

    -  ```file_Create('user', 'Sample Sheet', 'sample.csv', BytesIO('col1,col2\nrow1a,row1b\n'))````
    -  Creates a Google Sheet object in the user's drive.

    See: https://developers.google.com/drive/api/v3/manage-uploads 

    ### Args:
    -  * auth: (string) specify 'service' or 'user' to toggle between credentials used to access
    -  * name: (string) name of file to create, used as key to check if file exists
    -  * filename: ( string) specified as "file.extension" only to automate detection of mime type.
    -  * data: (BytesIO) any file like object that can be read from
    -  * parent: (string) the Google Drive to upload the file to

    ### Returns:
    -  * JSON specification of file created or existing.

    """

  # attempt to find the file by name ( not in trash )
  drive_file = file_find(auth, name, parent)

  # if file exists, return it, prevents obliterating user changes
  if drive_file:
    if project.verbose: print('Drive: File exists.')

  # if file does not exist, create it
  else:
    if project.verbose: print('Drive: Creating file.')

    # file mime is used for uplaod / fallback
    # drive mime attempts to map to a native Google format
    file_mime = mimetypes.guess_type(filename, strict=False)[0]
    drive_mime = about('importFormats')['importFormats'].get(file_mime, file_mime)[0]

    if project.verbose: print('Drive Mimes:', file_mime, drive_mime)

    # construct upload object, and stream upload in chunks
    body = {
      'name':name, 
      'parents' : [parent] if parent else [],
      'mimeType': drive_mime,
    }
  
    media = MediaIoBaseUpload(
      BytesIO(data or ' '), # if data is empty BAD REQUEST error occurs
      mimetype=file_mime,
      chunksize=CHUNKSIZE,
      resumable=True
    )

    drive_file = API_Drive(auth).files().create(
      body=body,
      media_body=media,
      fields='id'
    ).execute()
  
  return drive_file
Beispiel #10
0
 def _photo_file_upload(self, f_io):
     media = MediaIoBaseUpload(f_io, mimetype='image/jpeg')
     return media
Beispiel #11
0
def upload_docx(drive, doc_id: str, body: bytes):
    drive.files().update(
        fileId=doc_id,
        media_body=MediaIoBaseUpload(io.BytesIO(body), mimetype=DOCX_MIMETYPE),
    ).execute()
Beispiel #12
0
def io_to_table(auth,
                project_id,
                dataset_id,
                table_id,
                data_bytes,
                source_format='CSV',
                schema=None,
                skip_rows=0,
                disposition='WRITE_TRUNCATE',
                wait=True):

    # if data exists, write data to table
    data_bytes.seek(0, 2)
    if data_bytes.tell() > 0:
        data_bytes.seek(0)

        media = MediaIoBaseUpload(data_bytes,
                                  mimetype='application/octet-stream',
                                  resumable=True,
                                  chunksize=BIGQUERY_CHUNKSIZE)

        body = {
            'configuration': {
                'load': {
                    'destinationTable': {
                        'projectId': project_id,
                        'datasetId': dataset_id,
                        'tableId': table_id,
                    },
                    'sourceFormat':
                    source_format,  # CSV, NEWLINE_DELIMITED_JSON
                    'writeDisposition':
                    disposition,  # WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY
                    'autodetect': True,
                    'allowJaggedRows': True,
                    'allowQuotedNewlines': True,
                    'ignoreUnknownValues': True,
                }
            }
        }

        if schema:
            body['configuration']['load']['schema'] = {'fields': schema}
            body['configuration']['load']['autodetect'] = False

        if disposition == 'WRITE_APPEND':
            body['configuration']['load']['autodetect'] = False

        if source_format == 'CSV':
            body['configuration']['load']['skipLeadingRows'] = skip_rows

        job = API_BigQuery(auth).jobs().insert(
            projectId=project.id, body=body,
            media_body=media).execute(run=False)
        execution = job.execute()

        response = None
        while response is None:
            status, response = job.next_chunk()
            if project.verbose and status:
                print('Uploaded %d%%.' % int(status.progress() * 100))
        if project.verbose:
            print('Uploaded 100%')

        if wait:
            job_wait(auth, execution)
        else:
            return execution

    # if it does not exist and write, clear the table
    elif disposition == 'WRITE_TRUNCATE':
        if project.verbose:
            print('BIGQUERY: No data, clearing table.')
        table_create(auth, project_id, dataset_id, table_id, schema)
def io_to_table(auth,
                project_id,
                dataset_id,
                table_id,
                data_bytes,
                source_format='CSV',
                schema=None,
                skip_rows=0,
                disposition='WRITE_TRUNCATE',
                wait=True):

    # if data exists, write data to table
    data_bytes.seek(0, 2)
    if data_bytes.tell() > 0:
        data_bytes.seek(0)

        media = MediaIoBaseUpload(data_bytes,
                                  mimetype='application/octet-stream',
                                  resumable=True,
                                  chunksize=BIGQUERY_CHUNKSIZE)

        body = {
            'configuration': {
                'load': {
                    'destinationTable': {
                        'projectId': project_id,
                        'datasetId': dataset_id,
                        'tableId': table_id,
                    },
                    'sourceFormat': source_format,
                    'writeDisposition': disposition,
                    'autodetect': True,
                    'allowJaggedRows': True,
                    'allowQuotedNewlines': True,
                    'ignoreUnknownValues': True,
                }
            }
        }

        if schema:
            body['configuration']['load']['schema'] = {'fields': schema}
            body['configuration']['load']['autodetect'] = False

        if disposition == 'WRITE_APPEND':
            body['configuration']['load']['autodetect'] = False

        if source_format == 'CSV':
            body['configuration']['load']['skipLeadingRows'] = skip_rows

        job = API_BigQuery(auth).jobs().insert(
            projectId=project.id, body=body,
            media_body=media).execute(run=False)
        execution = job.execute()

        response = None
        while response is None:
            status, response = job.next_chunk()
            if project.verbose and status:
                print("Uploaded %d%%." % int(status.progress() * 100))
        if project.verbose: print("Uploaded 100%")
        if wait: job_wait(auth, job.execute())
        else: return job

    # if it does not exist and write, clear the table
    elif disposition == 'WRITE_TRUNCATE':
        if project.verbose: print("BIGQUERY: No data, clearing table.")

        body = {
            "tableReference": {
                "projectId": project_id,
                "datasetId": dataset_id,
                "tableId": table_id
            },
            "schema": {
                "fields": schema
            }
        }
        # change project_id to be project.id, better yet project.cloud_id from JSON
        API_BigQuery(auth).tables().insert(projectId=project.id,
                                           datasetId=dataset_id,
                                           body=body).execute()
    def close(self):
        super().close()  # close the file so that it's readable for upload
        if self.parsedMode.writing:
            # google doesn't accept the fractional second part
            now = datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
            uploadMetadata = {'modifiedTime': now}
            if self.thisMetadata is None:
                uploadMetadata.update({
                    'name': basename(self.path),
                    'parents': [self.parentMetadata['id']],
                    'createdTime': now
                })
                if 'createdDateTime' in self.options:
                    uploadMetadata.update({
                        'createdTime':
                        self.options['createdDateTime'].replace(
                            microsecond=0).isoformat() + 'Z'
                    })

            with open(self.localPath, 'rb') as f:
                dataToWrite = f.read()
            _log.debug(f'About to upload data: {dataToWrite}')

            if len(dataToWrite) > 0:
                upload = MediaFileUpload(self.localPath, resumable=True)
                if self.thisMetadata is None:
                    _log.debug('Creating new file')
                    request = self.fs.google_resource().files().create(
                        body=uploadMetadata,
                        media_body=upload,
                        **self.fs._file_kwargs,  # pylint: disable=protected-access
                    )
                else:
                    _log.debug('Updating existing file')
                    request = self.fs.google_resource().files().update(
                        fileId=self.thisMetadata['id'],
                        body={},
                        media_body=upload,
                        **self.fs._file_kwargs,  # pylint: disable=protected-access
                    )

                response = None
                while response is None:
                    status, response = request.next_chunk()
                    _log.debug(f'{status}: {response}')
            else:
                fh = BytesIO(b'')
                media = MediaIoBaseUpload(fh,
                                          mimetype='application/octet-stream',
                                          chunksize=-1,
                                          resumable=False)
                if self.thisMetadata is None:
                    createdFile = self.fs.google_resource().files().create(
                        body=uploadMetadata,
                        media_body=media,
                        **self.fs._file_kwargs,  # pylint: disable=protected-access
                    ).execute(num_retries=self.fs.retryCount)
                    _log.debug(f'Created empty file: {createdFile}')
                else:
                    updatedFile = self.fs.google_resource().files().update(
                        fileId=self.thisMetadata['id'],
                        body={},
                        media_body=media,
                        **self.fs._file_kwargs,  # pylint: disable=protected-access
                    ).execute(num_retries=self.fs.retryCount)
                    _log.debug(f'Updated file to empty: {updatedFile}')
        remove(self.localPath)
Beispiel #15
0
 def store_file_from_memory(self, key, memstring, metadata=None, extra_props=None,  # pylint: disable=arguments-differ
                            cache_control=None, mimetype=None):
     upload = MediaIoBaseUpload(
         BytesIO(memstring), mimetype or "application/octet-stream", chunksize=UPLOAD_CHUNK_SIZE, resumable=True
     )
     return self._upload(upload, key, self.sanitize_metadata(metadata), extra_props, cache_control=cache_control)
Beispiel #16
0
 def upload_file(self, io_stream, mime_type, name):
     file_metadata = {"name": name}
     media = MediaIoBaseUpload(io_stream, mime_type, -1)
     return self.service.files().\
         create(body=file_metadata, media_body=media, fields='id').execute().get('id')
Beispiel #17
0
 def _video_file_upload(self, f_io):
     media = MediaIoBaseUpload(f_io, mimetype='video/mp4')
     return media
Beispiel #18
0
        with open(shop_config["json_path"], "wb+") as f:
            f.write(shop_json)

    if "tinfoil_path" in shop_config and shop_tinfoil:
        print("  Tinfoil:\n    Location: {}\n    MD5: {}".format(
            shop_config["tinfoil_path"],
            md5(shop_tinfoil).hexdigest()))
        with open(shop_config["tinfoil_path"], "wb+") as f:
            f.write(shop_tinfoil)

    if "file_id" in shop_config or args.c is not None:
        mimetype = "application/json"
        if shop_tinfoil:
            mimetype = "application/tinfoil"

        media = MediaIoBaseUpload(BytesIO(shop_bytes), mimetype=mimetype)
        if args.c is not None:
            body = {
                "name": "index.json",
                "mimeType": mimetype,
                "contentHints": {
                    "thumbnail": {
                        "image":
                        urlsafe_b64encode(open("tinfoil.png",
                                               "rb").read()).decode("utf8"),
                        "mimeType":
                        "image/png",
                    }
                },
            }