def get_entry(self, entry_id): client = self.__auth.get_client() response = client.files().get(fileId=entry_id).execute() self.__assert_response_kind(response, 'drive#file') return \ gdrivefs.normal_entry.NormalEntry('direct_read', response)
def remove_entry(self, normalized_entry): _logger.info("Removing entry with ID [%s].", normalized_entry.id) client = self.__auth.get_client() args = {'fileId': normalized_entry.id} try: result = client.files().delete(**args).execute() except Exception as e: if e.__class__.__name__ == 'HttpError' and \ str(e).find('File not found') != -1: raise NameError(normalized_entry.id) _logger.exception("Could not send delete for entry with ID [%s].", normalized_entry.id) raise _logger.info("Entry deleted successfully.")
def remove_entry(self, normalized_entry): _logger.info("Removing entry with ID [%s].", normalized_entry.id) client = self.__auth.get_client() args = { 'fileId': normalized_entry.id } try: result = client.files().delete(**args).execute() except Exception as e: if e.__class__.__name__ == 'HttpError' and \ str(e).find('File not found') != -1: raise NameError(normalized_entry.id) _logger.exception("Could not send delete for entry with ID [%s].", normalized_entry.id) raise _logger.info("Entry deleted successfully.")
def truncate(self, normalized_entry): _logger.info("Truncating entry [%s].", normalized_entry.id) client = self.__auth.get_client() file_ = \ apiclient.http.MediaFileUpload( '/dev/null', mimetype=normalized_entry.mime_type) args = { 'fileId': normalized_entry.id, # TODO(dustin): Can we omit 'body'? 'body': {}, 'media_body': file_, } response = client.files().update(**args).execute() self.__assert_response_kind(response, 'drive#file') _logger.debug("Truncate complete: [%s]", normalized_entry.id) return response
def update_entry(self, normalized_entry, filename=None, data_filepath=None, mime_type=None, parents=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): _logger.info("Updating entry [%s].", normalized_entry) client = self.__auth.get_client() # Build request-body. body = {} if mime_type is None: mime_type = normalized_entry.mime_type body['mimeType'] = mime_type if filename is not None: body['title'] = filename if parents is not None: body['parents'] = parents if is_hidden is not None: body['labels'] = {"hidden": is_hidden} if description is not None: body['description'] = description set_mtime = True if modified_datetime is not None: body['modifiedDate'] = modified_datetime else: body['modifiedDate'] = \ gdrivefs.time_support.get_flat_normal_fs_time_from_dt() if accessed_datetime is not None: set_atime = True body['lastViewedByMeDate'] = accessed_datetime else: set_atime = False # Build request-arguments. args = { 'fileId': normalized_entry.id, 'body': body, 'setModifiedDate': set_mtime, 'updateViewedDate': set_atime, } if data_filepath is not None: _logger.debug("We'll be sending a file in the update: [%s] [%s]", normalized_entry.id, data_filepath) # We can only upload large files using resumable-uploads. args.update({ 'media_body': apiclient.http.MediaFileUpload( data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) _logger.debug("Sending entry update: [%s]", normalized_entry.id) request = client.files().update(**args) result = self.__finish_upload(normalized_entry.title, request, data_filepath is not None) normalized_entry = \ gdrivefs.normal_entry.NormalEntry('update_entry', result) _logger.debug("Entry updated: [%s]", normalized_entry) return normalized_entry
def __insert_entry(self, is_file, filename, parents, mime_type, data_filepath=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if parents is None: parents = [] now_phrase = gdrivefs.time_support.get_flat_normal_fs_time_from_dt() if modified_datetime is None: modified_datetime = now_phrase if accessed_datetime is None: accessed_datetime = now_phrase _logger.info( "Creating entry with filename [%s] under parent(s) " "[%s] with mime-type [%s]. MTIME=[%s] ATIME=[%s] " "DATA_FILEPATH=[%s]", filename, ', '.join(parents), mime_type, modified_datetime, accessed_datetime, data_filepath) client = self.__auth.get_client() ## Create request-body. body = { 'title': filename, 'parents': [dict(id=parent) for parent in parents], 'labels': { "hidden": is_hidden }, 'mimeType': mime_type, } if description is not None: body['description'] = description if modified_datetime is not None: body['modifiedDate'] = modified_datetime if accessed_datetime is not None: body['lastViewedByMeDate'] = accessed_datetime ## Create request-arguments. args = { 'body': body, } if data_filepath: args.update({ 'media_body': apiclient.http.MediaFileUpload( data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) if gdrivefs.config.IS_DEBUG is True: _logger.debug("Doing file-insert with:\n%s", pprint.pformat(args)) request = client.files().insert(**args) response = self.__finish_upload(filename, request, data_filepath is not None) self.__assert_response_kind(response, 'drive#file') normalized_entry = \ gdrivefs.normal_entry.NormalEntry( 'insert_entry', response) _logger.info("New entry created with ID [%s].", normalized_entry.id) return normalized_entry
def list_files(self, query_contains_string=None, query_is_string=None, parent_id=None): _logger.info( "Listing all files. CONTAINS=[%s] IS=[%s] " "PARENT_ID=[%s]", query_contains_string if query_contains_string is not None else '<none>', query_is_string if query_is_string is not None else '<none>', parent_id if parent_id is not None else '<none>') client = self.__auth.get_client() query_components = [] if parent_id: query_components.append("'%s' in parents" % (parent_id)) if query_is_string: query_components.append("title='%s'" % ( gdrivefs.fsutility.escape_filename_for_query(query_is_string))) elif query_contains_string: query_components.append( "title contains '%s'" % (gdrivefs.fsutility.escape_filename_for_query( query_contains_string))) # Make sure that we don't get any entries that we would have to ignore. hidden_flags = gdrivefs.conf.Conf.get('hidden_flags_list_remote') if hidden_flags: for hidden_flag in hidden_flags: query_components.append("%s = false" % (hidden_flag)) query = ' and '.join(query_components) if query_components else None page_token = None page_num = 0 entries = [] while 1: _logger.debug( "Doing request for listing of files with page-" "token [%s] and page-number (%d): %s", page_token, page_num, query) result = client.files().list(q=query, pageToken=page_token).\ execute() self.__assert_response_kind(result, 'drive#fileList') _logger.debug( "(%d) entries were presented for page-number " "(%d).", len(result['items']), page_num) for entry_raw in result['items']: entry = \ gdrivefs.normal_entry.NormalEntry( 'list_files', entry_raw) entries.append(entry) if 'nextPageToken' not in result: _logger.debug("No more pages in file listing.") break _logger.debug("Next page-token in file-listing is [%s].", result['nextPageToken']) page_token = result['nextPageToken'] page_num += 1 return entries
def update_entry(self, normalized_entry, filename=None, data_filepath=None, mime_type=None, parents=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): _logger.info("Updating entry [%s].", normalized_entry) client = self.__auth.get_client() # Build request-body. body = {} if mime_type is None: mime_type = normalized_entry.mime_type body['mimeType'] = mime_type if filename is not None: body['title'] = filename if parents is not None: body['parents'] = parents if is_hidden is not None: body['labels'] = { "hidden": is_hidden } if description is not None: body['description'] = description set_mtime = True if modified_datetime is not None: body['modifiedDate'] = modified_datetime else: body['modifiedDate'] = \ gdrivefs.time_support.get_flat_normal_fs_time_from_dt() if accessed_datetime is not None: set_atime = True body['lastViewedByMeDate'] = accessed_datetime else: set_atime = False # Build request-arguments. args = { 'fileId': normalized_entry.id, 'body': body, 'setModifiedDate': set_mtime, 'updateViewedDate': set_atime, } if data_filepath is not None: _logger.debug("We'll be sending a file in the update: [%s] [%s]", normalized_entry.id, data_filepath) # We can only upload large files using resumable-uploads. args.update({ 'media_body': apiclient.http.MediaFileUpload( data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) _logger.debug("Sending entry update: [%s]", normalized_entry.id) request = client.files().update(**args) result = self.__finish_upload( normalized_entry.title, request, data_filepath is not None) normalized_entry = \ gdrivefs.normal_entry.NormalEntry('update_entry', result) _logger.debug("Entry updated: [%s]", normalized_entry) return normalized_entry
def __insert_entry(self, is_file, filename, parents, mime_type, data_filepath=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if parents is None: parents = [] now_phrase = gdrivefs.time_support.get_flat_normal_fs_time_from_dt() if modified_datetime is None: modified_datetime = now_phrase if accessed_datetime is None: accessed_datetime = now_phrase _logger.info("Creating entry with filename [%s] under parent(s) " "[%s] with mime-type [%s]. MTIME=[%s] ATIME=[%s] " "DATA_FILEPATH=[%s]", filename, ', '.join(parents), mime_type, modified_datetime, accessed_datetime, data_filepath) client = self.__auth.get_client() ## Create request-body. body = { 'title': filename, 'parents': [dict(id=parent) for parent in parents], 'labels': { "hidden": is_hidden }, 'mimeType': mime_type, } if description is not None: body['description'] = description if modified_datetime is not None: body['modifiedDate'] = modified_datetime if accessed_datetime is not None: body['lastViewedByMeDate'] = accessed_datetime ## Create request-arguments. args = { 'body': body, } if data_filepath: args.update({ 'media_body': apiclient.http.MediaFileUpload( data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) if gdrivefs.config.IS_DEBUG is True: _logger.debug("Doing file-insert with:\n%s", pprint.pformat(args)) request = client.files().insert(**args) response = self.__finish_upload( filename, request, data_filepath is not None) self.__assert_response_kind(response, 'drive#file') normalized_entry = \ gdrivefs.normal_entry.NormalEntry( 'insert_entry', response) _logger.info("New entry created with ID [%s].", normalized_entry.id) return normalized_entry
def list_files(self, query_contains_string=None, query_is_string=None, parent_id=None): _logger.info("Listing all files. CONTAINS=[%s] IS=[%s] " "PARENT_ID=[%s]", query_contains_string if query_contains_string is not None else '<none>', query_is_string if query_is_string is not None else '<none>', parent_id if parent_id is not None else '<none>') client = self.__auth.get_client() query_components = [] if parent_id: query_components.append("'%s' in parents" % (parent_id)) if query_is_string: query_components.append("title='%s'" % (gdrivefs.fsutility.escape_filename_for_query(query_is_string))) elif query_contains_string: query_components.append("title contains '%s'" % (gdrivefs.fsutility.escape_filename_for_query(query_contains_string))) # Make sure that we don't get any entries that we would have to ignore. hidden_flags = gdrivefs.conf.Conf.get('hidden_flags_list_remote') if hidden_flags: for hidden_flag in hidden_flags: query_components.append("%s = false" % (hidden_flag)) query = ' and '.join(query_components) if query_components else None page_token = None page_num = 0 entries = [] while 1: _logger.debug("Doing request for listing of files with page-" "token [%s] and page-number (%d): %s", page_token, page_num, query) result = client.files().list(q=query, pageToken=page_token).\ execute() self.__assert_response_kind(result, 'drive#fileList') _logger.debug("(%d) entries were presented for page-number " "(%d).", len(result['items']), page_num) for entry_raw in result['items']: entry = \ gdrivefs.normal_entry.NormalEntry( 'list_files', entry_raw) entries.append(entry) if 'nextPageToken' not in result: _logger.debug("No more pages in file listing.") break _logger.debug("Next page-token in file-listing is [%s].", result['nextPageToken']) page_token = result['nextPageToken'] page_num += 1 return entries