def get_entry(self, entry_id): try: client = self.get_client() except: self.__log.exception( "There was an error while acquiring the Google " "Drive client (get_entry).") raise try: entry_raw = client.files().get(fileId=entry_id).execute() except: self.__log.exception("Could not get the file with ID [%s]." % (entry_id)) raise try: entry = NormalEntry('direct_read', entry_raw) except: self.__log.exception("Could not normalize raw-data for entry with " "ID [%s]." % (entry_id)) raise return entry
def get_entry(self, entry_id): client = self.__auth.get_client() response = client.files().get(fileId=entry_id).execute() self.__assert_response_kind(response, 'drive#file') return NormalEntry('direct_read', response)
def list_changes(self, start_change_id=None, page_token=None): """Get a list of the most recent changes from GD, with the earliest changes first. This only returns one page at a time. start_change_id doesn't have to be valid.. It's just the lower limit to what you want back. Change-IDs are integers, but are not necessarily sequential. """ self.__log.info("Listing changes starting at ID [%s] with page_token " "[%s]." % (start_change_id, page_token)) try: client = self.get_client() except: self.__log.exception("There was an error while acquiring the " "Google Drive client (list_changes).") raise # TODO: We expected that this reports all changes to all files. If this is the # case, than what's the point of the watch() call in Files? try: response = client.changes().list(pageToken=page_token, \ startChangeId=start_change_id).execute() except: self.__log.exception("Problem while listing changes. Reverting to " "saying that there were NO changes.") raise items = response[u'items'] largest_change_id = int(response[u'largestChangeId']) next_page_token = response[u'nextPageToken'] if u'nextPageToken' \ in response else None changes = OrderedDict() last_change_id = None for item in items: change_id = int(item[u'id']) entry_id = item[u'fileId'] was_deleted = item[u'deleted'] entry = None if item[u'deleted'] else item[u'file'] if last_change_id and change_id <= last_change_id: message = "Change-ID (%d) being processed is less-than the " \ "last change-ID (%d) to be processed." % \ (change_id, last_change_id) self.__log.error(message) raise Exception(message) try: normalized_entry = None if was_deleted \ else NormalEntry('list_changes', entry) except: self.__log.exception("Could not normalize entry embedded in " "change with ID (%d)." % (change_id)) raise changes[change_id] = (entry_id, was_deleted, normalized_entry) last_change_id = change_id return (largest_change_id, next_page_token, changes)
def __insert_entry(self, filename, mime_type, parents, data_filepath=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if parents is None: parents = [] now_phrase = get_flat_normal_fs_time_from_dt() if modified_datetime is None: modified_datetime = now_phrase if accessed_datetime is None: accessed_datetime = now_phrase self.__log.info("Creating file with filename [%s] under parent(s) " "[%s] with mime-type [%s], mtime= [%s], atime= [%s]." % (filename, ', '.join(parents), mime_type, modified_datetime, accessed_datetime)) client = self.__auth.get_client() body = { 'title': filename, 'parents': [dict(id=parent) for parent in parents], 'mimeType': mime_type, 'labels': { "hidden": is_hidden }, 'description': description } if modified_datetime is not None: body['modifiedDate'] = modified_datetime if accessed_datetime is not None: body['lastViewedByMeDate'] = accessed_datetime args = { 'body': body } if data_filepath: args['media_body'] = MediaFileUpload(filename=data_filepath, \ mimetype=mime_type) self.__log.debug("Doing file-insert with:\n%s" % (args)) try: result = client.files().insert(**args).execute() except: self.__log.exception("Could not insert file [%s]." % (filename)) raise normalized_entry = NormalEntry('insert_entry', result) self.__log.info("New entry created with ID [%s]." % (normalized_entry.id)) return normalized_entry
def update_entry(self, normalized_entry, filename=None, data_filepath=None, mime_type=None, parents=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if not mime_type: mime_type = normalized_entry.mime_type self.__log.debug("Updating entry [%s].", normalized_entry) client = self.__auth.get_client() body = { 'mimeType': mime_type } if filename is not None: body['title'] = filename if parents is not None: body['parents'] = parents if is_hidden is not None: body['labels'] = { "hidden": is_hidden } if description is not None: body['description'] = description set_mtime = True if modified_datetime is not None: body['modifiedDate'] = modified_datetime else: body['modifiedDate'] = get_flat_normal_fs_time_from_dt() if accessed_datetime is not None: set_atime = 1 body['lastViewedByMeDate'] = accessed_datetime else: set_atime = 0 args = { 'fileId': normalized_entry.id, 'body': body, 'setModifiedDate': set_mtime, 'updateViewedDate': set_atime } if data_filepath: args['media_body'] = MediaFileUpload(data_filepath, mimetype=mime_type) result = client.files().update(**args).execute() normalized_entry = NormalEntry('update_entry', result) self.__log.debug("Entry with ID [%s] updated." % (normalized_entry.id)) return normalized_entry
def list_changes(self, start_change_id=None, page_token=None): """Get a list of the most recent changes from GD, with the earliest changes first. This only returns one page at a time. start_change_id doesn't have to be valid.. It's just the lower limit to what you want back. Change-IDs are integers, but are not necessarily sequential. """ client = self.__auth.get_client() response = client.changes().list( pageToken=page_token, startChangeId=start_change_id).execute() self.__assert_response_kind(response, 'drive#changeList') items = response[u'items'] if items: _logger.debug("We received (%d) changes to apply.", len(items)) largest_change_id = int(response[u'largestChangeId']) next_page_token = response.get(u'nextPageToken') changes = [] last_change_id = None for item in items: change_id = int(item[u'id']) entry_id = item[u'fileId'] if item[u'deleted']: was_deleted = True entry = None _logger.debug("CHANGE: [%s] (DELETED)", entry_id) else: was_deleted = False entry = item[u'file'] _logger.debug("CHANGE: [%s] [%s] (UPDATED)", entry_id, entry[u'title']) normalized_entry = None \ if was_deleted \ else NormalEntry('list_changes', entry) changes.append( (change_id, (entry_id, was_deleted, normalized_entry))) last_change_id = change_id return (largest_change_id, next_page_token, changes)
def get_entry(self, entry_id): client = self.__auth.get_client() try: entry_raw = client.files().get(fileId=entry_id).execute() except: self.__log.exception("Could not get the file with ID [%s]." % (entry_id)) raise try: entry = NormalEntry('direct_read', entry_raw) except: self.__log.exception("Could not normalize raw-data for entry with " "ID [%s]." % (entry_id)) raise return entry
def update_entry(self, normalized_entry, filename=None, data_filepath=None, mime_type=None, parents=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if not mime_type: mime_type = normalized_entry.mime_type self.__log.info("Updating entry [%s]." % (normalized_entry)) try: client = self.get_client() except: self.__log.exception("There was an error while acquiring the " "Google Drive client (update_entry).") raise body = {'mimeType': mime_type} if filename is not None: body['title'] = filename if parents is not None: body['parents'] = parents if is_hidden is not None: body['labels'] = {"hidden": is_hidden} if description is not None: body['description'] = description set_mtime = True if modified_datetime is not None: body['modifiedDate'] = modified_datetime else: body['modifiedDate'] = get_flat_normal_fs_time_from_dt() if accessed_datetime is not None: set_atime = 1 body['lastViewedByMeDate'] = accessed_datetime else: set_atime = 0 args = { 'fileId': normalized_entry.id, 'body': body, 'setModifiedDate': set_mtime, 'updateViewedDate': set_atime } if data_filepath: args['media_body'] = MediaFileUpload(data_filepath, mime_type) try: result = client.files().update(**args).execute() except: self.__log.exception("Could not send update for file [%s]." % (filename)) raise try: normalized_entry = NormalEntry('update_entry', result) except: self.__log.exception("Could not normalize updated entry.") raise self.__log.info("Entry with ID [%s] updated." % (normalized_entry.id)) return normalized_entry
def list_files(self, query_contains_string=None, query_is_string=None, parent_id=None): self.__log.info( "Listing all files. CONTAINS=[%s] IS=[%s] " "PARENT_ID=[%s]" % (query_contains_string if query_contains_string is not None else '<none>', query_is_string if query_is_string is not None else '<none>', parent_id if parent_id is not None else '<none>')) try: client = self.get_client() except: self.__log.exception("There was an error while acquiring the " "Google Drive client (list_files).") raise query_components = [] if parent_id: query_components.append("'%s' in parents" % (parent_id)) if query_is_string: query_components.append( "title='%s'" % (escape_filename_for_query(query_is_string))) elif query_contains_string: query_components.append( "title contains '%s'" % (escape_filename_for_query(query_contains_string))) # Make sure that we don't get any entries that we would have to ignore. hidden_flags = Conf.get('hidden_flags_list_remote') if hidden_flags: for hidden_flag in hidden_flags: query_components.append("%s = false" % (hidden_flag)) query = ' and '.join(query_components) if query_components else None page_token = None page_num = 0 entries = [] while 1: self.__log.debug("Doing request for listing of files with page-" "token [%s] and page-number (%d): %s" % (page_token, page_num, query)) try: result = client.files().list(q=query, pageToken=page_token).\ execute() except: self.__log.exception("Could not get the list of files.") raise self.__log.debug("(%d) entries were presented for page-number " "(%d)." % (len(result[u'items']), page_num)) for entry_raw in result[u'items']: try: entry = NormalEntry('list_files', entry_raw) except: self.__log.exception( "Could not normalize raw-data for entry " "with ID [%s]." % (entry_raw[u'id'])) raise entries.append(entry) if u'nextPageToken' not in result: self.__log.debug("No more pages in file listing.") break self.__log.debug("Next page-token in file-listing is [%s]." % (result[u'nextPageToken'])) page_token = result[u'nextPageToken'] page_num += 1 return entries
def update_entry(self, normalized_entry, filename=None, data_filepath=None, mime_type=None, parents=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): _logger.info("Updating entry [%s].", normalized_entry) client = self.__auth.get_client() # Build request-body. body = {} if mime_type is not None: body['mimeType'] = mime_type else: body['mimeType'] = normalized_entry.mime_type if filename is not None: body['title'] = filename if parents is not None: body['parents'] = parents if is_hidden is not None: body['labels'] = {"hidden": is_hidden} if description is not None: body['description'] = description set_mtime = True if modified_datetime is not None: body['modifiedDate'] = modified_datetime else: body['modifiedDate'] = get_flat_normal_fs_time_from_dt() if accessed_datetime is not None: set_atime = True body['lastViewedByMeDate'] = accessed_datetime else: set_atime = False # Build request-arguments. args = { 'fileId': normalized_entry.id, 'body': body, 'setModifiedDate': set_mtime, 'updateViewedDate': set_atime, } if data_filepath is not None: _logger.debug("We'll be sending a file in the update: [%s] [%s]", normalized_entry.id, data_filepath) # We can only upload large files using resumable-uploads. args.update({ 'media_body': MediaFileUpload(data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) _logger.debug("Sending entry update: [%s]", normalized_entry.id) request = client.files().update(**args) result = self.__finish_upload(normalized_entry.title, request, data_filepath is not None) normalized_entry = NormalEntry('update_entry', result) _logger.debug("Entry updated: [%s]", str(normalized_entry)) return normalized_entry
def __insert_entry(self, is_file, filename, parents, mime_type, data_filepath=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if parents is None: parents = [] now_phrase = get_flat_normal_fs_time_from_dt() if modified_datetime is None: modified_datetime = now_phrase if accessed_datetime is None: accessed_datetime = now_phrase _logger.info( "Creating entry with filename [%s] under parent(s) " "[%s] with mime-type [%s]. MTIME=[%s] ATIME=[%s] " "DATA_FILEPATH=[%s]", filename, ', '.join(parents), mime_type, modified_datetime, accessed_datetime, data_filepath) client = self.__auth.get_client() ## Create request-body. body = { 'title': filename, 'parents': [dict(id=parent) for parent in parents], 'labels': { "hidden": is_hidden }, 'mimeType': mime_type, } if description is not None: body['description'] = description if modified_datetime is not None: body['modifiedDate'] = modified_datetime if accessed_datetime is not None: body['lastViewedByMeDate'] = accessed_datetime ## Create request-arguments. args = { 'body': body, } if data_filepath: args.update({ 'media_body': MediaFileUpload(data_filepath, mimetype=mime_type, resumable=True, chunksize=_DEFAULT_UPLOAD_CHUNK_SIZE_B), # TODO(dustin): Documented, but does not exist. # 'uploadType': 'resumable', }) if gdrivefs.config.IS_DEBUG is True: _logger.debug("Doing file-insert with:\n%s", pprint.pformat(args)) request = client.files().insert(**args) response = self.__finish_upload(filename, request, data_filepath is not None) self.__assert_response_kind(response, 'drive#file') normalized_entry = NormalEntry('insert_entry', response) _logger.info("New entry created with ID [%s].", normalized_entry.id) return normalized_entry
def __insert_entry(self, filename, mime_type, parents, data_filepath=None, modified_datetime=None, accessed_datetime=None, is_hidden=False, description=None): if parents is None: parents = [] now_obj = datetime.now().replace(tzinfo=tzlocal()).astimezone(tzutc()) now_phrase = build_rfc3339_phrase(now_obj) if modified_datetime is None: modified_datetime = now_phrase if accessed_datetime is None: accessed_datetime = now_phrase self.__log.info("Creating file with filename [%s] under parent(s) " "[%s] with mime-type [%s], mtime= [%s], atime= [%s]." % (filename, ', '.join(parents), mime_type, modified_datetime, accessed_datetime)) try: client = self.get_client() except: self.__log.exception("There was an error while acquiring the " "Google Drive client (insert_entry).") raise body = { 'title': filename, 'parents': [dict(id=parent) for parent in parents], 'mimeType': mime_type, 'labels': { "hidden": is_hidden }, 'description': description } if modified_datetime is not None: body['modifiedDate'] = modified_datetime if accessed_datetime is not None: body['lastViewedByMeDate'] = accessed_datetime args = { 'body': body } if data_filepath: args['media_body'] = MediaFileUpload(filename=data_filepath, \ mimetype=mime_type) self.__log.debug("Doing file-insert with:\n%s" % (args)) try: result = client.files().insert(**args).execute() except: self.__log.exception("Could not insert file [%s]." % (filename)) raise try: normalized_entry = NormalEntry('insert_entry', result) except: self.__log.exception("Could not normalize created entry.") raise self.__log.info("New entry created with ID [%s]." % (normalized_entry.id)) return normalized_entry