def __update_attrs(self, path, src): if src is None: return src_info = src.get_info() src_stat_info = src_info.statInfo attrs = SyncFileAttrs() debug("Updating: %s" % repr(path)) if src_stat_info is not None: if GsyncOptions.perms: attrs.mode = src_stat_info.st_mode if GsyncOptions.owner: attrs.uid = src_stat_info.st_uid if GsyncOptions.group: attrs.gid = src_stat_info.st_gid if GsyncOptions.times: attrs.mtime = float(src_info.modifiedDate) else: attrs.mtime = float(time.time()) if src_stat_info is not None: attrs.atime = src_stat_info.st_atime else: attrs.atime = attrs.mtime self._update_attrs(path, src, attrs)
def _create_symlink(self, path, src): debug("Creating remote symlink: %s" % repr(path)) if not GsyncOptions.dry_run: # link_source = src. # os.symlink(, path) pass
def _obtain_credentials(self): """ Prompts the user for authentication tokens to create a local ticket or token, that can be used for all future Google Drive requests. """ self._credentials = None # In order to gain authorization, we need to be running on a TTY. # Let's make sure before potentially hanging the process waiting for # input from a non existent user. if not sys.stdin.isatty(): raise NoTTYError # Locate the client.json file. client_json = self._get_config_file("client.json") # Create the client.json file if not present. if not os.path.exists(client_json): try: from libgsync.drive.client_json import client_obj with open(client_json, "w") as fd: fd.write(json.dumps(client_obj)) except Exception, ex: debug("Exception: %s" % repr(ex)) raise
def _updateStats(self, path, src, mode, uid, gid, mtime, atime): debug("Updating remote file stats: %s" % repr(path)) if GsyncOptions.dry_run: return info = self.getInfo(path) if not info: return st_info = list(tuple(info.statInfo)) if mode is not None: st_info[0] = mode if uid is not None: st_info[4] = uid if gid is not None: st_info[5] = gid if atime is not None: st_info[7] = atime info._setStatInfo(st_info) mtime_utc = datetime.datetime.utcfromtimestamp(mtime).isoformat() Drive().update( path, properties={"description": info.description, "modifiedDate": mtime_utc}, options={"setModifiedDate": GsyncOptions.times}, )
def getInfo(self, path = None): path = self.getPath(path) debug("Fetching local file metadata: %s" % repr(path)) try: # Obtain the file info, following the link st_info = os.stat(path) dirname, filename = os.path.split(path) if os.path.isdir(path): mimeType = MimeTypes.FOLDER else: mimeType = MimeTypes.get(path) md5Checksum = None if GsyncOptions.checksum: md5Checksum = self._md5Checksum(path) info = SyncFileInfo( None, filename, datetime.datetime.utcfromtimestamp( st_info.st_mtime ).isoformat(), mimeType, description = st_info, fileSize = st_info.st_size, md5Checksum = md5Checksum, path=path ) except OSError, e: debug("File not found: %s" % repr(path)) return None
def _updateFile(self, path, src): debug("Updating remote file: %s" % repr(path)) totalBytesWritten = self.bytesWritten bytesWritten = 0 info = src.getInfo() def _callback(status): bytesWritten = int(status.resumable_progress) self.bytesWritten = totalBytesWritten + bytesWritten progress = Progress(GsyncOptions.progress, _callback) if GsyncOptions.dry_run: bytesWritten = info.fileSize progress(MediaUploadProgress(bytesWritten, bytesWritten)) else: drive = Drive() info = drive.update(path, info, src.getUploader(), progress) if info is not None: bytesWritten = long(info.get('fileSize', '0')) else: debug("Update failed") progress.complete(bytesWritten) self.bytesWritten = totalBytesWritten + bytesWritten
def create(self, path, properties): # Get the parent directory. dirname, basename = os.path.split(path) info = self.stat(dirname) if info is None: return None parentId = info.id # Get the file info and delete existing file. info = self.stat(path) if info is not None: self.delete(path) body = {} for k, v in properties.iteritems(): body[k] = str(v) if parentId: body['parents'] = [{'id': parentId}] try: ent = self.service().files().insert( body = body, media_body = "" ).execute() # Clear the cache and update the path cache self._clearCache(path) self._pcache[path] = ent return ent except Exception, e: debug("Creation failed: %s" % str(e))
def _update_attrs(self, path, src, attrs): debug("Updating remote file attrs: %s" % repr(path)) if GsyncOptions.dry_run: return info = self.get_info(path) if not info: return st_info = list(tuple(info.statInfo)) if attrs.mode is not None: st_info[0] = attrs.mode if attrs.uid is not None: st_info[4] = attrs.uid if attrs.gid is not None: st_info[5] = attrs.gid if attrs.atime is not None: st_info[7] = attrs.atime info.set_stat_info(st_info) mtime_utc = datetime.datetime.utcfromtimestamp( attrs.mtime).isoformat() Drive().update(path, properties = { 'description': info.description, 'modifiedDate': mtime_utc, }, options = { 'setModifiedDate': GsyncOptions.times })
def _create_symlink(self, path, src): debug("Creating remote symlink: %s" % repr(path)) if not GsyncOptions.dry_run: #link_source = src. #os.symlink(, path) pass
def _update_attrs(self, path, src, attrs): debug("Updating remote file attrs: %s" % repr(path)) if GsyncOptions.dry_run: return info = self.get_info(path) if not info: return st_info = list(tuple(info.statInfo)) if attrs.mode is not None: st_info[0] = attrs.mode if attrs.uid is not None: st_info[4] = attrs.uid if attrs.gid is not None: st_info[5] = attrs.gid if attrs.atime is not None: st_info[7] = attrs.atime info.set_stat_info(st_info) mtime_utc = datetime.datetime.utcfromtimestamp( # attrs.mtime).isoformat() # attrs.mtime).replace(tzinfo=tzutc()).isoformat() attrs.mtime).replace( tzinfo=tzutc()).strftime("%Y-%m-%dT%H:%M:%S.%f%z") Drive().update(path, properties={ 'description': info.description, 'modifiedDate': mtime_utc, }, options={'setModifiedDate': GsyncOptions.times})
def _updateStats(self, path, src, mode, uid, gid, mtime, atime): debug("Updating remote file stats: %s" % repr(path)) if GsyncOptions.dry_run: return info = self.getInfo(path) if not info: return st_info = list(tuple(info.statInfo)) if mode is not None: st_info[0] = mode if uid is not None: st_info[4] = uid if gid is not None: st_info[5] = gid if atime is not None: st_info[7] = atime info._setStatInfo(st_info) mtime_utc = datetime.datetime.utcfromtimestamp(mtime).isoformat() Drive().update(path, properties = { 'description': info.description, 'modifiedDate': mtime_utc, }, options = { 'setModifiedDate': GsyncOptions.times })
def __updateStats(self, path, src): if src is None: return srcInfo = src.getInfo() debug("srcInfo = %s" % srcInfo) srcStatInfo = srcInfo.statInfo mode, uid, gid, atime, mtime = None, None, None, None, None if srcStatInfo is not None: if GsyncOptions.perms: mode = srcStatInfo.st_mode if GsyncOptions.owner: uid = srcStatInfo.st_uid if uid is not None: debug("Updating with uid: %d" % uid) if GsyncOptions.group: gid = srcStatInfo.st_gid if gid is not None: debug("Updating with gid: %d" % gid) if GsyncOptions.times: if srcStatInfo is not None: mtime = srcStatInfo.st_mtime atime = srcStatInfo.st_atime else: mtime = float(srcInfo.modifiedDate) atime = mtime debug("Updating with mtime: %0.2f" % mtime) debug("Updating with atime: %0.2f" % atime) self._updateStats(path, src, mode, uid, gid, atime, mtime)
def _save_credentials(self): debug("Saving credentials...") credentials = self._credentials if credentials: storage = self._get_credential_storage() if storage is not None: storage.put(credentials)
def _update_attrs(self, path, src, attrs): debug("Updating local file stats: %s" % repr(path)) if GsyncOptions.dry_run: return if attrs.uid is not None: try: os.chown(path, attrs.uid, -1) except OSError: # pragma: no cover pass if attrs.gid is not None: try: os.chown(path, -1, attrs.gid) except OSError: # pragma: no cover pass if attrs.mode is not None: os.chmod(path, attrs.mode) if attrs.atime is None: attrs.atime = attrs.mtime if attrs.mtime is None: attrs.mtime = attrs.atime if attrs.mtime is not None: os.utime(path, (attrs.atime, attrs.mtime))
def _devCheck(self, dev, path): if dev is not None: st_info = os.stat(path) if st_info.st_dev != dev: debug("Not on same dev: %s" % path) return False return True
def _devCheck(self, dev, path): if dev is not None: st_info = os.stat(path) if st_info.st_dev != dev: debug("Not on same dev: %s" % repr(path)) return False return True
def _get_config_file(self, name): """Returns the path to the gsync config file""" envname = re.sub(r'[^0-9A-Z]', '_', 'GSYNC_%s' % name.upper()) val = os.getenv(envname, os.path.join(self._get_config_dir(), name)) # print ("Environment: %s=%s" % (envname, val)) debug("Environment: %s=%s" % (envname, val)) return val
def relativeTo(self, relpath): path = self._path if path[-1] != "/": path += "/" expr = r'^%s+' % path relpath = self.normpath(relpath) debug("Creating relative path from %s and %s" % (expr, relpath)) return os.path.normpath(re.sub(expr, "", relpath + "/"))
def __del__(self): # pragma: no cover debug("Saving credentials...") credentials = self._credentials if credentials: storage = self._get_credential_storage() if storage is not None: storage.put(credentials) debug("My pid = %d" % os.getpid())
def __del__(self): debug("Saving credentials...") credentials = self._credentials if credentials: storage = self._getCredentialStorage() if storage is not None: storage.put(credentials) debug("My pid = %d" % os.getpid())
def _updateStats(self, path, src, mode, uid, gid, mtime, atime): debug("Updating local file stats: %s" % repr(path)) if GsyncOptions.dry_run: return if uid is not None: try: os.chown(path, uid, -1) except OSError, e: pass
def __init__(self): debug("Initialising drive") self._service = None self._http = None self._credentials = None self._credential_storage = None self._pcache = DrivePathCache() debug("Initialisation complete")
def _findEntity(self, name, ents): debug("Iterating %d entities to find %s" % (len(ents), name)) for ent in ents: entname = ent.get('title', "") if name == entname: debug("Found %s" % name) return ent return None
def _createFile(self, path, src): debug("Creating remote file: %s" % repr(path)) if GsyncOptions.dry_run: return drive = Drive() info = drive.create(path, src.getInfo()) if info is None: debug("Creation failed")
def relativeTo(self, relpath): path = self._path if path[-1] != "/": path += "/" expr = r'^%s+' % path relpath = self.normpath(relpath) debug("Creating relative path from %s and %s" % (repr(expr), repr(relpath))) return os.path.normpath(re.sub(expr, "", relpath + "/"))
def _update_data(self, path, src): path = self.get_path(path) self.get_info(path) debug("Updating local file %s" % repr(path)) uploader = src.get_uploader() fd = None bytes_written = 0 chunk_size = uploader.chunksize() file_size = uploader.size() try: if not GsyncOptions.dry_run: fd = open(path, "wb") progress = Progress(GsyncOptions.progress) while bytes_written < file_size: chunk = uploader.getbytes(bytes_written, chunk_size) debug("len(chunk) = %d" % len(chunk)) if not chunk: break if fd is not None: fd.write(chunk) chunk_len = len(chunk) bytes_written += chunk_len self.bytes_written += chunk_len progress(MediaUploadProgress(bytes_written, file_size)) debug(" Written %d bytes" % bytes_written) progress.complete(bytes_written) if bytes_written < file_size: # pragma: no cover raise Exception("Got %d bytes, expected %d bytes" % ( bytes_written, file_size )) except KeyboardInterrupt: # pragma: no cover debug("Interrupted") raise except Exception as ex: # pragma: no cover debug("Write failed: %s" % repr(ex)) raise finally: if fd is not None: fd.close()
def _query(self, **kwargs): parentId = kwargs.get("parentId") mimeType = kwargs.get("mimeType") fileId = kwargs.get("id") includeTrash = kwargs.get("includeTrash", False) result = [] if parentId is not None: debug("Checking gcache for parentId: %s" % parentId) cached = self._gcache.get(parentId, None) if cached is not None: result.extend(cached) return result page_token = None service = self.service() query, ents = [], [] param = {} if fileId is not None: query.append('id = "%s"' % fileId) elif parentId is not None: query.append('"%s" in parents' % parentId) if mimeType is not None: query.append('mimeType = "%s"' % mimeType) if not includeTrash: query.append('trashed = false') if len(query) > 0: param['q'] = ' and '.join(query) while True: if page_token: param['pageToken'] = page_token debug("Executing query: %s" % str(param)) files = service.files().list(**param).execute() debug("Query returned %d files" % len(files)) ents.extend(files['items']) page_token = files.get('nextPageToken') if not page_token: break debug("Updating google cache: %s (%d items)" % (parentId, len(ents))) self._gcache[parentId] = ents debug("My pid = %d" % os.getpid()) return ents
def _obtain_credentials(self): """ Prompts the user for authentication tokens to create a local ticket or token, that can be used for all future Google Drive requests. """ self._credentials = None # In order to gain authorization, we need to be running on a TTY. # Let's make sure before potentially hanging the process waiting for # input from a non existent user. if not sys.stdin.isatty(): raise NoTTYError # Locate the client.json file. client_json = self._get_config_file("client.json") # Create the client.json file if not present. if not os.path.exists(client_json): try: from libgsync.drive.client_json import client_obj with open(client_json, "w") as fd: fd.write(json.dumps(client_obj)) except Exception as ex: debug("Exception: %s" % repr(ex)) raise if not os.path.exists(client_json): raise FileNotFoundError(client_json) # Reresh token not available through config, so let's request a new # one using the app client ID and secret. Here, we need to obtain an # auth URL that the user will need to visit to obtain the user code # needed to allow us to obtain a refresh token. flow = flow_from_clientsecrets( client_json, scope='https://www.googleapis.com/auth/drive', redirect_uri='urn:ietf:wg:oauth:2.0:oob') auth_uri = flow.step1_get_authorize_url() print("Authorization is required to access your Google Drive.") print("Navigate to the following URL:\n%s" % auth_uri) code = "" while not code: code = six.moves.input("Type in the received code: ") credentials = flow.step2_exchange(code) if credentials is None: raise ExchangeError self._credentials = credentials return credentials
def get_path(self, path = None): """Returns the path of the SyncFile instance, or the path joined with the path provided. @param {str} path (default: None) """ if not path: return self._path debug("Joining: %s with %s" % (repr(self._path), repr(path))) return os.path.join(self._path, path)
def __init__(self): debug("Initialising drive") self._service = None self._http = None self._credentials = None self._service = None self._credentialStorage = None self.reinit() debug("Initialisation complete")
def _createFile(self, path, src): path = self.getPath(path) debug("Creating local file: %s" % repr(path)) f = None try: if not GsyncOptions.dry_run: f = open(path, "w") except Exception, e: debug("Creation failed: %s" % repr(e))
def get_path(self, path=None): """Returns the path of the SyncFile instance, or the path joined with the path provided. @param {str} path (default: None) """ if not path: return self._path debug("Joining: %s with %s" % (repr(self._path), repr(path))) return os.path.join(self._path, path)
def __setattr__(self, name, value): if name == "_dict": object.__setattr__(self, name, value) return if name in [ "description", "statInfo" ]: self._setStatInfo(value) return debug("Setting: %s = %s" % (name, str(value))) self._dict[name] = value
def __setattr__(self, name, value): if name == "_dict": object.__setattr__(self, name, value) return if name in ["description", "statInfo"]: self.set_stat_info(value) return debug("Setting: %s = %s" % (repr(name), repr(value))) self._dict[name] = value
def getPath(self, path = None): if path is None or path == "": return self._path selfStripPath = self.strippath(self._path) stripPath = self.strippath(path) debug("Joining: %s with %s" % (repr(selfStripPath), repr(stripPath))) ret = self.normpath(os.path.join(selfStripPath, stripPath)) debug(" * got: %s" % repr(ret)) return ret
def getPath(self, path=None): if path is None or path == "": return self._path selfStripPath = self.strippath(self._path) stripPath = self.strippath(path) debug("Joining: %s with %s" % (repr(selfStripPath), repr(stripPath))) ret = self.normpath(os.path.join(selfStripPath, stripPath)) debug(" * got: %s" % repr(ret)) return ret
def _create_file(self, path, src): path = self.get_path(path) debug("Creating local file: %s" % repr(path)) fd = None try: if not GsyncOptions.dry_run: fd = open(path, "w") except Exception, ex: # pragma: no cover debug("Creation failed: %s" % repr(ex))
def relative_to(self, relpath): """Returns a path that is relative to this object""" path = self._path if path[-1] != "/": path += "/" expr = r'^%s+' % path relpath = self.normpath(relpath) debug("Creating relative path from %s and %s" % (repr(expr), repr(relpath))) return os.path.normpath(re.sub(expr, "", relpath + "/"))
def _md5Checksum(self, path): try: import hashlib m = hashlib.md5() with open(path, "r") as f: m.update(f.read()) return m.hexdigest() except Exception, e: debug.exception() debug("Exception: %s" % repr(e)) return None
def _update_data(self, path, src): path = self.get_path(path) self.get_info(path) debug("Updating local file %s" % repr(path)) uploader = src.get_uploader() fd = None bytes_written = 0 chunk_size = uploader.chunksize() file_size = uploader.size() try: if not GsyncOptions.dry_run: fd = open(path, "w") progress = Progress(GsyncOptions.progress) while bytes_written < file_size: chunk = uploader.getbytes(bytes_written, chunk_size) debug("len(chunk) = %d" % len(chunk)) if not chunk: break if fd is not None: fd.write(chunk) chunk_len = len(chunk) bytes_written += chunk_len self.bytes_written += chunk_len progress(MediaUploadProgress(bytes_written, file_size)) debug(" Written %d bytes" % bytes_written) progress.complete(bytes_written) if bytes_written < file_size: # pragma: no cover raise Exception("Got %d bytes, expected %d bytes" % ( bytes_written, file_size )) except KeyboardInterrupt: # pragma: no cover debug("Interrupted") raise except Exception, ex: # pragma: no cover debug("Write failed: %s" % repr(ex)) raise
def _walk(self, path, generator, dev): for d, dirs, files in generator(path): debug("Walking: %s" % repr(d)) if not self._devCheck(dev, d): debug("Not on same device: %s" % repr(d)) continue if not GsyncOptions.force_dest_file: if GsyncOptions.dirs or GsyncOptions.recursive: # Sync the directory but not its contents debug("Synchronising directory: %s" % repr(d)) self._sync(d) else: sys.stdout.write("skipping directory %s\n" % d) break for f in files: f = os.path.join(d, f) if not self._devCheck(dev, f): continue debug("Synchronising file: %s" % repr(f)) self._sync(f) if not GsyncOptions.recursive: break
def run(self): srcpath = self._src basepath, path = os.path.split(srcpath) if self._drive.is_drivepath(self._src): basepath = self._drive.normpath(basepath) debug("Source srcpath: %s" % srcpath) debug("Source basepath: %s" % basepath) debug("Source path: %s" % path) if GsyncOptions.relative: # Supports the foo/./bar notation in rsync. path = re.sub(r'^.*/\./', "", path) self._sync = Sync(basepath, self._dst) debug("Enumerating: %s" % srcpath) try: self._walk(srcpath, self._walkCallback, self._dev) except KeyboardInterrupt, e: print("\nInterrupted") raise
def run(self): """ Worker method called synchronously or as part of an asynchronous thread or subprocess. """ srcpath = self._src basepath, path = os.path.split(srcpath) if self._drive.is_drivepath(self._src): basepath = self._drive.normpath(basepath) debug("Source srcpath: %s" % repr(srcpath)) debug("Source basepath: %s" % repr(basepath)) debug("Source path: %s" % repr(path)) if GsyncOptions.relative: # Supports the foo/./bar notation in rsync. path = re.sub(r'^.*/\./', "", path) self._sync = Sync(basepath, self._dst) debug("Enumerating: %s" % repr(srcpath)) try: self._walk(srcpath, self._walk_callback, self._dev) except KeyboardInterrupt, ex: print("\nInterrupted") raise
def getInfo(self, path=None): path = self.getPath(path) debug("Fetching local file metadata: %s" % repr(path)) try: # Obtain the file info, following the link st_info = os.stat(path) dirname, filename = os.path.split(path) if os.path.isdir(path): mimeType = MimeTypes.FOLDER else: mimeType = MimeTypes.get(path) md5Checksum = None if GsyncOptions.checksum: md5Checksum = self._md5Checksum(path) info = SyncFileInfo(None, filename, datetime.datetime.utcfromtimestamp( st_info.st_mtime).isoformat(), mimeType, description=st_info, fileSize=st_info.st_size, md5Checksum=md5Checksum, path=path) debug("Local file = %s" % repr(info), 3) debug("Local mtime: %s" % repr(info.modifiedDate)) except OSError, e: debug("File not found: %s" % repr(path)) return None
def run(self): srcpath = self._src basepath, path = os.path.split(srcpath) if self._drive.is_drivepath(self._src): basepath = self._drive.normpath(basepath) debug("Source srcpath: %s" % repr(srcpath)) debug("Source basepath: %s" % repr(basepath)) debug("Source path: %s" % repr(path)) if GsyncOptions.relative: # Supports the foo/./bar notation in rsync. path = re.sub(r'^.*/\./', "", path) self._sync = Sync(basepath, self._dst) debug("Enumerating: %s" % repr(srcpath)) try: self._walk(srcpath, self._walkCallback, self._dev) except KeyboardInterrupt, e: print("\nInterrupted") raise
def walk(self, top, topdown = True, onerror = None, followlinks = False): join = os.path.join names = None debug("Walking: %s" % repr(top)) try: names = self.listdir(top) except Exception, e: debug.exception() debug("Exception: %s" % repr(e)) if onerror is not None: onerror(e) return
def get_uploader(self, path=None): info = self.get_info(path) if info is None: raise Exception("Could not obtain file information: %s" % path) path = self.get_path(path) drive = Drive() debug("Opening remote file for reading: %s" % repr(path)) fd = drive.open(path, "r") if fd is None: raise Exception("Open failed: %s" % path) return MediaIoBaseUpload(fd, info.mimeType, resumable=True)
def update(self, path, properties, **kwargs): """ Updates the content and attributes of a remote file. """ progress_callback = kwargs.get('progress_callback') options = kwargs.get('options', {}) info = self.stat(path) if not info: raise FileNotFoundError(path) debug("Updating: %s" % repr(path)) # Merge properties for key, val in properties.iteritems(): # Do not update the ID, always use the path obtained ID. if key == 'id': continue debug(" * with: %s = %s" % (repr(key), repr(val))) setattr(info, key, Drive.utf8(val)) with self.service() as service: res = None req = service.files().update( fileId=info.id, body=info.copy(), setModifiedDate=options.get('setModifiedDate', False), newRevision=True, media_body=kwargs.get('media_body') ) if progress_callback is None: res = req.execute() else: try: while res is None: debug(" * uploading next chunk...") status, res = req.next_chunk() if status: progress_callback(status) elif res: file_size = int(res['fileSize']) progress_callback( MediaUploadProgress(file_size, file_size) ) except Exception, ex: debug("Exception: %s" % str(ex)) debug.exception() # Refresh the cache with the latest revision self._pcache.put(path, res) return res
def _get_config_dir(subdir=None): """Returns the path to the gsync config directory""" configdir = os.getenv('GSYNC_CONFIG_DIR', os.path.join(os.getenv('HOME', '~'), '.gsync')) debug("Config dir = %s" % configdir) if not os.path.exists(configdir): os.mkdir(configdir, 0o0700) if subdir is not None: configdir = os.path.join(configdir, subdir) if not os.path.exists(configdir): os.mkdir(configdir, 0o0700) return configdir
def _getConfigDir(self, subdir = None): configdir = os.getenv('GSYNC_CONFIG_DIR', os.path.join(os.getenv('HOME', '~'), '.gsync') ) debug("Config dir = %s" % configdir) if not os.path.exists(configdir): os.mkdir(configdir, 0700) if subdir is not None: configdir = os.path.join(configdir, subdir) if not os.path.exists(configdir): os.mkdir(configdir, 0700) return configdir