def uploadFile(url, filePath, job, uploadType='upload'): filePathFromDriveRoot = filePath.replace(IOHelpers.googleDriveDirectory, '') if os.path.isdir(filePath): # ignore empty folder uploads return None # make sure all the parent folders are in drive, if not, insert them queue = store.getState().get('driveFiles') pprint(queue) # for g in driveFiles: # for idx, x in enumerate(queue): # if x.get('id') != None and x.get('id'): # pass para = { "title": job.get('title'), # TODO implement nested file upload # "parents": [{"id": "root"}] } files = { "data": ("metadata", json.dumps(para), "application/json; charset=UTF-8"), "file": open(filePath, 'rb') } metadata = {"files": files} if uploadType == 'upload': doRequest(url, {}, 'post', False, metadata) elif uploadType == 'update': doRequest(url, {}, 'put', False, metadata)
def create(gFile): gDriveDir = IOHelpers.googleDriveDirectory # types that can't be downloaded since they should be converted # TODO add support for these files pprint(gFile) # if its not a folder, we need to change the cwd if gFile.get('mimeType') != getMimeType('folder'): _path = os.path.join(gDriveDir, gFile.get('path')) os.makedirs(_path, exist_ok=True) os.chdir(_path) if gFile.get('mimeType') == getMimeType('folder'): os.makedirs(os.path.join(gDriveDir, gFile.get('path'))) if getIfMimeTypeIsSupported(gFile.get('mimeType')): uri = 'https://www.googleapis.com/drive/v3/files/' + gFile.get( 'id') + '?alt=media' downloadFile(uri, gFile.get('title'), gFile.get('mimeType')) pass
def getFilePath(self): try: if len(self.file['parents']) <= 0: return '' gFilePath = self.getAllParentTitles(self.file, []) gFilePath.reverse() if self.file['mimeType'] == getMimeType('folder'): gFilePath.append(self.file['title']) pprint(gFilePath) return '/'.join(gFilePath) # pprint(self.driveFiles) # _arr = self._getFilePath(, _path) # pprint(_arr) # _arr.reverse() # _arr.pop(0) # if not onlyFolders: # _arr.append(self.file['title']) # return '/'.join(_arr) except expression as identifier: pass
def checkForError(request): if request.status_code >= 400: pprint('--- DO REQUEST ERROR ---') pprint(request.text) # pprint(request.json()) pprint(request.url) pprint(url) pprint(metadata) pprint('--- END ---')
def filterFiles(files, cache, remainderStartPoint, filterType, debug=False): _r = remainderStartPoint.copy() _c = cache.copy() _f = files.copy() if debug: pprint('---') pprint('files') pprint(_f) pprint('---') pprint('cache') pprint(_c) pprint('---') pprint('remainder start point') pprint(_r) pprint('---') for c in cache: isInFiles = False item = None for x in files: if self.getIfPathMatch(c, x): isInFiles = True item = x if isInFiles: for idx, x in enumerate(_r): if self.getIfPathMatch( x, c) and self.getIfPathMatch(x, item): _r.pop(idx) return _r
def scanDifferences(self, reference): localPaths = [] for root, dirs, files in os.walk(IOHelpers.googleDriveDirectory, topdown=True): for name in files: lPath = (os.path.join(root, name)).replace( IOHelpers.googleDriveDirectory, '') localPaths.append(lPath) for name in dirs: lPath = (os.path.join(root, name)).replace( IOHelpers.googleDriveDirectory, '') localPaths.append(lPath) # since first run is explicit there's a special case for first time run. if reference == 'firstRun': _filePaths = self.returnDriveFilesInStore() for filePath in _filePaths: # so if the directory doesn't already exist, create it include sub folders(and files) _path = os.path.join(IOHelpers.googleDriveDirectory, filePath.get('path')) # != folder is because files self don't get a direct path, just that of their parent for using in changing CWD if not IOHelpers.exists(_path) or filePath.get( 'mimeType') != getMimeType('folder'): IoOperations.create(filePath) # complete! if errors then we already crashed. True in string updateJsonFile('json_data/settings.json', {'fullSync': "True"}) elif reference == 'compare': _localFilePaths = self.returnLocalFilesInStore() _filePaths = self.returnDriveFilesInStore() lastSyncTime = loadJSONFile('json_data/settings.json').get( 'lastSyncTime') fullSync = bool( loadJSONFile('json_data/settings.json').get('fullSync')) self.updateOperationsQueue = [] googleDriveFilesCopy = _filePaths.copy() localFilesCopy = _localFilePaths.copy() if fullSync is False: raise ValueError( "We haven't completed full sync, please delete your google drive directory and let the initial syncing finish before closing the program." ) return None googleDriveFiles = self.returnDriveFilesInStore() localFiles = self.returnLocalFilesInStore() store = loadJSONFile('json_data/store.json') localCache = store.get('localCache') googleDriveCache = store.get('googleDriveCache') def filterFiles(files, cache, remainderStartPoint, filterType, debug=False): _r = remainderStartPoint.copy() _c = cache.copy() _f = files.copy() if debug: pprint('---') pprint('files') pprint(_f) pprint('---') pprint('cache') pprint(_c) pprint('---') pprint('remainder start point') pprint(_r) pprint('---') for c in cache: isInFiles = False item = None for x in files: if self.getIfPathMatch(c, x): isInFiles = True item = x if isInFiles: for idx, x in enumerate(_r): if self.getIfPathMatch( x, c) and self.getIfPathMatch(x, item): _r.pop(idx) return _r # filesToRemoveFromDrive = filterFiles(localFiles, localCache, localCache, 'deleteDriveFile') # filesToRemoveFromLocal = filterFiles(googleDriveFiles, googleDriveCache, googleDriveCache, 'deleteLocalFile') # download entire drive # download new revisions from drive or upload new revisions filesToDownload = filterFiles(googleDriveFiles, localFiles, googleDriveFiles, 'downloadFromDrive') filesToUpload = filterFiles(localFiles, googleDriveFiles, localFiles, 'uploadToDrive') # download new copy of file to local from drive for googleDriveFile in googleDriveFiles: if googleDriveFile.get( 'lastModified' ) > lastSyncTime and getIfMimeTypeIsSupported( googleDriveFile.get('mimeType')): self.createJob(googleDriveFile, 'localFileUpdate') # upload new copy of file to drive from local for localFile in localFiles: if localFile.get('lastModified' ) > lastSyncTime and getIfMimeTypeIsSupported( localFile.get('mimeType')): self.createJob(localFile, 'driveFileUpdate') def addToQueue(files, jobType, priority=0): for f in files: self.createJob(f, jobType, priority) # addToQueue(filesToRemoveFromDrive, 'deleteDriveFile', 2) # addToQueue(filesToRemoveFromLocal, 'deleteLocalFile', 2) addToQueue(filesToDownload, 'downloadFromDrive', 1) addToQueue(filesToUpload, 'uploadToDrive', 1) # jobs with priority 1 go first for idx, _job in enumerate(self.updateOperationsQueue): for _job2 in self.updateOperationsQueue: def jobCheck(a, b, prio1, prio2): return a == prio1 and b == prio2 if self.getIfPathMatch(_job, _job2): # and int(_job2.get('priority')) == 1 and int(_job.get('priority')) == 0 if jobCheck(_job2.get('priority'), _job.get('priority'), 2, 1): self.updateOperationsQueue.pop(idx) elif jobCheck(_job2.get('priority'), _job.get('priority'), 1, 0): self.updateOperationsQueue.pop(idx) pprint('update queue') pprint(self.updateOperationsQueue) for x in self.updateOperationsQueue: IoOperations.handleJob(x) self.saveCache()
def update(job): pprint('UPDATE') pprint('UPDATE') pprint('UPDATE') pprint(job) pprint('UPDATE') pprint('UPDATE') pprint('UPDATE') uri = 'https://www.googleapis.com/upload/drive/v2/files/%s?uploadType=multipart' % ( job.get('id')) uploadFile(uri, job.get('path'), job, uploadType='update')