def uploadFilesFromList(dataInterface, fileList: List[str], outputDir: str, srcDirPrefix=None) -> None: """ Copies files in fileList from the remote onto the system where this call is being made. """ for file in fileList: fileDir, filename = os.path.split(file) if srcDirPrefix is not None and fileDir.startswith(srcDirPrefix): # Get just the part of fileDir after the srcDirPrefix subDir = fileDir.replace(srcDirPrefix, '') else: subDir = '' try: data = dataInterface.getFile(file) except Exception as err: if type(err) is IsADirectoryError or 'IsADirectoryError' in str( err): continue raise (err) outputFilename = os.path.normpath(outputDir + '/' + subDir + '/' + filename) logging.info('upload: {} --> {}'.format(file, outputFilename)) utils.writeFile(outputFilename, data)
def uploadFiles(request): if 'cmd' not in request or request['cmd'] != "uploadFiles": raise StateError('uploadFiles: incorrect cmd request: {}'.format(request)) if Web.wsDataConn is None: # A remote fileWatcher hasn't connected yet errStr = 'Waiting for fileWatcher to attach, please try again momentarily' Web.setUserError(errStr) return try: srcFile = request['srcFile'] compress = request['compress'] except KeyError as err: Web.setUserError("UploadFiles request missing a parameter: {}".format(err)) return # get the list of file to upload cmd = listFilesReqStruct(srcFile) response = Web.sendDataMsgFromThread(cmd, timeout=10) if response.get('status') != 200: Web.setUserError("Error listing files {}: {}". format(srcFile, response.get('error'))) return fileList = response.get('fileList') if type(fileList) is not list: Web.setUserError("Invalid fileList reponse type {}: expecting list". format(type(fileList))) return if len(fileList) == 0: response = {'cmd': 'uploadProgress', 'file': 'No Matching Files'} Web.sendUserMsgFromThread(json.dumps(response)) return for file in fileList: try: cmd = getFileReqStruct(file, compress=compress) data = handleDataRequest(cmd) # write the returned data out to a file filename = response.get('filename') if filename is None: if 'data' in response: del response['data'] raise StateError('sendDataRequestToFile: filename field not in response: {}'.format(response)) # prepend with common output path and write out file # note: can't just use os.path.join() because if two or more elements # have an aboslute path it discards the earlier elements global CommonOutputDir outputFilename = os.path.normpath(CommonOutputDir + filename) dirName = os.path.dirname(outputFilename) if not os.path.exists(dirName): os.makedirs(dirName) writeFile(outputFilename, data) response['filename'] = outputFilename except Exception as err: Web.setUserError( "Error uploading file {}: {}".format(file, str(err))) return response = {'cmd': 'uploadProgress', 'file': file} Web.sendUserMsgFromThread(json.dumps(response)) response = {'cmd': 'uploadProgress', 'file': '------upload complete------'} Web.sendUserMsgFromThread(json.dumps(response))
def runUploadDownloadTest(dataInterface): # test downloadFilesFromCloud and uploadFilesToCloud assert dataInterface.isRunningRemote() is True # 0. remove any previous test directories shutil.rmtree('/tmp/d2', ignore_errors=True) shutil.rmtree('/tmp/d3', ignore_errors=True) # 1. create a tmp sub-dir with some files in it text1 = 'test file 1' text2 = 'test file 2' bindata1 = b'\xFE\xED\x01\x23' bindata2 = b'\xAA\xBB\xCC\xDD' utils.writeFile('/tmp/d1/test1.txt', text1, binary=False) utils.writeFile('/tmp/d1/test2.txt', text2, binary=False) utils.writeFile('/tmp/d1/test3.bin', bindata1) utils.writeFile('/tmp/d1/test4.bin', bindata2) # 2. download files from cloud downloadFilesFromCloud(dataInterface, '/tmp/d1/test*.txt', '/tmp/d2') downloadFilesFromCloud(dataInterface, '/tmp/d1/test*.bin', '/tmp/d2') # 3. upload files to cloud uploadFilesToCloud(dataInterface, '/tmp/d2/test*.txt', '/tmp/d3') uploadFilesToCloud(dataInterface, '/tmp/d2/test*.bin', '/tmp/d3') # check that all files in d1 are same as files in d3 d3text1 = utils.readFile('/tmp/d3/test1.txt', binary=False) d3text2 = utils.readFile('/tmp/d3/test2.txt', binary=False) d3bin1 = utils.readFile('/tmp/d3/test3.bin') d3bin2 = utils.readFile('/tmp/d3/test4.bin') assert d3text1 == text1 assert d3text2 == text2 assert d3bin1 == bindata1 assert d3bin2 == bindata2
def uploadFilesFromList(fileInterface, fileList, outputDir, srcDirPrefix=None): for file in fileList: fileDir, filename = os.path.split(file) if srcDirPrefix is not None and fileDir.startswith(srcDirPrefix): # Get just the part of fileDir after the srcDirPrefix subDir = fileDir.replace(srcDirPrefix, '') else: subDir = '' try: data = fileInterface.getFile(file) except Exception as err: if type(err) is IsADirectoryError or 'IsADirectoryError' in str(err): continue raise(err) outputFilename = os.path.normpath(outputDir + '/' + subDir + '/' + filename) logging.info('upload: {} --> {}'.format(file, outputFilename)) utils.writeFile(outputFilename, data)
def test_delete(self): fileList = [ '/tmp/testdir/d1/test1.txt', '/tmp/testdir/d1/d2/test2.txt', '/tmp/testdir/d1/d2/d3/test3.txt', '/tmp/testdir/d1/d2/d3/test4.txt' ] for file in fileList: utils.writeFile(file, 'hello', binary=False) # test delete files from list assert os.path.exists(fileList[-1]) utils.deleteFilesFromList(fileList) assert not os.path.exists(fileList[-1]) assert os.path.isdir('/tmp/testdir/d1/d2/d3') # test delete folder for file in fileList: utils.writeFile(file, 'hello', binary=False) utils.deleteFolder('/tmp/testdir/d1') assert not os.path.isdir('/tmp/testdir/d1') # test delete files recursively in folders, but leave folders in place for file in fileList: utils.writeFile(file, 'hello', binary=False) utils.deleteFolderFiles('/tmp/testdir/d1') assert os.path.isdir('/tmp/testdir/d1/d2/d3')
def retrieveControlRoomFileAndSaveToCloud(controlRoomFilePath, pathToSaveOnCloud, dataInterface): data = dataInterface.getFile(controlRoomFilePath) utils.writeFile(pathToSaveOnCloud, data)
def retrieveIntelFileAndSaveToCloud(intelFilePath,pathToSaveOnCloud,fileInterface): data = fileInterface.getFile(intelFilePath) writeFile(pathToSaveOnCloud,data)
def test_fileInterface(self, bigTestfile): projectComm = projUtils.initProjectComm(None, True) fileInterface = FileInterface(filesremote=True, commPipes=projectComm) # Read in original data with open(bigTestfile, 'rb') as fp: data = fp.read() # Read via fileClient startTime = time.time() try: responseData = fileInterface.getFile(bigTestfile) except Exception as err: assert False, str(err) assert responseData == data print('Read Bigfile time: {}'.format(time.time() - startTime)) # Write bigFile startTime = time.time() try: fileInterface.putBinaryFile(bigTestfile, data) except Exception as err: assert False, str(err) print('Write Bigfile time: {}'.format(time.time() - startTime)) # Read back written data and compare to original writtenPath = os.path.join(CommonOutputDir, bigTestfile) with open(writtenPath, 'rb') as fp: writtenData = fp.read() assert writtenData == data # test get allowedFileTypes allowedTypes = fileInterface.allowedFileTypes() assert allowedTypes == fileTypeList # test list files filepattern = os.path.join(testDir, 'test_input', '*.dcm') try: filelist = fileInterface.listFiles(filepattern) except Exception as err: assert False, str(err) # get list locally filelist2 = [x for x in glob.iglob(filepattern)] filelist.sort() filelist2.sort() assert filelist == filelist2 # test downloadFilesFromCloud and uploadFilesToCloud # 0. remove any previous test directories shutil.rmtree('/tmp/d2', ignore_errors=True) shutil.rmtree('/tmp/d3', ignore_errors=True) # 1. create a tmp sub-dir with some files in it text1 = 'test file 1' text2 = 'test file 2' bindata1 = b'\xFE\xED\x01\x23' bindata2 = b'\xAA\xBB\xCC\xDD' utils.writeFile('/tmp/d1/test1.txt', text1, binary=False) utils.writeFile('/tmp/d1/test2.txt', text2, binary=False) utils.writeFile('/tmp/d1/test3.bin', bindata1) utils.writeFile('/tmp/d1/test4.bin', bindata2) # 2. download files from cloud projUtils.downloadFilesFromCloud(fileInterface, '/tmp/d1/test*.txt', '/tmp/d2') projUtils.downloadFilesFromCloud(fileInterface, '/tmp/d1/test*.bin', '/tmp/d2') # 3. upload files to cloud projUtils.uploadFilesToCloud(fileInterface, '/tmp/d2/test*.txt', '/tmp/d3') projUtils.uploadFilesToCloud(fileInterface, '/tmp/d2/test*.bin', '/tmp/d3') # check that all files in d1 are same as files in d3 d3text1 = utils.readFile('/tmp/d3/test1.txt', binary=False) d3text2 = utils.readFile('/tmp/d3/test2.txt', binary=False) d3bin1 = utils.readFile('/tmp/d3/test3.bin') d3bin2 = utils.readFile('/tmp/d3/test4.bin') assert d3text1 == text1 assert d3text2 == text2 assert d3bin1 == bindata1 assert d3bin2 == bindata2