def test_sftp_download(self): q = Queue() q2 = Queue() q.put((0, {"url": test_urls["sftp_small"]["url"], "dest": test_dest})) q.put((1, {"url": test_urls["sftp_pkey_small"]["url"], "dest": test_dest, "key_filename": test_urls["sftp_pkey_small"]["key_filename"], "passphrase": test_urls["sftp_pkey_small"]["key_filename"]})) worker = Worker({"wait_task": 0}, q, q2) worker.start() worker.join() self.assertTrue(q.empty()) self.assertTrue(os.path.exists(test_urls["sftp_small"]["path"])) self.assertTrue(os.path.exists(test_urls["sftp_pkey_small"]["path"])) with open(test_urls["sftp_small"]["path"], "rb") as f: data = f.read() self.assertEqual(hashlib.md5(data).hexdigest(), test_urls["sftp_small"]["md5"]) with open(test_urls["sftp_pkey_small"]["path"], "rb") as f: data = f.read() self.assertEqual(hashlib.md5(data).hexdigest(), test_urls["sftp_pkey_small"]["md5"]) FileManager.remove_file(test_urls["sftp_small"]["path"]) FileManager.remove_file(test_urls["sftp_pkey_small"]["path"])
def parse_csv_with_name(self, filename): manager = FileManager() parser = ConfigParser() config_dto = parser.parse_config() path_to_file = manager.get_path_to_file(DirPath.STATISTICS) csv_dict = {} column_names = config_dto.column_names for name in column_names: csv_dict[name] = [] csv_file = open(os.path.join(path_to_file, filename), 'rt') csv_reader = csv.DictReader(csv_file, fieldnames=column_names, delimiter=' ', quotechar='"') header = next(csv_reader) for row in csv_reader: for key in row: csv_dict[key].append(row[key]) return DTO(filename, path_to_file, column_names, csv_dict)
def test_many_download(self): q = Queue() q2 = Queue() selects = {} for i in range(10): choice = random.choice(list(test_urls.keys())) if choice in selects.keys(): selects[choice] += 1 else: selects[choice] = 1 q.put((i, {"url": test_urls[choice]["url"], "dest": test_dest})) worker = Worker({"wait_task": 1, "wait_retry": 0, "max_retry": 1}, q, q2) worker.start() worker.join() self.assertTrue(q.empty()) for key, value in selects.items(): path = test_urls[key]["path"] dirname = FileManager.get_dirname(path) basename = FileManager.get_basename(path) for i in range(value): filepath = os.path.join(dirname, "{}_{}".format(i, basename)) if i != 0 else path self.assertTrue(os.path.exists(filepath)) with open(filepath, "rb") as f: data = f.read() self.assertEqual(hashlib.md5(data).hexdigest(), test_urls[key]["md5"]) FileManager.remove_file(filepath)
def run(self): while not self.works.empty(): # Get work work = self.works.get() info = work[1] self.i = work[0] self.progress = {} try: url = info.get("url") directory = info.get("dest") if not url: raise NoURLException("file {} does not have url".format(self.i)) if not directory: raise NoDestinationPathException("file {} does not have dest".format(self.i)) # Get filename and protocol from url split = urllib.parse.urlparse(url) protocol = split.scheme filename = FileManager.get_basename(split.path) # Create directory if the directory does not exist # Check whether directory has write permission if FileManager.is_path_creatable(directory): FileManager.create_directory(directory) else: import os raise OSError(errno.EACCES, os.strerror(errno.EACCES), directory) # Download file with random filename to prevent overwritten file with same name filepath = FileManager.random_filepath(directory) self.progress["filepath"] = filepath self.progress["filename"] = filename if protocol in ["http", "https"]: self.http_download(url, filepath) elif protocol in ["ftp", "ftps"]: self.ftp_download(split, filepath) elif protocol in ["sftp"]: self.sftp_download(split, info.get("key_filename"), info.get("passphrase"), filepath) else: raise UnsupportedProtocolException("file {}({}) uses unsupported protocol".format(self.i, filename)) except Exception as e: # Notify failed work self.progress["state"] = "Failed" self.progress["error"] = e self.progresses.put((self.i, self.progress.copy())) self.works.task_done() time.sleep(self.config.get("wait_task", 3)) return
def transform_to_csv_dto(self, generated_stat, config): manager = FileManager() path_to_file = manager.get_path_to_file(DirPath.STATISTICS) filename = "Statistics for " + config.filename data = self.transform_generated_values(config.column_names, generated_stat) return DTO(filename, path_to_file, config.column_names, data)
def __init__(self): # not needed at the moment # self.plManager = StateSettingsManager(plistFilePath) # self.state = self.plManager.getStateOrEmptyState() yml_generator = UserInputManager() file_manager = FileManager(markdown_dir) data = yml_generator.prompt() path = file_manager.write(data) dir_path = os.path.dirname(os.path.realpath(__file__)) # AppleScript to open in IA Writer os.system(f'osascript {dir_path}/open-in-iawriter.scpt {path}')
def remove_incomplete(self, dest): """ Remove partial file. dest: destination path """ # Notify failed work self.progress["state"] = "Failed" self.progresses.put((self.i, self.progress.copy())) # Retry loop if file cannot be removed for i in range(self.config.get("max_retry", 3)): try: FileManager.remove_file(dest) break except: time.sleep(3)
def test_success_download(self): works = Queue(maxsize=0) progresses = Queue(maxsize=0) for i, key in enumerate(test_urls): works.put((i + 1, { "url": test_urls[key]["url"], "dest": test_dest })) for i in range(4): worker = Worker({ "wait_task": 1, "wait_retry": 0, "max_retry": 1 }, works, progresses, name="worker{}".format(i)) worker.setDaemon(True) worker.start() visualizer = Visualizer(4, progresses, name="visualizer") visualizer.start() works.join() visualizer.join() self.assertTrue(works.empty()) self.assertTrue(progresses.empty()) self.assertEqual(visualizer.success, 4) self.assertEqual(visualizer.success, visualizer.task) self.assertTrue(not visualizer.results) for key in test_urls: self.assertTrue(os.path.exists(test_urls[key]["path"])) with open(test_urls[key]["path"], "rb") as f: data = f.read() self.assertEqual( hashlib.md5(data).hexdigest(), test_urls[key]["md5"]) FileManager.remove_file(test_urls[key]["path"])
def parse_config(self, name): manager = FileManager() filename = name path_to_file = manager.get_path_to_file(DirPath.CONFIGS) config = configparser.ConfigParser() if filename is not None: with open(os.path.join(path_to_file, filename)) as cfg_file: config.readfp(cfg_file) # read and parse entire file sections = {} for section in config.sections(): options = {} for option, value in config.items(section): options[option] = value sections[section] = options column_names = self.get_column_names(sections) return DTO(filename, path_to_file, column_names, sections) return None
def __btnOpenImgOnClick(self): # Get image path imgpath = QtWidgets.QFileDialog.getOpenFileName( self, 'Open image', '', 'Image files (*.bmp)') self.imgpath = imgpath[0] # Set image text in text edit self.teImage.setText(self.imgpath) FileManager().rgb2gray(self.imgpath) # Show image original = QtGui.QPixmap('./temp/original.bmp').scaled( 640, 480, QtCore.Qt.KeepAspectRatio) lbOriginal = self.tabOriginal.findChild(QtWidgets.QLabel, 'lbOriginal') lbOriginal.setPixmap(QtGui.QPixmap(original))
def __filter(self): # Check if folder exists if not os.path.exists(self.__TEMP_DIR) and not os.path.isdir( self.__TEMP_DIR): # Create temp dir to store temp files os.mkdir(self.__TEMP_DIR) # Convert loaded image in a file print('Converting image to file...') FileManager().img2file(self.imgpath) print('Applying sharpening kernel...') os.system('cd asm && ./sharpening') print('Converting to an image...') # Convert processing images FileManager().file2img('./temp/sharpening.txt') print('Applying oversharpening kernel...') os.system('cd asm && ./oversharpening') print('Converting to an image...') # Convert processing images FileManager().file2img('./temp/oversharpening.txt') # Set images sharpening = QtGui.QPixmap('./temp/sharpening.bmp').scaled( 640, 480, QtCore.Qt.KeepAspectRatio) lbSharpening = self.tabSharpening.findChild(QtWidgets.QLabel, 'lbSharpening') lbSharpening.setPixmap(sharpening) oversharpening = QtGui.QPixmap('./temp/oversharpening.bmp').scaled( 640, 480, QtCore.Qt.KeepAspectRatio) lbOverSharpening = self.tabOverSharpening.findChild( QtWidgets.QLabel, 'lbOverSharpening') lbOverSharpening.setPixmap(oversharpening) self.lbStatus.setText('Ready!')
def rename_file(self, dest): """ Rename downloaded file from random name to name identified in url. Filename will have incremental number at the front if filename exist. dest: destination path """ filename = self.progress.get("filename") dirname = FileManager.get_dirname(dest) # Get filename to rename the file # Increment number at the front of filename if filename exist new_dest = FileManager.join_path(dirname, filename) if FileManager.is_path_exists(new_dest): new_dest = FileManager.generate_filepath(new_dest) FileManager.rename_file(dest, new_dest) # Notify success work self.progress["filename"] = FileManager.get_basename(new_dest) self.progress["state"] = "Success" self.progresses.put((self.i, self.progress.copy()))
def parse_csv(self): manager = FileManager() filename = manager.select_file(DirPath.STATISTICS, FileType.CSV) return self.parse_csv_with_name(filename)
# Parse command line arguments parser = argparse.ArgumentParser( description= 'Download files from different sources with different protocols.') parser.add_argument('--input', required=True, metavar="/path/to/input/", help='Path to input .yml file') parser.add_argument('--config', required=True, metavar="/path/to/config/", help='Path to configurate .yml file') args = parser.parse_args() try: if not FileManager.is_path_exists(args.input): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), args.input) if not FileManager.is_path_exists(args.config): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), args.config) inputs = get_input(args.input) config = get_config(args.config) # Quit if inputs is empty if not inputs: raise Exception("No inputs given") works = Queue(maxsize=0) progresses = Queue(maxsize=0)