def _build_cmd(self): file = self.input[0] if file.type == 'media': self.out = File(file.filename, path=os.path.join(file.path, 'tmp'), extension='.mp4', ts=file.ts) cmd = 'ffmpeg -i %s ' % file.fullname cmd += '-filter_complex "[0:v]setpts=PTS-STARTPTS" ' cmd += '-c:v libx264 -keyint_min 15 -g 15 ' cmd += '-c:a libfdk_aac ' cmd += self.out.fullname elif file.type == 'presentation' and file.extension != '.pdf': cmd = 'libreoffice --headless --invisible --convert-to pdf --outdir %s %s' % ( file.path, file.fullname) self.out = File(file.filename, path=file.path, extension='.pdf', ts=file.ts) elif file.type == 'slide' and (file.meta['width'] % 2 == 1 or file.meta['height'] % 2 == 1): self.out = File(file.filename + 'even', path=file.path, extension='.png', ts=file.ts, duration=file.duration) cmd = 'ffmpeg -i %s ' % file.fullname cmd += '-filter_complex "[0:v]crop=%d:%d" ' % ( file.meta['width'] / 2 * 2, file.meta['height'] / 2 * 2) cmd += self.out.fullname else: cmd = '' self.out = file return cmd
async def add_files(self, ctx, rewrite: typing.Optional[str]): if len(ctx.message.attachments) == 0: await ctx.send('No files attached.' ' Attach them to add to branch') return has_files = False if rewrite and rewrite == '-r': await ctx.send(f'Files will be overridden, if exist') for attach in ctx.message.attachments: self.curr_branch.add_element(File(attach, self.curr_branch)) await ctx.send(f'File: {attach.filename} was added') else: for attach in ctx.message.attachments: if self.curr_branch.has_element(attach.filename): has_files = True await ctx.send(f'File: {attach.filename}' ' already exists') else: self.curr_branch.add_element(File(attach, self.curr_branch)) await ctx.send(f'File: {attach.filename} was added') if has_files: await ctx.send('To override files, set "-r" flag') return
def __populate(self, metainfo): # a torent file may have a single tracker, multiple trackers, # or both a reference to a single tracker and multiple trackers (for backwards compatibility) if 'announce' in metainfo: # single tracker self.trackerURLs.add(metainfo['announce']) if 'announce-list' in metainfo: # multiple trackers self.trackerURLs |= set( utilities.flatten(metainfo['announce-list'])) if 'created by' in metainfo: self.createdBy = metainfo['created by'] if 'comment' in metainfo: self.comment = metainfo['comment'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] if 'creation date' in metainfo: self.creationDate = datetime.datetime.fromtimestamp( metainfo['creation date']) if 'files' in metainfo['info']: # multi file mode self.singleFile = False self.name = metainfo['info']['name'] self.files = [] for file in metainfo['info']['files']: self.files.append(File(file['path'], file['length'])) if 'length' in metainfo['info']: # single file mode self.singleFile = True self.name = metainfo['info']['name'] self.files = [ File([metainfo['info']['name']], metainfo['info']['length']) ]
def InitOther(): #加载删词词库 Diacrisis.delChars = File(Diacrisis.delPath).Read() if not Diacrisis.delChars: print('读取失败') #加载常用诊断表 used_df = File(Diacrisis.usedPath).Read() print(len(used_df)) if len(used_df) > 0: Diacrisis.usedWords = pd.Series(index=list(used_df['诊断'].values), data=list(used_df['编码'].values)) #加载换词表 re_df = File(Diacrisis.replacePath).Read() print(len(re_df)) if len(re_df) > 0: Diacrisis.replaceWords = pd.Series( index=list(re_df['ToReplace'].values), data=list(re_df['truly'].values)) #加载标准ICD表 Diacrisis.stdICD = File(Diacrisis.stdICD_path).Read() print(len(Diacrisis.stdICD)) Diacrisis.stdICD['std-cut'] = Diacrisis.CleanStdICD( Diacrisis.stdICD['疾病名称'])[1] #Diacrisis.stdICD.to_csv(Diacrisis.stdICD_path, encoding = 'utf8', index = False) #加载权重表 Diacrisis.Idf()
def __init__(self, generator=None): super(PythonTask, self).__init__(generator) self._python_import = None self._python_source_files = None self._python_function_name = None self._python_args = None self._python_kwargs = None # self.executable = 'python' # self.arguments = '_run_.py' self.then_func_name = 'then_func' self._rpc_input_file = \ JSONFile('file://_rpc_input_%s.json' % hex(self.__uuid__)) self._rpc_output_file = \ JSONFile('file://_rpc_output_%s.json' % hex(self.__uuid__)) # input args -> input.json self.pre.append(self._rpc_input_file.transfer('input.json')) # output args -> output.json self.post.append(File('output.json').transfer(self._rpc_output_file)) f = File('staging:///_run_.py') self.pre.append(f.link()) self.add_cb('success', self.__class__._cb_success) self.add_cb('submit', self.__class__._cb_submit) # if True the RPC result will be stored in the DB with the task self.store_output = True
def add_url(self, url): url = url.replace('/embed#', '/#') url = url.replace('/#!#!', '/#!') # old2new url = url.split("/#")[1] url = url.replace('#', '!') # old2new id_video = None if "|" in url: url, id_video = url.split("|") if url.startswith("F!"): if len(url.split("!")) == 3: folder_id = url.split("!")[1] folder_key = url.split("!")[2] master_key = self.base64_to_a32(folder_key) files = self.api_req({ "a": "f", "c": 1, "r": 1 }, "&n=" + folder_id) for file in files["f"]: if file["t"] == 0: if id_video and id_video != file["h"]: continue key = file['k'][file['k'].index(':') + 1:] key = self.decrypt_key(self.base64_to_a32(key), master_key) k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]) attributes = self.base64urldecode(file['a']) attributes = self.dec_attr(attributes, k) self.files.append( File(info=attributes, file_id=file["h"], key=key, folder_id=folder_id, file=file, client=self)) else: raise Exception("Enlace no válido") elif url.startswith("!") or url.startswith("N!"): if len(url.split("!")) == 3: file_id = url.split("!")[1] file_key = url.split("!")[2] file = self.api_req({'a': 'g', 'g': 1, 'p': file_id}) key = self.base64_to_a32(file_key) k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]) attributes = self.base64urldecode(file['at']) attributes = self.dec_attr(attributes, k) self.files.append( File(info=attributes, file_id=file_id, key=key, file=file, client=self)) else: raise Exception("Enlace no válido") else: raise Exception("Enlace no válido")
def move_files(self, files, directory=""): """moving file to specific directory :param files: files to move :param directory: where to move (if empty, defined during program run) """ result = [] for file in files: if directory == "": temp_file = File(file) new_directory = self._create_or_define(temp_file) origin_folder = "" else: new_directory = directory origin_folder = os.path.basename(os.path.dirname(file)) temp_file = File(os.path.basename(file)) if not file.startswith(new_directory): if temp_file.get_extension(): temp_extension = "." + temp_file.get_extension() else: temp_extension = "" ordinal_number = self.check_same_objects( new_directory, temp_file) target_name = temp_file.get_just_name() + temp_extension if ordinal_number: formatted_ordinal_number = f" ({ordinal_number - 1})" target_name = (temp_file.get_just_name() + formatted_ordinal_number + temp_extension) if self.underscore_flag: target_name = target_name.replace(" ", "_") new_position = os.path.join(self.directory, new_directory, target_name) file_position = os.path.join(self.directory, origin_folder, str(temp_file)) if file_position != os.path.join( self.directory, new_directory, temp_file.get_just_name() + temp_extension, ): result.append(os.path.join(origin_folder, str(temp_file))) self.possibilities[new_directory].files.append(temp_file) if not self.dry_run: os.rename(file_position, new_position) else: print( f"{file_position} would be moved to {new_position}" ) elif self.dry_run: print( f"{file_position} won't be move since the location is the same" ) self.log_result(result, directory)
def main(): # Display available permissions print('-----File permissions: {}'.format(File.FILE_PERMISSIONS)) # Create some files f1 = File('poem.txt', 'joe') f2 = File('readme.txt', 'max', 1000, 'r') f3 = File('secret.txt', 'fred', 100) # Display file details print('-----File details...') print(f1) print(f2) print(f3) # Check access rights print('-----Access rights...') print(f3.has_access('fred', 'r')) print(f3.has_access('mary', 'x')) # Enable permissions print('-----Enabling permissions...') f3.enable_permission('fred', 'x') f3.enable_permission('mary', 'w') # Check access rights print('-----Access rights...') print(f3.has_access('mary', 'x')) # Disable permissions print('-----Disabling permissions...') f3.disable_permission('fred', 'x') f3.disable_permission('mary', 'w') # Check access rights print('-----Access rights...') print(f3.has_access('mary', 'x')) print(f3.has_access('vera', 'w')) # Enable permissions print('-----Enabling permissions...') f3.enable_permission('fred', 'r') f3.enable_permission('fred', 'x') f2.enable_permission('max', 'w') f2.enable_permission('max', 'x') # Display permissions print('-----Permissions: {}'.format(f3.get_permissions())) print('-----Permissions: {}'.format(f2.get_permissions())) # Display file details print('-----File details...') print(f3)
def process_new_file(self, source): file_details = json.loads(self.break_source_into_substrings(source)) self.file_storage.add_new_file( File(file_details['file_name'], file_details['file_path'], file_details['file_extension'], file_details['absolute_path'], DetectionIndicators())) print("new file created: " + str(file_details['absolute_path']))
def get_file(self, filenum=0): if filenum in self.files: return self.files[filenum] else: file = File(self, filenum) self.files[filenum] = file return file
def read(self): if self.loaded: pass entries = ((self.data[0] & 0xff) << 8) + (self.data[1] & 0xff) offset = entries * 10 + 2 for entry in range(entries): pos = entry * 10 + 2 namehash = ((self.data[pos] & 0xff) << 24) + ((self.data[pos + 1] & 0xff) << 16) + \ ((self.data[pos + 2] & 0xff) << 8) + (self.data[pos + 3] & 0xff) size = ((self.data[pos + 4] & 0xff) << 16) + ((self.data[pos + 5] & 0xff) << 8) + \ (self.data[pos + 6] & 0xff) sizec = ((self.data[pos + 7] & 0xff) << 16) + ((self.data[pos + 8] & 0xff) << 8) + \ (self.data[pos + 9] & 0xff) filedata = bytearray(size) if size != sizec: filedata = bz2.decompress(prep_data(self.data, sizec, offset)) # untested else: for i in range(size): filedata[i] = self.data[offset + i] self.add_file(File(filedata, namehash)) offset += sizec del self.data print "files:", len(self.files) self.loaded = True
def __init__(self): print("Initializing interface...") self.init_file = File('', [False, False, True]) self.init_data = self.init_file.open_file() print("Interface initialized!") self.tile_arr = []
def select_input(self): """ Save the name of the input file, clear output label, set progress bar to zero and actualize input label. When Select Input button is trigerred the method is invoked. """ full_paths = QFileDialog.getOpenFileNames( None, 'Open File', '', '*.jpg *.jpeg *.png *.bmp *.mp4 *.avi *.wmv *.mov *.mkv') if full_paths: # Clear input files list self.files.clear() self.output_name = None full_path = full_paths[0] print(full_paths) file = File(self.tmp_dir, full_path[0]) # Clear output label image, progress bar and label text self.output_label.clear() self.progress_bar.setValue(0) self.path_label.setText(file.get_output_name(self.output_type)) # Actualize input label image if file.type == InputType.IMAGE: image = file.full_path elif file.type == InputType.VIDEO: ret, image = cv2.VideoCapture(file.full_path).read() if not ret: raise ValueError image = npimg_to_pixmap(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) else: raise ValueError self.actualize_input_label(image) # Add input file to a list of files self.files.append(file)
def searchcontroller(self): # controller to search for each file in project for root, directories, filenames in os.walk(self.workfolder): for filename in filenames: full_file_name = os.path.join(root, filename) if (full_file_name.lower().endswith(tuple(self.src_filetypes))): self.src_files[full_file_name] = File(full_file_name) self.src_files[full_file_name].find_matches_in_src_file(self.code_offset, self.query_importance) elif (full_file_name.lower().endswith(tuple(Project.db_filetypes))): self.db_files[full_file_name] = File(full_file_name) self.db_files[full_file_name].find_matches_in_db_file() self.all_files.update(self.db_files) self.all_files.update(self.src_files) from report_html import Report_html treeview = Report_html.Tree_builder(self, "") self.tree_object = treeview.return_tree_object()
def call(self, command, **kwargs): """ Set the python function to be called with its arguments Parameters ---------- command : function a python function defined inside a package or a function. If in a package then the package needs to be installed on the cluster to be called. A function defined in a local file can be called as long as dependencies are installed. kwargs : ``**kwargs`` named arguments to the function """ self._python_function_name = '.'.join([command.__module__, command.func_name]) self._python_kwargs = kwargs self._python_import, self._python_source_files = \ get_function_source(command) for f in self._python_source_files: self.pre.append(File('file://' + f).load().transfer()) # call the helper script to execute the function call self.append('python _run_.py')
def js(self): "Returns JS context" if not self._ctx: glue = u'\n' if self._use_unicode else '\n' core_src = [ self.read_js_file(make_path(f)) for f in self._core_files ] self._ctx = PyV8.JSContext() self._ctx.enter() self._ctx.eval(glue.join(core_src)) # load default snippets self._ctx.locals.pyLoadSystemSnippets( self.read_js_file(make_path('snippets.json'))) # expose some methods self._ctx.locals.log = js_log self._ctx.locals.pyFile = File() if self._contrib: for k in self._contrib: self._ctx.locals[k] = self._contrib[k] if self._should_load_extension: self._ctx.locals.pyResetUserData() self.load_extensions() self._should_load_extension = False return self._ctx
def add_aap_pdfs(repos, gh): aap_repo = Repo({'name': 'NicoWeio/awesome-ap-pdfs'}, gh) aap_repo.update_repo_mirror() for repo in repos: repo_dir = aap_repo.cwd_path / repo.full_name.replace('/', '∕') # überspringen, falls aweseome-ap-pdfs zu diesem Repo keine PDFs bereitstellt if not repo_dir.exists(): continue versuche_dirs = [f for f in repo_dir.iterdir() if f.is_dir()] for versuch_dir in versuche_dirs: versuch = int( versuch_dir.stem ) # TODO: unsichere Annahme über die Ordnerstruktur von awesome-ap-pdfs paths = [ Path(f) for f in versuch_dir.iterdir() if f.suffix == '.pdf' ] files = [ File(path, aap_repo, is_user_generated=False) for path in paths ] if versuch in repo.versuche: repo.versuche[versuch].setdefault('pdfs', []).extend(files) else: warn( f'Versuch {versuch} existiert nicht in {repo.full_name}, aber in awesome-ap-pdfs.' ) return repos
def parse_file_info(self, p): """ Parse file info packet (type 3) """ # Create file object if new UID if not self.file_exists(p): self.files[p.uid] = File() # Check for existing file info try: self.files[p.uid].name except AttributeError: # Set file info properties self.files[p.uid].info(name=self.get_string(p.data[84:]), path=self.get_string(p.data[188:]), parts=self.get_int(p.data[72:74]), length=self.get_int(p.data[172:176]), time_a=self.get_time(p.data[164:168]), time_b=self.get_time(p.data[60:64])) # Set ignored file flag self.files[p.uid].ignored = self.is_ignored(self.files[p.uid]) # Print file info if self.config.verbose: print( f"\n[INFO] {self.to_hex(p.uid, 4)} \"{self.files[p.uid].name}\" ", end='') print( f"{round(self.files[p.uid].length/1024, 1)} kB IN {self.files[p.uid].parts} PARTS" )
def _download(self, content): cwd = os.getcwd() os.chdir(self._working_dir) for multihash in content.keys(): self._ipfs_client.get(multihash) for multihash in os.listdir(self._working_dir): name = base64.b64decode(content[multihash]).decode() os.rename(multihash, name) full_path = os.path.join(self._working_dir, name) self._content.add(full_path, multihash) if os.path.isfile(full_path): File(full_path).decrypt_content(cipher=self._cipher, dst_dir=self._root_dir) time.sleep(0.1) os.remove(full_path) elif os.path.isdir(full_path): Directory(full_path).decrypt_content(cipher=self._cipher, dst_dir=self._root_dir) time.sleep(0.1) shutil.rmtree(full_path) os.chdir(cwd)
def updateFileRecords(self, path, localFileSet, remoteFileSet): pathid = None for localFile in localFileSet: if localFile not in remoteFileSet: try: if not pathid: pathid = self.getOrCreateRemotePath(path) log.debug("Attempting to add %s" % (localFile, )) fullPath = stripUnicode(localFile, path=path) try: fullPath = makeFileStreamable(fullPath, appendSuffix=True, removeOriginal=True, dryRun=False) except Exception, e: log.error(e) log.error( "Something bad happened. Attempting to continue") if os.path.exists(fullPath): newFile = File( os.path.basename(fullPath), pathid, os.path.getsize(fullPath), True, ) newFile.post() except Exception, e: log.error(e) continue
def _test_naming_util(my): #my.clear_naming() naming_util = NamingUtil() # these should evaluate to be the same file_naming_expr1 = ['{$PROJECT}__{context[0]}__hi_{$BASEFILE}.{$EXT}','{project.code}__{context[0]}__hi_{basefile}.{ext}'] dir_naming_expr2 = ['{$PROJECT}/{context[1]}/somedir/{@GET(.name_first)}','{project.code}/{snapshot.context[1]}/somedir/{sobject.name_first}'] process= 'light' context = 'light/special' type = 'ma' version = 2 virtual_snapshot = Snapshot.create_new() virtual_snapshot_xml = '<snapshot process=\'%s\'><file type=\'%s\'/></snapshot>' % (process, type) virtual_snapshot.set_value("snapshot", virtual_snapshot_xml) virtual_snapshot.set_value("process", process) virtual_snapshot.set_value("context", context) virtual_snapshot.set_value("snapshot_type", 'file') virtual_snapshot.set_sobject(my.person) virtual_snapshot.set_value("version", version) file_name = "abc.txt" file_obj = File(File.SEARCH_TYPE) file_obj.set_value("file_name", file_name) for naming_expr in file_naming_expr1: file_name = naming_util.naming_to_file(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main") my.assertEquals(file_name,'unittest__light__hi_abc.txt') for naming_expr in dir_naming_expr2: dir_name = naming_util.naming_to_dir(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main") my.assertEquals(dir_name,'unittest/special/somedir/Philip')
def makeNewDataset(self): dataset = Dataset(self.dataset_dir, self.dataset_name, self.dataset_output_dir, self.output_dataset_name, joined_datasaet = True) dataset.loadDataset() new_dataset = pd.DataFrame(columns=dataset.dataset.columns) # print(dataset.dataset[self.getSamplesByClasses(dataset.dataset, 0)][0:10]) # print('asdf') multiple = 5 add = 2 for classes in range(5): filter_class = dataset.dataset[self.getSamplesByClasses(dataset.dataset, classes)] # new_dataset.append(filter_class.iloc[0]) # print(filter_class[0:2]) for sample in range(0, len(filter_class), multiple): # print(dataset.dataset['img_dataset'].iloc[sample], dataset.dataset['img_dataset'].iloc[sample + 1], # dataset.dataset['img_dataset'].iloc[sample + 2]) # self.bestsSamples(dataset.dataset.iloc[sample], # dataset.dataset.iloc[sample + 1], # dataset.dataset.iloc[sample + 2]) print('Class {0} sample {1}'.format(classes, sample)) subs = None if sample == 0: subs = filter_class[sample: multiple] else: subs = filter_class[sample: sample + multiple] # print(subs) result_samples = self.bestsSamples(subs, add) for result in result_samples: new_dataset = new_dataset.append(result) # print(len(new_dataset)) # os.system('pause') f = File(self.dataset_dir) f.saveFileAllDataset(new_dataset, self.output_dataset_name)
def setUp(self): # Str variable initialised to mock value, file name file_name = "fuzzyhashtest.txt" # Str variable initialised to mock value, file path file_path = "file_path" # Str variable initialised to mock value, file extension file_extension = "extension" # Str variable initialised to mock value, absolute path absolute_path = os.path.join(os.getcwd(), "fuzzyhashtest.txt") # An object of type DetectionIndicators detection_indicators = DetectionIndicators() """Initialise File class args: file_name (str) : file name file_path (str) : file path file_extension (str) : file extension absolute_path (str) : absolute path detection_indicators(DetectionIndicators): object of type DetectionIndicators """ self.file = File(file_name, file_path, file_extension, absolute_path, detection_indicators) """Initialise FileFuzzyHash class args: None """ self.file_fuzzy_hash = FileFuzzyHash()
def setData(self): #设置表格的数据 df = File(self.fpath).Read() cols = list(df.columns) data = df.values self.progressBarRangeSignal.emit(0, len(df) * len(df.columns)) #设置表格列数,行数,列名 self.setRowCount(len(data) + 1) self.setColumnCount(len(cols) + 1) #self.setHorizontalHeaderLabels(cols) for i in range(len(cols)): #header = QtGui.QStandardItem(cols[i]) header = HeaderItem(str(cols[i])) header.setEditable(False) header.setSelectable(True) header.setEnabled(True) header.setFlags(QtCore.Qt.ItemIsSelectable) header.setFlags(QtCore.Qt.ItemIsEnabled) self.setHorizontalHeaderItem(i, header) for i in range(len(data)): for j in range(len(cols)): self.progressBarValueSignal.emit(len(cols) * i + j + 1) if data[i][j] == np.nan: continue if type(data[i][j]) == float: #判断df缺失值 continue val = str(data[i][j]).encode('utf8').decode('utf8') item = QtGui.QStandardItem(val) self.setItem(i, j, item) self.progressBarDelSignal.emit()
def generate(self): self.makeDirDataset() Dataset.splitDataset(self, 0.7, 0.15, 0.15) for folder in zip(self.folders, self.sets): aux_dataset = [] print("Initialized -> ", folder[0]) dataset_file = File(os.path.join(self.dst, self.output_dataset_name, folder[0], self.output_dataset_name)) for sample in folder[1].values: # print(sample) if (not self.check(aux_dataset, sample[1]) or repeted): # print(os.path.join(self.src, sample[0][:])) img = cv.imread(os.path.join(self.src, sample[0][:]), cv.IMREAD_UNCHANGED) if (self.resize): # print(folder[0], sample[0]) resized = cv.resize(img, self.dimension, interpolation=cv.INTER_AREA) cv.imwrite( os.path.join(self.dst, self.output_dataset_name, folder[0], self.output_dataset_name, "images/", sample[0]), resized) else: cv.imwrite( os.path.join(self.dst, self.output_dataset_name, self.folder[0], self.output_dataset_name, "images/", sample[0]), img) aux_dataset.append(sample) # print(folder) # os.system("pause") dataset_file.saveFile(aux_dataset, folder)
def __init__(self, url, path='', name='pres', extension='.pdf'): self.out = File(name, path=path, extension=extension, type='presentation') self.cmd = 'wget %s -O %s' % (url, self.out.fullname) self.performed = False
def showData(self): print('showData') df = File(self.fpath).Read() cols = list(df.columns) data = df.values self.progressBarRangeSignal.emit(0, len(data) * len(cols)) self.setRowCount(len(data) + 5) self.setColumnCount(len(cols) + 5) #self.setHorizontalHeaderLabels(cols) for i in range(len(cols)): header = QtWidgets.QTableWidgetItem(str(cols[i])) self.setHorizontalHeaderItem(i, header) for i in range(len(data)): for j in range(len(cols)): self.progressBarValueSignal.emit(len(cols) * i + j + 1) if data[i][j] == np.nan: continue if type(data[i][j]) == float: #判断df缺失值 continue val = str(data[i][j]).encode('utf8').decode('utf8') item = QtWidgets.QTableWidgetItem(val) self.setItem(i, j, item) self.progressBarDelSignal.emit()
def create_file(data, parent, isdir=False): """ Create a File object representing either a file or directory. """ if isinstance(data, str): # fake item, there is no database entry id = None filename = parent.filename + data data = { 'name': data } if parent and parent._beacon_id: data['parent_type'], data['parent_id'] = parent._beacon_id media = parent._beacon_media if isdir: filename += '/' elif isinstance(parent, File): # db data id = (data['type'], data['id']) media = parent._beacon_media filename = parent.filename + data['name'] if isdir: filename += '/' elif not data['name']: # root directory id = (data['type'], data['id']) media = parent parent = None filename = media.mountpoint else: raise ValueError('unable to create File item from %s', data) return File(id, filename, data, parent, media, isdir)
def __ReadDatabase(self): tree = ElementTree() tree.parse(self.__database) nodes = tree.findall(".//*") for node in nodes: f = File(node.text, node.attrib["crc"]) self.__crcDatabase[f.FileName] = f
def send(self): """ Send written messages """ mode = self._mode_chooser.get_active() host = self._receiver_address.get() if not (self._key.key and self._iv.key): print('Keys are not generated') return if host: path = 'files/message.txt' self._save_message_to_file(path) message_file = File(path) message_file.encrypt(self._key.key, self._iv.key, mode=mode, progress_func=self._progress_func) time.sleep(.1) send_thread = SendThread(message_file, mode=mode, host=host, show_progress_func=self._progress_func) send_thread.start() else: print('You have to specify receiver IP address')