async def add_files(self, ctx, rewrite: typing.Optional[str]): if len(ctx.message.attachments) == 0: await ctx.send('No files attached.' ' Attach them to add to branch') return has_files = False if rewrite and rewrite == '-r': await ctx.send(f'Files will be overridden, if exist') for attach in ctx.message.attachments: self.curr_branch.add_element(File(attach, self.curr_branch)) await ctx.send(f'File: {attach.filename} was added') else: for attach in ctx.message.attachments: if self.curr_branch.has_element(attach.filename): has_files = True await ctx.send(f'File: {attach.filename}' ' already exists') else: self.curr_branch.add_element(File(attach, self.curr_branch)) await ctx.send(f'File: {attach.filename} was added') if has_files: await ctx.send('To override files, set "-r" flag') return
class FileHSV: """HSV value handler that loads values from a file.""" def __init__(self, name): """ Instantiate a HSV value handler. :param name: Target name """ self.file = File(name, { 'H': (0, 255), 'S': (0, 255), 'V': (0, 255) }, 'hsv', 'json') self.hsv_values = self.file.load_file() def save_hsv_values(self): """Implement this method as a dry method because this handler doesn't save values.""" pass def reload(self): """Reload the values from file.""" self.hsv_values = self.file.load_file() def get_hsv(self) -> dict: """ Get current HSV. This method is here to allow diversity, so we can use FileHSV and also Trackbars. :return: Cached HSV values """ return self.hsv_values
def updateFileRecords(self, path, localFileSet, remoteFileSet): pathid = None for localFile in localFileSet: if localFile not in remoteFileSet: try: if not pathid: pathid = self.getOrCreateRemotePath(path) log.debug("Attempting to add %s" % (localFile, )) fullPath = stripUnicode(localFile, path=path) try: fullPath = makeFileStreamable(fullPath, appendSuffix=True, removeOriginal=True, dryRun=False) except Exception, e: log.error(e) log.error( "Something bad happened. Attempting to continue") if os.path.exists(fullPath): newFile = File( os.path.basename(fullPath), pathid, os.path.getsize(fullPath), True, ) newFile.post() except Exception, e: log.error(e) continue
def send(self): """ Send written messages """ mode = self._mode_chooser.get_active() host = self._receiver_address.get() if not (self._key.key and self._iv.key): print('Keys are not generated') return if host: path = 'files/message.txt' self._save_message_to_file(path) message_file = File(path) message_file.encrypt(self._key.key, self._iv.key, mode=mode, progress_func=self._progress_func) time.sleep(.1) send_thread = SendThread(message_file, mode=mode, host=host, show_progress_func=self._progress_func) send_thread.start() else: print('You have to specify receiver IP address')
def on_modified(self, event): if os.path.isdir(event.src_path): return file = File(event.src_path.replace(os.sep, '/')) self._logger.info("Modified %s" % file.path) parent_dir_path = os.path.dirname(file.path) file_name = os.path.basename(file.path) # add modified file to ipfs file.multihash = self._ipfs_client.add_file(file) self._ipfs_cluster.pin(file.multihash) # replace modified file in content self._content.add(file.path, file.multihash) if self._content[parent_dir_path]: # remove link from parent to the file before being modified self._ipfs_client.rm_link(self._content[parent_dir_path], file_name) # add link from parent to the file after being modified new_parent_dir_hash = self._ipfs_client.add_link( self._content[parent_dir_path], file_name, file.multihash) # add new parent link to self._content self._content.add(parent_dir_path, new_parent_dir_hash) # add link from parent dirs to the updated parent dir of the modified file self._add_links_to_parent_dirs(parent_dir_path, self._content[parent_dir_path])
def __init__(self): print("Initializing interface...") self.init_file = File('', [False, False, True]) self.init_data = self.init_file.open_file() print("Interface initialized!") self.tile_arr = []
def open(self, mode="a", buffering=None): """open the file Args: mode: file open mode for the first file to be opened. other files will be opened in "w" mode buffering: same as in the built-in "open" command Returns: None Raises: IOError: same as in the built-in "open" command a.infra.lock.multi_process.LockError - when lock is not taken when required (only working in multi processes mode). """ self._verifyLockTakenIfNeeded() self._buffering = buffering self._rotatingFileSizeEnforcer.prepare() self._rotatingFileSizeEnforcer.moveCurrentToPending() newFileName = self._rotatingFileSizeEnforcer.getCurrentFileName() self._log("open").debug1("first file name '%s'", newFileName) File.__init__(self, logger=self._log, fileName=newFileName, mode=mode, buffering=buffering) self._wasOpened = True
def send_thread(filename, server_address, progress, lock): stop = False clientSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) clientSocket.settimeout(1) counter = 0 retry_counter = 0 path = '{}/' + filename file_path = path.format(file.get_source_directory()) try: file_obj = File(file_path) except FileNotFoundError: print('\n\n<<< FILE NOT FOUND >>>\n') chunk_generator = file_obj.get_chunks_generator() num_of_chunk = file_obj.calculate_chunks_number() packet_class = Packet(counter) # Send file name clientSocket.sendto(filename.encode('utf-8').strip(), server_address) server_port = clientSocket.recv(2) receiver_port = int.from_bytes(server_port, byteorder='little') for chunk in chunk_generator: counter += 1 success = False if (counter == num_of_chunk): packet = packet_class.create_last_packet(chunk) else: packet = packet_class.create_packet(chunk) while not (success): if (retry_counter > 9): print("\n\nServer not responding") stop = True break try: clientSocket.sendto(packet, (server_address[0], receiver_port)) acknowledgement = int.from_bytes(clientSocket.recv(1024), byteorder='little') success = True retry_counter = 0 except: print('\n\n<<< RETRYING >>>', end='\r') retry_counter += 1 if (stop): break progress_percent = ceil(counter / num_of_chunk * 100.0) with lock: progress.value = progress_percent
def generate(self): self.makeDirDataset() Dataset.splitDataset(self, 0.7, 0.15, 0.15) for folder in zip(self.folders, self.sets): aux_dataset = [] print("Initialized -> ", folder[0]) dataset_file = File(os.path.join(self.dst, self.output_dataset_name, folder[0], self.output_dataset_name)) for sample in folder[1].values: # print(sample) if (not self.check(aux_dataset, sample[1]) or repeted): # print(os.path.join(self.src, sample[0][:])) img = cv.imread(os.path.join(self.src, sample[0][:]), cv.IMREAD_UNCHANGED) if (self.resize): # print(folder[0], sample[0]) resized = cv.resize(img, self.dimension, interpolation=cv.INTER_AREA) cv.imwrite( os.path.join(self.dst, self.output_dataset_name, folder[0], self.output_dataset_name, "images/", sample[0]), resized) else: cv.imwrite( os.path.join(self.dst, self.output_dataset_name, self.folder[0], self.output_dataset_name, "images/", sample[0]), img) aux_dataset.append(sample) # print(folder) # os.system("pause") dataset_file.saveFile(aux_dataset, folder)
def __init__(self): print('Ports.__init_()') self.file = File('data/ports.json') self.map = {} self.domain = '' self.domain_code = '' self.set_map()
def InitOther(): #加载删词词库 Diacrisis.delChars = File(Diacrisis.delPath).Read() if not Diacrisis.delChars: print('读取失败') #加载常用诊断表 used_df = File(Diacrisis.usedPath).Read() print(len(used_df)) if len(used_df) > 0: Diacrisis.usedWords = pd.Series(index=list(used_df['诊断'].values), data=list(used_df['编码'].values)) #加载换词表 re_df = File(Diacrisis.replacePath).Read() print(len(re_df)) if len(re_df) > 0: Diacrisis.replaceWords = pd.Series( index=list(re_df['ToReplace'].values), data=list(re_df['truly'].values)) #加载标准ICD表 Diacrisis.stdICD = File(Diacrisis.stdICD_path).Read() print(len(Diacrisis.stdICD)) Diacrisis.stdICD['std-cut'] = Diacrisis.CleanStdICD( Diacrisis.stdICD['疾病名称'])[1] #Diacrisis.stdICD.to_csv(Diacrisis.stdICD_path, encoding = 'utf8', index = False) #加载权重表 Diacrisis.Idf()
def owl(message): try: file = File() row = file.get_by_animal(animal.OWL) message.send(row.url) except: message.send(NO_PICTURE_MESSAGE)
def getInfoFile(parent, fname): fullPath = parent.dirPath + '/' + fname out = subprocess.Popen(['stat', fullPath], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = out.communicate() res = stdout.decode("utf-8") attrs = res.split() permission = attrs[2] fileNode = int(attrs[3]) fileOwner = attrs[4] groupOwner = attrs[5] fileSize = int(attrs[7]) t = fname.split('.') type = 'hidden' if len(t) > 1 and len(t[0]) > 0: type = t[-1] date = formatDate(res) file = File(parent.dirId, fname, permission, fileOwner, groupOwner, type, fileSize, fileNode, date, fullPath) try: f = open(fullPath, "rb") content = f.read() file.content = content except: return None return file
def hedgehog(message): try: file = File() row = file.get_by_animal(animal.HEDGEHOG) message.send(row.url) except: message.send(NO_PICTURE_MESSAGE)
def chinchilla(message): try: file = File() row = file.get_by_animal(animal.CHINCHILLA) message.send(row.url) except: message.send(NO_PICTURE_MESSAGE)
def iyashi(message): try: file = File() row = file.get_by_random() message.send(row.url) except: message.send(NO_PICTURE_MESSAGE)
class Config: def __init__(self): print('Config.__init__()') self.file = File('data/config.json') config_json = self.file.read() if config_json == '': self.all = {} else: self.all = json.loads(config_json) #pprint(self.all) #pprint(self.all.keys()) if 'domains' not in self.all.keys(): UI.print('domains does not exist in config - creating it') self.all['domains'] = {} #if 'ports' not in self.all.keys(): # UI.print('ports does not exist in config - creating it'); # self.all['ports'] = {} #pprint(self.all) def save(self): #self.file.write(json.dumps(self.all), 'w') self.file.write(json.dumps(self.all, indent=4, sort_keys=True), 'w') #json.dumps(parsed) '''
def makeNewDataset(self): dataset = Dataset(self.dataset_dir, self.dataset_name, self.dataset_output_dir, self.output_dataset_name, joined_datasaet = True) dataset.loadDataset() new_dataset = pd.DataFrame(columns=dataset.dataset.columns) # print(dataset.dataset[self.getSamplesByClasses(dataset.dataset, 0)][0:10]) # print('asdf') multiple = 5 add = 2 for classes in range(5): filter_class = dataset.dataset[self.getSamplesByClasses(dataset.dataset, classes)] # new_dataset.append(filter_class.iloc[0]) # print(filter_class[0:2]) for sample in range(0, len(filter_class), multiple): # print(dataset.dataset['img_dataset'].iloc[sample], dataset.dataset['img_dataset'].iloc[sample + 1], # dataset.dataset['img_dataset'].iloc[sample + 2]) # self.bestsSamples(dataset.dataset.iloc[sample], # dataset.dataset.iloc[sample + 1], # dataset.dataset.iloc[sample + 2]) print('Class {0} sample {1}'.format(classes, sample)) subs = None if sample == 0: subs = filter_class[sample: multiple] else: subs = filter_class[sample: sample + multiple] # print(subs) result_samples = self.bestsSamples(subs, add) for result in result_samples: new_dataset = new_dataset.append(result) # print(len(new_dataset)) # os.system('pause') f = File(self.dataset_dir) f.saveFileAllDataset(new_dataset, self.output_dataset_name)
def create(self): path = self.path.to_include_directory / (self.class_name + ".h") template = InterfaceHeader( File.read(self.path.to_class_header_template), self.create_license_header()) File.write(self.path.to_include_directory / (self.class_name + ".h"), template.instantiate_with(self.class_name))
def __populate(self, metainfo): # a torent file may have a single tracker, multiple trackers, # or both a reference to a single tracker and multiple trackers (for backwards compatibility) if 'announce' in metainfo: # single tracker self.trackerURLs.add(metainfo['announce']) if 'announce-list' in metainfo: # multiple trackers self.trackerURLs |= set( utilities.flatten(metainfo['announce-list'])) if 'created by' in metainfo: self.createdBy = metainfo['created by'] if 'comment' in metainfo: self.comment = metainfo['comment'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] if 'creation date' in metainfo: self.creationDate = datetime.datetime.fromtimestamp( metainfo['creation date']) if 'files' in metainfo['info']: # multi file mode self.singleFile = False self.name = metainfo['info']['name'] self.files = [] for file in metainfo['info']['files']: self.files.append(File(file['path'], file['length'])) if 'length' in metainfo['info']: # single file mode self.singleFile = True self.name = metainfo['info']['name'] self.files = [ File([metainfo['info']['name']], metainfo['info']['length']) ]
def added(self, path, client): path = path.replace("\\", "/") file_extension_pattern=self.path_to_watch+"/chunks" if (re.search("Thumbs.db", path) == None): # Checks if folder is chunks, only if it isn't it will send modifications if (re.search(file_extension_pattern,path) == None): # Prints message of file added print_message("Added: " + path) if (os.path.isfile(path)): # Creates file information f = File(path, client) f.generate_file_id() f.get_salt() file_size = f.get_file_size() # Gets relative_path location relative_path = path.split(self.path_to_watch)[1].replace("\\", "/") modification_date = f.get_modification_date().replace(" ", "T").split(".")[0] +"Z" # Sends request to server with information about the new file url = self.api+'files/create.php' values = {'apikey': '12', 'path': relative_path, 'user': client.get_email(), 'modification': f.get_file_id(), 'dateModified': modification_date, 'size': str(int(file_size)) } response = json_request(url, values) if (response['result'] == 'notEnoughSpace'): print_message("Not enough space. Space left to use " + str(response['spaceLeft'])) window = Tkinter.Tk() window.wm_withdraw() tkMessageBox.showerror(title="Budibox", message="Not enough space! Space left to use " + str(response['spaceLeft']) + "bytes !") return if (response['result'] != 'ok'): print_message("Error sending information created about file " + path) return print_message("Created file " + path + " successfully") url = self.api+'files/getId.php' values = {'apikey': '12', 'path': relative_path, 'user': client.get_email(), } response = json_request(url, values) if (response['result'] != 'ok'): print_message("Error getting fileId of " + path) return print_message("Get fileId of " + path + "successfully") # Send information about chunks to server db_file_id = response['id'] f.generate_chunks(db_file_id)
def cities(number, year): data = json.loads(request.form['data']) file = File('../data/cidades.csv') lista = file.select(data['attributes'], year) return jsonify(lista)
def updateFileRecords(self, path, localFileSet, remoteFileSet): pathid = None for localFile in localFileSet: if localFile not in remoteFileSet: try: if not pathid: pathid = self.getOrCreateRemotePath(path) log.debug("Attempting to add %s" % (localFile,)) fullPath = stripUnicode(localFile, path=path) try: fullPath = makeFileStreamable(fullPath, appendSuffix=True, removeOriginal=True, dryRun=False) except Exception, e: log.error(e) log.error("Something bad happened. Attempting to continue") if os.path.exists(fullPath): newFile = File(os.path.basename(fullPath), pathid, os.path.getsize(fullPath), True, ) newFile.post() except Exception, e: log.error(e) continue
def _build_cmd(self): file = self.input[0] if file.type == 'media': self.out = File(file.filename, path=os.path.join(file.path, 'tmp'), extension='.mp4', ts=file.ts) cmd = 'ffmpeg -i %s ' % file.fullname cmd += '-filter_complex "[0:v]setpts=PTS-STARTPTS" ' cmd += '-c:v libx264 -keyint_min 15 -g 15 ' cmd += '-c:a libfdk_aac ' cmd += self.out.fullname elif file.type == 'presentation' and file.extension != '.pdf': cmd = 'libreoffice --headless --invisible --convert-to pdf --outdir %s %s' % ( file.path, file.fullname) self.out = File(file.filename, path=file.path, extension='.pdf', ts=file.ts) elif file.type == 'slide' and (file.meta['width'] % 2 == 1 or file.meta['height'] % 2 == 1): self.out = File(file.filename + 'even', path=file.path, extension='.png', ts=file.ts, duration=file.duration) cmd = 'ffmpeg -i %s ' % file.fullname cmd += '-filter_complex "[0:v]crop=%d:%d" ' % ( file.meta['width'] / 2 * 2, file.meta['height'] / 2 * 2) cmd += self.out.fullname else: cmd = '' self.out = file return cmd
def save_data_2_file(self, blog_list, file_name='blog_info',): file = File() file_write_file = file.opne_file(file_name, "w") for blog in blog_list: content = blog['blog_name'] + '------' + blog['blog_url'] + '\n' file.save_file(file_write_file, file_name, content) file_write_file.close()
def __init__(self, generator=None): super(PythonTask, self).__init__(generator) self._python_import = None self._python_source_files = None self._python_function_name = None self._python_args = None self._python_kwargs = None # self.executable = 'python' # self.arguments = '_run_.py' self.then_func_name = 'then_func' self._rpc_input_file = \ JSONFile('file://_rpc_input_%s.json' % hex(self.__uuid__)) self._rpc_output_file = \ JSONFile('file://_rpc_output_%s.json' % hex(self.__uuid__)) # input args -> input.json self.pre.append(self._rpc_input_file.transfer('input.json')) # output args -> output.json self.post.append(File('output.json').transfer(self._rpc_output_file)) f = File('staging:///_run_.py') self.pre.append(f.link()) self.add_cb('success', self.__class__._cb_success) self.add_cb('submit', self.__class__._cb_submit) # if True the RPC result will be stored in the DB with the task self.store_output = True
class GameStats: """Stores statistic data of alien invasion""" def __init__(self, game_set): """Initiates the statistic data""" self.game_set = game_set self.reset_stats() self.file = File() #starts the alien invasion in a inactive state self.game_active = False #The hight score can never be reset if self.file.read_file(): self.high_score = self.file.read_file() else: self.high_score = 0 def reset_stats(self): """Starts the data that can change during the game""" self.ships_left = self.game_set.ship_limit self.score = 0 self.level = 1 #a flag to know if the alien invasion is paused or not self.game_paused = False
def _test_naming_util(my): #my.clear_naming() naming_util = NamingUtil() # these should evaluate to be the same file_naming_expr1 = ['{$PROJECT}__{context[0]}__hi_{$BASEFILE}.{$EXT}','{project.code}__{context[0]}__hi_{basefile}.{ext}'] dir_naming_expr2 = ['{$PROJECT}/{context[1]}/somedir/{@GET(.name_first)}','{project.code}/{snapshot.context[1]}/somedir/{sobject.name_first}'] process= 'light' context = 'light/special' type = 'ma' version = 2 virtual_snapshot = Snapshot.create_new() virtual_snapshot_xml = '<snapshot process=\'%s\'><file type=\'%s\'/></snapshot>' % (process, type) virtual_snapshot.set_value("snapshot", virtual_snapshot_xml) virtual_snapshot.set_value("process", process) virtual_snapshot.set_value("context", context) virtual_snapshot.set_value("snapshot_type", 'file') virtual_snapshot.set_sobject(my.person) virtual_snapshot.set_value("version", version) file_name = "abc.txt" file_obj = File(File.SEARCH_TYPE) file_obj.set_value("file_name", file_name) for naming_expr in file_naming_expr1: file_name = naming_util.naming_to_file(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main") my.assertEquals(file_name,'unittest__light__hi_abc.txt') for naming_expr in dir_naming_expr2: dir_name = naming_util.naming_to_dir(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main") my.assertEquals(dir_name,'unittest/special/somedir/Philip')
def select_input(self): """ Save the name of the input file, clear output label, set progress bar to zero and actualize input label. When Select Input button is trigerred the method is invoked. """ full_paths = QFileDialog.getOpenFileNames( None, 'Open File', '', '*.jpg *.jpeg *.png *.bmp *.mp4 *.avi *.wmv *.mov *.mkv') if full_paths: # Clear input files list self.files.clear() self.output_name = None full_path = full_paths[0] print(full_paths) file = File(self.tmp_dir, full_path[0]) # Clear output label image, progress bar and label text self.output_label.clear() self.progress_bar.setValue(0) self.path_label.setText(file.get_output_name(self.output_type)) # Actualize input label image if file.type == InputType.IMAGE: image = file.full_path elif file.type == InputType.VIDEO: ret, image = cv2.VideoCapture(file.full_path).read() if not ret: raise ValueError image = npimg_to_pixmap(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) else: raise ValueError self.actualize_input_label(image) # Add input file to a list of files self.files.append(file)
def __init__(self, name: str): """ Create a networktables server, and create the vision table. :param name: The name of the target. """ self.name = name self.prefix = '/vision/' + self.name + '_' self.team_number = 5987 # The values file for the target, with a default value for when no such file exists self.file = File( self.name, '[NetworkTables Storage 3.0]\nstring "/vision/{}_name"={}', 'values', 'nt'.format(self.name, self.name)) # Server IP returned by get_nt_server() server = self.get_nt_server() # Set update rate as defined in constants.py NetworkTables.setUpdateRate(constants.NT_RATE) logging.info( 'Initiating network tables connection with {}'.format(server)) NetworkTables.initialize(server=server) NetworkTables.addConnectionListener(self.connection_listener, immediateNotify=True) # Create individual table instead of clogging SmartDashboard self.table = NetworkTables.getTable('vision')
def add_url(self, url): url = url.replace('/embed#', '/#') url = url.replace('/#!#!', '/#!') # old2new url = url.split("/#")[1] url = url.replace('#', '!') # old2new id_video = None if "|" in url: url, id_video = url.split("|") if url.startswith("F!"): if len(url.split("!")) == 3: folder_id = url.split("!")[1] folder_key = url.split("!")[2] master_key = self.base64_to_a32(folder_key) files = self.api_req({ "a": "f", "c": 1, "r": 1 }, "&n=" + folder_id) for file in files["f"]: if file["t"] == 0: if id_video and id_video != file["h"]: continue key = file['k'][file['k'].index(':') + 1:] key = self.decrypt_key(self.base64_to_a32(key), master_key) k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]) attributes = self.base64urldecode(file['a']) attributes = self.dec_attr(attributes, k) self.files.append( File(info=attributes, file_id=file["h"], key=key, folder_id=folder_id, file=file, client=self)) else: raise Exception("Enlace no válido") elif url.startswith("!") or url.startswith("N!"): if len(url.split("!")) == 3: file_id = url.split("!")[1] file_key = url.split("!")[2] file = self.api_req({'a': 'g', 'g': 1, 'p': file_id}) key = self.base64_to_a32(file_key) k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]) attributes = self.base64urldecode(file['at']) attributes = self.dec_attr(attributes, k) self.files.append( File(info=attributes, file_id=file_id, key=key, file=file, client=self)) else: raise Exception("Enlace no válido") else: raise Exception("Enlace no válido")
class Trackbars: """This class handles the trackbar window that allows us to change and set the HSV values.""" def __init__(self, name): self.name = name self.window = cv2.namedWindow('HSV') # Create window self.callback = lambda v: None # Dry callback for trackbars since it's not needed self.file = File(self.name, { 'H': (0, 255), 'S': (0, 255), 'V': (0, 255) }, 'hsv', 'json') self.create_trackbars() def save_hsv_values(self): """Save HSV values to correct file.""" self.file.save_file(self.get_hsv()) def reload_trackbars(self): """Reloads the trackbars from the file.""" hsv = self.file.load_file() cv2.setTrackbarPos('lowH', 'HSV', hsv['H'][0]) cv2.setTrackbarPos('highH', 'HSV', hsv['H'][1]) cv2.setTrackbarPos('lowS', 'HSV', hsv['S'][0]) cv2.setTrackbarPos('highS', 'HSV', hsv['S'][1]) cv2.setTrackbarPos('lowV', 'HSV', hsv['V'][0]) cv2.setTrackbarPos('highV', 'HSV', hsv['V'][1]) def create_trackbars(self): """Create the trackbars intially with the value from the file.""" hsv = self.file.load_file() # Create trackbars for color change cv2.createTrackbar('lowH', 'HSV', hsv['H'][0], 179, self.callback) cv2.createTrackbar('highH', 'HSV', hsv['H'][1], 179, self.callback) cv2.createTrackbar('lowS', 'HSV', hsv['S'][0], 255, self.callback) cv2.createTrackbar('highS', 'HSV', hsv['S'][1], 255, self.callback) cv2.createTrackbar('lowV', 'HSV', hsv['V'][0], 255, self.callback) cv2.createTrackbar('highV', 'HSV', hsv['V'][1], 255, self.callback) @staticmethod def get_hsv() -> dict: """ Gets HSV values from trackbars. :return: HSV values """ low_h = cv2.getTrackbarPos('lowH', 'HSV') high_h = cv2.getTrackbarPos('highH', 'HSV') low_s = cv2.getTrackbarPos('lowS', 'HSV') high_s = cv2.getTrackbarPos('highS', 'HSV') low_v = cv2.getTrackbarPos('lowV', 'HSV') high_v = cv2.getTrackbarPos('highV', 'HSV') return { 'H': (low_h, high_h), 'S': (low_s, high_s), 'V': (low_v, high_v) }
def test_load(self): """ description: Load some file. """ path = 'file.py' content = File() content.load(path, 'gcode') self.assertNotEqual(content.get(), '')
def getProjectfileTimestamps(name): projectfile = File.projectfileFromName(name) timestamps = {} timestamps["creation"] = File.fileCreationTimestamp(projectfile) timestamps["modification"] = File.fileModificationTimestamp(projectfile) return timestamps
def get_rotation(self, path_to_file): metadata = {} with exiftool.ExifTool() as et: metadata = et.get_metadata(path_to_file) f = File(path_to_file) tag = f.rotation_tag() if tag not in metadata: return 0 return int(metadata[tag])
def __arguments(): parser = argparse.ArgumentParser(prog="rtlsdr_scan.py", description=''' Scan a range of frequencies and save the results to a file''') parser.add_argument("-s", "--start", help="Start frequency (MHz)", type=int) parser.add_argument("-e", "--end", help="End frequency (MHz)", type=int) parser.add_argument("-w", "--sweeps", help="Number of sweeps", type=int, default=1) parser.add_argument("-p", "--delay", help="Delay between sweeps (s)", type=int, default=0) parser.add_argument("-g", "--gain", help="Gain (dB)", type=float, default=0) parser.add_argument("-d", "--dwell", help="Dwell time (seconds)", type=float, default=0.1) parser.add_argument("-f", "--fft", help="FFT bins", type=int, default=1024) parser.add_argument("-l", "--lo", help="Local oscillator offset", type=int, default=0) parser.add_argument("-c", "--conf", help="Load a config file", default=None) group = parser.add_mutually_exclusive_group() group.add_argument("-i", "--index", help="Device index (from 0)", type=int, default=0) group.add_argument("-r", "--remote", help="Server IP and port", type=str) types = File.get_type_pretty(File.Types.SAVE) types += File.get_type_pretty(File.Types.PLOT) help = 'Output file (' + types + ')' parser.add_argument("file", help=help, nargs='?') args = parser.parse_args() error = None isGui = True if args.start is not None or args.end is not None: if args.start is not None: if args.end is not None: if args.file is not None: isGui = False else: error = "No filename specified" else: error = "No end frequency specified" else: error = "No start frequency specified" elif args.file is not None: args.dirname, args.filename = os.path.split(args.file) if error is not None: print "Error: {}".format(error) parser.exit(1) return isGui, (args)
def consume(lock, graphs, done, critical): while done.empty(): try: g = graphs.get(block=True, timeout=5) d = DalGraph(graph=g, logger=LOGGER) if d.critical_aprox(): f = File(DIRECTORY, G=g, logger=LOGGER) fp = f.save() if fp is not None: critical.put(fp) except Empty: #print("Empty") pass return
def playVideoKodiFolder(query): from paths import play_video_kodi_folder from src.file import File kodiFolderFile, parseMethod = _decodedUrl(query) kodiFolderFile = File.fromFullpath(kodiFolderFile) play_video_kodi_folder.play(kodiFolderFile, parseMethod)
class IMDBScraper: def __init__(self, file_name): self.open_file = File(file_name) self.open_file.skipToData() def next(self): actor = Actor("init", "actor") for line in self.open_file.next(): if (line.containsActorName()): yield actor actor, film = line.getActorAndFilm() actor.addFilm(film) else: film = line.getFilm() if not film is None: actor.addFilm(film)
def __init__(self): self.config = Config() self.config.load('config.yaml') self.cellfont = pygame.font.SysFont('arial',10) self.file = File() self.fpsClock = pygame.time.Clock() self.load()
def addToCollection(query): from paths import add_to_collection from src.file import File vSourceId, vSourceFile, sourceType = _decodedUrl(query) vSourceFile = File.fromFullpath(vSourceFile) add_to_collection.add(vSourceId, vSourceFile, sourceType)
def __process_file(self): """ Calls all methods to process a file Return: None """ self.file_data = File(self.input_path) self.file_data.get_hashes() if self.datfile_path: rom_found_in_datfile = self.search_in_datfile( self.search_type, getattr(self.file_data, self.search_type)) if rom_found_in_datfile: self.__print_match_found_in_datfile(rom_found_in_datfile) if self.matched_dir: self._move_or_copy_file(self.matched_dir) else: if self.unmatched_dir: self._move_or_copy_file(self.unmatched_dir) self.__get_template_content() self.__populate_template() self.__write_populated_template() else: self.__get_template_content() self.__populate_template() self.__write_populated_template()
def playVideoSource(query): from file import File from paths import play_video_source videoId, sourceId, collectionFile = _decodeQuery(query) collectionFile = File.fromQuery(collectionFile) play_video_source.play(videoId, sourceId, collectionFile)
def browseSource(query): from file import File from paths import browse_source sourceId, collectionFile = _decodeQuery(query) collectionFile = File.fromQuery(collectionFile) browse_source.browse(sourceId, collectionFile)
class TestFile(unittest.TestCase): def setUp(self): self.file = File('testfilename', 123, 234, True) @mock.patch('file.requests') def test_getFileSet(self, mock_requests): test_data = {'next': None, 'results': [{'filename': 'file1'}, {'filename': 'file2'}, {'filename': 'file3'}], } mock_request = mock.MagicMock() mock_request.json.return_value = test_data mock_requests.get.return_value = mock_request expectedSet = set(['file1', 'file2', 'file3', ]) self.assertEquals(expectedSet, self.file.getFileSet(1))
def __on_export_geo(self, _event): dlgGeo = DialogExportGeo(self, self.spectrum, self.locations, self.settings) if dlgGeo.ShowModal() == wx.ID_OK: self.status.set_general("Exporting...") extent = dlgGeo.get_extent() dlgFile = wx.FileDialog( self, "Export map to file", self.settings.dirExport, self.filename, File.get_type_filters(File.Types.GEO), wx.SAVE | wx.OVERWRITE_PROMPT, ) dlgFile.SetFilterIndex(File.GeoType.KMZ) if dlgFile.ShowModal() == wx.ID_OK: fileName = dlgFile.GetFilename() dirName = dlgFile.GetDirectory() self.settings.dirExport = dirName fileName = extension_add(fileName, dlgFile.GetFilterIndex(), File.Types.GEO) fullName = os.path.join(dirName, fileName) exportType = dlgFile.GetFilterIndex() image = None xyz = None if exportType == File.GeoType.CSV: xyz = dlgGeo.get_xyz() else: image = dlgGeo.get_image() export_map(fullName, exportType, extent, image, xyz) self.status.set_general("Finished") dlgFile.Destroy() dlgGeo.Destroy()
class FileMenuInit: def __init__(self, builder): self.builder = builder self.top_level = builder.get_object("top_level") self.fo = None self.rc = RcFile() open_menu_item = self.builder.get_object("open_menu_item") open_menu_item.connect("activate", self.on_open_menu_item_activate) quit_menu_item = self.builder.get_object("quit_menu_item") quit_menu_item.connect("activate", self.on_quit_menu_item_activate) open_toolbar_button = self.builder.get_object("open_toolbar_button") open_toolbar_button.connect("clicked", self.on_open_menu_item_activate) self.fo = File(self.builder) def on_open_menu_item_activate(self, menuitem, data=None): file, dir = self.fo.load_file(self.rc.rc_hash["OPEN_DIR"]) if file != None: self.top_level.set_title(os.path.basename(file)) if dir != None: self.rc.UpdateRcValue("OPEN_DIR", dir) def on_quit_menu_item_activate(self, menuitem, data=None): RcFile().WriteRcFile() gtk.main_quit()
def __on_export_image(self, _event): dlgFile = wx.FileDialog( self, "Export image to file", self.settings.dirExport, self.filename, File.get_type_filters(File.Types.IMAGE), wx.SAVE | wx.OVERWRITE_PROMPT, ) dlgFile.SetFilterIndex(File.ImageType.PNG) if dlgFile.ShowModal() == wx.ID_OK: dlgImg = DialogImageSize(self, self.settings) if dlgImg.ShowModal() != wx.ID_OK: dlgFile.Destroy() return self.status.set_general("Exporting...") fileName = dlgFile.GetFilename() dirName = dlgFile.GetDirectory() self.settings.dirExport = dirName fileName = extension_add(fileName, dlgFile.GetFilterIndex(), File.Types.IMAGE) fullName = os.path.join(dirName, fileName) exportType = dlgFile.GetFilterIndex() export_image(fullName, exportType, self.graph.get_figure(), self.settings) self.status.set_general("Finished") dlgFile.Destroy()
def test_it_should_tell_the_truth_about_JPEG_files(self): test_file = File('/path/to/file.JPEG') self.assertTrue(test_file.is_image()) self.assertFalse(test_file.is_video()) self.assertEqual(test_file.filename(), '/path/to/file.JPEG') self.assertEqual(test_file.ext(), '.jpeg') self.assertEqual(test_file.media_type(), 'JPEG') self.assertEqual(test_file.date_taken_tag(), 'EXIF:DateTimeOriginal') self.assertEqual(test_file.rotation_tag(), 'EXIF:Orientation')
def test_it_should_tell_the_truth_about_MP4_files(self): test_file = File('/path/to/file.MP4') self.assertFalse(test_file.is_image()) self.assertTrue(test_file.is_video()) self.assertEqual(test_file.filename(), '/path/to/file.MP4') self.assertEqual(test_file.ext(), '.mp4') self.assertEqual(test_file.media_type(), 'MP4') self.assertEqual(test_file.date_taken_tag(), 'QuickTime:CreateDate') self.assertEqual(test_file.rotation_tag(), 'Composite:Rotation')
def test_it_should_tell_the_truth_about_MTS_files(self): test_file = File('/path/to/file.MTS') self.assertFalse(test_file.is_image()) self.assertTrue(test_file.is_video()) self.assertEqual(test_file.filename(), '/path/to/file.MTS') self.assertEqual(test_file.ext(), '.mts') self.assertEqual(test_file.media_type(), 'MTS') self.assertEqual(test_file.date_taken_tag(), 'H264:DateTimeOriginal') self.assertEqual(test_file.rotation_tag(), 'Composite:Rotation')
def walk_through_folders(parent_folder, configuration_folder_structure, root_folder_name): if type(parent_folder) is not str: raise Exception if type(root_folder_name) is not str: raise Exception if type(configuration_folder_structure) is not ConfigurationFolder: raise Exception current_folder = Folder(root_folder_name) for dir in os.listdir(parent_folder): path = os.path.join(parent_folder, dir) if os.path.isdir(path): folder = Folder(dir) temp_conf_folder = None for conf_folder in configuration_folder_structure.all_folders(): if str(folder) == str(conf_folder): if conf_folder.is_excluded(): break else: current_folder.add_folder(walk_through_folders(os.path.join(str(parent_folder), str(folder)), conf_folder, str(folder))) break if os.path.isfile(path): file = File(dir) is_excluded = True for extension in configuration_folder_structure.all_extensions(): if extension == file.extension(): is_excluded = False break is_excluded_file = False for conf_file in configuration_folder_structure.all_files(): if str(file) == str(conf_file): if conf_file.is_excluded(): is_excluded_file = True break if is_excluded is False and is_excluded_file is False: current_folder.add_file(file) return current_folder
def set_file(self, f): """ Función encargada de seleccionar el archivo que vamos a servir y por tanto, priorizar su descarga """ #Seleccionamos el archivo que vamos a servir fmap=self.meta.map_file(f.index, 0,1) self.file=File(f.path, self.temp_path, f.index, f.size, fmap, self.meta.piece_length(), self) self.prioritize_file()
def browseYoutubePlaylist(query): from paths import browse_youtube_playlist from src.file import File playlistFile, pageNum = _decodedUrl(query) playlistFile = File.fromFullpath(playlistFile) browse_youtube_playlist.browse(playlistFile, pageNum)
def searchYoutube(query): from paths import search_youtube from src.file import File searchType, searchFile, pageNum = _decodedUrl(query) if searchFile: searchFile = File.fromFullpath(searchFile) search_youtube.search(searchType, searchFile, pageNum)
def removeFromCollection(query): from paths import remove_from_collection from src.file import File collectionFile, sourceId, showConfirmDialog, showSuccessDialog, refreshContainer = _decodedUrl(query) collectionFile = File.fromFullpath(collectionFile) remove_from_collection.remove(collectionFile, sourceId, showConfirmDialog, showSuccessDialog, refreshContainer)
def deleteCollection(query): from paths import delete_collection from src.file import File collectionFile, showConfirmDialog, showSuccessDialog, refreshContainer = _decodedUrl(query) collectionFile = File.fromFullpath(collectionFile) delete_collection.delete(collectionFile, showConfirmDialog, showSuccessDialog, refreshContainer)
def playVideoSpecial(query): from file import File from paths import play_video_special videoId, collectionFile = _decodeQuery(query) collectionFile = File.fromQuery(collectionFile) play_video_special.play(videoId, collectionFile)
def get_date_taken(self, path_to_file): metadata = {} with exiftool.ExifTool() as et: metadata = et.get_metadata(path_to_file) f = File(path_to_file) tag = f.date_taken_tag() self.__check_tag(tag, metadata, path_to_file) raw_date_taken_string = metadata[tag] massaged_date_taken_string = raw_date_taken_string.replace(COLON, DASH, COLONS_IN_YMD) dt_object = parser.parse(massaged_date_taken_string) timestamp = mktime(dt_object.timetuple()) return int(timestamp)