def process_all(directory="dataset/", purpose="train"): # create required sub-directories, if they don't already exist for category in CAT: subdirectory = directory + purpose + "/" + category if not path(subdirectory).is_dir(): os.makedirs(subdirectory) # directory containing all csv files for purpose parent_dir = path('agg/' + purpose) # all csv files which contain labelled data all_csv_files = [ x for x in parent_dir.iterdir() if str(x).endswith('.csv') ] for file in all_csv_files: print("Processing for " + str(file) + " file starts: ") with open(file) as f: all_lines = f.read().split() start = time.time() with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor: # process each line in parallel if (purpose == "train"): executor.map(create_img_train, all_lines, chunksize=CHUNKSIZE) else: executor.map(create_img_test, all_lines, chunksize=CHUNKSIZE) end = time.time() print("Time taken for " + str(file) + " file: " + str(end - start)) return
def main(trainRatio=1, testRatio=0): # check if agg folder exists in current directory try: if path('agg').is_dir(): # download weights for VGG16, if it hasn't been done keras.utils.get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models', file_hash='64373286793e3c8b2b4e3219cbf3544b') # segment data set segment_data(trainRatio, testRatio) elif path('agg.zip').is_file(): # download weights for VGG16, if it hasn't been done keras.utils.get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models', file_hash='64373286793e3c8b2b4e3219cbf3544b') # unzip file if it has yet to be done agg_file = zipfile.ZipFile(path('agg.zip'), 'r') # create subdirectory to store all files within zip os.mkdir('agg') agg_file.extractall(path('agg')) agg_file.close() # segment data set segment_data(trainRatio, testRatio) else: raise NoDataSetFoundException except NoDataSetFoundException: print("Required data set cannot be found.") print( "Download 'agg.zip' here: http://www.cs.rochester.edu/u/qyou/deepemotion/" )
def rename(): renamePath = r'C:\Users\Administrator\Desktop\pic_downloader\imgur' global x fullPath = path(imageName) #get image full path fileType = fullPath.suffix #get the file extension fileNewname = path(r'C:\Users\Administrator\Desktop\pic_downloader\imgur' ) / f'{filesName}{x}{fileType}' while os.path.exists( fileNewname ): #if file name already exsits (like if the user downloaded the pic before) x += 1 #add 1 to x until it be bigger than the last file name try: fileNewname = path( r'C:\Users\Administrator\Desktop\pic_downloader\imgur' ) / f'{filesName}{x}{fileType}' #trying to create a new file name that is not used before by adding 1to x value except: pass if not os.path.exists( fileNewname ): #if it found a new name then it will exit the loop break print( f'\n\nRenameing C:\\Users\\Administrator\\Desktop\\pic_downloader\\imgur\\{os.path.basename(fullLink)} to {fileNewname}\n\n' ) os.rename( f'C:\\Users\\Administrator\\Desktop\\pic_downloader\\imgur\\{os.path.basename(fullLink)}', fileNewname) x += 1
def move(folder): folder = os.path.abspath(folder) fileRegex = re.compile( r'.*.png|jpg') #Regex which will get the file extension that you want destinsion1 = path(r'Where you want to put your files') folWalk = os.listdir( path( r'the folder that contain the files you want to rename(same as destinsion1)' )) #Get all the files path's (used to rename files) for folders, subfolders, files in os.walk( folder ): #walk through all folder,subfolders,files in the given dir for file in files: #Walk through every file in files list file = folders + '\\' + file #Get the full path for the file fileBaseName = os.path.basename(file) des = destinsion1 / fileBaseName mo = fileRegex.search(file) if not os.path.exists(file): continue if not mo: continue if mo: #Copying files print(f"Moving {fileBaseName} to {des}\n\n") #shutil.copy(file,f"{destinsion1}")uncomment this after check if mo: x = 1 for i in folWalk: #Renameing files print(folWalk) fullPath = destinsion1 / i fileType = fullPath.suffix #get the file extension fileNewname = destinsion1 / f'Picture({x}){fileType}' x += 1 print(f'Renameing {i} to Picture{x}\n\n')
def set_alias(shell): pattern = "alias j='" home_folder = path('~').expanduser() bashrc = path(f'{home_folder}/.bashrc').resolve() zshrc = path(f'{home_folder}/.zshrc').resolve() #zsh if shell == '/bin/zsh' and zshrc.is_file(): with open(zshrc, 'r+') as f: if pattern not in f.read(): f.write(pattern + f"{zshrc}'") print(f'Alias set in {zshrc}. Source or restart zsh & hit "j" ' 'to start program') else: print( "'j' is already an alias. You'll have to run the program " 'manually') #bash elif shell == '/bin/bash' and bashrc.is_file(): with open(bashrc, 'r+') as f: if pattern not in f.read(): f.write(pattern + f"{bashrc}'") print( f'Alias set in {bashrc}. Source or restart bash & hit "j" ' 'to start program') else: print( "'j' is already an alias. You'll have to run the program " 'manually') else: print("Can't find $SHELL or shell run commands. Can't set alias")
def _play(self, n): n = json.loads(n) slot, docker, rf = n["slot"], n["docker"], n['rf'] cmd = (f"{self.config.modhome}/ModulatorConsole " f"dvb-t " f"-r {self.config.ipaddr} " f"--slot {slot} " f"--playback play " f"--mode loop " f"--file \"{n['file']}\" " f"--bw {n['bw']} " f"--const {n['const']} " f"--coderate {n['coderate']} " f"--guard {n['guard']} " f"--tx {n['tx']} " f"--cell-id {n['cellid']} " f"--playrate {n['playrate']} " f"--rf {int(rf) * 10**6} ") if int(docker): print("docker is enabled") result = self.docker.run(int(n['slot'])) print('docker result:', result) socketio.emit('report_docker_start', { 'status': result, 'slot': slot }) else: res = self.util._execute(cmd) path(f"modulator.playing.{slot}").write_text(n["player"]) self.util._dump_json(slot, n) socketio.emit( 'response', {'status': ['PLAYING', int(n['slot']), n['player'], n['docker']]})
def run_cases(self, generator): rider = path(self.rider) if not rider.is_file(): raise RuntimeError(f"Unable to find (dyna-)rider: {self.rider}") for prob in generator.generate_problems(): seconds = perflib.rider.run(self.rider, prob.length, direction=prob.direction, real=prob.real, inplace=prob.inplace, precision=prob.precision, nbatch=prob.nbatch, ntrial=self.ntrial, device=self.device, libraries=self.lib, verbose=self.verbose, timeout=self.timeout) for idx, vals in enumerate(seconds): out = path(self.out[idx]) logging.info("output: " + str(out)) meta = {'title': prob.tag} meta.update(prob.meta) perflib.utils.write_dat(out, prob.length, prob.nbatch, seconds[idx], meta)
def get_plot_savepath(file_name): default_folder_plots = "/Users/enricobertolotti/PycharmProjects/BScAssignment/RaMOutput/" global_folder = default_folder_plots + "Analysis_Results/" current_round = len(next(os.walk(global_folder))[1]) + 1 abs_path = global_folder + "Round" + str(current_round) + "/RotationAnalysis/" path(abs_path).mkdir(parents=True, exist_ok=True) return abs_path + str(file_name) + "_rot_anal_round" + str(current_round)
def run_cases(self): dload = len(self.lib) > 0 prog = path(self.prog) if not prog.is_file(): print("**** Error: unable to find " + self.prog) sys.exit(1) metadatastring = "# " + str(self) + "\n" metadatastring += "# " metadatastring += "dimension" metadatastring += "\txlength" if (self.dimension > 1): metadatastring += "\tylength" if (self.dimension > 2): metadatastring += "\tzlength" metadatastring += "\tnbatch" metadatastring += "\tnsample" metadatastring += "\tsamples ..." metadatastring += "\n" # The log file is stored alongside each data output file. for out in self.out: out = path(out) out.parent.mkdir(parents=True, exist_ok=True) out.write_text(metadatastring) log = out.with_suffix('.log') log.write_text(metadatastring) if self.problem_file: length_gen = problem_file_size_generator(self.problem_file, self.dimension) else: length_gen = radix_size_generator(self.xmin, self.ymin, self.zmin, self.xmax, self.ymax, self.zmax, self.dimension, self.radix) for length in length_gen: N = self.ntrial seconds = run_rider(self.prog, dload, self.lib, length, self.direction, self.real, self.inplace, N, self.precision, self.nbatch, self.device, self.log) #print(seconds) for idx, vals in enumerate(seconds): with open(self.out[idx], 'a') as outfile: outfile.write(str(self.dimension)) outfile.write("\t") outfile.write("\t".join([str(val) for val in length])) outfile.write("\t") outfile.write(str(self.nbatch)) outfile.write("\t") outfile.write(str(len(seconds[idx]))) for second in seconds[idx]: outfile.write("\t") outfile.write(str(second)) outfile.write("\n")
def get_code() -> list: argv_file = path(path(argv[1]).stem + '.py') if argv_file.exists(): with open(str(argv_file)) as f: code = f.readlines() return code print('Sorry, that file does not exist')
async def test_read_allows_unrelated_read(self): lock = TreeLock() acquired_history = await mutate_tasks_in_sequence(create_tree_tasks( lock(read=[path('/a/b/c')], write=[]), lock(read=[path('/a/b/e')], write=[]), ), complete(0), complete(1)) self.assertEqual(acquired_history[0], [True, True])
async def test_blocked_read_root_and_write_block_write(self): lock = TreeLock() acquired_history = await mutate_tasks_in_sequence(create_tree_tasks( lock(read=[path('/')], write=[path('/a/b/c')]), lock(read=[], write=[path('/a/b/d')]), ), complete(0), complete(1)) self.assertEqual(acquired_history[0], [True, False]) self.assertEqual(acquired_history[1], [True, True])
async def test_cancellation_after_acquisition_unblocks_read(self): lock = TreeLock() acquired_history = await mutate_tasks_in_sequence(create_tree_tasks( lock(read=[], write=[path('/a/b/c')]), lock(read=[path('/a/b/c/d')], write=[]), ), cancel(0), complete(1)) self.assertEqual(acquired_history[0], [True, False]) self.assertEqual(acquired_history[1], [True, True])
def collect_config(src_directory): src = path(src_directory) des = path('./res/') print 'Ready to move configs from %s to %s.' % (str(src), str(des)) if des.exists() and des.is_dir(): shutil.rmtree(str(des)) des.mkdir() shutil.copy(str(path(src) / '.emacs'), str(des)) shutil.copytree(str(path(src) / '.emacs.d'), str(des / '.emacs.d'), False)
def rename(): folWalk = os.listdir( path(r'C:\Users\Administrator\Desktop\pic1') ) #the folder that contain the files you want to rename(same as destinsion1) filePath = path(r'C:\Users\Administrator\Desktop\pic1') x = 1 for i in folWalk: #Renameing files fullPath = filePath / i fileType = fullPath.suffix #get the file extension fileNewname = filePath / f'Picture({x}){fileType}' x += 1 print(f'Renameing {i} to Picture ({x})\n\n') os.rename(filePath / i, fileNewname) #uncomment this after check
def create_imageset(coco_annotation, dst): _, instances = get_instances(coco_annotation) dst = path(dst).expand() for instance in instances: name = rename(instance['file_name']) dst.write_text('{}\n'.format(name), append=True)
def run(self): weekdays = { 'dl.': dict(), 'dt.': dict(), 'dc.': dict(), 'dj.': dict(), 'dv.': dict(), 'ds.': dict(), 'dg.': dict() } for item in path(self.root).rglob('*.txt'): if re.search(r"\d{4}-\d{2}-\d{2}\.txt$", item.name): with open(item.absolute(), 'r', encoding=_encoding) as file: txt = file.read() route_ = Route() date_ = DateHandler(item.name.rstrip("tx.")) wday = date_.get_weekday() routes = route_.fetch_towns(txt) if not weekdays[wday]: weekdays[wday] = routes else: for route_ in routes: if weekdays[wday].get(route_): for town in routes[route_]: weekdays[wday][route_].add(town) else: weekdays[wday][route_] = routes[route_] for wday in weekdays: for rt in weekdays[wday]: weekdays[wday][rt] = list(weekdays[wday][rt]) print(json.dumps(weekdays, indent=4))
def construcao(self, file): ambiente = None self._criacao = self._path / self._dirwork if not path.is_dir(self._criacao): print('Ambiente inesistente') print('Criando ambiente...') try: path.mkdir(self._criacao) ambiente = True except FileNotFoundError as e: print('[WinError 3] O sistema não consegue encontrar o caminho especificado em:') print(self._criacao) ambiente = False else: ambiente = True if file == None or file == '': print('Não há datasets alocados neste ambiente') ambiente = True if ambiente: chdir(self.get_criacao()) for pasta in self.get_dir(): if not path(self._criacao / pasta).exists(): path.mkdir(self._criacao / pasta) if pasta == self._diretorios[0]: local = self._criacao / self._diretorios[0] chdir(local) self.copy_file(file, local) elif pasta == self.get_dir()[1]: local = self._criacao / self._diretorios[1] chdir(local) self.copy_file(file, local) print('Ambiente criado com sucesso em: \n', self._criacao) chdir(self.get_desktop())
def deploy_config(des_directory): des = path(des_directory) src = path('./res/') print 'Ready to deploy configs from %s to %s.' % (str(des), str(src)) if not des.exists(): des.mkdir() if (des / '.emacs.d/').exists(): shutil.rmtree(str(des / '.emacs.d')) shutil.copy(str(path(src) / '.emacs'), str(des)) shutil.copytree(str(path(src) / '.emacs.d'), str(des / '.emacs.d'), False) print 'Finish deploying configs'
def _upload(self): if request.method == 'POST': file = request.files['file'] if file: filename = path(secure_filename(file.filename)) uppath = Config().updir / filename file.save(os.fspath(uppath))
async def test_exception_after_acquisition_unblocks_write(self): lock = TreeLock() tasks = create_tree_tasks( lock(read=[], write=[path('/a/b/c')]), lock(read=[], write=[path('/a/b/c/d')]), ) exp = Exception('Raised exception') acquired_history = await mutate_tasks_in_sequence( tasks, exception(0, exp), complete(1)) self.assertEqual(tasks[0].task.exception(), exp) self.assertEqual(acquired_history[0], [True, False]) self.assertEqual(acquired_history[1], [True, True])
class Data(object): submit_trajectories = path(SETTING_BASE_DIR + '/../data/dataSets_p1/submission_sample_travelTime.csv') submit_volume = path(SETTING_BASE_DIR + '/../data/dataSets_p1/submission_sample_volume.csv') class Test(object): trajectories = path(SETTING_BASE_DIR + '/../data/dataSets_p1/testing_phase1/trajectories_table5_test1.csv') volume = path(SETTING_BASE_DIR + '/../data/dataSets_p1/testing_phase1/volume_table6_test1.csv') weather = path(SETTING_BASE_DIR + '/../data/dataSets_p1/testing_phase1/weather_table7_test1.csv') class Train(object): links = path(SETTING_BASE_DIR + '/../data/dataSets_p1/training/links_table3.csv') routes = path(SETTING_BASE_DIR + '/../data/dataSets_p1/training/routes_table4.csv') trajectories = path(SETTING_BASE_DIR + '/../data/dataSets_p1/training/trajectories_table5_training.csv') volume = path(SETTING_BASE_DIR + '/../data/dataSets_p1/training/volume_table6_training.csv') weather = path(SETTING_BASE_DIR + '/../data/dataSets_p1/training/weather_table7_training.csv')
def move(folder): folder = os.path.abspath(folder) fileRegex = re.compile( r'.*.png|jpg') #Regex which will get the file extension that you want destinsion1 = path(r'C:\Users\Administrator\Desktop\pic1' ) #Where you want to put your files for folders, subfolders, files in os.walk( folder ): #walk through all folder,subfolders,files in the given dir for file in files: #Walk through every file in files list file = folders + '\\' + file #Get the full path for the file fileBaseName = os.path.basename(file) des = destinsion1 / fileBaseName mo = fileRegex.search(file) print(file) if not os.path.exists(file): continue if not mo: continue if mo: #Copying files #print(f"Moving {fileBaseName} to {des}\n\n") #shutil.copy(file,f"{destinsion1}")#uncomment this after check pass
def rpf_signals(bamfilename, min_length, max_length, verbose): output_files = {} file_path=path(bamfilename) # file_name_rgx = "(.+/)(.+)\.[a-z]{3}\.bam" # c_file_rgx = re.compile(file_name_rgx) with AlignmentFile(bamfilename, 'rb') as alignfile: for aseg in alignfile.fetch(): # calculate length of aligned read alen = rpf_length(aseg) if alen >= min_length and alen <= max_length: # check if a file for that length exists if alen not in output_files.keys(): # if it doesn't exist create it # path, outfile = c_file_rgx.findall(bamfilename)[0] output_filename = "%s/seg_by_align_size/%s.all.bam" % (str(file_path.parent), alen) output_files[alen] = AlignmentFile( output_filename, 'wb', template=alignfile) # print >>sys.stderr, '>> opening file %s' %output_filename # fetch file_handle for that length output_files[alen].write(aseg) # write the segment to that file for file in output_files.values(): file.close()
def get_frame(video_filepath, time): # nornalize $video_filepath to avoid weird errors video_filepath = str(path(video_filepath)) # make sure $time is actually within the video if time > get_length(video_filepath): return (False, None) if time < 0: return (False, None) # use ffmpeg, this might seem like a weird descision, but i have a perfectly valid reason # using cv2 shifts the colors a bit so it looks different (i think it might be a limited vs full rgb thing but idk) # using ffmpeg makes the colors completley accurate, and only adds a little to extraction time result = subprocess.run([ 'ffmpeg', '-y', '-loglevel', 'error', '-ss', str(time), '-i', video_filepath, '-vframes', '1', 'tmpframe.png' ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) img = cv2.imread('tmpframe.png') delete_file('tmpframe.png') if img is None: return (False, None) # return tuple of (success value: bool, image: numpy nth dim. array) # save image using cv2.imwrite('image.png', get_frame.get_frame(...)[1]) # this return a tuple still because i dont want to change other code return (True, img)
def file_data_ambiente(self): dir_file = self._criacao / _diretorios[1] if not path(dir_file).exists(): print('E nescessário criar o ambiente.\nUtilize o método: def construcao(file), para criar o ambiente.') else: chdir(dir_file) return self.local_file(path.cwd())
def find_default_configuration() -> str: ''' search the default configuration file named <easymake.yml> or <emake.yml> ''' default_config = path('./easymake.yml') if default_config.exists() and default_config.is_file(): return str(default_config) default_config = path('./emake.yml') if default_config.exists() and default_config.is_file(): return str(default_config) raise DefaultConfigNotExistException( 0, "Error: Cannot find the default configuration")
def ml_export_image(image, image_id, num_slices, ring_pos, image_type="mask", debug=False): # Folder & Path preparations: subfolder = "ML_Training_Data/Masks/" if "mask" in image_type else "ML_Training_Data/Images/" full_path = default_folder + subfolder path_obj = path(full_path) path_obj.mkdir(parents=True, exist_ok=True) # Get the export-image # Create a list of depth indexes for the slices to be exported z_mid = int(ring_pos[0] / 2) slices = list( range(z_mid, z_mid + num_slices) if num_slices > 0 else list(range(image.shape[0]))) for slice_depth in slices: # Define image name & path prefix = "_clean_ring_slice_ml_training_" if "mask" in image_type else "_base_ring_slice_ml_training_" img_name = str(image_id) + prefix + str(slice_depth) file_path = full_path + img_name + ".png" # Create and save image test_slice = image[slice_depth, :] img = Image.fromarray(test_slice) img.save(file_path) print("\rSlice Exported:\t" + str(slice_depth), end="") print("\n")
def extractArchive(fileName): ext = path(fileName) ext = ''.join(ext.suffixes) if ext == ".tar.gz": tarfile.open(fileName).extractall() else: with zipfile(fileName, 'r') as archive: archive.extractall()
def _watch(self): cnts = self.config.nodecounts docker_state = ['0' for i in range(cnts)] state = [path(f"modulator.playing.{b}").exists() for b in range(cnts)] for b in range(cnts): d = self._load_configuration_dict(str(b)) docker_state[b] = d['docker'] socketio.emit('report_watch', { 'state': state, 'docker_state': docker_state }) for i, b in enumerate(state): if b: player = f"player{i+1}" socketio.emit( f"{player}_name", {player: path(f"modulator.playing.{i}").read_text()})
async def test_lock_modes_can_be_reused(self): lock = TreeLock() lock_write_1 = lock(read=[], write=[path('/a/b/c')]) lock_write_2 = lock(read=[], write=[path('/a/b/c')]) acquired_history = await mutate_tasks_in_sequence(create_tree_tasks( lock_write_1, lock_write_2), complete(0), complete(1)) self.assertEqual(acquired_history[0], [True, False]) self.assertEqual(acquired_history[1], [True, True]) acquired_history = await mutate_tasks_in_sequence(create_tree_tasks( lock_write_1, lock_write_2), complete(0), complete(1)) self.assertEqual(acquired_history[0], [True, False]) self.assertEqual(acquired_history[1], [True, True])
def get_file_and_line(up=2): """Get file and file number of frame 'up'-steps up in the stack.""" frame = inspect.currentframe() for _ in range(up): frame = frame.f_back if frame is None: return None, None file_name, line_number, *_ = inspect.getframeinfo(frame) return path(file_name).name, line_number
def nginx_htpasswd(self): return path('/etc/nginx/locations.d').joinpath('%s.htpasswd' % self.name)
def nginx_site(self): return path('/etc/nginx/sites-enabled').joinpath(self.name)
def nginx_location(self): return path('/etc/nginx/locations.d').joinpath('%s.conf' % self.name)
def venv(self): """directory containing virtualenvs for clld apps. """ return path('/usr/venvs').joinpath(self.name)
def home(self): """home directory of the user running the app. """ return path('/home').joinpath(self.name)
def logs(self): """directory containing the app's logfiles. """ return path('/var/log').joinpath(self.name)
def supervisor(self): return path('/etc/supervisor/conf.d').joinpath('%s.conf' % self.name)
intended to be conveniently used by wsgi servers. :copyright: Copyright (C) 2015 by the respective authors; see AUTHORS. :license: GPLv3, see LICENSE for details. """ # XXX: werkzeug is garbage: https://github.com/pallets/werkzeug/issues/461 # because both writing my own reloader and replacing werkzeug with something not # as terrible are too hard, just hack at sys.path if __name__ == '__main__': # remove the pb module from sys.path; it should never be in there import sys, __main__ from pathlib import Path as path _path = path(__main__.__file__).resolve().parent for index, p in enumerate(sys.path): p = path(p).resolve() if p == _path: assert path.cwd().resolve() != p sys.path[index] = str(path.cwd().resolve()) import os from werkzeug.serving import run_simple from pb import db from pb.pb import create_app from pb.runonce import add_indexes