def remove(self): rm_rf(self.entry_path) for fn in os.listdir(appdir): if fn.startswith(self.name_): # found one shortcut, so don't remove the name from menu return self._remove_this_menu()
def waveform(sound): startTime = request.args.get('startTime', '0') endTime = request.args.get('endTime', '0') if float(startTime) > float(endTime): endTime = startTime script = praat._scripts_dir + 'drawWave' params = [sound, startTime, endTime, praat._sounds_dir, praat._images_dir] ts = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S_") if "wav" not in sound: image = praat._images_dir + ts + str(sound.replace("mp3", "png")) else: image = praat._images_dir + ts + str(sound.replace("wav", "png")) # Add image name to params list params.append(image) # If image does not exist, run script #if not os.path.isfile(image): praat.runScript(script, params) utils.resizeImage(image, (1280, 640)) #utils.cropImage(image) # Image should be available now, generated or cached with open(image) as fp: resp = app.make_response(fp.read()) utils.rm_rf(image) resp.content_type = "image/png" return resp
def write_arcname(self, arcname): if arcname.endswith('/') or arcname.startswith('.unused'): return m = self.py_pat.match(arcname) if m and (m.group(1) + self.py_obj) in self.arcnames: # .py, .pyc, .pyo next to .so are not written return path = self.get_dst(arcname) dn, fn = os.path.split(path) data = self.z.read(arcname) if fn in ['__init__.py', '__init__.pyc']: tmp = arcname.rstrip('c') if tmp in self.arcnames and NS_PKG_PAT.match(self.z.read(tmp)): if fn == '__init__.py': data = '' if fn == '__init__.pyc': return self.files.append(path) if not isdir(dn): os.makedirs(dn) rm_rf(path) fo = open(path, 'wb') fo.write(data) fo.close() if (arcname.startswith(('EGG-INFO/usr/bin/', 'EGG-INFO/scripts/')) or fn.endswith(('.dylib', '.pyd', '.so')) or (arcname.startswith('EGG-INFO/usr/lib/') and self.so_pat.match(fn))): os.chmod(path, 0755)
def generate_csv(): if os.path.exists(output_csv_file_path): timer = TimeCounter() rm_rf(output_csv_file_path) print timer.stop(), 'clear old output csv files' timer = TimeCounter() os.mkdir(output_csv_file_path) xml_file_names = [] for d, dirs, files in os.walk(output_zip_file_path): for f in files: if f[-3:] == "xml": xml_file_names.append(os.path.join(d, f)) timer = TimeCounter() run_tasks(get_xml_data, iter(xml_file_names), final_get_xml_data_callback)
def rm_egg_dirs(): "remove egg directories" sp_dir = environ.sp_dir egg_dirs = glob(join(sp_dir, '*-py*.egg')) for egg_dir in egg_dirs: print 'moving egg dir:', egg_dir try: os.rename(join(egg_dir, 'EGG-INFO/PKG-INFO'), egg_dir + '-info') except OSError: pass utils.rm_rf(join(egg_dir, 'EGG-INFO')) for fn in os.listdir(egg_dir): os.rename(join(egg_dir, fn), join(sp_dir, fn)) utils.rm_rf(join(sp_dir, 'easy-install.pth'))
def provide(recipe_dir, meta, patch=True): """ given a recipe_dir: - download (if necessary) - unpack - apply patches (if any) """ rm_rf(WORK_DIR) if 'fn' in meta: unpack(meta) elif 'git_url' in meta: git_source(meta) else: # no source os.makedirs(WORK_DIR) if patch: src_dir = get_dir() for patch in meta.get('patches', []): apply_patch(src_dir, join(recipe_dir, patch))
def remove(self): if not isdir(self.meta_dir): print "Error: Can't find meta data for:", self.cname return self.read_meta() n = 0 getLogger('progress.start').info(dict( amount = len(self.files), # number of files disp_amount = human_bytes(self.installed_size), filename = self.fn, action = 'removing')) self.install_app(remove=True) self.run('pre_egguninst.py') for p in self.files: n += 1 getLogger('progress.update').info(n) if self.hook and not p.startswith(self.pkgs_dir): continue rm_rf(p) if p.endswith('.py'): rm_rf(p + 'c') self.rm_dirs() rm_rf(self.meta_dir) if self.hook: rm_empty_dir(self.pkg_dir) else: rm_empty_dir(self.egginfo_dir) getLogger('progress.stop').info(None)
def remove(self): if not isdir(self.meta_dir): print "Error: Can't find meta data for:", self.cname return if self.evt_mgr: from encore.events.api import ProgressManager else: from console import ProgressManager self.read_meta() n = 0 progress = ProgressManager( self.evt_mgr, source=self, operation_id=uuid4(), message="removing egg", steps=len(self.files), # --- progress_type="removing", filename=self.fn, disp_amount=human_bytes(self.installed_size), super_id=getattr(self, 'super_id', None)) self.install_app(remove=True) self.run('pre_egguninst.py') with progress: for p in self.files: n += 1 progress(step=n) rm_rf(p) if p.endswith('.py'): rm_rf(p + 'c') self.rm_dirs() rm_rf(self.meta_dir) rm_empty_dir(self.egginfo_dir)
def remove(self): if not isdir(self.meta_dir): print "Error: Can't find meta data for:", self.cname return self.read_meta() n = 0 nof = len(self.files) # number of files self.progress_callback(0, self.installed_size) self.install_app(remove=True) self.run('pre_egguninst.py') for p in self.files: n += 1 self.progress_callback(n, nof) rm_rf(p) if p.endswith('.py'): rm_rf(p + 'c') self.rm_dirs() rm_rf(self.meta_dir) if self.hook: rm_empty_dir(self.pkg_dir) else: rm_empty_dir(self.egginfo_dir)
def import_db(zip_url): """Dev function to download example data and import""" # download import_dir = "/tmp/import" rm_rf(import_dir) mkdir_p(import_dir) zip_url = zip_url zip_file = zip_url.split('/')[-1] os.system("cd {} && curl {} > {}".format(import_dir, zip_url, zip_file)) os.system("cd {} && unzip {}".format(import_dir, zip_file)) # find files dcms = find_recursively(import_dir, lambda x: x.endswith(".dcm")) # import for i, filename in enumerate(dcms): status_line = "Importing {} of {}".format(i, len(dcms)) sys.stdout.write(status_line) # TODO: contrived way to upload data using werkzeug so UploadSet can be # used to store the file builder = EnvironBuilder(method='POST', data={ 'dicom': (StringIO(filename).read(), filename.split('/')[-1]) }) env = builder.get_environ() request = Request(env) upload_filename = uploaded_dicoms.save(request.files['dicom']) rec = Dicom(filename=upload_filename) db.session.add(rec) sys.stdout.write(len(status_line) * '\r') db.session.commit() # get rid of temp dir rm_rf(import_dir)
def ensure_menu_file(): # ensure any existing version is a file if exists(menu_file) and not isfile(menu_file): rm_rf(menu_file) # ensure any existing file is actually a menu file if isfile(menu_file): # make a backup of the menu file to be edited cur_time = time.strftime('%Y-%m-%d_%Hh%Mm%S') backup_menu_file = "%s.%s" % (menu_file, cur_time) shutil.copyfile(menu_file, backup_menu_file) if not is_valid_menu_file(): os.remove(menu_file) # create a new menu file if one doesn't yet exist if not isfile(menu_file): fo = open(menu_file, 'w') if mode == 'user': merge = '<MergeFile type="parent">%s</MergeFile>' % sys_menu_file else: merge = '' fo.write("<Menu><Name>Applications</Name>%s</Menu>\n" % merge) fo.close()
def remove(self): if not isdir(self.meta_dir): print "Error: Can't find meta data for:", self.cname return if self.evt_mgr: from encore.events.api import ProgressManager else: from console import ProgressManager self.read_meta() n = 0 progress = ProgressManager( self.evt_mgr, source=self, operation_id=uuid4(), message="removing egg", steps=len(self.files), # --- progress_type="removing", filename=self.fn, disp_amount=human_bytes(self.installed_size), super_id=getattr(self, 'super_id', None)) self.install_app(remove=True) self.run('pre_egguninst.py') with progress: for p in self.files: n += 1 progress(step=n) if self.hook and not p.startswith(self.pkgs_dir): continue rm_rf(p) if p.endswith('.py'): rm_rf(p + 'c') self.rm_dirs() rm_rf(self.meta_dir) if self.hook: rm_empty_dir(self.pkg_dir) else: rm_empty_dir(self.egginfo_dir)
def create(self, remove=False): if "pywscript" in self.shortcut: cmd = join(self.prefix, 'pythonw.exe') args = self.shortcut["pywscript"].split() elif "pyscript" in self.shortcut: cmd = join(self.prefix, 'python.exe') args = self.shortcut["pyscript"].split() elif "webbrowser" in self.shortcut: cmd = join(self.prefix, 'pythonw.exe') args = ['-m', 'webbrowser', '-t', self.shortcut['webbrowser']] else: raise Exception("Nothing to do: %r" % self.shortcut) workdir = self.shortcut.get('workdir', '') icon = self.shortcut.get('icon', '') for a, b in [ ('${PYTHON_SCRIPTS}', join(self.prefix, 'Scripts')), ('${MENU_DIR}', join(self.prefix, 'Menu')), ('${PERSONALDIR}', get_folder('CSIDL_PERSONAL')), ('${USERPROFILE}', get_folder('CSIDL_PROFILE')), ]: args = [s.replace(a, b) for s in args] workdir = workdir.replace(a, b) icon = icon.replace(a, b) # Fix up the '/' to '\' workdir = workdir.replace('/', '\\') icon = icon.replace('/', '\\') # Create the working directory if it doesn't exist if workdir: if not isdir(workdir): os.makedirs(workdir) else: workdir = expanduser('~') # Menu link dst_dirs = [self.menu.path] # Desktop link if self.shortcut.get('desktop'): dst_dirs.append(desktop_dir) # Quicklaunch link if self.shortcut.get('quicklaunch'): dst_dirs.append(quicklaunch_dir) for dst_dir in dst_dirs: dst = join(dst_dir, self.shortcut['name'] + '.lnk') if remove: rm_rf(dst) else: # The API for the call to 'wininst.create_shortcut' has 3 # required arguments (path, description and filename) # and 4 optional ones (args, working_dir, icon_path and # icon_index). wininst.create_shortcut( quoted(cmd), self.shortcut['name'], dst, ' '.join(quoted(arg) for arg in args), workdir, icon, )
def remove(self): for ext in ('.desktop', 'KDE.desktop'): path = self.path + ext rm_rf(path)
def send(message): Router.print_message(message) if message == Router.State.initial_clean_old_files: Router.start_timer() rm_rf(input_zip_file_path) rm_rf(output_zip_file_path) Router.send(Router.State.initial_clean_old_files_done) elif message == Router.State.initial_clean_old_files_done: Router.stop_timer( "ROUTER: initial input & output temp data removed") Router.send(Router.State.read_xml_template) elif message == Router.State.read_xml_template: Router.start_total_timer_if_not_launched() Router.start_timer() createtask.read_xml_template() elif message == Router.State.read_xml_template_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: xml template read") Router.send(Router.State.create_zip_files) elif message == Router.State.create_zip_files: Router.start_total_timer_if_not_launched() Router.start_timer() createtask.create_zip_files() elif message == Router.State.create_zip_files_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: zip files created") Router.send(Router.State.extract_zip_files) elif message in Router.State.extract_zip_files: Router.start_total_timer_if_not_launched() Router.timer = TimeCounter() analyzertask.extract_zip_files() elif message == Router.State.extract_zip_files_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: zip files extracted") Router.send(Router.State.generate_csv_data) elif message in Router.State.generate_csv_data: Router.start_total_timer_if_not_launched() Router.timer = TimeCounter() analyzertask.generate_csv() elif message == Router.State.generate_csv_data_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: csv files data collected") Router.send(Router.State.save_csv_data) elif message == Router.State.save_csv_data: Router.start_total_timer_if_not_launched() Router.print_message("ROUTER: write data to csv") Router.timer = TimeCounter() analyzertask.write_data_to_csv() elif message == Router.State.save_csv_data_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: csv data saved") Router.stop_total_timer("ROUTER: TEST COMPLETED") Router.send(Router.State.final_clean_old_files) elif message == Router.State.final_clean_old_files: Router.start_total_timer_if_not_launched() rm_rf(input_zip_file_path) rm_rf(output_zip_file_path) Router.send(Router.State.final_clean_old_files_done) elif message == Router.State.final_clean_old_files_done: Router.stop_timer("ROUTER: final input & output temp data removed") else: Router.start_total_timer_if_not_launched() Router.print_message("ERROR: unknown message: %s" % message) Router.stop_total_timer("ROUTER: stopped on error")
path = self.path if tp == 'gnome': filebrowser = 'gnome-open' path += '.desktop' elif tp == 'kde': filebrowser = 'kfmclient openURL' path += 'KDE.desktop' cmd = self.cmd if cmd[0] == '{{FILEBROWSER}}': cmd[0] = filebrowser elif cmd[0] == '{{WEBBROWSER}}': import webbrowser executable = get_executable(self.prefix) cmd[0:1] = [executable, webbrowser.__file__, '-t'] spec['cmd'] = cmd spec['path'] = path # create the shortcuts make_desktop_entry(spec) if __name__ == '__main__': rm_rf(menu_file) Menu('Foo').create() Menu('Bar').create() Menu('Foo').remove() Menu('Foo').remove()
def remove(self): rm_rf(self.path)
def _create_dirs(self): rm_rf(self.app_path) os.makedirs(self.resources_dir) os.makedirs(self.macos_dir)
def delete(self, key): subdir = os.path.join(self.__root, utils.generate_id(key)) utils.rm_rf(subdir)
def send(message): Router.print_message(message) if message == Router.State.initial_clean_old_files: Router.start_timer() rm_rf(input_zip_file_path) rm_rf(output_zip_file_path) Router.send(Router.State.initial_clean_old_files_done) elif message == Router.State.initial_clean_old_files_done: Router.stop_timer("ROUTER: initial input & output temp data removed") Router.send(Router.State.read_xml_template) elif message == Router.State.read_xml_template: Router.start_total_timer_if_not_launched() Router.start_timer() createtask.read_xml_template() elif message == Router.State.read_xml_template_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: xml template read") Router.send(Router.State.create_zip_files) elif message == Router.State.create_zip_files: Router.start_total_timer_if_not_launched() Router.start_timer() createtask.create_zip_files() elif message == Router.State.create_zip_files_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: zip files created") Router.send(Router.State.extract_zip_files) elif message in Router.State.extract_zip_files: Router.start_total_timer_if_not_launched() Router.timer = TimeCounter() analyzertask.extract_zip_files() elif message == Router.State.extract_zip_files_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: zip files extracted") Router.send(Router.State.generate_csv_data) elif message in Router.State.generate_csv_data: Router.start_total_timer_if_not_launched() Router.timer = TimeCounter() analyzertask.generate_csv() elif message == Router.State.generate_csv_data_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: csv files data collected") Router.send(Router.State.save_csv_data) elif message == Router.State.save_csv_data: Router.start_total_timer_if_not_launched() Router.print_message("ROUTER: write data to csv") Router.timer = TimeCounter() analyzertask.write_data_to_csv() elif message == Router.State.save_csv_data_done: Router.start_total_timer_if_not_launched() Router.stop_timer("ROUTER: csv data saved") Router.stop_total_timer("ROUTER: TEST COMPLETED") Router.send(Router.State.final_clean_old_files) elif message == Router.State.final_clean_old_files: Router.start_total_timer_if_not_launched() rm_rf(input_zip_file_path) rm_rf(output_zip_file_path) Router.send(Router.State.final_clean_old_files_done) elif message == Router.State.final_clean_old_files_done: Router.stop_timer("ROUTER: final input & output temp data removed") else: Router.start_total_timer_if_not_launched() Router.print_message("ERROR: unknown message: %s" % message) Router.stop_total_timer("ROUTER: stopped on error")
def train(config, debug=False): if debug and not run_debug_initial(config=config): return # `run_debug_initial` should return True if the program is meant to continue afterward root = config['project_root'] train_params = config['train'] input_params, output_params = config['inputs'], config['outputs'] Model = getattr(models, train_params['model']) assert isinstance(Model, type) lr = train_params.get('lr', 0.1) model = Model(input_params, output_params) optimizer = tf.train.GradientDescentOptimizer(lr) model.train_step = optimizer.minimize(model.loss) outfolder = os.path.join(root, train_params['outfolder']) if not os.path.exists(outfolder): os.makedirs(outfolder) else: for _, _, files in os.walk(outfolder): if files: utils.rm_rf(outfolder, require_confirmation=False) with open(os.path.join(outfolder, 'config_in.yaml'), 'w') as f: yaml.dump(config, f, default_flow_style=False) checkpoint_freq, report_freq = train_params[ 'checkpoint_freq'], train_params['report_freq'] sess = tf.Session() sess.run(tf.global_variables_initializer()) restore_itr = train_params.get('restore_itr', None) if type(restore_itr) == int: model.restore( sess, restore_itr, outfolder ) # TODO: this won't work, because the checkpoint was just deleted data_inputs, data_labels = load_data(model.inputs, model.labels, config) batch_indices = index_generator(train_params['batch_size'], data_labels.values()[0].shape[0]) for step in range(train_params['max_steps'] + 1): indices = next(batch_indices) input_feed = { input_tensor: data_inputs[name][indices] for name, input_tensor in model.inputs.items() } label_feed = { label_tensor: data_labels[name][indices] for name, label_tensor in model.labels.items() } feed_dict = input_feed.copy() feed_dict.update(label_feed) fetches = [ model.train_step, model.loss, model.outputs['winner']['output'] ] _, loss, output = sess.run(fetches, feed_dict=feed_dict) if step % checkpoint_freq == 0: model.save(sess, step, outfolder) if step % report_freq == 0: output = np.squeeze(output) mean, std = np.mean(output), np.std(output) # type: float print( '[o] iteration %d | training loss %.3f | mean %.3f | std: %.3f' % (step, loss, mean, std)) print('[+] Training complete.')
utils.mkdir(args.tempbase) ## preprocess for input file input_ext = os.path.splitext(args.input)[1] ## node-- args.nodes = max(args.nodes - 1, 1) if input_ext != ".myf": utils.logging( "[INFO] Transform input file to myf. (<- %s)" % input_ext, args) myf_input = os.path.join(args.output, "input.myf") utils.convert_to_myf(args.input, myf_input) args.input = myf_input bc_args = sc.broadcast(args) utils.logging("[INFO] Start BiSpark.", args) start_time = time.time() result_path = align(sc, bc_args.value) end_time = time.time() utils.logging("[INFO] BiSpark took : " + str(end_time - start_time), args) # remove temp files utils.rm_rf(args.tempbase) # for DEBUG if args.local_save != "": utils.logging("[INFO] Save to local.", args) utils.merge_hdfs(result_path, args.local_save)
def generic_audio_ops(user, group, method, audio=None, params=None): if group is None: return "Group does not exist" storage_svc = get_storage_service(praat.app.config) if method == 'GET': #g_info = group.details() #return jsonify(g_info['details']['audios']) audios = [] for audio in group.audios: info = audio.summary() if utils.is_true(params.get('show_versions')): info['versions'] = storage_svc.show_versions(audio.id) audios.append(info) resp = {"status": "success", "audios": audios} print resp return jsonify(resp) if not audio or not audio.filename: # If no audio file, stop status = "No audio file" audioName = "" elif not utils.isSound(audio.filename): # Stop if uploaded file is not a audio status = "Unknown file type" audioName = audio.filename else: audioName = audio.filename data = audio.read() key_seed = group.id + audioName key = utils.generate_id(group.id + audioName) attrs = { 'created_by': user.email, } audioObj = praat.Audio.query.get(key) retval = storage_svc.put(key, data, attrs) # save waveform temp_dir = '/tmp/waveform' + key + retval['version'] + '/' waveform_name = key + retval['version'] + '.png' utils.mkdir_p(temp_dir) with open(temp_dir + audioName, 'w') as fp: fp.write(data) script = praat._scripts_dir + 'drawWaveV2' params = [temp_dir + audioName, temp_dir + waveform_name] praat.runScript(script, params) with open(temp_dir + waveform_name, 'r') as fp: data = fp.read() attrs = {'name': audioName} attrs.update(retval) storage_svc.put(waveform_name, data, attrs) utils.rm_rf(temp_dir) if audioObj is None: print 'Creating new audio file' audioObj = praat.Audio(audioName, user, group, key_seed) praat.db_session.add(audioObj) praat.db_session.commit() else: print audioObj.summary() audioObj.updated_at = datetime.datetime.utcnow() praat.db_session.commit() print 'Updating existing audio file' status = "Success" result = { "status": status, "audio": audioName } #return jsonify(result) return redirect('/?context=workspace')