def open(self, path, mode='r'): container, subpath = self._splitAzurePath(path) if mode == 'r': _file = self._service.get_file_to_text(container, os.dirname(subpath), os.basename(subpath)) stream = NamedStringIO(_file.content, name=_file.name) elif mode == 'rb': _file = self._service.get_file_to_bytes(container, os.dirname(subpath), os.basename(subpath)) stream = NamedBytesIO(_file.content, name=_file.name) elif mode == 'w': stream = NamedStringIO(name=subpath) elif mode == 'wb': stream = NamedBytesIO(name=subpath) else: raise NotImplementedError(mode + ' is not supported') try: yield stream finally: if mode == 'w': file_write = self._service.create_file_from_text elif mode == 'wb': file_write = self._service.create_file_from_bytes else: return self.makedirs(os.dirname(path)) file_write(container, os.path.dirname(subpath), os.path.basename(subpath), stream.getvalue())
def getStubPymelCore(): if 'PYMEL_STUBS' in os.environ: stubPath = os.environ['PYMEL_STUBS'] else: # this file should be in: # pymel/tools/removePymelAll.py # while stubs usually at: # extras/completion/py stubPath = os.path.join(os.dirname(os.dirname(THIS_DIR)), 'extras', 'completion', 'py') # make sure you've got the stubs loaded! #print stubPath sys.path.insert(0, stubPath) try: oldModules = {} for moduleName, mod in sys.modules.items(): if (moduleName == 'pymel' or moduleName.startswith('pymel.') or moduleName == 'maya' or moduleName.startswith('maya.')): oldModules[moduleName] = mod del sys.modules[moduleName] try: import pymel.core as pm return pm finally: for moduleName, mod in sys.modules.items(): if (moduleName == 'pymel' or moduleName.startswith('pymel.') or moduleName == 'maya' or moduleName.startswith('maya.')): del sys.modules[moduleName] for moduleName, mod in oldModules.iteritems(): sys.modules[moduleName] = mod finally: del sys.path[0]
def save_variables(self, save_file): if save_file is not None: if not os.path.isdir(os.dirname(save_file)): os.mkdir(os.dirname(save_file)) print("Saving checkpoint to %s" % save_file, file=sys.stderr) self.saver.save(self.session, save_file) saver = tf.train.Saver()
def change_res(res, option_menu, self): global resolution global fresolution resolution = "%s" % (var.get()) if resolution == 'E00 - Blank 640x480.xml': fresolution = 'E00 - Blank 640x480.xml' if hasattr(sys, '_MEIPASS'): # PyInstaller >= 1.6 os.chdir(sys._MEIPASS) fresolution = os.path.join(sys._MEIPASS, fresolution) elif '_MEIPASS2' in os.environ: # PyInstaller < 1.6 (tested on 1.5 only) os.chdir(os.environ['_MEIPASS2']) fresolution = os.path.join(os.environ['_MEIPASS2'], fresolution) else: os.chdir(os.dirname(sys.argv[0])) fresolution = os.path.join(os.dirname(sys.argv[0]), fresolution) elif resolution == 'E00 - Blank 1280x800.xml': fresolution = 'E00 - Blank 1280x800.xml' if hasattr(sys, '_MEIPASS'): # PyInstaller >= 1.6 os.chdir(sys._MEIPASS) fresolution = os.path.join(sys._MEIPASS, fresolution) elif '_MEIPASS2' in os.environ: # PyInstaller < 1.6 (tested on 1.5 only) os.chdir(os.environ['_MEIPASS2']) fresolution = os.path.join(os.environ['_MEIPASS2'], fresolution) else: os.chdir(os.dirname(sys.argv[0])) fresolution = os.path.join(os.dirname(sys.argv[0]), fresolution)
def text_speech(text): u = os.envion.get("TS_USERNAME") p = os.envion.get("TS_PASSWORD") text_to_speech = TextToSpeechV1(username=u,password=p,x_watson_learning_opt_out=True) # Optional flag filepath = os.join(os.dirname(os.dirname(__file__)), 'static/audio/output.wav') with open(filepath,'wb') as audio_file: audio_file.write(text_to_speech.synthesize(text, accept='audio/wav',voice="en-US_AllisonVoice")) return filepath
def poem (name=None): a =[] k =0 for fn in glob.glob( 'static/3130000547/forkmusics/music.png' + os.sep + '*' ): t = os.dirname(fn) a.append(t) return render_template('poem.html', name=name)
def illustation (name=None): a=[] k=0 for fn in glob.glob( 'static/3130000547/illusration/illustration.png' + os.sep + '*' ): t = os.dirname(fn) a.append(t) return render_template('illustration.html', name=name)
def add(name, uid=None, gid=None, groups=None, home=False, shell='/bin/false'): ''' Add a user to the minion CLI Example:: salt '*' user.add name <uid> <gid> <groups> <home> <shell> ''' cmd = 'pw useradd -s {0} '.format(shell) if uid: cmd += '-u {0} '.format(uid) if gid: cmd += '-g {0} '.format(gid) if groups: cmd += '-G {0} '.format(groups) if home: cmd += '-m -b {0} '.format(os.dirname(home)) cmd += '-n {0}'.format(name) ret = __salt__['cmd.run_all'](cmd) return not ret['retcode']
def gen_log2cna(exports_config: Config.Config, study_config: Config.Config, janus_path, verb): # TODO janus_path argument is not used, can remove; replace verb with logger helper.working_on(verb, message='Gathering files ...') seg_file = os.path.join( study_config.config_map['output_folder'], 'data_{}.txt'.format(constants.config2name_map['SEG'])) bed_file = exports_config.config_map['bed_file'] l_o_file = os.path.join( study_config.config_map['output_folder'], 'data_{}.txt'.format( constants.config2name_map[exports_config.type_config])) helper.working_on(verb, message='Generating log2CNA...') executable = 'Rscript' r_script_path = os.path.join(os.dirname(__file__), R_SCRIPT_DIRNAME, 'seg2gene.r') if os.path.exists(r_script_path): cmd = ', '.join( [executable, r_script_path, seg_file, bed_file, l_o_file]) logger.debug('Running R script command: ' + cmd) rc = subprocess.call(cmd) if rc != 0: msg = "Non-zero exit code %i from R script command '%s'" % (rc, cmd) raise ValueError(msg) else: raise FileNotFoundError( 'Cannot find R script path {}'.format(r_script_path))
def add(name, uid=None, gid=None, groups=None, home=True, shell=None, **kwargs): ''' Add a user to the minion CLI Example:: salt '*' user.add name <uid> <gid> <groups> <home> <shell> ''' if isinstance(groups, basestring): groups = groups.split(',') cmd = 'pw useradd ' if shell: cmd += '-s {0} '.format(shell) if uid: cmd += '-u {0} '.format(uid) if gid: cmd += '-g {0} '.format(gid) if groups: cmd += '-G {0} '.format(','.join(groups)) if home: if home is True: cmd += '-m ' else: cmd += '-m -b {0} '.format(os.dirname(home)) cmd += '-n {0}'.format(name) ret = __salt__['cmd.run_all'](cmd) return not ret['retcode']
def get_info(index): pres_data = {} file_name = os.path.join(os.dirname(__file__), 'presidents.txt'), with open(file_name) as pres_in: for line in pres_in: flds = line[:-1].split(":") if int(flds[0]) == index: pres_data["lastname"] = flds[1] pres_data["firstname"] = flds[2] pres_data["birthdate"] = mkdate(flds[3]) pres_data["deathdate"] = mkdate(flds[4]) pres_data["birthplace"] = flds[5] pres_data["birthstate"] = flds[6] pres_data["termstart"] = mkdate(flds[7]) pres_data["termend"] = mkdate(flds[8]) pres_data["party"] = flds[9] break return pres_data
def trans_package(self): # Transfer the packages from the filesystem format on DVD # to the datastream format on to disk for i in pkg_list: pkg_src = os.dirname(i) pkg_name = os.basename(i) os.system('pkgtrans -s %s %s %s' % (i, pkg_src, pkg_name))
def install_dap(name, version='', update=False): '''Install a dap from dapi If update is True, it will remove previously installed daps of the same name''' m, d = _get_metadap_dap(name, version) if update: available = d['version'] current = get_installed_version_of(name) if not current: raise Exception('Cannot update not yet installed dap') if dapver.compare(available, current) < 0: raise Exception( 'Currently installed version {c} is newer than version {a} available on Dapi' .format(c=current, a=available)) if dapver.compare(available, current) == 0: raise Exception( 'Currently installed version {c} is the same as the version available on Dapi' .format(c=current)) path, remove_dir = download_dap(name, d=d) install_dap_from_path(path, update=update) try: if remove_dir: shutil.rmtree(os.dirname(path)) else: os.remove(path) except: pass
def extract_pictures_from_pdf(self): for i in range(self.extract_from_pics_list_main.count()): file = self.extract_from_pics_list_main.item(i).text() file_short = os.path.basename(file)[:-4] if self.extract_from_pics_end_name.text(): end_path = self.extract_from_pics_end_name.text() else: end_path = os.dirname(file) pdf_file = fitz.open(file) for page_index in range(len(pdf_file)): page = pdf_file[page_index] image_list = page.getImageList() if not image_list: continue for image_index, img in enumerate(page.getImageList(), start=1): xref = img[0] base_image = pdf_file.extractImage(xref) image_bytes = base_image["image"] image_ext = base_image["ext"] image = Image.open(io.BytesIO(image_bytes)) image.save( open( end_path + '\\' + file_short + str(page_index + 1) + '_' + str(image_index) + '.' + image_ext, "wb"))
def does_file_exist(): """Adds localhost:8080/sftp/does_file_exist/ Check to see if a file exists SFTP will throw an error if the directory doesn't exist So we wrap this in a try/catch block """ request_data = json.loads(request.data.decode('utf-8')) file = request_data.get('file') dir = os.dirname(file) ssh, sftp = initialize_ssh('gencore', 'dalma.abudhabi.nyu.edu') results = {} results['dir_exists'] = False files = [] try: files = sftp.listdir(dir) results['dir_exists'] = True except FileNotFoundError as e: files = [] results['dir_exists'] = False results['error'] = str(e) if file in files: results['file_exists'] = True else: results['file_exists'] = False return jsonify({'results': results, 'context': {'endpoint': '/sftp/does_file_exist', 'args': request_data}})
def install_dap(name, version='', update=False, update_allpaths=False, first=True, force=False, nodeps=False, reinstall=False): '''Install a dap from dapi If update is True, it will remove previously installed daps of the same name''' m, d = _get_metadap_dap(name, version) if update: available = d['version'] current = get_installed_version_of(name) if not current: raise Exception('Cannot update not yet installed DAP.') if dapver.compare(available, current) <= 0: return [] path, remove_dir = download_dap(name, d=d) ret = install_dap_from_path(path, update=update, update_allpaths=update_allpaths, first=first, force=force, nodeps=nodeps, reinstall=reinstall) try: if remove_dir: shutil.rmtree(os.dirname(path)) else: os.remove(path) except: pass return ret
async def test_migration_future_chain(downloaded_update_file, loop, monkeypatch, state_partition, data_partition, resin_data_dir, unused_sysroot): dl_path = os.dirname(downloaded_update_file) session = otupdate.buildroot.update_session.UpdateSession(dl_path) fut = otupdate.migration.endpoints._begin_validation( session, loop, dl_path, 'my-robot-name') assert session.stage == otupdate.buildroot.update_session.Stages.VALIDATING assert session.current_task == fut last_progress = 0.0 while not fut.done(): assert session.state['progress'] >= last_progress assert session.state['stage'] == 'validating' assert session.stage\ == otupdate.buildroot.update_session.Stages.VALIDATING last_progress = session.state['progress'] await asyncio.sleep(0.01) await fut yield # This yield needs to be here to let the loop spin while session.state['stage'] == session.state['writing']: assert session.state['progress'] >= last_progress assert session.stage\ == otupdate.buildroot.update_session.Stages.VALIDATING assert session.state['stage'] == 'writing' last_progress = session.state['progress'] await asyncio.sleep(0.1) assert session.state['stage']\ == otupdate.buildroot.update_session.Stages.DONE with zipfile.ZipFile(downloaded_update_file, 'r') as zf: assert open(unused_sysroot, 'rb').read() == \ zf.read('rootfs.ext4')
def __setitem__(self, path, val): dirname = os.dirname(path) if dirname != '/': self.conn.write(dirname, dir=True) self._writeval(path, val)
def illustation(name=None): a = [] k = 0 for fn in glob.glob('static/3130000547/illusration/illustration.png' + os.sep + '*'): t = os.dirname(fn) a.append(t) return render_template('illustration.html', name=name)
def poem(name=None): a = [] k = 0 for fn in glob.glob('static/3130000547/forkmusics/music.png' + os.sep + '*'): t = os.dirname(fn) a.append(t) return render_template('poem.html', name=name)
def get_web_cache_path(sub_dir): if running_on_linux() == False: ret = os.path.join(os.dirname(materials_base_path), "web_cache", sub_dir) else: ret = os.path.join(get_home_path(), "web_cache", sub_dir) if os.path.isdir(ret) == False: os.makedirs(ret) return ret
def setUp(self): config = configparser.ConfigParser() config_path = os.path.abspath( os.path.join(os.sep, os.path.abspath(os.dirname(__file__)), "../config.ini")) with open(config_path, encoding="utf8") as config_file: config.read_file(config_file) sys.path.insert(0, os.path.dirname(config['environment']['RPCAUTH'])) self.rpcauth = importlib.import_module('rpcauth')
def clean_csvs(dialogpath=None): """ Translate non-ASCII characters to spaces or equivalent ASCII characters """ dialogdir = os.dirname(dialogpath) if os.path.isfile(dialogpath) else dialogpath filenames = [dialogpath.split(os.path.sep)[-1]] if os.path.isfile(dialogpath) else os.listdir(dialogpath) for filename in filenames: filepath = os.path.join(dialogdir, filename) df = clean_df(filepath) df.to_csv(filepath, header=None) return filenames
def __init__(self, uri, session=None): super(BaseClient, self).__init__(uri, session) self.session = self.session or (SFTPClient( username=self.username, password=self.password, port=self.port or 22, default_path=os.dirname(self.path), private_key=self.query.get('private_key', None), cnopts=self.query))
def write(self, entry_name, content): path = os.path.join(self.file_path, entry_name) if not mkdirs(os.dirname(path)): raise IOError("Directory can not be created to write file: " + path) if entry_name.endswith("/"): os.mkdir(path) else: with open(path, "w") as f: f.write(content)
def convert_djvu_to_pdf(self): if self.convert_djvu_end_name.text(): result_folder = os.dirname( self.convert_djvu_end_name.text()) + '\\' for i in range(self.convert_djvu_list_main.count()): item = self.convert_djvu_list_main.item(i).text() result_path = result_folder + os.path.basename( item)[:-5] + '.pdf' os.system('ddjvu -format=pdf "' + item + '" "' + result_path + '"') else: for i in range(self.convert_djvu_list_main.count()): item = self.convert_djvu_list_main.item(i).text() result_folder = os.dirname(item) + '\\' result_path = result_folder + os.path.basename( item)[:-5] + '.pdf' os.system('ddjvu -format=pdf "' + item + '" "' + result_path + '"')
def key(self): path = os.path.join(os.environ['HOME'], '.android', 'debug.keystore') if os.path.exists(path): return path else: os.makedirs(os.dirname(path)) log.info("generating key for repackaging") os.system('keytool -genkey -v -keystore %(path)s -alias androiddebugkey -dname "CN=Android Debug, O=Android, C=US" -storepass android -keypass android -keyalg RSA -keysize 2048 -validity 10000' % dict(path=path)) return path
def get_music_rename_filename(new_path, old_path): # We only allow moving if the file doesn't already exist if os.path.exists(new_path): if old_path and not samefile(old_path, new_path): raise Exception(f"File '{new_path}' already exists.") # If another file matches with a different extension, try to rename it old_dirname = os.dirname(old_path) dirname = os.dirname(new_path) basename = os.path.basename(new_path) filename, ext = os.path.splitext(basename) ext_lower = ext.lower() for list_basename in os.listdir(dirname): list_path = os.path.join(dirname, list_basename) list_filename, list_ext = os.path.splitext(list_basename) list_ext_lower = list_ext.lower() # We only care about audio extensions if list_ext_lower not in ('mp3', 'flac', 'm4a', 'wav'): continue # We only care about different extensions if ext_lower == list_ext_lower: continue # Warn if different extension and different filename if list_filename != filename: log.warn(f"Found file with different extension '{list_path}'.") continue # Fail if different extension and same name, and we aren't upgrading to FLAC if ext_lower != ".flac" or list_ext_lower == ".flac": raise Exception( f"File with different extension '{list_path}' already exists.") # Otherwise backup the old file, and proceed backup_path = os.path.join(old_dirname, list_basename + '.bak') log.warn("Backing up different extension %r => %r", list_path, backup_path) os.rename(list_path, backup_path) return new_path
def parseFile(filename, envin, envout = {}): exec "from sys import path" in envin exec "path.append(\"" + dirname(filename) + "\")" in envin envin.pop("path") lines = open(filename, 'r').read() exec lines in envin returndict = {} for key in envout: returndict[key] = envin[key] return returndict
def get_deep_directory_exists(d): if os.path.isdir(d): return d else: newd = os.dirname(d) # special values '/' and '' if newd == d: return newd else: return get_deep_directory_exists(newd)
def __init__(self, experiment, dev=None): try: import clr clr.AddReference("System.Reflection") from System.Reflection import Assembly directory=os.getcwd() print(directory) pluginfolder = os.dirname(__file__) Assembly.UnsafeLoadFrom(directory+'\share\opensesame_plugins\Pulse_EVT2\HidSharp.dll') Assembly.UnsafeLoadFrom(directory+'\share\opensesame_plugins\Pulse_EVT2\HidSharp.DeviceHelpers.dll') Assembly.UnsafeLoadFrom(directory+'\share\opensesame_plugins\Pulse_EVT2\EventExchanger.dll') except Exception as e: print(e.Message) print(e.Source) try: self._EVT2 = clr.ID.EventExchanger() print(self._EVT2.Attached()) self._nEVT2 = self._EVT2.Attached().count('/')+1 except Exception as e: raise osexception('EVT2 Error') # print(self._EVT2.Attached()) self.experiment = experiment # If a device has not been specified, autodetect if dev in (None, "", "autodetect"): try: if self._nEVT2 == 1: print("Starting the only attached EVT") self._EVT2.Start() else: print(("Starting device %s" % self._EVT2.Attached().partition('/')[0])) self._EVT2.Start(self._EVT2.Attached().partition('/')[0]) except Exception as e: print(e.Message) raise osexception( "libEVT2 does not know how to auto-detect the EVT on your platform. Please specify a device.") else: try: self._EVT2.Start(str(dev)) except Exception as e: raise osexception( "Failed to open device port '%s' in libEVT2: '%s'" \ % (dev, e)) if self._EVT2 is None: raise osexception( "libEVT2 failed to auto-detect a unique instance of the EVT. Please specify a device.") debug.msg("using device %s" % dev)
def test_autocorrs(self): usual_place = os.path.join(os.getenv("HOME"), "src", "SJ") if os.path.exists(usual_place): parent_dir = usual_place else: parent_dir = os.dirname(os.getcwd()) files = [f for f in os.listdir(parent_dir) if f.endswith(".txt")] files.sort() for f in files: autocorr_file(os.path.join(parent_dir, f))
def __init__(self, top_manager, opened_path): """Contructor for the EditorScreenManager """ super().__init__(top_manager, 'editor') if os.path.isdir(opened_path): self.opened_path = opened_path else: self.opened_path = os.dirname(opened_path) self.new_file_open = False
def build_sqlite3(des=None): if des is None: des = ModuleDescriptor('sqlite3') des.add_module('sqlite3', ignore=['test']) des.add_module('_sqlite3') des.add_dll('sqlite3') import _sqlite3 des.add_dll_search_path(os.dirname(_sqlite3.__file__)) # des.add_dependency('datetime') return des
def transferFile(self, params): outFile, data, offset, numBytes = params if os.path.exists(outFile): out = file(outFile, 'ab') else: dirPath = os.dirname(outFile) os.makedirs(dirPath) out = file(outFile, 'wb') out.seek(offset) out.write(data) out.close() return Result(Boolean, 'true')
def get_gsi_version(cfg): ''' Determine the version of GSI used ''' # First, look for a '.version' in the directory with a dirname if os.path.exists(os.path.join(cfg.gsi_root_dir, '.version')): return open(os.path.join(cfg.gsi_root_dir, '.version')).readline().strip() log.warn( 'No version string found in $GSI_ROOT/.version. Attempting to guess') # for comGSI, I name the directories such that the dirname is the version number if cfg.gsi_root_dir.contains('comgsi'): gsi_version = os.dirname(cfg.gsi_root_dir)
def show_image_det_from_class(img,objects,type_names=None,ignored_type_names=None,occlusion=-1,truncation=-1,saveto=-1): ''' (needs dataset class and object class from frustum-pointnets.) show image with labels from objects,bbox,occlusion,truncation :param img(ndarray): :param objects(list of class): type,xmin,ymin,xmax,ymax,occlusion,truncation :param type_names(tuple): like('Car','Pedestrian','Cyclist') :param ignored_type_names(tuple): like ('DontCare') :param occlusion(int): >0:show occlusion(0,1,2,3) :param truncation(int): >0:show truncation(0~1) :param saveto(str):str:save else: show :return: None ''' assert objects.type_names != None for obj in objects: if obj.type in type_names: try: cv2.rectangle(img, (int(obj.xmin),int(obj.ymin)), (int(obj.xmax),int(obj.ymax)), (0,255,0), 1) cv2.putText(img, obj.type[:3], (int(obj.xmin), int(obj.ymin - 15)), cv2.FONT_HERSHEY_DUPLEX, \ 0.5, (0, 255, 0), 1) except AttributeError: print("No object.xmin/ymin/xmax/ymax/type(str)") if occlusion>0: try: cv2.putText(img, 'o'+str(obj.occlusion), (int(obj.xmin), int(obj.ymin + 15)), cv2.FONT_HERSHEY_DUPLEX, \ 0.5, (0, 255, 0), 1) except AttributeError: print("No object.occlusion but you set occlusion!=-1") if truncation>0: try: cv2.putText(img, 't'+str(obj.truncation), (int(obj.xmin), int(obj.ymin)), cv2.FONT_HERSHEY_DUPLEX, \ 0.5, (0, 255, 0), 1) except AttributeError: print("No object.occlusion but you set truncation!=-1") elif obj.type in ignored_type_names: cv2.rectangle(img, (int(obj.xmin),int(obj.ymin)), (int(obj.xmax),int(obj.ymax)), (0,100,100), 1) cv2.putText(img, 'ignored', (int(obj.xmin), int(obj.ymin - 15)), cv2.FONT_HERSHEY_DUPLEX, \ 0.5, (0, 100, 100), 1) if isinstance(saveto,str): dir = os.abspath(os.dirname(saveto)) if not os.path.exists(dir): os.makedirs(dir) img.save(saveto) else: cv2.imshow(img) cv2.waitKey(0)
def plotLearningCurve(entries,output_file): color_id=0 for entry in entries: train_values=map(float,entry['train'].split('|')) test_values=map(float,entry['test'].split('|')) if entry.has_key('test') else None color=color_list[color_id] if color_id<len(color_list) else random_color() color_id+=1 handler1,=plt.plot(range(1,len(train_values)+1),train_values,color=color,linewidth=3,label=entry['title']) if test_values!=None: handler2,=plt.plot(range(1,len(test_values)+1),test_values,color=color,linewidth=1) plt.legend() plt.xlabel('Epoch') plt.ylabel('Loss') if output_file==sys.stdout: plt.show() else: if not os.path.exists(os.dirname(output_file)): os.makedirs(os.dirname(output_file)) fig=plt.gcf() fig.set_size_inches(18,13) fig.savefig(output_file)
def setup_tag_project(project_name='tag_project', app_name='tag_app'): django.core.management.call_command('startproject', project_name) django.core.management.call_command('startapp', app_name) root = os.path.join(os.getcwd(), project_name) template_dir = os.path.join(root, 'templates') tags_dir = os.path.join(root, app_name, 'templatetags') add_template_dir_to_settings(settings_file, template_dir) os.mkdir(template_dir) os.mkdir(tags_dir) os.copytree(os.path.join(os.dirname(__file__), 'templatetags'), tags_dir) return template_dir, tags_dir
def plotGradient(pkl_file,output_file,batch_per_epoch=-1): gradient_info_list=cPickle.load(open(pkl_file,'rb')) layer_num=len(gradient_info_list[0]['gU']) x_axis_idx=range(len(gradient_info_list)) if batch_per_epoch<=0 else map(lambda x:batch_per_epoch*x['epoch']+x['index'],gradient_info_list) for layer_idx in xrange(layer_num): plt.plot(x_axis_idx,map(lambda x:x['gU'][layer_idx],gradient_info_list),label='gU%d'%(layer_idx+1)) plt.plot(x_axis_idx,map(lambda x:x['gW'][layer_idx],gradient_info_list),label='gW%d'%(layer_idx+1)) plt.plot(x_axis_idx,map(lambda x:x['gs'][layer_idx],gradient_info_list),label='gs%d'%(layer_idx+1)) plt.plot(x_axis_idx,map(lambda x:x['gV'],gradient_info_list),label='gV') plt.legend() plt.title('gradient information loaded from %s'%pkl_file) plt.xlabel('Epoch/Batches') plt.ylabel('Gradient Avg Abs Value') if output_file==sys.stdout: plt.show() else: if not os.path.exists(os.dirname(output_file)): os.makedirs(os.dirname(output_file)) fig=plt.gcf() fig.set_size_inches(18,13) fig.savefig(output_file)
def start_record(self): if not os.path.exists(os.path.dirname(self.bag_path)): if self.make_path: os.makedirs(os.dirname(self.bag_path)) else: raise PathNotPresent("Hosting directory does not exist.") if os.path.exists(self.bag_path): raise BagAlreadyStarted("Bag file with path " + self.bag_path + " already exists.") rospy.loginfo("Starting bag " + self.bag_path) self.bag_file = rosbag.Bag(self.bag_path, "w") self.subscribe_topics() self.start_time = rospy.Time.now() rospy.loginfo("Bag " + self.bag_path + " fully recording.")
def main(): opts = docopt(__doc__) if opts["-o"]: if os.path.exists(os.dirname(opts["-o"])): sys.stdout = open(opts["-o"]) gff_fn = opts["<infile>"] db_fn = gff_fn + ".db" if not os.path.exists(db_fn): create_db(gff_fn, db_fn) fdb = FeatureDB(db_fn) assert "gene" in list(fdb.featuretypes()), "Database doesn't contain genes" for chrom in fdb.chromosomes(): biggest_stop = 0 biggest_stop_id = chrom + "_START" genes = list(fdb.features_of_type("gene", chrom=chrom)) genes.extend(list(fdb.features_of_type("transposable_element", chrom=chrom))) genes.extend(list(fdb.features_of_type("transposable_element_gene", chrom=chrom))) genes = sorted(genes, key=lambda g: g.start) print(fdb[chrom]) for gene in genes: start = gene.start stop = gene.stop agi = gene.id if start > biggest_stop: ig_name = biggest_stop_id + "-" + agi ig_start = biggest_stop + 1 # starts at the next pos ig_stop = start - 1 # stops before the next gene _print_intergene(chrom, ig_start, ig_stop, ig_name) if stop > biggest_stop: biggest_stop = stop biggest_stop_id = agi print(gene) for transcript in fdb.children(gene): print(transcript) for exon in fdb.children(transcript): print(exon) if opts["-o"]: sys.stdout.close()
def directory_changed(self, path): return self.changed(os.dirname(path))
import sys import os from os import dirname sys.path.insert(0, '../../jedi') sys.path.append(dirname(os.path.abspath('thirdparty' + os.path.sep + 'asdf'))) # modifications, that should fail: # syntax err sys.path.append('a' +* '/thirdparty') #? ['evaluate'] import evaluate #? ['evaluator_function_cache'] evaluate.Evaluator_fu # Those don't work because dirname and abspath are not properly understood. ##? ['jedi_'] import jedi_ ##? ['el'] jedi_.el
from os import abspath, dirname _CWD = dirname(abspath(__file__))
def get_cwd(): filename = sys.argv[0] return os.dirname(os.abspath(os.expanduser(filename)))
#-------------------------- # lexer.py # # Verilog-AMS Lexical Analyzer # # Copyright (C) 2015, Andrew Plumb # License: Apache 2.0 #-------------------------- from __future__ import absolute_import from __future__ import print_function import sys import os import re sys.path.insert(0, os.path.dirname(os.path.dirname(os.dirname(os.path.abspath(__file)))) ) from pyvams.vamsparser import ply from pyvams.vamsparser.ply.lex import * class VerilogAMSLexer(object): """ Verilog-AMS Lexical Analyzer """ def __init__(self, error_func): self.filename = '' self.error_func = error_func self.directives = [] self.default_nettype = 'wire' def build(self, **kwargs): self.lexer = ply.lex.lex(object=self, **kwargs) def input(self, data): self.lexer.input(data)
will generally fail, in which case the parent directory is added to sys.path and the import is tried again. Then "pyzo.start()" is called. """ import os import sys # Imports that are maybe not used in Pyzo, but are/can be in the tools. # Import them now, so they are available in the frozen app. import shutil if hasattr(sys, 'frozen') and sys.frozen: app_dir = os.dirname(os.path.abspath(sys.executable)) # Enable loading from source sys.path.insert(0, os.path.join(app_dir, 'source')) sys.path.insert(0, os.path.join(app_dir, 'source/more')) # Import import pyzo else: # Try importing try: import pyzo except ImportError: # Very probably run as a script, either the package or the __main__ # directly. Add parent directory to sys.path and try again. thisDir = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, os.path.split(thisDir)[0])
DEBUG = os.environ['DEBUG'] # Pick a universe (default from above, or as specified in the environment) if 'UNIVERSE' in os.environ: universe = int(os.environ['UNIVERSE']) # Pick a tick interval (default from above, or as specified in the environment) if 'INTERVAL' in os.environ: interval = int(os.environ['INTERVAL']) # Pick a socket file ($TMPDIR/plexMonitor/DMX.socket, or as specified in the environment) cmd_file = None data_dir = None if 'SOCKET' in os.environ: cmd_file = os.environ['SOCKET'] data_dir = os.dirname(cmd_file) else: proc = subprocess.Popen(['getconf', 'DARWIN_USER_TEMP_DIR'], stdout=subprocess.PIPE, shell=False) (tmp_dir, err) = proc.communicate() tmp_dir = tmp_dir.strip() data_dir = tmp_dir + 'plexMonitor/' cmd_file = data_dir + 'DMX.socket' # Sanity checks if (not os.path.isdir(data_dir)): raise Exception('Bad config: ' + data_dir) # Open the socket if (os.path.exists(cmd_file)): os.unlink(cmd_file) sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
# -*- coding: utf-8 -*- import os import sys import codecs from setuptools import setup try: # Python 3 from os import dirname except ImportError: # Python 2 from os.path import dirname here = os.path.abspath(dirname(__file__)) with codecs.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = '\n' + f.read() if sys.argv[-1] == "publish": os.system("python setup.py sdist bdist_wheel upload") sys.exit() required = [ 'humanize', 'pytz', 'dateparser', 'iso8601', 'python-dateutil',
if USE_WINDOWS: # Try to add some commonly needed paths to PATH paths = os.environ.get('PATH', '').split(os.path.pathsep) program_files = os.environ.get('PROGRAMFILES') if program_files: # Guess some paths paths += glob.glob(os.path.join(program_files, 'gs/gs*/bin')) paths += glob.glob(os.path.join(program_files, 'pstoedit*')) paths += glob.glob(os.path.join(program_files, 'miktex*/miktex/bin')) # FIXME: a better solution would be to look them up from the registry # The path where Inkscape is likely to be paths += [os.path.join(os.abspath(os.dirname(sys.argv[0])), '..', '..')] # Set the paths os.environ['PATH'] = os.path.pathsep.join(paths) class ConvertInfo(object): def __init__(self): self.text = None self.preamble_file = None self.page_width = None self.scale_factor = None self.has_node = False self.text_to_path = False self.selected_converter = None
def load(self, flag='r'): file = self.filename os.makedirs(os.dirname(file)) self.data = shelve.open(file, flag=flag, writeback=True)
# Just sketch import os, sys CONFIG_FILE = os.path.join(os.abspath(os.dirname(__file__)), "../config.yaml") def get_config_variable(name): """ Retrieve variable name from configuration """ pass
# -*- coding: utf-8 -*- # Django settings for pyglossary project. import os, string from os import dirname from os.path import join DJ_ROOT_DIR = dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': join(DJ_ROOT_DIR, 'pyglossary.sqlite'), 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } # Local time zone for this installation. Choices can be found here:
sys.stdout.write("finished line %i\n"%j) sys.stdout.flush() try: return array(out).transpose() except: return out if __name__ == '__main__': import sys,os fname=sys.argv[1] exargs={} for a in sys.argv[2:]: if a[:5]=='erng=': exargs['erng']=float(a[5:]) if fname[-4:]=='.npy': fdir=os.dirname(fname) fname=os.basename(fname) rep=meas_all(fname,fdir) else: init_single(fname) rep=anal(range(len(dt))) for a in rep: print a global counter counter=0 def bump(w2=310,w1=1500,vl=None,slim=[2.2,2.6],rep=1,min_dis=0.015,loud=0,fit_wid=2): global counter if vl==None: oxide=polyval(spectra.cau_sio2,px)