def edit_changelog(self): """ To remove extra line '* New upstream release' from debian/changelog """ _call("sed -i '/* New upstream release/d' debian/changelog", shell=True)
def get_pic(now_img_files_list): print('get_pic') global wall_manager tmpfls = '' print('get pic: ', now_img_files_list) #pwd = os.getcwd() + '/' tmpfl = open('/tmp/wallnext_tmp.sh', 'w') print('wall_manager = ', type(wall_manager)) if wall_manager == 'gsettings': tmpfls = ( '#!/bin/bash \n' 'gsettings set org.gnome.desktop.background picture-uri file://"' + now_img_files_list + '"') elif wall_manager == 'pcmanfm': tmpfls = ('#!/bin/bash \n' 'pcmanfm --set-wallpaper "' + now_img_files_list + '"') elif wall_manager == 'feh': tmpfls = ('#!/bin/bash \n' 'feh --bg-scale "' + now_img_files_list + '"') else: print("i don'n know this wall manager") print('tmpfls = ', tmpfls) tmpfl.write(tmpfls) tmpfl.close() os.chmod(os.path.join(pwd, '/tmp/wallnext_tmp.sh'), 0o777) _call(['/tmp/./wallnext_tmp.sh']) os.remove('/tmp/wallnext_tmp.sh')
def get_pic(now_img_files_list): print('get_pic') global wall_manager tmpfls = '' print('get pic: ', now_img_files_list) #pwd = os.getcwd() + '/' tmpfl = open('/tmp/wallnext_tmp.sh', 'w') print('wall_manager = ',type(wall_manager)) if wall_manager == 'gsettings': tmpfls = ('#!/bin/bash \n' 'gsettings set org.gnome.desktop.background picture-uri file://"' + now_img_files_list + '"') elif wall_manager == 'pcmanfm': tmpfls = ('#!/bin/bash \n' 'pcmanfm --set-wallpaper "' + now_img_files_list + '"') elif wall_manager == 'feh': tmpfls = ('#!/bin/bash \n' 'feh --bg-scale "' + now_img_files_list + '"') else: print("i don'n know this wall manager") print('tmpfls = ',tmpfls) tmpfl.write(tmpfls) tmpfl.close() os.chmod(os.path.join(pwd,'/tmp/wallnext_tmp.sh'), 0o777) _call(['/tmp/./wallnext_tmp.sh']) os.remove('/tmp/wallnext_tmp.sh')
def edit_changelog(self): """ To remove extra line '* New upstream release' from debian/changelog """ _call( "sed -i '/* New upstream release/d' debian/changelog", shell=True)
def run(self, command=None): if not self.exists(): self.create() bash = "bash --rcfile %s" % self.bashrc if command: bash = """bash -c "source %s ; %s" """ % (self.bashrc, command) utils.debug(1, "running \"%s\"" % bash) _call(bash, shell=True)
def call(args, silent=False, **kwargs): devnull = open(os.devnull, 'wb') if silent: kwargs['stdout'] = devnull kwargs['stderr'] = devnull else: print 'Running cmd: %s' % ' '.join(args) _call(args, **kwargs)
def edit_file(time: tuple, directory=_todo_conf.get_directory()): filepath = _path.join(directory, ''.join((_strftime('%Y_%m_%d', time), '.md'))) try: create_file(time, directory) except FileExistsError: pass _call(['editor', filepath])
def _convert(f): """return 1 on success, 0 on fail""" try: cmd = "jupyter nbconvert --to html \"{fn}\" --output-dir='./html'".format(fn=f) #print('\nCommand: {}\n'.format(cmd)) # for debugging # Wait for the command(s) to get executed ... _call(cmd, shell=True) except: return 0 else: return 1
def run_buildpackage(self): print("\nBuilding the binary package") _call('dpkg-source -b .', shell=True) _call('rm -rf .gitignore .git* .travis.yml', shell=True) _call('dpkg-buildpackage', shell=True) # removing auto generated temporary files _call('debian/rules clean', shell=True)
def call(cmds, stdout, stderr=None): """ Llama a una instrucción en consola. :param cmds: Lista de comandos :param stdout: Salida estandar :param stderr: Salida de errores :return: Tiempo de ejecución :rtype: float """ t = time.time() if stderr: if is_windows(): _call(cmds, stdout=stdout, stderr=stderr, creationflags=CREATE_NO_WINDOW) else: _call(cmds, stdout=stdout, stderr=stderr) else: if is_windows(): _call(cmds, stdout=stdout, creationflags=CREATE_NO_WINDOW) else: _call(cmds, stdout=stdout) return time.time() - t
def _check_extentions(self): """ Do some checks on the ext_modules. """ if self.without_sgio: self._build_without_sgio() if self.without_libiscsi: self._build_without_libiscsi() else: try: self.announce('create libiscsi_wrap.c ...', 2) _call(libiscsi_cmd, shell=True) except CalledProcessError as e: self.announce('could not create libiscsi_wrap.c: %s' % e.message, 2)
def _check_extentions(self): """ Do some checks on the ext_modules. """ if self.without_sgio: self._build_without_sgio() if self.without_libiscsi: self._build_without_libiscsi() else: try: self.announce('create libiscsi_wrap.c ...', 2) _call(libiscsi_cmd, shell=True) except CalledProcessError as e: self.announce( 'could not create libiscsi_wrap.c: %s' % e.message, 2)
def launch_new(self, outputFilename, templateFilename=DefaultTemplate): ''' Save and then launch this notebook with a new jupyter server. Note that this function waits to return until the notebook server exists, and so is difficult to work with. Parameters ---------- outputFilename : str filename to save this notebook to templateFilename : str, optional filename to build this notebook from (see save_to) ''' self.save_to(outputFilename, templateFilename) _call('jupyter notebook {}'.format(outputFilename), shell=True) # this waits for notebook to complete
def initiate_build(self, saved_path): """ Try building deb package after creating required files using start(). 'uscan', 'uupdate' and 'dpkg-buildpackage' are run if debian/watch is OK. """ uscan_info = self.test_uscan() if uscan_info[0] == 0: self.run_uscan() self.run_uupdate() new_dir = '%s-%s' % (self.debian_name, self.upstream_version) utils.change_dir('../%s' % new_dir) # copy over non-duplicated changelog _os.rename('../%s/debian/changelog' % self.debian_name, 'debian/changelog') _rmtree('../%s' % self.debian_name) self.run_buildpackage() self.edit_changelog() self.create_git_repository() debian_path = "%s/%s/debian" % (self.name, new_dir) print('\nRemember, your new source directory is %s/%s' % (self.name, new_dir)) else: debian_path = "%s/%s/debian" % (self.name, self.debian_name) print(""" This is not a crystal ball, so please take a look at auto-generated files.\n You may want fix first these issues:\n""") utils.change_dir(saved_path) _call('/bin/grep --color=auto FIX_ME -r %s/*' % debian_path, shell=True) _call('/bin/grep --color=auto FIX_ME -r -H %s/*_itp.mail' % self.name, shell=True) if uscan_info[0] != 0: print( "\nUse uscan to get orig source files. Fix debian/watch and then run\ \n$ uscan --download-current-version\n") if self.upstream_watch: print(""" *** Warning ***\nUsing npmregistry to download npm dist tarballs, because upstream git repo is missing tags. Its better to ask upstream to tag their releases instead of using npm dist tarballs as dist tarballs may contain pre built files and may not include tests.\n""")
def create_git_repository(self): if _os.system('command -v gbp') == 0: dsc_name = None _os.chdir('..') for file in _os.listdir(_os.getcwd()): if file.endswith('.dsc'): dsc_name = file break if dsc_name is None: print('No dsc file found! Aborting creating git repository') return if self.debian_name in _os.listdir(_os.getcwd()): _rmtree(self.debian_name) repo_create_status = _call('gbp import-dsc --pristine-tar %s' % dsc_name, shell=True) if repo_create_status == 0: _rmtree('{name}-{version}'.format( name=self.debian_name, version=self.upstream_version)) else: print('gbp import-dsc exited with status %s' % repo_create_status) else: print( 'gbp not found, please: sudo apt-get install git-buildpackage')
def _external(cmd, logger, msg): """ Run an external command with appropriate logging (e.g. if command isn't installed) """ logger.debug('%s: %s' % (msg, cmd)) p = _call(cmd, stdout=_STDOUT) return p.communicate()
def call(*args, **kwargs): """Mocks call function for testing """ if TESTING: print(args, kwargs) return 0 else: return _call(*args, **kwargs)
def run_command(*args): '''Run a command in sub-process. ''' if _Verbose > 2: print('Running: %s' % ' '.join(args)) r = _call(args) if r: print("Running '%s ...' failed with exit status %r" % (' '.join(args[:2]), r)) return r
def initiate_build(self, saved_path): """ Try building deb package after creating required files using start(). 'uscan', 'uupdate' and 'dpkg-buildpackage' are run if debian/watch is OK. """ uscan_info = self.test_uscan() if uscan_info[0] == 0: self.run_uscan() self.run_uupdate() new_dir = '%s-%s' % (self.debian_name, self.upstream_version) utils.change_dir('../%s' % new_dir) self.run_buildpackage() self.edit_changelog() debian_path = "%s/%s/debian" % (self.name, new_dir) print('\nRemember, your new source directory is %s/%s' % (self.name, new_dir)) else: debian_path = "%s/%s/debian" % (self.name, self.debian_name) print(""" This is not a crystal ball, so please take a look at auto-generated files.\n You may want fix first these issues:\n""") utils.change_dir(saved_path) _call( '/bin/grep --color=auto FIX_ME -r %s/*' % debian_path, shell=True) _call( '/bin/grep --color=auto FIX_ME -r -H %s/*_itp.mail' % self.name, shell=True) if uscan_info[0] != 0: print( "\nUse uscan to get orig source files. Fix debian/watch and then run\ \n$ uscan --download-current-version\n") if self.upstream_watch: print(""" *** Warning ***\nUsing npmregistry to download npm dist tarballs, because upstream git repo is missing tags. Its better to ask upstream to tag their releases instead of using npm dist tarballs as dist tarballs may contain pre built files and may not include tests.\n""")
def make_lookup_tables(dbsnp_file, outname='dbsnp_filtered', skip_filter=False): """Create a series of easy lookup tables from dbSNP files.""" filtered_bed = '{}.snp_only.bed'.format(outname) if not skip_filter: print('Making filtered snp_only bed file') with open_zipped(dbsnp_file) as fin, open(filtered_bed, 'w') as fout: for line in fin: if line.startswith('track'): continue chrom, start, end, name, _, strand = line.rstrip().split('\t') if int(end) - int(start) > 1: continue fout.write('\t'.join([chrom, start, end, name, strand]) + '\n') # Create a lookup file sorted by rsid print('Making rsid lookup table') script = r"""cat {name} | awk '{{print $4 "\t" $1 "\t" $2 "\t" $3}}' | sort -k1,1 > {name}.rslookup.rs_sort.txt""" _call(script.format(name=filtered_bed), shell=True) # Create a location lookup file sorted by the start location (base-1) print('Making start location lookup table') script = r"""cat {name} | awk '{{print $1"."$2 "\t" $4}}' | sort -k1,1 > {name}.rslookup.start_sort.txt""" _call(script.format(name=filtered_bed), shell=True) # Create a location lookup file sorted by the end location (base-1) print('Making end location lookup table') script = r"""cat {name} | awk '{{print $1"."$3 "\t" $4}}' | sort -k1,1 > {name}.rslookup.end_sort.txt""" _call(script.format(name=filtered_bed), shell=True) print('Done')
def save_conf(r_var0): print('save_conf') global wall_manager #wall_manager = str(rbut_var0.get()) wall_dir_path = ent_wall_dir_path.get() wall_sort = rbut_sort.get() wall_change_interval = ent_interval.get() conf_str_list = (wall_manager + '\n' + wall_dir_path + '\n' + read_subdir_var.get() + '\n' + wall_sort + '\n' + wall_change_interval) conf_file_obj = open(conf_file_path, "w") conf_file_obj.write(conf_str_list) conf_file_obj.close() print(conf_str_list) #_call(['killall']) try: root.destroy() _call(['wallnext']) except: try: _call(['./wallnext']) except: _call(['./wallnext.py'])
def start_daemon(r_var0): print('start_daemon') global conf_dir_path global wall_manager global wall_dir_path wall_manager = str(rbut_var0.get()) wall_dir_path = ent_wall_dir_path.get() wall_change_interval = ent_interval.get() conf_str_list = (wall_manager + '\n' + wall_dir_path + '\n' + wall_change_interval) conf_file_obj = open(conf_file_path, "w") conf_file_obj.write(conf_str_list) conf_file_obj.close() print(conf_str_list) #_call(['killall']) try: #root.destroy() _call(['wallnext']) #sys.exit() except: _call(['wallnext.py'])
def create(args): npm2deb = get_npm2deb_instance(args) try: saved_path = _os.getcwd() _utils.create_dir(npm2deb.name) _utils.change_dir(npm2deb.name) npm2deb.start() _utils.change_dir(saved_path) except OSError as os_error: print(str(os_error)) exit(1) debian_path = "%s/%s/debian" % (npm2deb.name, npm2deb.debian_name) print(""" This is not a crystal ball, so please take a look at auto-generated files.\n You may want fix first these issues:\n""") _call('/bin/grep --color=auto FIX_ME -r %s/*' % debian_path, shell=True) print ("\nUse uscan to get orig source files. Fix debian/watch and then run\ \n$ uscan --download-current-version\n") _show_mapper_warnings()
def create(args): npm2deb = get_npm2deb_instance(args) try: saved_path = _os.getcwd() _utils.create_dir(npm2deb.name) _utils.change_dir(npm2deb.name) npm2deb.start() _utils.change_dir(saved_path) except OSError as os_error: print(str(os_error)) exit(1) debian_path = "%s/%s/debian" % (npm2deb.name, npm2deb.debian_name) print(""" This is not a crystal ball, so please take a look at auto-generated files.\n You may want fix first these issues:\n""") _call('/bin/grep --color=auto FIX_ME -r %s/*' % debian_path, shell=True) print("\nUse uscan to get orig source files. Fix debian/watch and then run\ \n$ uscan --download-current-version\n") _show_mapper_warnings()
def run_uupdate(self): print('\nCreating debian source package...') _call( 'uupdate -b -f --upstream-version %s' % self.upstream_version, shell=True)
def __init__(): global pwd global wall_manager global home_dir_path global conf_dir_path global read_subdir_var # check presence of a config file # Проверяем наличие config каталога, и если его нет, создаем if not os.path.exists(conf_dir_path): os.makedirs(conf_dir_path) # check presence of a confin dir in a .config dir # Проверяем наличие config файла, и если его нет, копируем из default if not os.path.exists(conf_file_path): print('no conf file') shutil.copy2(os.path.join(default_dir_path,conf_file_name),conf_file_path) # start gui print(pwd) os.chdir(pwd) try: _call(['wallnext-gui']) # start a GUI except: try: _call(['./wallnext-gui']) except: _call(['./wallnext-gui.py']) sys.exit() conf_file_obj = open(conf_file_path, 'r') # read .conf file # derive a line of a config file for i in fileinput.input(conf_file_path): conf_str_list.append(i) wall_manager = conf_str_list[0][:-1] # wallpappers manager wall_dir_path = conf_str_list[1][:-1] # PATH to wallpappers read_subdir_var = conf_str_list[2][:-1] wall_sort = conf_str_list[3][:-1] wall_change_interval = int(conf_str_list[4]) # time wall_change_interval print(wall_change_interval) ## kill PID (stop daemon) print('pwd = ', pwd) try: pid_file_obj = open(os.path.join(conf_dir_path,'pid'), "r") #print('open') kill_pid = int(pid_file_obj.readline()) print('kill_pid = ',kill_pid) os.kill(kill_pid, signal.SIGTERM) print('kill') pid_file_obj.close() except: print('except kill') ## create PID file pid_file_obj = open(os.path.join(conf_dir_path,'pid'), "w") pid = str(os.getpid()) pid_file_obj.write(pid) pid_file_obj.close() img_files_list = read_wall_dir(wall_dir_path) if read_subdir_var == '1': img_files_list += read_sub_wall_dir(wall_dir_path) while True: if wall_sort == 'random': print('Random sort \n', img_files_list) print('len = ',len(img_files_list)) xxx = set(img_files_list) print('len2 = ', len(xxx)) rand_files_list, chM = random_pic(img_files_list) #print(rand_files_list) _start_random(rand_files_list, img_files_list, ch3, chM, wall_change_interval) elif wall_sort == 'name': print("Sort by name \n", img_files_list) chM = len(img_files_list) _start_name(img_files_list, ch3, chM, wall_change_interval) else: print('wall_sort = ',wall_sort, type(wall_sort)) sys.exit()
def run_uscan(self): print('\nDownloading source tarball file using debian/watch file...') _call( 'uscan --download-version %s' % self.upstream_version, shell=True)
def check_call(popenargs, *args, **kwargs): retcode = _call(popenargs, *args, **kwargs) if retcode != 0: raise CalledProcessError(retcode, popenargs) return retcode
def join_rsid(rsids, dbsnp_file, outfile, sort=True, as_df=False): """Use linux join to create a lookup table of rsids. Args: rsids (str/list): List of rsids as a file name (string), list of rsid, or Series. dbsnp_file (str): The dbsnp lookup file from make_lookup_tables. should be the .rslookup.rs_sort.txt file (zipped ok) outfile (str): Name of outfile to write to sort (bool): Pre sort the rsids as_df (bool): Return a dataframe Writes: A tab separated table of rsid, chrom, start, end for all rsids. Returns: DataFrame: Dataframe of written table, only returned if as_df is true. """ if isinstance(rsids, pd.core.series.Series): rsids = rsids.tolist() if isinstance(rsids, (list, tuple, set)): rsids = sorted(list(set(rsids))) tmpfile = outfile + '.rsids.tmp' with open(tmpfile, 'w') as fout: fout.write('\n'.join(rsids)) rsids = tmpfile else: tmpfile = None rsids = _os.path.abspath(rsids) outfile = _os.path.abspath(outfile) if sort: print('Sorting') cat = 'zcat' if rsids.endswith('gz') else 'cat' tmpfile = 'tmpsort_{}'.format(_rand(1000, 20000)) script = r"""{cat} {rsids} | sort > {tmp}; mv {tmp} {rsids}""" _call(script.format(cat=cat, rsids=rsids, tmp=tmpfile), shell=True) print('Joining') script = r"""join {rsids} {dbsnp} > {outfile}""" try: _call(script.format(rsids=rsids, dbsnp=dbsnp_file, outfile=outfile), stderr=_STDOUT, shell=True, universal_newlines=True) except _call_err as exc: print("Status : FAIL", exc.returncode, exc.output) raise exc print('Done, file {} has the joined list'.format(outfile)) if as_df: print('Getting DataFrame') try: df = pd.read_csv(outfile, sep=' ', header=None, index_col=0) except pd.io.common.EmptyDataError: print('Joined file empty, skipping') return None df.index.name = None df.columns = ['chrom', 'start', 'end'] return df
def call(*args, **kwargs): """ Calls a command in a shell. """ return _call(shell=True, *args, **kwargs)
def run_depict(sample_1, sample_2, prefix, cores=None, run_path=None, depict_path=DEPICT): """Run DEPICT twice, once on each sample, DEPICT will be run in parallel. Parallelization at this step will be local only. Args: sample_1 (str): File name or path to file with rsids for sample 1 sample_2 (str): File name or path to file with rsids for sample 2 prefix (str): Name for the output directory, input file names will be used to set output files in this directory. cores (int): Number of cores to use *PER PROCESS* for DEPICT, defaults to 1/2 of available cores on the machine, meaning all cores will be used for run (1/2 each). run_path (str): Root directory to run in, defaults to current dir depict_path (str): Path to the DEPICT package, default set in file. Outputs: <prefix>/<sample_name>.geneprioritization.txt <prefix>/<sample_name>.loci.txt <prefix>/<sample_name>.tissueenrichment.txt <prefix>/<sample_name>.genesetenrichment.txt <prefix>/<sample_name>.log Returns: dict: Dictionary of relevant files. Raises Exception on error. """ FLAG_LOCI = 1 # Construct loci based on your associated SNPs FLAG_GENES = 1 # Prioritize Genes FLAG_GENESETS = 1 # Conduct reconstituted gene set enrichment analysis FLAG_TISSUES = 1 # Conduct tissue/cell type enrichment analysis PARAM_NCORES = 4 # Number of cores to use *PER PROCESS* for DEPICT if not cores: cores = PARAM_NCORES # Set dirs startdir = _os.path.abspath(_os.path.curdir) run_path = _os.path.abspath(run_path if run_path else startdir) print(run_path) if not _os.path.isdir(run_path): _os.mkdir(run_path) # Get DEPICT dir if not _os.path.isdir(depict_path): depict_path = DEPICT depict_path = _os.path.abspath(depict_path) print(depict_path) # check_depict(depict_path) # Check sample files infiles = { 'sample_1': _os.path.abspath(sample_1), 'sample_2': _os.path.abspath(sample_2), } for sample in infiles.values(): if not _os.path.isfile(sample): raise FileNotFoundError('{} does not exist'.format(sample)) with open(sample) as fin: assert fin.readline().strip().startswith('rs') try: # Change directory _os.chdir(run_path) # Create prefix, prefix_long is in run_path, prefix is in depict_path prefix = _os.path.basename(prefix) prefix_long = _os.path.abspath(prefix) if not _os.path.isdir(prefix_long): _os.makedirs(prefix_long) # Get names names = { 'sample_1': '.'.join(_os.path.basename(sample_1).split('.')[:-1]), 'sample_2': '.'.join(_os.path.basename(sample_2).split('.')[:-1]), } prefixes = { 'sample_1_long': _pth(prefix_long, names['sample_1']), 'sample_2_long': _pth(prefix_long, names['sample_2']), 'sample_1': _pth(prefix, names['sample_1']), 'sample_2': _pth(prefix, names['sample_2']), } # Set cores if not cores: cores = int(_mp.cpu_count()/2) # Change directory _os.chdir(depict_path) print(depict_path) if not _os.path.isdir(prefix): _os.makedirs(prefix) # Create script templates loci_script = ( "java -Xms512m -Xmx4000m -jar " "{depict}/LocusGenerator/LocusGenerator.jar " "{depict}/LocusGenerator/config.xml {infile} " "{prefix} > {prefix}.log 2>&1" ) gene_script = ( "java -Xms512m -Xmx16000m -jar {depict}/Depict/Depict.jar " "{outname} {flag_genes} {flag_genesets} 0 {cores} {outdir} " ">> {prefix}.log 2>&1" ) tissue_script = ( "java -Xms512m -Xmx16000m -jar {depict}/Depict/Depict.jar " "{outname} 0 1 1 {cores} {outdir} >> {prefix}.log 2>&1" ) # Run jobs if FLAG_LOCI: print('Running loci building job..') loci_jobs = {} # Create jobs for sample in ['sample_1', 'sample_2']: loci_jobs[sample] = _mp.Process( target = _call, args = ( loci_script.format( depict=depict_path, infile=infiles[sample], prefix=prefixes[sample] ), ), kwargs = {'shell': True}, name = sample + '_locus' ) # Run jobs for job in loci_jobs.values(): job.start() # Wait for finish for job in loci_jobs.values(): job.join() # Make sure job worked for job in loci_jobs.values(): if job.exitcode != 0: raise Exception('Job {} failed with exitcode {}'.format( job.name, job.exitcode )) for sample in ['sample_1', 'sample_2']: print('copying results') _call('cp -f {}* {}'.format(prefixes[sample], prefix_long), shell=True) if FLAG_GENES or FLAG_GENESETS: print('Running gene job..') gene_jobs = {} # Create jobs for sample in ['sample_1', 'sample_2']: gene_jobs[sample] = _mp.Process( target = _call, args = ( gene_script.format( depict=depict_path, cores=cores, outdir=prefix, flag_genes=FLAG_GENES, flag_genesets=FLAG_GENESETS, prefix=prefixes[sample], outname=names[sample] ), ), kwargs = {'shell': True}, name = sample + '_gene' ) # Run jobs for job in gene_jobs.values(): job.start() # Wait for finish for job in gene_jobs.values(): job.join() # Make sure job worked for job in gene_jobs.values(): if job.exitcode != 0: raise Exception('Job {} failed with exitcode {}'.format( job.name, job.exitcode )) for sample in ['sample_1', 'sample_2']: _call('cp -f {}* {}'.format(prefixes[sample], prefix_long), shell=True) if FLAG_TISSUES: print('Running tissue job..') tissue_jobs = {} # Create jobs for sample in ['sample_1', 'sample_2']: tissue_jobs[sample] = _mp.Process( target = _call, args = ( tissue_script.format( depict=depict_path, cores=cores, outdir=prefix, flag_genes=FLAG_GENES, flag_genesets=FLAG_GENESETS, prefix=prefixes[sample], outname=names[sample] ), ), kwargs = {'shell': True}, name = sample + '_tissue' ) # Run jobs for job in tissue_jobs.values(): job.start() # Wait for finish for job in tissue_jobs.values(): job.join() # Make sure job worked for job in tissue_jobs.values(): if job.exitcode != 0: raise Exception('Job {} failed with exitcode {}'.format( job.name, job.exitcode )) for sample in ['sample_1', 'sample_2']: _call('cp -f {}* {}'.format(prefixes[sample], prefix_long), shell=True) # Remove temp dir as all our files are in our new dir # if _os.path.abspath(prefix) != prefix_long: # _call('rm -rf {}'.format(prefix), shell=True) # Change directory _os.chdir(run_path) # Check output files expected_suffices = { 'loci': '_loci.txt', 'gene': '_genesetenrichment.txt', 'tissue': '_tissueenrichment.txt', } expected_outputs = {} for sample in ['sample_1', 'sample_2']: expected_outputs[sample] = { 'loci': '{}{}'.format(prefixes[sample + '_long'], expected_suffices['loci']), 'gene': '{}{}'.format(prefixes[sample + '_long'], expected_suffices['gene']), 'tissue': '{}{}'.format(prefixes[sample + '_long'], expected_suffices['tissue']), } for sample, files in expected_outputs.items(): for fl in files.values(): assert _os.path.isfile(fl) with open(_pth(run_path, prefix + '_files.txt'), 'wb') as fout: _pickle.dump(expected_outputs, fout) finally: # Change directory _os.chdir(startdir) return expected_outputs
def call(command): return _call(command, shell=True)
def plot_graph(mc: MarkovChain, nodes_color: bool = True, nodes_type: bool = True, edges_color: bool = True, edges_value: bool = True, dpi: int = 100) -> _Optional[_Tuple[_mp.Figure, _mp.Axes]]: """ The function plots the directed graph of the Markov chain. | **Notes:** Graphviz and Pydot are not required, but they provide access to extended graphs with supplementar drawing functionalities. :param mc: the target Markov chain. :param nodes_color: a boolean indicating whether to display colored nodes based on communicating classes (by default, True). :param nodes_type: a boolean indicating whether to use a different shape for every node type (by default, True). :param edges_color: a boolean indicating whether to display edges using a gradient based on transition probabilities, valid only for extended graphs (by default, True). :param edges_value: a boolean indicating whether to display the transition probability of every edge (by default, True). :param dpi: the resolution of the plot expressed in dots per inch (by default, 100). :return: None if Matplotlib is in interactive mode as the plot is immediately displayed, the handles of the plot otherwise. :raises ValidationError: if any input argument is not compliant. """ def edge_colors(hex_from: str, hex_to: str, steps: int) -> _List[str]: begin = [int(hex_from[i:i + 2], 16) for i in range(1, 6, 2)] end = [int(hex_to[i:i + 2], 16) for i in range(1, 6, 2)] clist = [hex_from] for s in range(1, steps): vector = [ int(begin[j] + (float(s) / (steps - 1)) * (end[j] - begin[j])) for j in range(3) ] rgb = [int(v) for v in vector] clist.append( f'#{"".join(["0{0:x}".format(v) if v < 16 else "{0:x}".format(v) for v in rgb])}' ) return clist def node_colors(count: int) -> _List[str]: colors_limit = len(_colors) - 1 offset = 0 clist = list() while count > 0: clist.append(_colors[offset]) offset += 1 if offset > colors_limit: offset = 0 count -= 1 return clist if not isinstance(mc, MarkovChain): raise ValidationError('A valid MarkovChain instance must be provided.') try: nodes_color = _validate_boolean(nodes_color) nodes_type = _validate_boolean(nodes_type) edges_color = _validate_boolean(edges_color) edges_value = _validate_boolean(edges_value) dpi = _validate_dpi(dpi) except Exception as e: argument = ''.join(_trace()[0][4]).split('=', 1)[0].strip() raise ValidationError(str(e).replace('@arg@', argument)) from None extended_graph = True # noinspection PyBroadException try: _call(['dot', '-V'], stdout=_PIPE, stderr=_PIPE) except Exception: extended_graph = False pass try: import pydot as pyd except ImportError: extended_graph = False pass g = mc.to_directed_graph() if extended_graph: g_pydot = _nx.nx_pydot.to_pydot(g) if nodes_color: c = node_colors(len(mc.communicating_classes)) for node in g_pydot.get_nodes(): state = node.get_name() for x, cc in enumerate(mc.communicating_classes): if state in cc: node.set_style('filled') node.set_fillcolor(c[x]) break if nodes_type: for node in g_pydot.get_nodes(): if node.get_name() in mc.transient_states: node.set_shape('box') else: node.set_shape('ellipse') if edges_color: c = edge_colors(_color_gray, _color_black, 20) for edge in g_pydot.get_edges(): probability = mc.transition_probability( edge.get_destination(), edge.get_source()) x = int(round(probability * 20.0)) - 1 edge.set_style('filled') edge.set_color(c[x]) if edges_value: for edge in g_pydot.get_edges(): probability = mc.transition_probability( edge.get_destination(), edge.get_source()) if probability.is_integer(): edge.set_label(f' {probability:g}.0 ') else: edge.set_label(f' {round(probability,2):g} ') buffer = _BytesIO() buffer.write(g_pydot.create_png()) buffer.seek(0) img = _mi.imread(buffer) img_x = img.shape[0] / dpi img_y = img.shape[1] / dpi figure = _mp.figure(figsize=(img_y, img_x), dpi=dpi) figure.figimage(img) ax = figure.gca() else: mpi = _mp.isinteractive() _mp.interactive(False) figure, ax = _mp.subplots(dpi=dpi) pos = _nx.spring_layout(g) ncolors_all = node_colors(len(mc.communicating_classes)) for node in g.nodes: ncolor = None if nodes_color: for x, cc in enumerate(mc.communicating_classes): if node in cc: ncolor = ncolors_all[x] break if nodes_type: if node in mc.transient_states: nshape = 's' else: nshape = 'o' else: nshape = None if ncolor is not None and nshape is not None: _nx.draw_networkx_nodes(g, pos, ax=ax, nodelist=[node], edgecolors='k', node_color=ncolor, node_shape=nshape) elif ncolor is not None and nshape is None: _nx.draw_networkx_nodes(g, pos, ax=ax, nodelist=[node], edgecolors='k', node_color=ncolor) elif ncolor is None and nshape is not None: _nx.draw_networkx_nodes(g, pos, ax=ax, nodelist=[node], edgecolors='k', node_shape=nshape) else: _nx.draw_networkx_nodes(g, pos, ax=ax, edgecolors='k') _nx.draw_networkx_labels(g, pos, ax=ax) _nx.draw_networkx_edges(g, pos, ax=ax, arrows=False) if edges_value: evalues = dict() for edge in g.edges: probability = mc.transition_probability(edge[1], edge[0]) if probability.is_integer(): value = f' {probability:g}.0 ' else: value = f' {round(probability,2):g} ' evalues[(edge[0], edge[1])] = value _nx.draw_networkx_edge_labels(g, pos, ax=ax, edge_labels=evalues, label_pos=0.7) _mp.interactive(mpi) if _mp.isinteractive(): _mp.show(block=False) return None return figure, ax
def push(directory=_todo_conf.get_directory()): _call(['git', '-C', directory, 'push'])
def call(cmd, *args, **kwargs): cmdstr = cmd if isinstance(cmd, string_types) else ' '.join(cmd) print_log(cmdstr) return _call(cmd, *args, **kwargs)
def commit_all(time=None, directory=_todo_conf.get_directory()): if time is not None: filepath = ''.join((_strftime('%Y_%m_%d', time), '.md')) _call(['git', '-C', directory, 'add', filepath]) _call(['git', '-C', directory, 'commit', '-a'])
def join_location(locs, dbsnp_file, outfile, sort=True, as_df=False): """Use linux join to create a lookup table of rsids. Args: locs (str/list): File name of a file of chr.position, of a list of the same. Can also give a Series of the same, or a DataFrame with the columns 'chrom' and 'position'. dbsnp_file (str): The dbsnp lookup file from make_lookup_tables. should be the .rslookup.rs_sort.txt file (zipped ok) outfile (str): Name of outfile to write to sort (bool): Pre sort the rsids as_df (bool): Return a dataframe Writes: A tab separated table of chrom.pos, rsid for all locations. Returns: DataFrame: Dataframe of written table, only returned if as_df is true. """ if isinstance(locs, pd.core.series.Series): locs = sorted(list(set(locs.tolist()))) if isinstance(locs, pd.core.frame.DataFrame): locs = locs.chrom.astype(str) + '.' + locs.position.astype(str) locs = sorted(list(set(locs.tolist()))) if isinstance(locs, (list, tuple, set)): tmpfile = outfile + 'locs.tmp' with open(tmpfile, 'w') as fout: fout.write('\n'.join(locs)) locs = tmpfile else: tmpfile = None locs = _os.path.abspath(locs) outfile = _os.path.abspath(outfile) if sort: print('Sorting') cat = 'zcat' if locs.endswith('gz') else 'cat' script = r"""{cat} {locs} | sort -k1,1 > tmp45876; mv tmp45876 {locs}""" _call(script.format(cat=cat, locs=locs), shell=True) print('Joining') script = r"""join {locs} {dbsnp} > {outfile}""" _call(script.format(locs=locs, dbsnp=dbsnp_file, outfile=outfile), shell=True) if tmpfile: _os.remove(tmpfile) if as_df: print('Getting DataFrame') try: lookup = pd.read_csv(outfile, sep=' ', header=None, index_col=0) except pd.io.common.EmptyDataError: print('Joined file empty, skipping') return None lookup.index.name = None lookup['chrom'], lookup['position'] = lookup.index.to_series( ).str.split('.', 1).str lookup.columns = ['rsid', 'chrom', 'position'] lookup = lookup[['chrom', 'position', 'rsid']] return lookup
def __init__(): global pwd global wall_manager global home_dir_path global conf_dir_path global read_subdir_var # check presence of a config file # Проверяем наличие config каталога, и если его нет, создаем if not os.path.exists(conf_dir_path): os.makedirs(conf_dir_path) # check presence of a confin dir in a .config dir # Проверяем наличие config файла, и если его нет, копируем из default if not os.path.exists(conf_file_path): print('no conf file') shutil.copy2(os.path.join(default_dir_path, conf_file_name), conf_file_path) # start gui print(pwd) os.chdir(pwd) try: _call(['wallnext-gui']) # start a GUI except: try: _call(['./wallnext-gui']) except: _call(['./wallnext-gui.py']) sys.exit() conf_file_obj = open(conf_file_path, 'r') # read .conf file # derive a line of a config file for i in fileinput.input(conf_file_path): conf_str_list.append(i) wall_manager = conf_str_list[0][:-1] # wallpappers manager wall_dir_path = conf_str_list[1][:-1] # PATH to wallpappers read_subdir_var = conf_str_list[2][:-1] wall_sort = conf_str_list[3][:-1] wall_change_interval = int(conf_str_list[4]) # time wall_change_interval print(wall_change_interval) ## kill PID (stop daemon) print('pwd = ', pwd) try: pid_file_obj = open(os.path.join(conf_dir_path, 'pid'), "r") #print('open') kill_pid = int(pid_file_obj.readline()) print('kill_pid = ', kill_pid) os.kill(kill_pid, signal.SIGTERM) print('kill') pid_file_obj.close() except: print('except kill') ## create PID file pid_file_obj = open(os.path.join(conf_dir_path, 'pid'), "w") pid = str(os.getpid()) pid_file_obj.write(pid) pid_file_obj.close() img_files_list = read_wall_dir(wall_dir_path) if read_subdir_var == '1': img_files_list += read_sub_wall_dir(wall_dir_path) while True: if wall_sort == 'random': print('Random sort \n', img_files_list) print('len = ', len(img_files_list)) xxx = set(img_files_list) print('len2 = ', len(xxx)) rand_files_list, chM = random_pic(img_files_list) #print(rand_files_list) _start_random(rand_files_list, img_files_list, ch3, chM, wall_change_interval) elif wall_sort == 'name': print("Sort by name \n", img_files_list) chM = len(img_files_list) _start_name(img_files_list, ch3, chM, wall_change_interval) else: print('wall_sort = ', wall_sort, type(wall_sort)) sys.exit()
def run_uupdate(self): print('\nCreating debian source package...') _call('uupdate -b -f --upstream-version %s' % self.upstream_version, shell=True)
def call(cmd, *args, **kwargs): cmdstr = cmd if isinstance(cmd, basestring) else ' '.join(cmd) print_log(cmdstr) return _call(cmd, *args, **kwargs)
def run_uscan(self): print('\nDownloading source tarball file using debian/watch file...') _call('uscan --download-version %s' % self.upstream_version, shell=True)
def call(cmd, **kwargs): return _call(cmd, shell=True, **kwargs)
def call(*args, **kw): return _call(*args, **_quiet_args(kw))