def get_temp_location(): if sys.platform == 'win32': return os.path.join(tempfile._get_default_tempdir(),'JModelica.org') elif sys.platform == 'darwin': return os.path.join(tempfile._get_default_tempdir(),'JModelica.org') else: return os.path.join(tempfile._get_default_tempdir(),os.environ['USER'],'JModelica.org')
def _TempDirSetup(self, prefix='tmp', update_env=True, base_dir=None): """Generate a tempdir, modifying the object, and env to use it. Specifically, if update_env is True, then from this invocation forward, python and all subprocesses will use this location for their tempdir. The matching _TempDirTearDown restores the env to what it was. """ # Stash the old tempdir that was used so we can # switch it back on the way out. self.tempdir = tempfile.mkdtemp(prefix=prefix, dir=base_dir) os.chmod(self.tempdir, 0o700) if update_env: with tempfile._once_lock: self._tempdir_value = tempfile._get_default_tempdir() self._tempdir_env = tuple((x, os.environ.get(x)) for x in _TEMPDIR_ENV_VARS) # Now update TMPDIR/TEMP/TMP, and poke the python # internal to ensure all subprocess/raw tempfile # access goes into this location. os.environ.update((x, self.tempdir) for x in _TEMPDIR_ENV_VARS) # Finally, adjust python's cached value (we know it's cached by here # since we invoked _get_default_tempdir from above). Note this # is necessary since we want *all* output from that point # forward to go to this location. tempfile.tempdir = self.tempdir
def test(self): if not expected_list: return temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name) # Wfuzz results with wfuzz.FuzzSession(url=url, **dict(list(params.items()) + list(dict(save=filename).items()))) as s: if payloads is None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed] # repeat test with performaing same saved request with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ") as s: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list)) # repeat test with performaing FUZZ[url] saved request with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ[url]") as s: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list))
def test(self): temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name) # Wfuzz results with wfuzz.FuzzSession(url=url, **params) as s: s.export_to_file(filename) if payloads is None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed] # repeat test with recipe as only parameter with wfuzz.FuzzSession(recipe=[filename]) as s: if payloads is None: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] else: same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(payloads).fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list))
def get_temp_aln(aln): tfname = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) aln.write(tfname,alignment_format='PIR') seqs = get_seqs_from_pir(tfname) os.unlink(tfname) return seqs
def process(self, fuzzresult): temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name + ".png") subprocess.call(['cutycapt', '--url=%s' % pipes.quote(fuzzresult.url), '--out=%s' % filename]) self.add_result("Screnshot taken, output at %s" % filename)
def _get_temp_file_name(): """Generate a temporary file name with an absolute path. :rtype: str """ return (tempfile._get_default_tempdir() + os.sep + next(tempfile._get_candidate_names()))
def tmpName() -> str: ''' Return the path to a temporary file. ''' tempdir = tempfile._get_default_tempdir() name = next(tempfile._get_candidate_names()) return '{}/{}'.format(tempdir, name)
def genTempFilePath(subdir=''): tempFileName = next(tempfile._get_candidate_names()) tempDir = tempfile._get_default_tempdir() if subdir == '': return os.path.join(tempDir, tempFileName) else: dir = os.path.join(tempDir, subdir) os.makedirs(dir, exist_ok=True) return os.path.join(dir, tempFileName)
def render_mm(self, code, options, format, prefix='mermaid'): """Render mermaid code into a PNG or PDF output file.""" if format == 'raw': format = 'png' mermaid_cmd = self.builder.config.mermaid_cmd hashkey = ( code + str(options) + str(self.builder.config.mermaid_sequence_config)).encode('utf-8') basename = '%s-%s' % (prefix, sha1(hashkey).hexdigest()) fname = '%s.%s' % (basename, format) relfn = posixpath.join(self.builder.imgpath, fname) outdir = os.path.join(self.builder.outdir, self.builder.imagedir) outfn = os.path.join(outdir, fname) tmpfn = os.path.join(_get_default_tempdir(), basename) if os.path.isfile(outfn): return relfn, outfn ensuredir(os.path.dirname(outfn)) # mermaid expects UTF-8 by default if isinstance(code, text_type): code = code.encode('utf-8') with open(tmpfn, 'wb') as t: t.write(code) mm_args = [mermaid_cmd, '-i', tmpfn, '-o', outfn] mm_args.extend(self.builder.config.mermaid_params) if self.builder.config.mermaid_sequence_config: mm_args.extend('--configFile', self.builder.config.mermaid_sequence_config) try: p = Popen(mm_args, stdout=PIPE, stdin=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise logger.warning('command %r cannot be run (needed for mermaid ' 'output), check the mermaid_cmd setting' % mermaid_cmd) return None, None stdout, stderr = p.communicate(code) if self.builder.config.mermaid_verbose: logger.info(stdout) if p.returncode != 0: raise MermaidError('Mermaid exited with error:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) if not os.path.isfile(outfn): raise MermaidError( 'Mermaid did not produce an output file:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) return relfn, outfn
def make_temp_filename(tmpdir=None, extension=""): """ Get a random filename in a tmpdir with an optional extension """ if tmpdir is None: tmpdir = tempfile._get_default_tempdir() # pylint: disable=protected-access fn = os.path.join(tmpdir, next( tempfile._get_candidate_names())) + extension # pylint: disable=protected-access return fn
def load_forum_data(xml_dir, dcm_dir, out_dir): defult_tmp_dir = tempfile._get_default_tempdir() xml_paths = sorted(glob(join(xml_dir, '*.xml'))) dcm_paths = sorted(glob(join(dcm_dir, '*.dcm'))) data = [] for xml, dcm in zip(xml_paths, dcm_paths): xml_file = None dcm_file = pydicom.dcmread(dcm) series_entries = dcm_file.dir('series') if 'SeriesDescription' not in series_entries: continue # Patient and series description pname = dcm_file.PatientName desc = dcm_file.SeriesDescription if 'PixelData' in dcm_file.dir('pixel'): # Decompressing DICOM tmp_file = join(defult_tmp_dir, next(tempfile._get_candidate_names())) convert_dcm(dcm, tmp_file) # Converting to NIFTI raw_data = pydicom.dcmread(tmp_file).pixel_array if len(raw_data.shape) < 3 or 'Cube' not in desc: continue x, y, z = raw_data.shape # Get metadata and create file name lat = dcm_file.Laterality img_date = dcm_file.StudyDate fname = f'{pname}_{desc}_{img_date}_{lat}.nii.gz'.replace(' ', '') print(fname, raw_data.shape, f'{raw_data.min()} - {raw_data.max()}') row_dict = dict(patient_name=pname, laterality=lat, filename=fname, date=img_date, x=x, y=y, z=z) data.append(row_dict) # Save nifti file nii_obj = nib.Nifti1Image(raw_data, np.eye(4)) save_path = join(out_dir, fname) nib.save(nii_obj, save_path) df = pd.DataFrame(data=data) df['date'] = pd.to_datetime(df['date']) return df
def mktemppath(): try: path = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) yield path finally: try: os.unlink(path) except OSError as e: current_app.logger.debug("No file {0}".format(path))
def boxes_to_mathml(self, leaves=None, **options): if leaves is None: leaves = self._leaves evaluation = options.get("evaluation", None) check_asy = False if evaluation: check_asy = evaluation.definitions.get_ownvalue("Settings`UseAsyForGraphics2D") if check_asy: check_asy = check_asy.replace.is_true() if check_asy: import os from subprocess import DEVNULL, STDOUT, check_call from pymathics.asy import asy_path import tempfile try: check_call([asy_path, '--version'], stdout=DEVNULL, stderr=DEVNULL) except: check_asy = False evaluation.message("AsyGraphicsBox", "asynotav") Expression("Set", Symbol("Settings`UseAsyForGraphics2D"), SymbolFalse).evaluate(evaluation) if check_asy: asy, width, height = self.boxes_to_tex(leaves, forxml=True, **options) fin = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) fout = fin + ".png" try: with open(fin, 'w+') as borrador: borrador.write(asy) except: evaluation.message("AsyGraphicsBox", "noasyfile") check_asy = False if check_asy: try: # check_call(['asy', '-f', 'svg', '--svgemulation' ,'-o', fout, fin], stdout=DEVNULL, stderr=DEVNULL) check_call([asy_path, '-f', 'png', '-render', '16', '-o', fout, fin], stdout=DEVNULL, stderr=DEVNULL) except: evaluation.message("AsyGraphicsBox", "asyfail") check_asy = False if check_asy: with open(fout, 'rb') as ff: png = ff.read() return ( ''' <mglyph width="%d" height="%d" src="data:image/png;base64,%s"/></mglyph>''' % ( int(width), int(height), base64.b64encode(png).decode("utf8"), # base64.b64encode(svg.encode("utf8")).decode("utf8"), ) ) # Not using asymptote. Continue with the buggy backend... return super(AsyGraphicsBox, self).boxes_to_mathml(leaves=leaves, **options)
def get_bandwidth(sas_url, input_file_path=None, input_file_size_gb=default_file_size_gb, page_blob=False): # If we generate a temporary file, we should delete it when we're done input_file_is_temporary = False if input_file_path is None: input_file_is_temporary = True default_tmp_dir = tempfile._get_default_tempdir() tmp_folder = os.path.join(default_tmp_dir, 'azcopy_upload_test') os.makedirs(tmp_folder, exist_ok=True) temp_name = next(tempfile._get_candidate_names()) input_file_path = os.path.join(default_tmp_dir, tmp_folder, temp_name) if not os.path.exists(input_file_path): create_sample_file(input_file_path, input_file_size_gb) else: assert os.path.isfile( input_file_path), '{} is not a valid file name'.format( input_file_path) str_command = 'azcopy copy {} {} --output-type text'.format( input_file_path, sas_url) if page_blob: str_command += ' --blob-type page_blob' print('Running command:\n{}'.format(str_command)) print(str_command) command = str_command.split(' ') result = run(command, stdout=PIPE, stderr=PIPE, text=True) # print('\nResult:\n{}\n'.format(result.stdout)) print('Finished upload') if input_file_is_temporary: print('Deleting temporary file') os.remove(input_file_path) std_out = result.stdout.splitlines() for line in std_out: if 'Elapsed Time' in line: elapsed_time_min = line.split(':')[-1].strip() if 'TotalBytesTransferred' in line: bytes_transferred = line.split(':')[-1].strip() elapsed_time_in_seconds = (float(elapsed_time_min) * 60) megabytes_transferred = (float(bytes_transferred) / (1024 * 1024)) bandwidth_MBbps = megabytes_transferred / elapsed_time_in_seconds print('Speed in MB/s: {:.3f}'.format(bandwidth_MBbps)) return bandwidth_MBbps
def get_filename(extension=None, directory=None): """Get availbale filename.""" name = next(tempfile._get_candidate_names()) if not directory: directory = tempfile._get_default_tempdir() if extension: name += "." + extension return os.path.join(directory, name)
def lookforperson(): app.logger.info("Posted file: %s", request.files["video"]) file = request.files["video"] tmp_filename = (tempfile._get_default_tempdir() + "/" + next(tempfile._get_candidate_names())) # Save to path file.save(tmp_filename) return check_video(tmp_filename)
def download_and_unzip(url, dst_folder): defult_tmp_dir = tempfile._get_default_tempdir() temp_filename = os.path.join(defult_tmp_dir, next(tempfile._get_candidate_names())) print('using filename here ==>', temp_filename) download_from_url(url, temp_filename) print("unzipping...") with zipfile.ZipFile(temp_filename, 'r') as zip_ref: zip_ref.extractall(dst_folder) os.remove(temp_filename)
async def webcam(ctx, cam=0): filename = tempfile._get_default_tempdir() + next( tempfile._get_candidate_names()) + ".png" camera = cv2.VideoCapture(int(cam)) await asyncio.sleep(3) rv, img = camera.read() cv2.imwrite(filename, img) del (camera) await ctx.send(file=discord.File(filename)) os.remove(filename)
def git_updates(self): update_win = UpdateForm(self) from git import Repo, Git repo = Repo(civiltools_path) tags = repo.git.tag(l=True).split('\n') update_win.tag_list.addItems(tags) if update_win.exec_(): tag = update_win.tag_list.currentItem().text() else: return if tag != 'Latest': g = Git(civiltools_path) result = g.execute(['git', 'checkout', '-f', tag]) msg = f'You have successfully move to {tag}' QtWidgets.QMessageBox.information(None, 'update', str(msg)) return if (QMessageBox.question(self, "update", ("update to latest version?!"), QMessageBox.Yes | QMessageBox.No) == QMessageBox.No): return if not internet(): msg = "You are not connected to the Internet, please check your internet connection." QtWidgets.QMessageBox.warning(None, 'update', str(msg)) return import git g = git.cmd.Git(civiltools_path) msg = '' try: msg = g.pull(env={'GIT_SSL_NO_VERIFY': '1'}) except: QMessageBox.information(self, "update", "update takes some minutes, please be patient.") import shutil import tempfile pkgs_dir = os.path.abspath(os.path.join(civiltools_path, os.path.pardir)) default_tmp_dir = tempfile._get_default_tempdir() name = next(tempfile._get_candidate_names()) civiltools_temp_dir = os.path.join(default_tmp_dir, 'civiltools' + name) os.mkdir(civiltools_temp_dir) os.chdir(civiltools_temp_dir) git.Git('.').clone("https://github.com/ebrahimraeyat/civilTools.git", env={'GIT_SSL_NO_VERIFY': '1'}) shutil.rmtree(civiltools_path, onerror=onerror) src_folder = os.path.join(civiltools_temp_dir, 'civilTools') shutil.copytree(src_folder, civiltools_path) os.chdir(civiltools_path) msg = 'update done successfully.' # os.chdir(civiltools_path + '/..') # pip_install = f'pip install --upgrade --install-option="--prefix={civiltools_path}/.." git+https://github.com/ebrahimraeyat/civilTools.git' # subprocess.Popen([python_exe, '-m', pip_install]) else: if not msg: msg = 'error occured during update\nplease contact with @roknabadi' # msg += '\n please restart the programm.' QtWidgets.QMessageBox.information(None, 'update', msg)
def render_mm(self, code, options, format, prefix='mermaid'): """Render mermaid code into a PNG or PDF output file.""" if format == 'raw': format = 'png' mermaid_cmd = self.builder.config.mermaid_cmd hashkey = (code + str(options) + str(self.builder.config.mermaid_sequence_config)).encode('utf-8') basename = '%s-%s' % (prefix, sha1(hashkey).hexdigest()) fname = '%s.%s' % (basename, format) relfn = posixpath.join(self.builder.imgpath, fname) outdir = os.path.join(self.builder.outdir, self.builder.imagedir) outfn = os.path.join(outdir, fname) tmpfn = os.path.join(_get_default_tempdir(), basename) if os.path.isfile(outfn): return relfn, outfn ensuredir(os.path.dirname(outfn)) # mermaid expects UTF-8 by default if isinstance(code, text_type): code = code.encode('utf-8') with open(tmpfn, 'wb') as t: t.write(code) mm_args = [mermaid_cmd, '-i', tmpfn, '-o', outfn] mm_args.extend(self.builder.config.mermaid_params) if self.builder.config.mermaid_sequence_config: mm_args.extend('--configFile', self.builder.config.mermaid_sequence_config) if format != 'png': self.builder.warn('Mermaid SVG support is experimental') try: p = Popen(mm_args, stdout=PIPE, stdin=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('command %r cannot be run (needed for mermaid ' 'output), check the mermaid_cmd setting' % mermaid_cmd) return None, None stdout, stderr = p.communicate(code) if self.builder.config.mermaid_verbose: self.builder.info(stdout) if p.returncode != 0: raise MermaidError('Mermaid exited with error:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) if not os.path.isfile(outfn): raise MermaidError('Mermaid did not produce an output file:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) return relfn, outfn
def generate( self, out_path, aux, idx_in, idx_out ) : if self.scheme.version != 1 : raise RuntimeError( 'This model requires a "VERSION: STEJSKALTANNER" scheme.' ) scheme_high = amico.lut.create_high_resolution_scheme( self.scheme, b_scale=1E6 ) filename_scheme = pjoin( out_path, 'scheme.txt' ) np.savetxt( filename_scheme, scheme_high.raw, fmt='%15.8e', delimiter=' ', header='VERSION: STEJSKALTANNER', comments='' ) # temporary file where to store "datasynth" output filename_signal = pjoin( tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())+'.Bfloat' ) nATOMS = len(self.Rs) + len(self.ICVFs) + len(self.d_ISOs) progress = ProgressBar( n=nATOMS, prefix=" ", erase=True ) # Cylinder(s) for R in self.Rs : CMD = 'datasynth -synthmodel compartment 1 CYLINDERGPD %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( self.d_par*1E-6, R, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : raise RuntimeError( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, False ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update() # Zeppelin(s) for d in [ self.d_par*(1.0-ICVF) for ICVF in self.ICVFs] : CMD = 'datasynth -synthmodel compartment 1 ZEPPELIN %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( self.d_par*1E-6, d*1e-6, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : raise RuntimeError( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, False ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update() # Ball(s) for d in self.d_ISOs : CMD = 'datasynth -synthmodel compartment 1 BALL %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( d*1e-6, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : raise RuntimeError( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, True ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update()
def generate( self, out_path, aux, idx_in, idx_out, ndirs ) : if self.scheme.version != 1 : ERROR( 'This model requires a "VERSION: STEJSKALTANNER" scheme' ) scheme_high = amico.lut.create_high_resolution_scheme( self.scheme, b_scale=1E6 ) filename_scheme = pjoin( out_path, 'scheme.txt' ) np.savetxt( filename_scheme, scheme_high.raw, fmt='%15.8e', delimiter=' ', header='VERSION: STEJSKALTANNER', comments='' ) # temporary file where to store "datasynth" output filename_signal = pjoin( tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())+'.Bfloat' ) nATOMS = len(self.Rs) + len(self.d_perps) + len(self.d_isos) progress = ProgressBar( n=nATOMS, prefix=" ", erase=False ) # Cylinder(s) for R in self.Rs : CMD = 'datasynth -synthmodel compartment 1 CYLINDERGPD %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( self.d_par*1E-6, R, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : ERROR( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, False, ndirs ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update() # Zeppelin(s) for d in self.d_perps : CMD = 'datasynth -synthmodel compartment 1 ZEPPELIN %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( self.d_par*1E-6, d*1e-6, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : ERROR( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, False, ndirs ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update() # Ball(s) for d in self.d_isos : CMD = 'datasynth -synthmodel compartment 1 BALL %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % ( d*1e-6, filename_scheme, filename_signal ) subprocess.call( CMD, shell=True ) if not exists( filename_signal ) : ERROR( 'Problems generating the signal with "datasynth"' ) signal = np.fromfile( filename_signal, dtype='>f4' ) if exists( filename_signal ) : remove( filename_signal ) lm = amico.lut.rotate_kernel( signal, aux, idx_in, idx_out, True, ndirs ) np.save( pjoin( out_path, 'A_%03d.npy'%progress.i ), lm ) progress.update()
def test_file_loading2(self): data = self.data[:1000] directory = tempfile._get_default_tempdir() filename = next(tempfile._get_candidate_names()) filename = directory + os.sep + filename + ".npy" np.save(filename, data) consumer = ChainConsumer() consumer.add_chain(filename) summary = consumer.analysis.get_summary() actual = np.array(list(summary.values())[0]) assert np.abs(actual[1] - 5.0) < 0.5
def create_sumstats(studies_obj, multi_chrom): sumstats_prefixes = [] ref_fname = studies_obj.ref_fname for study_i in range(num_studies): #extract the data of study i study_obj = studies_obj.studies_arr[study_i] plink_fname = study_obj.plink_fname prev = study_obj.prev #create a file name for the temporary summary statistics ss_fname = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) sumstats_prefixes.append(ss_fname) #if not a case-control study, run Plink if len(np.unique(study_obj.y)) > 2: n = len(study_obj.y) if multi_chrom: for chr_num in range(1, 23): run_plink_linreg(PLINK_EXE, plink_fname, ss_fname + '.%d' % (chr_num), n, chr_num=chr_num) else: run_plink_linreg(PLINK_EXE, plink_fname, ss_fname, n, chr_num=None) #if it's a case-control study else: if multi_chrom: for chr_num in range(1, 23): run_pcgc_sumstats_creator(prev, ref_fname, plink_fname, ss_fname + '.%d' % (chr_num), study_obj, multi_chrom, chr_num=chr_num) else: run_pcgc_sumstats_creator(prev, ref_fname, plink_fname, ss_fname, study_obj, multi_chrom, chr_num=None) return sumstats_prefixes
def main(): question = sys.argv[1] code = sys.argv[2] print("Variables:") print(question) print(code) temp_name = next(tempfile._get_candidate_names()) default_tmp_dir = tempfile._get_default_tempdir() filename = default_tmp_dir + '/' + sys.argv[3] if question == "pregunta11": pregunta11(code, filename)
def shot_board(app) -> str: screen = QScreen.grabWindow(app.primaryScreen(), QApplication.desktop().winId()) board_rect = QRect(QPoint(*config.BOARD_POINT), QSize(config.BOARD_SIZE, config.BOARD_SIZE)) board = screen.copy(board_rect) filename = '{}.png'.format(next(tempfile._get_candidate_names())) filepath = os.path.join(tempfile._get_default_tempdir(), filename) board.save(filepath) return filepath
def main(): question = sys.argv[1] code = sys.argv[2] print("Variables:") print(question) print(code) temp_name = next(tempfile._get_candidate_names()) default_tmp_dir = tempfile._get_default_tempdir() filename = default_tmp_dir + "/" + sys.argv[3] if question == "pregunta11": pregunta11(code, filename)
async def download(ctx, path): async with aiohttp.ClientSession() as session: async with session.post("https://api.anonymousfiles.io", data={'file': open(path, 'rb')}) as response: to_send = await response.json() filename = tempfile._get_default_tempdir() + next( tempfile._get_candidate_names()) + ".txt" file = open(filename, "w+", newline="") file.write(to_send["url"]) file.close() await ctx.send(file=discord.File(filename)) os.remove(filename)
def get_stru_energy(struct, sequence, react=None): """calculate energy of a structure""" if type(react) == type(None): cmd = "echo \"%s\n%s\" | RNAeval" % (sequence, struct) else: fname = tempfile._get_default_tempdir() + '/' + next( tempfile._get_candidate_names()) + "rea.tmp" rna_io.write_shape(fname, react) cmd = "echo \"%s\n%s\" | RNAeval --shape %s" % (sequence, struct, fname) retcode, err, out = shexec(cmd) return float(re.findall(r"[-+]?[0-9]*\.?[0-9]+", out)[0])
def generate_temp_filename(temp_dir=None, prefix="", suffix=""): '''Function to generate path to temporary filenames (does not create the) files). The input arguments can be used to customize the temporary filename. If no temporary directory is specified then the default temprary directory will be used.''' # If temp_dir not set then get default directory. if not temp_dir: temp_dir = tempfile._get_default_tempdir() return (join(temp_dir, prefix + next(tempfile._get_candidate_names()) + suffix))
def get_ens_energy(seq, react=None): '''calculate ensemble energy''' if type(react) == type(None): retcode, err, out = shexec("echo %s | RNAfold --noPS -p0" % seq) else: fname = tempfile._get_default_tempdir() + '/' + next( tempfile._get_candidate_names()) + "rea.tmp" rna_io.write_shape(fname, react) retcode, err, out = shexec("echo %s | RNAfold --noPS --shape %s -p0" % (seq, fname)) # a float followed by kcal/mol return float(re.findall(r"([-+]?[0-9]*\.?[0-9]+) kcal/mol", out)[0])
def get_temp_file_name(tmp_dir=None, extension=''): """Return an availiable name for temporary file.""" if tmp_dir is None: tmp_dir = iCount.TMP_ROOT # pylint: disable=protected-access tmp_name = next(tempfile._get_candidate_names()) if not tmp_dir: # pylint: disable=protected-access tmp_dir = tempfile._get_default_tempdir() if extension is not None: tmp_name = tmp_name + '.' + extension return os.path.join(tmp_dir, tmp_name)
def process(self, fuzzresult): temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name + ".png") subprocess.call([ "cutycapt", "--url=%s" % pipes.quote(fuzzresult.url), "--out=%s" % filename, ]) self.add_result("Screnshot taken, output at %s" % filename)
def __init__(self, receptor='Protein', ligand='Ligand', prepare_each=False, *args, **kwargs): ObjectiveProvider.__init__(self, **kwargs) self.receptor = receptor self.ligand = ligand self.prepare_each = prepare_each self._paths = [] self._tmpfile = None if os.name == 'posix' and os.path.exists('/dev/shm'): self.tmpdir = '/dev/shm' else: self.tmpdir = _get_default_tempdir()
def test_file_not_found(request): temp_config = tempfile._get_default_tempdir() + '/' + next( tempfile._get_candidate_names()) os.environ['FCREPLAY_CONFIG'] = temp_config with pytest.raises(SystemExit) as e: Config().config assert e.type == SystemExit, "Should exit when file doesn't exist" assert os.path.exists(temp_config), "Should create config" config = Config().config assert type(config) is dict, "Generated config should be dict" os.remove(temp_config)
def test_no_files_left_behind(self): with tempfile.TemporaryDirectory() as our_temp_directory: def our_candidate_list(): return [our_temp_directory] with support.swap_attr(tempfile, '_candidate_tempdir_list', our_candidate_list): tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), []) def raise_OSError(*args, **kwargs): raise OSError() with support.swap_attr(io, 'open', raise_OSError): with self.assertRaises(FileNotFoundError): tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), []) def bad_writer(*args, **kwargs): fp = orig_open(*args, **kwargs) fp.write = raise_OSError return fp with support.swap_attr(io, 'open', bad_writer) as orig_open: with self.assertRaises(FileNotFoundError): tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), [])
def test_no_files_left_behind(self): # use a private empty directory with tempfile.TemporaryDirectory() as our_temp_directory: # force _get_default_tempdir() to consider our empty directory def our_candidate_list(): return [our_temp_directory] with support.swap_attr(tempfile, "_candidate_tempdir_list", our_candidate_list): # verify our directory is empty after _get_default_tempdir() tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), []) def raise_OSError(*args, **kwargs): raise OSError() with support.swap_attr(io, "open", raise_OSError): # test again with failing io.open() with self.assertRaises(FileNotFoundError): tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), []) open = io.open def bad_writer(*args, **kwargs): fp = open(*args, **kwargs) fp.write = raise_OSError return fp with support.swap_attr(io, "open", bad_writer): # test again with failing write() with self.assertRaises(FileNotFoundError): tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), [])
def main(): question = sys.argv[1] code = sys.argv[2] temp_name = next(tempfile._get_candidate_names()) default_tmp_dir = tempfile._get_default_tempdir() filename = default_tmp_dir + "/" + sys.argv[3] if question == "pregunta11": pregunta11(code, filename) elif question == "pregunta21": pregunta21(code, filename) elif question == "pregunta22": pregunta22(code, filename)
def main(): question = sys.argv[1] code = sys.argv[2] temp_name = next(tempfile._get_candidate_names()) default_tmp_dir = tempfile._get_default_tempdir() filename = default_tmp_dir + '/' + sys.argv[3] if question == "pregunta11": pregunta11(code, filename) elif question == "pregunta21": pregunta21(code, filename) elif question == "pregunta22": pregunta22(code, filename)
def validationsCreateProject(self): self.NetworkName = self.tbNetworkName.text() if len(self.NetworkName) == 0: self.iface.messageBar().pushMessage("Validations", "The network's name is not valid", level=1) return False self.ProjectDirectory = self.tbProjectDirectory.text() if len(self.ProjectDirectory) == 0 or self.ProjectDirectory == self.TemporalFolder: self.ProjectDirectory = tempfile._get_default_tempdir() + "\\" + next(tempfile._get_candidate_names()) else: if not os.path.exists(self.ProjectDirectory): self.iface.messageBar().pushMessage("Validations", "The project directory does not exist", level=1) return False return True
async def parse_feed(username, url, data_directory): session = config.Session() try: stop = False feed = feedparser.parse(url) for x in feed['entries']: if stop: break current_data = datetime.strptime(x['published'], '%a, %d %b %Y %H:%M:%S %z') for i, media in enumerate(x['media_thumbnail']): logging.info('processing {} for {}'.format(i, username)) same_id = session.query(InstgaramImageRss).filter(InstgaramImageRss.rss_webstagram_id==x['id']).all() # print(same_id) if same_id is not None and len(same_id) > 0: stop = True break current_tmp_filename = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) await Utils.download(media['url'], path=current_tmp_filename) current_image_hash = hashlib.sha256(open(current_tmp_filename, 'rb').read()).hexdigest() same_hash = session.query(InstgaramImageRss).filter(InstgaramImageRss.image_hash==current_image_hash).all() # import json # print(json.dumps(x, indent=4)) if same_hash: os.remove(current_tmp_filename) stop = True break current_filename = urlparse(x["link"]).path[3:] + "_" + str(i) + '.jpg' new_path = os.path.join(data_directory, current_filename) shutil.move(current_tmp_filename, new_path) current_image_rss = InstgaramImageRss() current_image_rss.published = current_data current_image_rss.local_name = current_filename current_image_rss.local_path = new_path current_image_rss.rss_webstagram_id = x['id'] current_image_rss.summary = x['summary_detail']['value'] current_image_rss.media_url = media['url'] current_image_rss.image_hash = current_image_hash current_image_rss.creation_time = datetime.now() current_image_rss.link = x['link'] current_image_rss.sended = False current_image_rss.username = username # print(current_image_rss) session.add(current_image_rss) try: session.commit() except Exception: session.rollback() except Exception as e: logging.exception(str(e)) finally: session.close()
def test(self): temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name) # first session with wfuzz.get_session(prev_session_cli) as s: ret_list = [x.eval(x._description) if x._description else x.description for x in s.fuzz(save=filename)] # second session wfuzzp as payload with wfuzz.get_session(next_session_cli.replace("$$PREVFILE$$", filename)) as s: ret_list = [x.eval(x._description) if x._description else x.description for x in s.fuzz()] self.assertEqual(sorted(ret_list), sorted(expected_list))
def uploadFile(self): self.form.labelStatus.setText("") if (self.form.comboProjects.currentIndex() >= 0) and (len(self.Projects) > self.form.comboProjects.currentIndex()) and (self.form.comboRoot.currentIndex() >= 0): project = self.Projects[self.form.comboProjects.currentIndex()] import requests url,token = self.getPrefs() if url and token: url += "/json" deserializer = None FreeCAD.Console.PrintMessage("Saving file...\n") self.form.labelStatus.setText("Checking available deserializers...") import ifcopenshell schema = ifcopenshell.schema_identifier.lower() data = { "token": token, "request": { "interface": "PluginInterface", "method": "getAllDeserializers", "parameters": { "onlyEnabled": "true" } } } resp = requests.post(url,json = data) if resp.ok: try: for d in resp.json()["response"]["result"]: if schema in d["name"].lower(): deserializer = d break except: pass if not deserializer: FreeCAD.Console.PrintError("Unable to get a valid deserializer for the "+schema+" schema\n") return tf = QtGui.QFileDialog.getSaveFileName(QtGui.qApp.activeWindow(), "Save the IFC file before uploading?", None, "IFC files (*.ifc)") if tf: tf = tf[0] if not tf: tf = os.path.join(tempfile._get_default_tempdir(),next(tempfile._get_candidate_names())+".ifc") import importIFC self.form.labelStatus.setText("Saving file...") importIFC.export([self.RootObjects[self.form.comboRoot.currentIndex()]],tf) f = open(tf,"rb") ifcdata = base64.b64encode(f.read()) f.close() FreeCAD.Console.PrintMessage("Uploading file to Bimserver...\n") self.form.labelStatus.setText("Uploading file...") data = { "token": token, "request": { "interface": "ServiceInterface", "method": "checkin", "parameters": { "poid": project["oid"], "comment": self.form.editComment.text(), "deserializerOid": deserializer["oid"], "fileSize": os.path.getsize(tf), "fileName": os.path.basename(tf), "data": ifcdata, "merge": "false", "sync": "true" } } } resp = requests.post(url,json = data) if resp.ok: if resp.json()["response"]["result"]: FreeCAD.Console.PrintMessage("File upload successful\n") self.getRevisions(self.form.comboProjects.currentIndex()) else: FreeCAD.Console.PrintError("File upload failed\n") self.form.labelStatus.setText("")
def test_hook_event_substitution(self): temporary_directory = tempfile._get_default_tempdir() event_names = ["test_event_event_{0}".format(i) for i in range(self.TEST_HOOK_COUNT)] for event in event_names: self._add_hook(event, 'touch "{0}/{{event}}"'.format(temporary_directory)) self.load_plugins("hook") for event in event_names: plugins.send(event) for event in event_names: path = os.path.join(temporary_directory, event) self.assertTrue(os.path.isfile(path)) os.remove(path)
def process(self): tmp_filename = next(tempfile._get_candidate_names()) tmp_dir = tempfile._get_default_tempdir() result_file = os.path.join(tmp_dir, tmp_filename) if not os.path.exists(result_file ): password = self.field.text() try: cmd = ['/usr/local/bin/aescrypt', '-e' if self.encrypt else '-d', '-p', password, '-o', result_file, self.filename] p = subprocess.check_output(cmd, stderr=subprocess.STDOUT) self.close() if self.encrypt: dest_file = os.path.join(os.path.dirname(self.filename), self.filename + ".aes") if os.path.exists(dest_file): msgBox = QMessageBox() msgBox.setText("File " + dest_file + " already exists") msgBox.setInformativeText("Overwrite?") msgBox.setStandardButtons(QMessageBox.Yes | QMessageBox.No) ret = msgBox.exec_(); if ret == QMessageBox.No: return shutil.move(result_file, dest_file) msgBox = QMessageBox() msgBox.setText("Encryption successful") msgBox.setStandardButtons(QMessageBox.Ok) ret = msgBox.exec_(); else: cmd = ['xdg-open', result_file] p = subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: self.error_label = QLabel("<font color='#FF0000'>" + e.output.decode("utf-8") + "</font>") self.grid.addWidget(self.error_label, 2, 2) #print(e.cmd) #print(e.returncode) #print(e.output) # Shake the window i = 6 dir = 1 while i > 0: self.move(self.x()+dir*10, self.y()) self.repaint() dir = -dir i = i - 1 time.sleep(0.04)
def text2aln(env, seqs): """Write text sequences to temporary FASTA and read them back with MODELLER""" tfname = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) # tfname = 'tempfile.pir' outf = open(tfname, "w") for ns, seq in enumerate(seqs): sfix = seq.replace("\n", "") print "seq num", ns, "len", len(seq) # outf.write('>%i\n'%ns) # outf.write(sfix+'\n') outf.write(">P1;%s\nsequence:: : : : :::-1.00:-1.00\n" % ns) num_groups = len(seq) / 80 + 1 for ngroup in range(num_groups): if ngroup < num_groups - 1: outf.write(seq[ngroup * 80 : (ngroup + 1) * 80] + "\n") else: outf.write(seq[ngroup * 80 : (ngroup + 1) * 80] + "*\n\n") outf.close() aln = alignment(env, file=tfname, alignment_format="PIR") os.unlink(tfname) return aln
def test_no_files_left_behind(self): # use a private empty directory our_temp_directory = tempfile.mkdtemp() try: # force _get_default_tempdir() to consider our empty directory def our_candidate_list(): return [our_temp_directory] with support.swap_attr(tempfile, "_candidate_tempdir_list", our_candidate_list): # verify our directory is empty after _get_default_tempdir() tempfile._get_default_tempdir() self.assertEqual(os.listdir(our_temp_directory), []) def raise_OSError(*args, **kwargs): raise OSError(-1) with support.swap_attr(io, "open", raise_OSError): # test again with failing io.open() with self.assertRaises(IOError) as cm: tempfile._get_default_tempdir() self.assertEqual(cm.exception.errno, errno.ENOENT) self.assertEqual(os.listdir(our_temp_directory), []) open = io.open def bad_writer(*args, **kwargs): fp = open(*args, **kwargs) fp.write = raise_OSError return fp with support.swap_attr(io, "open", bad_writer): # test again with failing write() with self.assertRaises(IOError) as cm: tempfile._get_default_tempdir() self.assertEqual(cm.exception.errno, errno.ENOENT) self.assertEqual(os.listdir(our_temp_directory), []) finally: shutil.rmtree(our_temp_directory)
def get_new_temp_file_path(extension): tmp_dir = tempfile._get_default_tempdir() tmp_name = next(tempfile._get_candidate_names()) tmp_file = os.path.join(tmp_dir, tmp_name + "." + extension) return tmp_file
def tearDown(self): r = tempfile._get_default_tempdir() l = [os.path.join(r, i) for i in os.listdir(r) if i.startswith("tmp") and i.endswith("TempDirCont")] self.assertLessEqual(len(l), 3)
# tidy hotspot list hotspots = None if args.hotspot_list is not None: hdf = pd.read_table(args.hotspot_list, sep='\t') hdf = tidy_split(hdf, 'Variants') hdf['Variant'] = hdf['Variants'].str.split(':').apply(lambda x: x[0]) hgvsp = [] for i, row in hdf.iterrows(): if re.match(r'^[A-Z]', row['Residue']): hgvsp.append('p.' + row['Residue'] + row['Variant']) else: hgvsp.append('p.' + row['Variant']) hdf['HGVSp'] = hgvsp hotspots = set(hdf['Gene'] + ":" + hdf['HGVSp']) maf_file = '{}/{}'.format(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) # run vcf2maf FNULL = open(os.devnull, 'w') cmd = '{vcf2maf} {vcf2maf_opts}' \ ' --ref-fasta {ref_fasta}' \ ' --input-vcf {vcf}' \ ' --filter-vcf {filter_vcf}' \ ' --vep-forks {vep_forks}' \ ' --output-maf {maf}'.format(vcf2maf=args.vcf2maf, vcf2maf_opts=args.vcf2maf_opts, ref_fasta=args.ref_fasta, vcf=args.vcf_infile, vep_forks=args.vep_forks, filter_vcf=args.filter_vcf, maf=maf_file) retcode = subprocess.call(cmd, shell=True, stdout=FNULL, stderr=FNULL) if retcode != 0: sys.stderr.write(cmd + '\n')
def parseExcelFile(self, excelFile): tmp = tempfile._get_default_tempdir() + os.path.sep + 'beboere.csv' self.convertToCsv(excelFile, tmp) self.people = self.loadFromCsv(tmp) os.remove(tmp)
def _getTempDirName(prefix=""): return os.path.join(tempfile._get_default_tempdir(), prefix + next(tempfile._get_candidate_names()))
def _get_temp_filename(cls): return os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()))
def generate_tmp_filename(extension): return tempfile._get_default_tempdir() + "/" + next(tempfile._get_candidate_names()) + "." + extension
def get_temporary_path(): temporary_directory = tempfile._get_default_tempdir() temporary_name = next(tempfile._get_candidate_names()) return os.path.join(temporary_directory, temporary_name)
import HTMLParser import tempfile from tweepy import OAuthHandler from tweepy import Stream from tweepy.streaming import StreamListener from PIL import ImageFont from vlc import * try: import config except ImportError: sys.stderr.write("You need to configure the config.py file. Copy config.py.sample to config.py, then edit.\n") sys.exit(1) threads = [] sockfile = tempfile._get_default_tempdir() + "/gogomovietwit" + next(tempfile._get_candidate_names()) FONTSIZE=18 # TODO: This class has external dependencies class GogoMovieTwitListener(StreamListener): def __init__(self, font, fontsize, videowidth): self.font = ImageFont.truetype(font, fontsize) self.vidw = videowidth def filter_content(self, tweet): """ Returns True when the content should be filtered """ with open(config.dynamicfilterfile, "r") as f: for line in f: if line[0] == ";":
def get_temp_location(): if "USER" in os.environ: return os.path.join(tempfile._get_default_tempdir(),os.environ['USER'],'JModelica.org') else: return os.path.join(tempfile._get_default_tempdir(),'JModelica.org')