Exemplo n.º 1
0
    def test_create_new_files(self):
        """
        """
        # Create placeholder for phtn_src file
        with contextlib.nested(
                NTF(prefix='data_', dir=os.curdir),
                NTF(prefix='cfg_', dir=os.curdir),
                NTF(prefix='inpp_', dir=os.curdir)) as \
                (dataNTF, cfgNTF, inppNTF):

            cfgNTF.write("photon_isotope = @rb!+r@Ry\t\n" \
                    "photon_cooling =  @rb!+r@Ry\t\n" \
                    "neutron_mcnp_input = @rb!+r@Ry\t\n" \
                    "alara_phtn_src = @rb!+r@Ry\t\n" \
                    "photon_mcnp_input = @rb!+r@Ry\t\n"
                    )
            cfgNTF.seek(0)

            inppNTF.write("Fake title card line\nAnother line.\n")
            inppNTF.seek(0) # Goes to beginning
        
            s2s.create_new_files(self.path_list, dataNTF.name, cfgNTF.name, 
                    "mcnp_n.inp", inppNTF.name, "phtn_src")

            # Check that files were copied
            for folder in os.listdir(os.curdir):
                if os.path.isdir(os.path.join(os.curdir,folder)):
                    self.assertTrue(os.path.exists( \
                            os.path.join(os.curdir, folder, dataNTF.name)))
                    self.assertTrue(os.path.exists( \
                            os.path.join(os.curdir, folder, inppNTF.name)))
                    self.assertTrue(os.path.exists( \
                            os.path.join(os.curdir, folder, cfgNTF.name)))
Exemplo n.º 2
0
    def test_load_config_files(self):
        """Simulate a .cfg file and check that file names are read correctly.
        """
        with contextlib.nested(
                NTF(prefix='data_',dir=os.curdir),
                NTF(prefix='cfg_', dir=os.curdir),
                NTF(prefix='phtn_', dir=os.curdir)) as \
                (dataNTF, cfgNTF, phtnNTF):
            # Create placeholder for r2s.cfg file
            cfgNTF.write("[r2s-files]\n" \
                    "neutron_mcnp_input = mcnp_n\n" \
                    "photon_mcnp_input = mcnp_p\n" \
                    "step1_datafile = {0}\n" \
                    "alara_phtn_src = {1}\n".format(dataNTF.name, phtnNTF.name)
                    )
            cfgNTF.seek(0)  # Goes to beginning

            config = ConfigParser.SafeConfigParser()
            config.read(cfgNTF.name)

            datafile, phtn_src, mcnp_n, mcnp_p, gammas = \
                    s2.load_config_files(config)

            # Check for correctness
            self.assertEqual(mcnp_n, 'mcnp_n')
            self.assertEqual(mcnp_p, 'mcnp_p')
            self.assertEqual(datafile, dataNTF.name)
            self.assertEqual(phtn_src, phtnNTF.name)
            self.assertEqual(gammas, 'gammas')
Exemplo n.º 3
0
def write(config="", spec="", configext='.py'):
    with NTF(suffix=configext) as f_config:
        with NTF(suffix=".py") as f_spec:
            f_config.write(config)
            f_config.flush()
            f_config.seek(0)

            f_spec.write(spec)
            f_spec.flush()
            f_spec.seek(0)

            yield (f_config, f_spec)
Exemplo n.º 4
0
    def test_load_config_params(self):
        """Simulate a .cfg file and check that parameters are read correctly.
        """
        with NTF() as myNTF:
            # Create placeholder for r2s.cfg file
            myNTF.write("[r2s-params]\n" \
                    "photon_isotope = u235\n" \
                    "photon_cooling = shutdown\n" \
                    "sampling = u\n" \
                    "custom_ergbins = True\n" \
                    "photon_bias = True\n" \
                    "cumulative = True\n" \
                    "add_fmesh_card = False\n"
                    "resample = True\n"
                    )
            myNTF.seek(0)  # Goes to beginning

            config = ConfigParser.SafeConfigParser()
            config.read(myNTF.name)

            (opt_isotope, opt_cooling, opt_sampling, opt_ergs, opt_bias, opt_cumulative, opt_phtnfmesh, resample, uni_resamp_all) = \
                    s2.load_config_params(config)
            # Check for correctness
            self.assertEqual(opt_isotope, 'u235')
            self.assertEqual(opt_cooling, 'shutdown')
            self.assertEqual(opt_sampling, 'u')
            self.assertTrue(opt_ergs)
            self.assertTrue(opt_bias)
            self.assertTrue(opt_cumulative)
            self.assertFalse(opt_phtnfmesh)
            self.assertTrue(resample)
            self.assertFalse(uni_resamp_all)
Exemplo n.º 5
0
def blast_sequence(sequence,
                   database,
                   extra_options=[],
                   query_name='Test',
                   application='blastn',
                   evalue=10,
                   output_str="6 sseq"):
    results = None
    output_file = None
    sequence_file = None
    try:
        sequence_file = generate_query(query_name, sequence)
        output_file = NTF(dir='.', delete=False)
        param_list = [
            os.path.join(BLAST_PATH, application), '-db', database, '-query',
            sequence_file.name, '-evalue',
            str(evalue), '-out', output_file.name, '-outfmt', output_str
        ]
        param_list += extra_options
        logging.info("starting blast run: {}".format(param_list))
        with Popen(param_list, stdout=PIPE, stdin=PIPE) as proc:
            proc.communicate()
            if proc.wait() < 0:
                raise Exception(err)
            results = output_blast_analyze(output_file.name)
            logging.info("Finished blast run {} results".format(len(results)))
    except Exception as e:
        logging.error("Failed to run {} on DB {} seq {}. ERROR: {}".format(
            application, database, sequence, e))
    finally:
        if sequence_file is not None and os.path.exists(sequence_file.name):
            os.remove(sequence_file.name)
        if output_file is not None and os.path.exists(output_file.name):
            os.remove(output_file.name)
    return results
Exemplo n.º 6
0
    def test_load_configs(self):
        """Simulate a .cfg file and check that everything is read correctly.
        """
        with NTF() as cfgNTF:
            # Create placeholder for r2s.cfg file
            cfgNTF.write("[r2s-files]\n" \
                         "neutron_mcnp_input = mcnp_n\n" \
                         "photon_mcnp_input = mcnp_p\n" \
                         "step1_datafile = mesh.h5m\n" \
                         "alara_phtn_src = phtn_src\n" \
                         "[r2s-params]\n" \
                         "photon_isotope = u235\n" \
                         "photon_cooling = shutdown\n" \
                         )
            cfgNTF.seek(0) # Goes to beginning

            config = ConfigParser.SafeConfigParser()
            config.read(cfgNTF.name)

        mcnp_n, mcnp_p, datafile, phtn_src, iso, cool = s2s.load_configs(config)

        # Check for correctness
        self.assertEqual(mcnp_n, 'mcnp_n')
        self.assertEqual(mcnp_p, 'mcnp_p')
        self.assertEqual(datafile, 'mesh.h5m')
        self.assertEqual(phtn_src, 'phtn_src')
        self.assertEqual(iso, 'u235')
        self.assertEqual(cool, 'shutdown')
Exemplo n.º 7
0
    def write(self, output_path: Path) -> None:
        """Writes the report to the given path."""
        toc = {"xsl-style-sheet": self.toc_fname}
        tmp_prefix = str(Path.cwd()) + "/"
        cover_ctx = {
            "sample_name": self.sample_name,
            "run_name": self.run_name,
            "pipeline_version": self.pipeline_version,
            "timestamp": self.timestamp,
            "css_fname": self.css_fname,
            "imgs_dir": self.imgs_dir,
        }
        contents_ctx = self.summary
        contents_ctx["css_fname"] = self.css_fname
        contents_ctx["imgs_dir"] = self.imgs_dir

        with NTF(prefix=tmp_prefix, suffix=".html") as cov_fh:
            cov_txt = self.cover_tpl.render(**cover_ctx)
            cov_fh.write(cov_txt.encode("utf-8"))
            cov_fh.seek(0)

            con_txt = self.contents_tpl.render(**contents_ctx)
            pdfkit.from_string(con_txt,
                               str(output_path),
                               options=self.pdfkit_opts,
                               css=self.css_fname,
                               toc=toc,
                               cover=cov_fh.name,
                               cover_first=True)
Exemplo n.º 8
0
    def write(self, A, image_vector_dict):

        from tempfile import NamedTemporaryFile as NTF
        from tempfile import mkdtemp
        import pickle
        import os.path

        prefix = 'LO_{0:.1f}_{1:.2f}_{2:.2f}_'.format(A.d, A.h_step, A.g_step)
        dir_ = mkdtemp(prefix=prefix, dir=self.dir_name)
        np.savez(
            os.path.join(dir_, 'arrays'),
            G2state=A.G2state,
            state_list=A.state_list,
            eigenvector=A.eigenvector,
        )

        image_dict = {}
        for key, value in image_vector_dict.items():
            vec_file = NTF(prefix='image_{0}'.format(key),
                           dir=dir_,
                           delete=False)
            value.tofile(vec_file)
            image_dict[key] = os.path.basename(vec_file.name)
        dict_ = {'image_dict': image_dict}

        # Use s for attributes of LO that are not np.arrays so that d,
        # h_step, g_step can be read quickly for dictionary key
        s = {}
        for key in '''d h_step g_step iterations
        n_states n_pairs shape n_g n_h origin_h origin_g'''.split():
            s[key] = getattr(A, key)
        dict_['self'] = s
        file_ = open(os.path.join(dir_, 'dict.pickle'), 'wb')
        pickle.dump(dict_, file_, 2)
        return
Exemplo n.º 9
0
def fetch_seq_tlbout(tlbout_path: str, fasta_orig: str) -> List[Dict[str, str]]:
    results = []
    ssi_file = None
    try:
        ssi_file = "{}.ssi".format(fasta_orig)
        if os.path.exists(ssi_file):
            # exists before then don't create and remove
            ssi_file = None
        else:
            param_list = [ESL_FETCH, '--index', fasta_orig]
            logging.info("generating index {}".format(param_list))
            with Popen(param_list, stdout=PIPE, stdin=PIPE) as proc:
                proc.communicate()
        temp_fasta_out = NTF(dir='.', delete=False)
        temp_fasta_out.close()
        param_list = ['sh', SHELL_SEQ_SCRIPT, tlbout_path, fasta_orig, temp_fasta_out.name]
        logging.info("Retrieving sequence {}".format(param_list))
        with Popen(param_list, stdout=PIPE, stdin=PIPE) as proc:
            proc.communicate()
        if os.path.exists(temp_fasta_out.name):
            for seq_record in SeqIO.parse(temp_fasta_out.name, "fasta"):
                results.append({"target name": seq_record.id, "sequence": "{}".format(seq_record.seq)})
        else:
            raise Exception("fasta file not created")
    except Exception as exc:
        logging.error("Failed to retrieve sequences {}".format(exc))
    finally:
        if temp_fasta_out is not None and os.path.exists(temp_fasta_out.name):
            os.remove(temp_fasta_out.name)
        if ssi_file is not None and os.path.exists(ssi_file):
            os.remove(ssi_file)
    return results
Exemplo n.º 10
0
def download(username, password, my_name, remote_name, remote_ip, path, filename, service_name):
    conn = connect(username, password, my_name, remote_name, remote_ip)
    if conn:
        attr = conn.getAttributes(service_name, path+filename)

        filesize_kb = attr.file_size / 1024.0
        filesize_mb = attr.file_size / (1024.0 * 1024.0)

        print('File %s = %.2f Mbytes' % (filename, filesize_mb))
        print('\nstart download...')

        tmpf = NTF()
        try:
            t = timer()
            conn.retrieveFile(service_name, path+filename, tmpf)
            elapsed = timer() - t
        except Exception as e:
            print(e)

        print('Speed = %.2f KBytes/s = %.2f Mbytes' % (filesize_kb / elapsed, filesize_mb / elapsed))

        shutil.copy(tmpf.name, filename)

        tmpf.close()

        os.chmod(filename, 0o666)

        print('download finished')

        conn.close()
Exemplo n.º 11
0
def generate_temp_ct(structure, sequence, title=''):
    comp_map = {}
    closing_stack = []
    for i in range(0, len(sequence)):
        if structure[i] == '(':
            closing_stack.append(i)
        elif structure[i] == ')':
            start_index = closing_stack.pop()
            comp_map[start_index] = i
            comp_map[i] = start_index
    temp_file = NTF(dir='.', delete=False, suffix='.ct', mode='w')
    temp_file.write('{}\t{}\n'.format(len(structure), title))
    for i in range(0, len(sequence)):
        next_item = i + 2
        if next_item > len(sequence):
            next_item = 0
        comp = comp_map.get(i)
        if comp is None:
            comp = 0
        else:
            comp += 1
        line = "{}\t{}\t{}\t{}\t{}\t{}\n".format(i + 1, sequence[i], i, next_item, comp, i)
        temp_file.write(line)
    temp_file.close()
    return temp_file.name
Exemplo n.º 12
0
def run_incaRNAtion(structure,
                    amount_to_generate,
                    gc_content=0.5,
                    sequence_constraints=None):
    result = []
    attempt = 0
    if sequence_constraints is None:
        sequence_constraints = 'N' * len(structure)
    tmp_file = NTF(dir='.', delete=False)
    try:
        tmp_file.write('{}\n'.format(structure).encode())
        tmp_file.close()
        #param_list = [INCARNATION_PATH, '-a', '1', '-d', tmp_file.name,
        param_list = [
            PYTHON_PATH, INCARNATION_PATH, '-a', '1', '-d', tmp_file.name,
            '-c', sequence_constraints, '-no_profile', '-s_gc',
            str(gc_content)
        ]
        while len(result) < amount_to_generate and attempt < MAX_ATTEMPT:
            attempt += 1
            param_list.append(str(amount_to_generate - len(result)))
            result.extend(_single_run(param_list))
            result = list(set(result))  # remove duplicates
            param_list.pop()
    finally:
        os.remove(tmp_file.name)
    return result
Exemplo n.º 13
0
def search_cm(cm_file_path: str, seqdb_path: str, debug: bool=False,
              res_type: ResType=ResType.ERIC, inc_e: float=None, cpus: int=None) -> List[Dict[str, str]]:
    def merge_eric(table_results: List[Dict[str, str]], eric_results: List[Dict[str, str]]):
        for eric_res, table_res in zip(eric_results, table_results):
            eric_target = eric_res.get("target name")
            target, loc_str = eric_target.rsplit('/', 1)
            seq_from, seq_to = loc_str.split('-', 1)
            if table_res.get('target name') != target or table_res.get('seq from') != seq_from \
                    or table_res.get('seq to') != seq_to:
                logging.error("Rows do not match: table - {} eric - {}".format(table_res, eric_res))
            if table_res.get('target name') == target:
                table_res['sequence'] = eric_res.get('sequence')

    results = None
    temp_out = None
    try:
        temp_out = NTF(dir='.', delete=False)
        temp_out.close()
        param_list = [os.path.join(INFENRAL_PATH, CMSEARCH_EXE), '--tblout', temp_out.name]
        if inc_e is not None:
            param_list += ['--incE', str(inc_e)]
        if cpus is not None:
            param_list += ['--cpu', str(cpus)]
        param_list += [cm_file_path, seqdb_path]
        logging.info("Starting cm search {}".format(param_list))
        with Popen(param_list, stdout=PIPE, stdin=PIPE) as proc:
            output, err = proc.communicate()
            ret_code = proc.wait()
            if ret_code < 0:
                raise Exception(err)
            # keeping this to compare for errors!
            with open(temp_out.name, 'r') as output_file:
                results = ''
                for line in output_file:
                    results += line
            tlbout_results = output_search_analyze(results)
            if res_type == ResType.ERIC:
                # getting actual results (sequnces)
                results = fetch_seq_tlbout(temp_out.name, seqdb_path)
                if len(tlbout_results) != len(results):
                    logging.warning("Something strange in infernal result analysis. tlbout lines = {},"
                                    " esl_sfetch lines = {}\n{}\n{}".format(len(tlbout_results), len(results),
                                                                            tlbout_results, results))
                else:
                    merge_eric(tlbout_results, results)
                    results = tlbout_results
            elif res_type == ResType.TBLOUT:
                results = tlbout_results
            logging.info("Finisied cm search {} results".format(len(results)))
    except Exception as e:
        logging.error("Failed to search cm file {} on sequence db {}. ERROR: {}"
                      .format(cm_file_path, seqdb_path, e))
    finally:
        if temp_out is not None and os.path.exists(temp_out.name):
            if not debug:
                os.remove(temp_out.name)
            else:
                logging.info("Finished debug run on cm: {} output: {}".format(cm_file_path, temp_out.name))
    return results
Exemplo n.º 14
0
def generate_fasta(sequences: Dict[str, str]) -> NTF:
    tmp_file = NTF(mode='w+', dir='.', delete=False, encoding="utf-8")
    for topic, sequence in sequences.items():
        tmp_file.write('> {}\n'.format(topic))
        for fasta_line in [sequence[i:i+FASTA_LINE_LENGTH] for i in range(0, len(sequence), FASTA_LINE_LENGTH)]:
            tmp_file.write('{}\n'.format(fasta_line))
    tmp_file.close()
    return tmp_file
Exemplo n.º 15
0
def generate_stockholm(sequence: str, structure: str=None) -> NTF:
    if structure is None:
        structure = vienna.fold(sequence)['MFE']
    tmp_file = NTF(mode='w+', dir='.', delete=False, encoding="utf-8")
    tmp_file.write('{}\n'.format(STOCKHOLM_FORMAT))
    tmp_file.write('seq1\t{}\n'.format(sequence))
    tmp_file.write('#=GC SS_cons\t{}\n//'.format(structure))
    tmp_file.close()
    return tmp_file
Exemplo n.º 16
0
def softioc():
    with NTF(mode='w+') as cf, NTF(mode='w+') as df:
        cf.write(cas_rules)
        cf.flush()
        df.write(cas_test_db)
        df.flush()

        proc = subprocess.Popen([
            'softIoc', '-D',
            '/home/travis/mc/envs/testenv/epics/dbd/softIoc.dbd', '-m',
            'P=test', '-a', cf.name, '-d', df.name
        ],
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE)
        yield proc

        proc.kill()
        proc.wait()
Exemplo n.º 17
0
    def test_import_config(self):
        with NTF(suffix='.py') as f:
            f.write(self.config_text)
            f.flush()
            import_config(f.name)

            self.assertEqual(a, 1)
            self.assertEqual(c.d, 'abc')
            self.assertEqual(c.e, [])
Exemplo n.º 18
0
    def test_import_spec(self):
        with NTF(suffix='.py') as f:
            f.write(self.spec_text)
            f.flush()
            import_spec(f.name)

            self.assertEqual(a, required(type=int))
            self.assertEqual(b, optional(type=float, default=1.0))
            self.assertEqual(c.d, required(type=str))
            self.assertEqual(c.e, [required(type=str)])
Exemplo n.º 19
0
def generate_query(query_name, sequence):
    query_file = NTF(dir='.', delete=False)
    query_file.write('> {}\n'.format(query_name).encode())
    lines = [
        sequence[i:i + FASTA_MAX_LENGTH]
        for i in range(0, len(sequence), FASTA_MAX_LENGTH)
    ]
    for line in lines:
        query_file.write('{}\n'.format(line).encode())
    query_file.close()
    return query_file
Exemplo n.º 20
0
 def test_gen_iso_cool_lists6(self):
     """Test that exception is raised if cooling steps not found in phnt_src
     """
     isolistraw = "a, b, c,d"
     coollistraw = "1_d, 99_y, never"
     
     # Create placeholder for phtn_src file
     with NTF() as myNTF: 
         myNTF.write(self.NTFcontents)
         myNTF.seek(0) # Goes to beginning
         self.assertRaises(R2S_CFG_Error, s2s.gen_iso_cool_lists, \
                 isolistraw, coollistraw, myNTF.name)
Exemplo n.º 21
0
 def test_gen_iso_cool_lists5(self):
     """Tests that exception is raised when cooling indices go too high.
     """
     isolistraw = "a, b, c,d"
     coollistraw = "1, 2, 3, 14"
     
     # Create placeholder for phtn_src file
     with NTF() as myNTF: 
         myNTF.write(self.NTFcontents)
         myNTF.seek(0) # Goes to beginning
         self.assertRaises(R2S_CFG_Error, s2s.gen_iso_cool_lists, \
                 isolistraw, coollistraw, myNTF.name)
Exemplo n.º 22
0
def radar(protein):
    """Run RADAR on list of proteins in single sequence mode

    -P  filename of multiple alignment [provide -S file here too]
    -Q  filename of lfasta output
    -R  filename of sequence
    -S  filename of sequence lfasta file [optional]
    -V  level of verbosity

    Args:
        proteins (list): Protein instances being analysed
    Returns:
        candidates (tuple): Proteins with repeats
    """

    with NTF() as blosum, NTF() as pam, NTF() as sequence:
        sequence.write(protein.sequence.encode())
        sequence.flush()

        # Run lfasta with BLOSUM50 matrix, then ensure we've gone back to start
        blosum.write(lfasta(sequence.name, "BLOSUM50").encode())
        sequence.seek(0)

        # Run lfasta with PAM250 matrix
        pam.write(lfasta(sequence.name, "PAM250").encode())

        blosum.seek(0)
        pam.seek(0)

        params = {
            "path": get_path("radar"),
            "-P": sequence.name,
            "-R": sequence.name,
            "-Q": blosum.name,
            "-S": pam.name,
        }

        return subprocess.run(make_command(params),
                              stdout=subprocess.PIPE,
                              universal_newlines=True).stdout
Exemplo n.º 23
0
def run_design(run_code: int,
               seed: str,
               target_sequence: str,
               target_structure: str,
               pseudoknots: str = None):
    general_run_logger.info('Starting design {}'.format(run_code))
    result_object = None
    temp_file = None
    try:
        temp_file = NTF(prefix='DESIGN',
                        encoding='utf-8',
                        mode='w',
                        delete=False)
        temp_file.write('TARGET_STRUCTURE={}\n'.format(target_structure))
        temp_file.write('TARGET_SEQUENCE={}\n'.format(target_sequence))
        temp_file.write('TARGET_SEQUENCE={}\n'.format(target_sequence))
        temp_file.write('STARTING_SEQUENCE={}\n'.format(seed))
        temp_file.write('SEED={}\n'.format(random_gen.getrandbits(64)))
        temp_file.write('ITERATION={}\n'.format(1000))
        temp_file.flush()
        temp_file.close()
        result_object = RNAfbinvCL.main('-f {} --length 5'.format(
            temp_file.name))
        if result_object is not None:
            if result_object.score < 300 and result_object.score % 100 < 30:
                general_run_logger.info(
                    'Finished designing {}'.format(run_code))
                design_logger.info('{}\t{}\t{}\t{}\t{}\t{}\t{}'.format(
                    run_code, seed, result_object.sequence,
                    result_object.score, result_object.structure,
                    result_object.bp_dist, result_object.tree_edit_distance))
            else:
                general_run_logger.warning(
                    'Finished designing {}, score {}'.format(
                        run_code, result_object.score))
                result_object = None
        else:
            general_run_logger.error(
                'Failed to design sequence. run code: {} seed: {}'.format(
                    run_code, seed))
    except Exception:
        if result_object is None:
            general_run_logger.fatal(
                'Crashed while designing sequences. run code: {} seed: {}'.
                format(run_code, seed))
    finally:
        if temp_file is not None:
            try:
                os.remove(temp_file.name)
            except:
                pass
    return run_code, result_object
Exemplo n.º 24
0
def align_sequences(sequences, name=None, tool="mafft", cpu=2, trim_msa=False):
    """Align Sequence objects."""

    with NTF("w") as fna:
        fasta = "\n".join(s.fasta() for s in sequences)
        fna.write(fasta)
        fna.seek(0)
        msa = align(fna.name, tool=tool, name=name, cpu=cpu)

    if trim_msa:
        msa = trim(msa)

    return msa
Exemplo n.º 25
0
def mds(distmat, k, outfile=None):
    """perform <k>-dimensional mds using distance matrix in <distmat>"""
    if not outfile: outfile = distmat + ".coord"
    if os.path.exists(outfile):
        debug('reusing MDS result: %s' % outfile)
        return outfile
    mds_p = mds_r_program % (distmat, k, outfile)
    n = NTF()
    n.write("%s" % mds_p)
    n.flush()
    cmd = "R CMD BATCH %s %s.Rout" % (n.name, n.name)
    os_run(cmd, msg='cannot run MDS')
    n.close()
    os.unlink(n.name + ".Rout")
    return outfile
Exemplo n.º 26
0
    def test_gen_iso_cool_lists2(self):
        """Test that string cooling steps are correctly accepted
        """
        isolistraw = "a, b, c,d"
        coollistraw = "1_d, 99 y"
        
        # Create placeholder for phtn_src file
        with NTF() as myNTF: 
            myNTF.write(self.NTFcontents)
            myNTF.seek(0) # Goes to beginning
            isolist, coollist = s2s.gen_iso_cool_lists(isolistraw, \
                coollistraw, myNTF.name)

        self.assertEqual(isolist, ["a", "b", "c", "d"])
        self.assertEqual(coollist, ["1_d", "99 y"])
Exemplo n.º 27
0
    def test_BaseModel_IO(self):

        from tempfile import NamedTemporaryFile as NTF
        import os

        c = random_corpus(1000, 50, 6, 100)
        with NTF(delete=False, suffix='.npz') as tmp:
            m0 = BaseModel(c.corpus, 'context')
            m0.save(tmp.name)
            m1 = BaseModel.load(tmp.name)

            self.assertEqual(m0.context_type, m1.context_type)
            self.assertTrue((m0.matrix == m1.matrix).all())

        os.remove(tmp.name)
Exemplo n.º 28
0
 def test_handle_phtn_data_string_cooling2(self):
     """Tests handling of phtn_src file with string cooling step
     """
     with NTF() as gammaNTF:
         s2.handle_phtn_data(self.meshfile_new,
                             self.phtnfile,
                             'TOTAL',
                             '1 d',
                             'v',
                             False,
                             False,
                             False,
                             False,
                             False,
                             gammas=gammaNTF.name)
Exemplo n.º 29
0
 def test_handle_phtn_data_integer_cooling(self):
     """Tests handling of phtn_src file with numeric cooling step
     """
     with NTF() as gammaNTF:
         s2.handle_phtn_data(self.meshfile_new,
                             self.phtnfile,
                             'TOTAL',
                             '1',
                             'v',
                             False,
                             False,
                             False,
                             False,
                             False,
                             gammas=gammaNTF.name)
Exemplo n.º 30
0
    def test_gen_iso_cool_lists4(self):
        """Test getting string cooling times from phtn_src file w/out of order cooling numbers.
        """
        isolistraw = "a, b, c, d"
        coollistraw = "3, 1, 0, 2"

        # Create placeholder for phtn_src file
        with NTF() as myNTF:
            myNTF.write(self.NTFcontents)
            myNTF.seek(0) # Goes to beginning
            isolist, coollist = s2s.gen_iso_cool_lists(isolistraw, \
                coollistraw, myNTF.name)

        self.assertEqual(isolist, ["a", "b", "c", "d"])
        self.assertEqual(coollist, ["shutdown", "1_d", "2 w", "99 y"])