def test_generate_target_from_supercells(self):
        """Test generation of a structure based on a target cluster vector and a list
        of supercells"""
        target_cv = [1., 0., 0., -1., 0., 1., 0., 0., 0., 0., 0., 0., 0.]
        target_conc = {'Au': 0.5, 'Pd': 0.5}
        kwargs = dict(cluster_space=self.cs,
                      target_concentrations=target_conc,
                      target_cluster_vector=target_cv,
                      n_steps=500,
                      random_seed=42,
                      optimality_weight=0.3)

        supercells = [self.prim.repeat((2, 2, 1)), self.prim.repeat((2, 1, 1))]
        # This should be simple enough to always work
        structure = generate_target_structure_from_supercells(
            supercells=supercells, **kwargs)
        self.assertTrue(
            np.allclose(self.cs.get_cluster_vector(structure), target_cv))

        # Log output to StringIO stream
        for handler in logger.handlers:
            handler.close()
            logger.removeHandler(handler)

        # Use supercells that do not fit
        supercells = [self.prim.repeat((2, 2, 1)), self.prim.repeat((3, 1, 1))]
        logfile = NamedTemporaryFile(mode='w+', encoding='utf-8')
        set_log_config(filename=logfile.name)
        structure = generate_target_structure_from_supercells(
            supercells=supercells, **kwargs)
        logfile.seek(0)
        lines = logfile.readlines()
        logfile.close()
        self.assertIn('At least one supercell was not commensurate', lines[0])
        self.assertTrue(
            np.allclose(self.cs.get_cluster_vector(structure), target_cv))

        # Use two supercells that do not fit
        supercells = [self.prim.repeat((3, 3, 1)), self.prim.repeat((3, 1, 1))]
        logfile = NamedTemporaryFile(mode='w+', encoding='utf-8')
        set_log_config(filename=logfile.name)
        with self.assertRaises(ValueError) as cm:
            generate_target_structure_from_supercells(supercells=supercells,
                                                      **kwargs)
        logfile.seek(0)
        lines = logfile.readlines()
        logfile.close()
        self.assertEqual(len(lines), 1)  # Warning should be issued once
        self.assertIn('At least one supercell was not commensurate', lines[0])
        self.assertIn('No supercells that may host the specified',
                      str(cm.exception))
Example #2
0
    def solve(self, cnf):
        s = Solution()

        infile = NamedTemporaryFile(mode='w')
        outfile = NamedTemporaryFile(mode='r')

        io = DimacsCnf()
        infile.write(io.tostring(cnf))
        infile.flush()

        ret = call(self.command % (infile.name, outfile.name), shell=True)

        infile.close()

        if ret != 10:
            return s

        s.success = True

        lines = outfile.readlines()[1:]

        for line in lines:
            varz = line.split(" ")[:-1]
            for v in varz:
                v = v.strip()
                value = v[0] != '-'
                v = v.lstrip('-')
                vo = io.varobj(v)
                s.varmap[vo] = value

        # Close deletes the tmp files
        outfile.close()

        return s
    def test_generate_sqs_from_supercells(self):
        """Test generation of SQS structure from list of supercells"""
        target_conc = {'Au': 0.5, 'Pd': 0.5}
        kwargs = dict(cluster_space=self.cs,
                      target_concentrations=target_conc,
                      n_steps=500,
                      random_seed=42,
                      optimality_weight=0.0)

        supercells = [self.prim.repeat((2, 2, 1)), self.prim.repeat((2, 1, 1))]
        structure = generate_sqs_from_supercells(supercells=supercells,
                                                 **kwargs)
        target_cv = [1., 0., 0., -1., 0., 1., 0., 0., 0., 0., 0., 0., 0.]
        self.assertTrue(
            np.allclose(self.cs.get_cluster_vector(structure), target_cv))

        # Log output to StringIO stream
        for handler in logger.handlers:
            handler.close()
            logger.removeHandler(handler)

        # Test with supercell that does not match
        supercells = [self.prim]
        logfile = NamedTemporaryFile(mode='w+', encoding='utf-8')
        set_log_config(filename=logfile.name)
        with self.assertRaises(ValueError) as cm:
            generate_sqs_from_supercells(supercells=supercells, **kwargs)
        logfile.seek(0)
        lines = logfile.readlines()
        logfile.close()
        self.assertEqual(len(lines), 1)
        self.assertIn('At least one supercell was not commensurate', lines[0])
        self.assertIn('No supercells that may host the specified',
                      str(cm.exception))
Example #4
0
 def test_AllTypes_RandomDiam(self):
     for rule_type in ['all', 'retro', 'forward']:
         # for i in range(len(self.diameters)):
         i = 3
         diams = list(
             combinations(
                 self.diameters,
                 i+1
             )
         )
         seed(2)
         sub_diams = sample(diams, 1)
         for diam in sub_diams:
             with self.subTest(rule_type=rule_type, diam=diam):
                 outfile = NamedTemporaryFile(delete=False)
                 parse_rules(
                     rules_file = 'retrorules',
                     outfile    = outfile.name,
                     rule_type  = rule_type,
                     diameters  = ','.join(diam)
                 )
                 # Test if outfile has more than one single line (header)
                 self.assertGreater(
                     len(outfile.readlines()),
                     1
                 )
                 outfile.close()
                 unlink(outfile.name)
Example #5
0
    def cargo_toml_context():
        tmp_file = NamedTemporaryFile(buffering=False)
        with open("Cargo.toml", "rb") as f:
            tmp_file.writelines(f.readlines())

        cargo_file = toml.load("Cargo.toml")

        cargo_file.setdefault("patch",
                              {}).setdefault("crates-io", {})["jsonschema"] = {
                                  "path":
                                  os.environ["UNRELEASED_JSONSCHEMA_PATH"],
                              }

        with open("Cargo.toml", "w") as f:
            toml.dump(cargo_file, f)

        try:
            print(
                "Modified Cargo.toml file by patching jsonschema dependency to {}"
                .format(os.environ["UNRELEASED_JSONSCHEMA_PATH"]),
                file=sys.stderr,
            )
            yield
        except:
            print("Cargo.toml used during the build", file=sys.stderr)
            with open("Cargo.toml", "r") as f:
                print(f.read(), file=sys.stderr)

            raise
        finally:
            with open("Cargo.toml", "wb") as f:
                tmp_file.seek(0)
                f.writelines(tmp_file.readlines())
Example #6
0
def main():
    parser = argparse.ArgumentParser(description="Check files.")
    parser.add_argument('infile',
                        type=str,
                        nargs=1,
                        help='Specify the path to the file of interest.')
    parser.add_argument('-out',
                        '--outfile',
                        type=str,
                        nargs=1,
                        default=[None],
                        help='Specify the path to the outfile.')
    args = parser.parse_args()
    infile = args.infile[0]
    new_file = NamedTemporaryFile(delete=False)
    with open(infile, 'rb') as f:
        for line in f:
            if not line.startswith('#'):
                new_file.write(line)
    for line in new_file.readlines():
        if not is_number(line.rstrip().split('\t')[-1]):
            print('du', line)
    print('no errors')
    fs = FileSort(new_file, args.outfile[0])
    fs.sort()
    def modify_guest(self):
        # We swap out the default kickstart for one that forces mptspi
        # This results in a bootable guest both during KVM customize and when
        # run on vsphere
        # By adding this at JEOS install time we leave open the possibility
        # of modifying the module/boot environment during customization,
        # for example, via the install of vmware tools.
        
        # Use this just to get a known unique temp file name
        new_kickstart = NamedTemporaryFile(delete = False)
        new_kickstart_name = new_kickstart.name
        new_kickstart.close()

        # This step does the rootpw substitution for us
        # modified file ends up in new_kickstart_name
        # This is slightly naughty since we use an internal Oz function
        self.guest._copy_kickstart(new_kickstart_name)

        # read the now root-pw-substituted kickstart
        new_kickstart = open(new_kickstart_name, "r")        
        ks_lines = new_kickstart.readlines()
        new_kickstart.close()

        # Add our device line
        new_kickstart = open(new_kickstart_name, "w")
        for ks_line in ks_lines:
            new_kickstart.write(ks_line)
            if re.match("install", ks_line):
                new_kickstart.write("device scsi mptspi\n")
        new_kickstart.close()

        # Tell Oz that we want it to use this ks.cfg rather than the built in one
        self.guest.auto = new_kickstart_name
Example #8
0
def _convertDiff(filename, do_diff=False):
    m = Mesh(3)
    sf = filename[filename.rindex('.'):]
    if sf == '.smesh':
        sf_o = '.ply'
    else:
        sf_o = '.smesh'
    try:
        m.parse(filename)
        t = NamedTemporaryFile(suffix=sf_o)
        m.write(t.name)
        m.parse(t.name)
        t2 = NamedTemporaryFile(suffix=sf)
        fn2 = m.write(t2.name)
    except Exception as e:
        t.seek(0)
        open('/tmp/error.log', 'w').writelines(t.readlines())
        print((traceback.format_exc()))
        raise e
    #diff = difflib.unified_diff( open( filename ).readlines(), open( fn2 ).readlines() )
    if do_diff:
        print('-' * 15, 'generating diff output, convertDiff')
        diff = difflib.HtmlDiff().make_file(
            open(filename).readlines(),
            open(fn2).readlines())
        open('%s.convert.diff.html' % filename, 'w').writelines(diff)
Example #9
0
def runDCBScodeInMatlab(fastaPath, cdsCount):
    fout = NamedTemporaryFile(mode='r', delete=(not debugMode))

    print("Calculating DCBS...")
    # /usr/local.cc/bin/matlab -nodisplay -nodesktop -nosplash -singleCompThread -nojvm -r "cds=fastaread('/var/tmp/pbs.9025895.power8.tau.ac.il/tmpIzkenm');csvwrite('/var/tmp/pbs.9025895.power8.tau.ac.il/tmpIzkenm.csv',Compute_DCBS({cds.Sequence}));quit()"
    cmdline = ((
        config.MatlabPath, "-nodisplay", "-nodesktop", "-nosplash",
        "-singleCompThread", "-nojvm", "-r",
        "cds=fastaread('{}');csvwrite('{}',Compute_DCBS({{cds.Sequence}}));quit()"
        .format(fastaPath, fout.name)))
    print(" ".join(cmdline))
    out = subprocess.call(cmdline, shell=False)

    lastVal = None
    lineNum = 0
    for line in fout.readlines():
        lastVal = float(line)
        lineNum += 1

    if lineNum != cdsCount + 1:
        raise Exception(
            "Expected to find DCBS values for {} proteins; found {} instead".
            format(cdsCount, lineNum))

    return lastVal
Example #10
0
    def solve(self, cnf):
        s = Solution()

        infile = NamedTemporaryFile(mode='w')
        outfile = NamedTemporaryFile(mode='r')

        io = DimacsCnf()
        infile.write(io.tostring(cnf))
        infile.flush()

        ret = call(self.command % (infile.name, outfile.name), shell=True)

        infile.close()

        if ret != 10:
            return s

        s.success = True

        lines = outfile.readlines()[1:]

        for line in lines:
            varz = line.split(" ")[:-1]
            for v in varz:
                v = v.strip()
                value = v[0] != '-'
                v = v.lstrip('-')
                vo = io.varobj(v)
                s.varmap[vo] = value

        # Close deletes the tmp files
        outfile.close()

        return s
Example #11
0
    def _test(*, num_lines: int, max_length: int):
        in_file = NamedTemporaryFile(mode='r+b')
        out_file = NamedTemporaryFile(mode='w+b')

        write_lines_to_file(in_file, num_lines, max_length)
        merge_sort(in_file, out_file, chunk_size=1_000)

        out_file.seek(0)
        src = out_file.readlines()
        in_file.seek(0)
        dst = sorted(in_file.readlines())

        try:
            assert src == dst
        finally:
            in_file.close()
            out_file.close()
Example #12
0
def test_newline_at_EOF():
    """http://stackoverflow.com/a/729795"""
    f = NamedTemporaryFile(delete=False)
    prop = Properties(OrderedDict([("a", "b"), ("c", "d"), ("e", "f")]))
    prop.save(f.name)
    with open(f.name) as f:
        lastline = f.readlines()[-1]
    os.remove(f.name)
    assert lastline.endswith("\n")
Example #13
0
 def test_export_table(self):
     Account.objects.create(account='TEST',
                            type='Asset',
                            last_updated=datetime.datetime.now())
     f = NamedTemporaryFile()
     path = f.name
     f.close()
     export_table(Account, path)
     with open(path, 'r') as f:
         lines = f.readlines()
     os.remove(path)
     self.assertEqual(2, len(lines))
Example #14
0
def test_newline_at_EOF():
	"""http://stackoverflow.com/a/729795"""
	f = NamedTemporaryFile(delete=False)
	prop = Properties(OrderedDict([
		("a", "b"),
		("c", "d"),
		("e", "f")
	]))
	prop.save(f.name)
	with open(f.name) as f:
		lastline = f.readlines()[-1]
	os.remove(f.name)
	assert lastline.endswith("\n")
Example #15
0
def call_minisat(n, clauses):
    import os
    dir_path = os.path.dirname(os.path.realpath(__file__))
    infile = NamedTemporaryFile(mode='w')
    outfile = NamedTemporaryFile(mode='r')
    Dimacs.Dimacs.write(n, clauses, infile)
    call("%s/minisat %s %s" % (dir_path, infile.name, outfile.name), shell=True)
    infile.close()
    lines = outfile.readlines()
    if lines[0].strip().startswith('SAT'):
        return [int(x) for x in lines[1].strip().split()]
    else:
        return "UNSAT"
Example #16
0
def test_get_skll_logger_with_warning():
    reset()

    temp_file = NamedTemporaryFile("w", delete=False)
    temp_file.close()
    TEMP_FILES.append(temp_file)
    TEMP_FILE_PATHS.append(temp_file.name)
    logger = get_skll_logger("test_get_skll_logger_with_warning",
                             temp_file.name)
    LOGGERS.append(logger)

    # Send a regular log message
    msg1 = "message 1"
    logger.info(msg1)

    # Trigger an ``sklearn`` warning
    trigger_sklearn_warning()

    # Send a regular log message
    msg2 = "message 2"
    logger.info(msg2)

    with open(temp_file.name) as temp_file:
        log_lines = temp_file.readlines()
        assert log_lines[0].endswith("INFO - {}\n".format(msg1))
        sklearn_warning_re = \
            re.compile(r"WARNING - [^\n]+sklearn.metrics.ranking.py:\d+: "
                       r"UndefinedMetricWarning:No negative samples in "
                       r"y_true, false positive value should be "
                       r"meaningless")
        assert sklearn_warning_re.search("".join(log_lines[1]))
        assert log_lines[-1].endswith("INFO - {}\n".format(msg2))

    # Now make sure that warnings.showwarning works the way
    # it normally works (writes to STDERR) by issuing a warning,
    # capturing it, and, finally, making sure the expected
    # warning shows up correctly in the STDERR stream and,
    # additionally, not in the log file.
    old_stderr = sys.stderr
    try:
        msg3 = "message 3"
        sys.stderr = mystderr = StringIO()
        warnings.warn(msg3)
        err = mystderr.getvalue()
        assert "UserWarning: {}".format(msg3) in err
        with open(temp_file.name) as log_file:
            assert "UserWarning:{}".format(msg3) not in log_file.read()
    finally:
        sys.stderr = old_stderr

    close_and_remove_logger_handlers(logger)
Example #17
0
def patch(binary, addForSub, subForAdd):
	patchBytes = NamedTemporaryFile()

	r = r2pipe.open(binary)

	# Disassemble binary
	r.cmd('aa')

	if (addForSub):
		# Find all 4-byte 'add rsp, const8' instructions
		r.cmd('/c add rsp | grep " # 4:" > ' + patchBytes.name)
	if (subForAdd):
		# Find all 4-byte 'sub rsp, const8' instructions
		r.cmd('/c sub rsp | grep " # 4:" >> ' + patchBytes.name)

	count = 0
	with open(binary, 'r+b') as binfile:
		for line in patchBytes.readlines():
			# Address to patch
			patchAddr = int(line.split()[0], 16)

			# Seek to byte that must be inverted
			binfile.seek(patchAddr + 3)
			originalConst = ord(binfile.read(1))
			newConst = (~originalConst + 1) & 0xff

			# Seek back to byte to 'add'/'sub' opcode
			binfile.seek(patchAddr+2)

			opcode = ord(binfile.read(1))
			# 'add rsp' -> 'sub rsp'
			if (addForSub and opcode == 0xc4):
				binfile.seek(patchAddr+2)
				# 'sub rsp'
				binfile.write('\xec')
				# Write inverted byte
				binfile.write(chr(newConst))
				count += 1

			# 'sub rsp' -> 'add rsp'
			if (subForAdd and opcode == 0xec):
				binfile.seek(patchAddr+2)
				# 'add rsp'
				binfile.write('\xc4')
				# Write inverted byte
				binfile.write(chr(newConst))
				count += 1

	filename = os.path.basename(binary)
	print("Patched %d instructions in: %s" %(count, filename))
Example #18
0
    def test_startup_script(self):
        self.command.contact_group_info = [{
            'account': '*****@*****.**',
            'key': 'conv1',
            'contacts_csv': 'contacts.csv',
        }]
        self.command.conversation_info = [{
            'account': '*****@*****.**',
            'key': 'conv1',
            'start': True,
        }, {
            'account': '*****@*****.**',
            'key': 'conv2',
            'start': False,
        }]
        self.command.router_info = [{
            'account': '*****@*****.**',
            'key': 'router1',
            'start': False,
        }, {
            'account': '*****@*****.**',
            'key': 'router2',
            'start': True,
        }]
        startup_tmpfile = NamedTemporaryFile()
        self.command.mk_filename = lambda fn, s: startup_tmpfile.name

        self.command.write_startup_script()

        startup_tmpfile.flush()
        lines = [
            l.strip('\n') for l in startup_tmpfile.readlines()[3:]
            if l.strip() != ''
        ]
        self.assertEqual(
            lines,
            [
                '#!/bin/bash',
                './go-admin.sh go_import_contacts '  # cont.
                '--email-address [email protected] \\',
                '    --contacts contacts.csv --group conv1',
                'echo "Starting conversation: conv1"',
                './go-admin.sh go_manage_conversation '  # cont.
                '--email-address [email protected] \\',
                '    --conversation-key conv1 --start',
                'echo "Starting router: router2"',
                './go-admin.sh go_manage_router '  # cont.
                '--email-address [email protected] \\',
                '    --router-key router2 --start',
            ])
Example #19
0
 def test_BadDiametersArgument(self):
     for diam in ['3']:
         with self.subTest(diam=diam):
             outfile = NamedTemporaryFile(delete=False)
             parse_rules(
                 rules_file=self.rules_file,
                 rule_type='retro',
                 diameters=diam,
                 outfile=outfile.name
             )
             # Test if outfile has one single line (header)
             self.assertEqual(len(outfile.readlines()), 1)
             outfile.close()
             unlink(outfile.name)
Example #20
0
    def test_startup_script(self):
        self.command.contact_group_info = [{
            'account': '*****@*****.**',
            'key': 'conv1',
            'contacts_csv': 'contacts.csv',
        }]
        self.command.conversation_info = [{
            'account': '*****@*****.**',
            'key': 'conv1',
            'start': True,
        }, {
            'account': '*****@*****.**',
            'key': 'conv2',
            'start': False,
        }]
        self.command.router_info = [{
            'account': '*****@*****.**',
            'key': 'router1',
            'start': False,
        }, {
            'account': '*****@*****.**',
            'key': 'router2',
            'start': True,
        }]
        startup_tmpfile = NamedTemporaryFile()
        self.command.mk_filename = lambda fn, s: startup_tmpfile.name

        self.command.write_startup_script()

        startup_tmpfile.flush()
        lines = [l.strip('\n') for l in startup_tmpfile.readlines()[3:]
                 if l.strip() != '']
        self.assertEqual(lines, [
            '#!/bin/bash',
            './go-admin.sh go_import_contacts '  # cont.
            '--email-address [email protected] \\',
            '    --contacts contacts.csv --group conv1',
            'echo "Starting conversation: conv1"',
            './go-admin.sh go_manage_conversation '  # cont.
            '--email-address [email protected] \\',
            '    --conversation-key conv1 --start',
            'echo "Starting router: router2"',
            './go-admin.sh go_manage_router '  # cont.
            '--email-address [email protected] \\',
            '    --router-key router2 --start',
        ])
def main():
    parser = argparse.ArgumentParser(description="Check files.")
    parser.add_argument('infile', type=str, nargs=1, help='Specify the path to the file of interest.')
    parser.add_argument('-out', '--outfile', type=str, nargs=1, default=[None], help='Specify the path to the outfile.')
    args = parser.parse_args()
    infile = args.infile[0]
    new_file = NamedTemporaryFile(delete=False)
    with open(infile, 'rb') as f:
        for line in f:
            if not line.startswith('#'):
                new_file.write(line)
    for line in new_file.readlines():
        if not is_number(line.rstrip().split('\t')[-1]):
            print('du', line)
    print('no errors')
    fs = FileSort(new_file, args.outfile[0])
    fs.sort()
Example #22
0
    def solve(self,
              cnf,
              variable=None,
              value=True,
              translator=AIMA_to_Dimacs_Translator):

        # if there are no clauses, then can't infer anything, so by default query result is unknown
        # return Solution with success == None
        # Note that this could be treated the same as failure.
        # In PropKB_SAT.ask, this is OK as it will test if sT.success == sF.success
        #     and therefore will also return None
        if not cnf: return Solution(None)

        s = Solution()
        infile = NamedTemporaryFile(mode='w')
        outfile = NamedTemporaryFile(mode='r')
        io = translator()
        if variable:
            dimacs = io.to_dimacs_string_set_variable_value(
                cnf, variable, value)
            if dimacs:
                infile.write(dimacs)
            else:
                return s
        else:
            infile.write(io.to_dimacs_string(cnf))
        infile.flush()
        ret = call(self.command % (infile.name, outfile.name), shell=True)
        infile.close()
        if ret != 10:
            return s
        s.success = True
        lines = outfile.readlines()[1:]
        for line in lines:
            varz = line.split(' ')[:-1]
            for v in varz:
                v = v.strip()
                value = v[0] != '-'
                v = v.lstrip('-')
                vo = io.varobj(v)
                s.varmap[vo] = value

        outfile.close()
        return s
Example #23
0
    def test_convert_excel_to_datafile(self):

        read_strategy = ReadExcel()
        write_strategy = WriteDatafile()
        context = Context(read_strategy, write_strategy)

        tmpfile = NamedTemporaryFile()
        from_path = os.path.join("tests", "fixtures", "combined_inputs.xlsx")

        context.convert(from_path, tmpfile.name)

        tmpfile.seek(0)
        actual = tmpfile.readlines()
        tmpfile.close()

        assert actual[-1] == b"end;\n"
        assert actual[0] == b"# Model file written by *otoole*\n"
        assert actual[2] == b"09_ROK d_bld_2_coal_products 2017 20.8921\n"
        assert actual[8996] == b"param default 1 : DepreciationMethod :=\n"
Example #24
0
def _convertDiff(filename,do_diff=False):
    m = Mesh(3)
    sf = filename[filename.rindex('.'):]
    if sf == '.smesh':
        sf_o = '.ply'
    else:
        sf_o = '.smesh'
    try:
        m.parse( filename )
        t = NamedTemporaryFile(suffix=sf_o)
        m.write( t.name )
        m.parse( t.name )
        t2 = NamedTemporaryFile(suffix=sf)
        fn2 = m.write( t2.name )
    except Exception, e:
        t.seek(0)
        open( '/tmp/error.log', 'w').writelines( t.readlines() )
        print traceback.format_exc()
        raise e
Example #25
0
    def test_multiline_value_unicode_to_file(self):
        disk_file = NamedTemporaryFile(mode='ab+')
        handler = FileHandler(disk_file.name)
        handler.setFormatter(ShoehornFormatter())
        self.logger.addHandler(handler)
        try:
            1 / 0
        except:
            get_logger().exception('bad', short='x', diff=u'foo\n\U0001F4A9')

        disk_file.seek(0)
        compare(disk_file.readlines()[:5],
                expected=[
                    b"bad short='x'\n",
                    b'diff:\n',
                    b'foo\n',
                    b'\xf0\x9f\x92\xa9\n',
                    b'Traceback (most recent call last):\n',
                ])
Example #26
0
def process(infile):
    fin = open(infile, "r")
    cmd = []

    for line in fin:
        cmd.append(line.split('#')[0].strip())

    a = NamedTemporaryFile()
    foutName = a.name
    a.close()

    cmd.append("ELn %s.xml" % foutName)

    ai = athenaObj.Interpreter('cgi')
    for c in cmd:
        ai.cmd(c)

    a = open(foutName + ".sco", "r")

    for line in a.readlines():
        print(line)
Example #27
0
def process(infile):
	fin = open(infile, "r")
	cmd = []	

	for line in fin:
		cmd.append(line.split('#')[0].strip())

	a = NamedTemporaryFile()	
	foutName = a.name
	a.close()

	cmd.append("ELn %s.xml"%foutName)

	ai = athenaObj.Interpreter('cgi')
	for c in cmd:
		ai.cmd(c)

	a = open(foutName + ".sco", "r")

	for line in a.readlines():
		print line
Example #28
0
def load_target_types(ccdb_context, run_number):
    mapping = {}

    # make temp file to store CCDB info in
    f = NamedTemporaryFile()
    ccdb_context.process_command_line("dump /TARGET/target_type_list:" + str(run_number) + " > " + f.name)

    # read in info
    f.flush()
    lines = f.readlines()

    if len(lines) < 2:
        print "Problem writing out CCDB table: /TARGET/target_type_list"
    else:
        # skip the first line, which is junk
        for x in range(1, len(lines)):
            vals = lines[x].split()
            # index = int(vals[0])
            mapping[int(vals[0])] = " ".join(vals[1:])

    return mapping
Example #29
0
    def solve(self, cnf, variable = None, value = True,
              translator = AIMA_to_Dimacs_Translator):

        # if there are no clauses, then can't infer anything, so by default query result is unknown
        # return Solution with success == None
        # Note that this could be treated the same as failure.
        # In PropKB_SAT.ask, this is OK as it will test if sT.success == sF.success
        #     and therefore will also return None
        if not cnf: return Solution(None)
        
        s = Solution()
        infile = NamedTemporaryFile(mode='w')
        outfile = NamedTemporaryFile(mode='r')
        io = translator()
        if variable:
            dimacs = io.to_dimacs_string_set_variable_value(cnf, variable, value)
            if dimacs:
                infile.write(dimacs)
            else:
                return s
        else:
            infile.write(io.to_dimacs_string(cnf))
        infile.flush()
        ret = call(self.command % (infile.name, outfile.name), shell=True)
        infile.close()
        if ret != 10:
            return s
        s.success = True
        lines = outfile.readlines()[1:]
        for line in lines:
            varz = line.split(' ')[:-1]
            for v in varz:
                v = v.strip()
                value = v[0] != '-'
                v = v.lstrip('-')
                vo = io.varobj(v)
                s.varmap[vo] = value

        outfile.close()
        return s
Example #30
0
def load_target_types(ccdb_context, run_number):
    mapping = {}

    # make temp file to store CCDB info in
    f = NamedTemporaryFile()
    ccdb_context.process_command_line("dump /TARGET/target_type_list:" +
                                      str(run_number) + " > " + f.name)

    # read in info
    f.flush()
    lines = f.readlines()

    if len(lines) < 2:
        print "Problem writing out CCDB table: /TARGET/target_type_list"
    else:
        # skip the first line, which is junk
        for x in range(1, len(lines)):
            vals = lines[x].split()
            #index = int(vals[0])
            mapping[int(vals[0])] = " ".join(vals[1:])

    return mapping
Example #31
0
class TmpFile:
    mode = dict(mode='rt', encoding='utf-8')

    def __init__(self):
        pass

    def __exit__(self, type, value, tb):
        self.f.close()
        os.remove(self.f.name)

    def __enter__(self):
        self.f = NamedTemporaryFile(delete=False, **TmpFile.mode)
        return self

    def borrow_name(self, proc):
        self.f.close()
        proc(self.f.name)
        self.f = open(self.f.name, **TmpFile.mode)
        return self

    def readlines(self):
        return self.f.readlines()
 def test_mapping(structure,
                  reference=None,
                  expected_drmax=0.276249887,
                  expected_dravg=0.139034051,
                  expected_chemical_formula='H3Au6Pd2X5',
                  **kwargs):
     """
     Convenience wrapper for testing mapping.
     """
     if reference is None:
         reference = self.reference
     logfile = NamedTemporaryFile(mode='w+', encoding='utf-8')
     set_log_config(filename=logfile.name)
     mapped, info = map_structure_to_reference(structure, reference,
                                               **kwargs)
     self.assertEqual(len(info), 6)
     self.assertAlmostEqual(info['drmax'], expected_drmax)
     self.assertAlmostEqual(info['dravg'], expected_dravg)
     self.assertEqual(mapped.get_chemical_formula(),
                      expected_chemical_formula)
     logfile.seek(0)
     lines = logfile.readlines()
     logfile.close()
     return lines, info
Example #33
0
def createEncPrimeReport(fastaPath, geneticCode):
    fout = NamedTemporaryFile(mode='r', delete=(not debugMode))

    print("Creating codon counts...")
    #SeqCount -c ExSeqs.fasta 9
    print((ENCprimeMain,
           "{}.codcnt".format(fastaPath), "{}.acgtfreq".format(fastaPath),
           str(geneticCode), fout.name, "0", "-q"))
    out = subprocess.check_output((ENCprimeMain, "{}.codcnt".format(fastaPath),
                                   "{}.acgtfreq".format(fastaPath),
                                   str(geneticCode), fout.name, "0", "-q"),
                                  shell=False)

    print(out)

    for line in fout.readlines():
        print("= {}".format(line))
        if line.find("Totals: ") == 0:
            fields = line.split(" ")
            ENc = float(fields[1])
            ENc_prime = float(fields[2])
            print("ENc: {}\tENc': {}".format(ENc, ENc_prime))

    return (ENc, ENc_prime)
Example #34
0
def _convertDiff(filename,do_diff=False):
    m = Mesh(3)
    sf = filename[filename.rindex('.'):]
    if sf == '.smesh':
        sf_o = '.ply'
    else:
        sf_o = '.smesh'
    try:
        m.parse( filename )
        t = NamedTemporaryFile(suffix=sf_o)
        m.write( t.name )
        m.parse( t.name )
        t2 = NamedTemporaryFile(suffix=sf)
        fn2 = m.write( t2.name )
    except Exception as e:
        t.seek(0)
        open( '/tmp/error.log', 'w').writelines( t.readlines() )
        print((traceback.format_exc()))
        raise e
    #diff = difflib.unified_diff( open( filename ).readlines(), open( fn2 ).readlines() )
    if do_diff:
        print('-'*15 , 'generating diff output, convertDiff')
        diff = difflib.HtmlDiff().make_file( open( filename ).readlines(), open( fn2 ).readlines() )
        open( '%s.convert.diff.html'%filename ,'w' ).writelines(diff)
Example #35
0
def test_get_skll_logger():
    reset()

    temp_file = NamedTemporaryFile("w", delete=False)
    temp_file.close()
    TEMP_FILES.append(temp_file)
    TEMP_FILE_PATHS.append(temp_file.name)
    logger = get_skll_logger("test_get_skll_logger", temp_file.name)
    LOGGERS.append(logger)

    # Send a regular log message
    msg1 = "message 1"
    logger.info(msg1)

    # Send a regular log message
    msg2 = "message 2"
    logger.info(msg2)

    with open(temp_file.name) as temp_file:
        log_lines = temp_file.readlines()
        assert log_lines[0].endswith("INFO - {}\n".format(msg1))
        assert log_lines[1].endswith("INFO - {}\n".format(msg2))

    close_and_remove_logger_handlers(logger)
Example #36
0
class _FileLogger:
    """File logging class wrapper.

    Class wrapping is needed manly for safety of log file removal
    after Blender is shut down.

    Registering fuction for atexit module makes sure than,
    file is deleted if Blender is closed normally.

    However file is not deleted if process is killed in Linux.
    On Windows, on the other hand, file gets deleted even if Blender
    is closed from Task Manager -> End Task/Process
    """
    __log_file = None

    def __init__(self):

        self.__log_file = NamedTemporaryFile(mode="w+", suffix=".log.txt", delete=True)

        # instead of destructor we are using delete method,
        # to close and consequentially delete log file
        atexit.register(self.delete)

    def delete(self):
        """Closes file and consiquentally deletes it as log file was created in that fashion.
        """

        # close file only if it's still exists in class variable
        if self.__log_file is not None:
            self.__log_file.close()
            self.__log_file = None

    def write(self, msg_object):
        """Writes message to the log file.

        :param msg_object: message to be written to file
        :type msg_object: object
        """

        self.__log_file.write(msg_object)

    def flush(self):
        """Flushes written content to file on disk."""

        self.__log_file.flush()

    def get_log(self):
        """Gets current content of temporary SCS BT log file,
        which was created at startup and is having log of BT session.

        :return: current content of log file as string
        :rtype: str
        """

        # firstly move to start of the file
        self.__log_file.seek(0)

        log = ""
        for line in self.__log_file.readlines():
            log += line.replace("\t   ", "\t\t   ")  # replace for Blender text editor to be aligned the same as in console

        return log
Example #37
0
def main(argv):
    # configuration vars
    RUN_PERIOD = "RunPeriod-2014-10"
    CONDITION_FILE_FORMAT = "/work/halld/online_monitoring/conditions/run_conditions%06d.dat"
    # assume that the files are loaded on the cache disk
    RAWDATA_DIR_FORMAT = "/cache/mss/halld/RunPeriod-2014-10/rawdata/Run%06d"

    # read in run number from command line
    try:
        run_number = int(argv[0])
    except:
        print "Need to pass the run number to process as a command line argument!"
        return

    run_properties = init_property_mapping()

    run_conditions = parse_condition_file(CONDITION_FILE_FORMAT % (run_number))
    if run_conditions is None:
        return

    # start extracting saved EPICS values
    # run_number = run_conditions['RUN']     ## check this?
    run_properties["beam_current"] = run_conditions["IBCAD00CRCUR6"]
    run_properties["start_time"] = run_conditions["TIME"]
    run_properties["solenoid_current"] = run_conditions["HallD-PXI:Data:I_Shunt"]

    # figure out which radiator was used
    # save luminosity factor = current * radiator thickness
    amorphous_radiator_position = float(run_conditions["hd:radiator:motor.RBV"])
    if fabs(amorphous_radiator_position - 135.948) < RADIATOR_TOLERANCE:
        run_properties["radiator_type"] = "2x10-5 RL"
        run_properties["luminosity"] = 1.7e-5 * float(run_properties["beam_current"])
    elif fabs(amorphous_radiator_position - 166.095) < RADIATOR_TOLERANCE:
        run_properties["radiator_type"] = "1x10-4 RL"
        run_properties["luminosity"] = 11.2e-5 * float(run_properties["beam_current"])
    elif fabs(amorphous_radiator_position - 196.262) < RADIATOR_TOLERANCE:
        run_properties["radiator_type"] = "3x10-4 RL"
        run_properties["luminosity"] = 22.7e-5 * float(run_properties["beam_current"])
    else:
        run_properties["radiator_type"] = "None"
        # run_properties['luminosity'] = run_properties['beam_current']
        run_properties["luminosity"] = 0.0

    # parse EVIO files to extract useful information
    # eventually the DAQ will report this properly?
    rawdata_evio_dir = RAWDATA_DIR_FORMAT % (run_number)
    if os.path.isdir(rawdata_evio_dir):
        filelist = [
            join(rawdata_evio_dir, f)
            for f in listdir(rawdata_evio_dir)
            if ((f[:10] == "hd_rawdata" or f[:6] == "hd_raw") and (f[-5:] == ".evio"))
        ]
        filelist.sort()
        file_properties = ParseEVIOFiles(filelist)
        if len(file_properties) > 0:
            run_properties["num_events"] = file_properties["num_events"]
            run_properties["num_files"] = file_properties["num_files"]
            run_properties["start_time"] = file_properties["start_time"]
            run_properties["end_time"] = file_properties["end_time"]

    # pull out target information from the CCDB
    # load CCDB connection
    ccdb_context = InitCCDB()

    # read target index -> name mapping definition in from the CCDB
    target_types = load_target_types(ccdb_context, run_number)

    # make temp file to store CCDB info in
    fconst = NamedTemporaryFile()
    ccdb_context.process_command_line("dump /TARGET/target_parms:" + str(run_number) + " > " + fconst.name)

    # read in info
    fconst.flush()
    const_lines = fconst.readlines()

    if len(const_lines) < 2:
        print "Problem writing out CCDB constants to file!"
    else:
        # the first line of the output file from CCDB is junk, and our numbers are on the second line
        vals = const_lines[1].split()
        target_index = int(vals[0])
        if target_index in target_types:
            run_properties["target_type"] = target_types[target_index]
        else:
            print "Invalid target index from CCDB = " + str(target_index)
        fconst.close()

    if VERBOSE:
        print "RUN PROPERTIES FOR RUN " + str(run_number)
        print str(run_properties)

    # Add information to DB
    ## initialize DB
    db = datamon_db()
    ## add blank run to DB if it doesn't exist
    if db.GetRunID(run_number) < 0:
        db.CreateRun(run_number)
    db.UpdateRunInfo(run_number, run_properties)
Example #38
0
class BasicTranscludeTests(unittest.TestCase):

    def setUp(self):
        """Create tempfile as target, add cleanup methods to close and unlink tempfiles."""
        self.target = NamedTemporaryFile(delete=False)
        self.addCleanup(self.target.close)
        self.addCleanup(os.unlink, self.target.name)

    def compare_results(self, correct_path):
        """Compare the actual result with the correct result."""
        with file(correct_path, 'r+') as correct:
            c = correct.readlines()
            self.target.seek(0)
            t = self.target.readlines()
            self.assertEqual(c, t)

    def test_no_transclusion(self):
        """Transcluding a file without transclude directive returns the original file."""
        transclude_file(make_path("simple-test-result.md"), self.target, 'md')
        self.compare_results(make_path("simple-test-result.md"))

    def test_simple_transclude(self):
        """Transclude replaces directive {{some_other_file.txt}} with contents of some_other_file.txt."""
        """transclude looks for files in parent folder of source"""
        transclude_file(make_path("simple-transclusion.md"), self.target, 'md')
        self.compare_results(make_path("simple-test-result.md"))

    def test_recursive_transclude(self):
        """Transclude is recursive."""
        transclude_file(
            make_path("recursive-transclusion.md"), self.target, 'md')
        self.compare_results(make_path("simple-test-result.md"))

    def test_two_transclusions_in_one_line(self):
        """Two transclusion directives in one file are handled correctly."""
        transclude_file(make_path("double-transclusion.md"), self.target, 'md')
        self.compare_results(make_path("simple-test-result.md"))

    def test_wildcard_transclusion(self):
        """Wildcard transclusion {{foo.*}} wildcard is set according to type (tex, html, )"""
        transclude_file(
            make_path("wildcard-transclusion.md"), self.target, 'html')
        self.compare_results(make_path("simple-test-result.md"))

    def test_missing_file_raises_error(self):
        """transclude outputs an error when a file to transclude is not found."""
        self.assertRaises(MissingFileException,
                          transclude_file,
                          make_path("missing-transclusion.md"),
                          self.target,
                          'md')

    def test_transclude_base(self):
        """If metadata "Transclude Base" is set, transclude looks there for files."""
        """metadata "Transclude Base" is only evaluated in the first file."""
        transclude_file(
            make_path("new-transclude-base.md"), self.target, 'html')
        self.compare_results(make_path("new-transclude-base-result.md"))

    def test_recursion_nested_folders(self):
        """Transclude ignores metadata in transculded file."""
        """with recursion, transclude looks for files relative to the file which transludes them."""
        """after recursion, transclude looks for files again relative to source."""
        """metadata of recursed files is ignored in result."""
        """metadata of source file is included in result"""
        # TODO: split into smaller tests
        transclude_file(
            make_path("recursive-subfolder.md"), self.target, 'html')
        self.compare_results(make_path("recursive-subfolder-result.md"))
Example #39
0
class FileStore(object):

   def __init__(self, proxy, processId, mode= 'r'):
      '''
      Gives r+w access to file on static instance via local tempfile.NamedTemproraryFile
      '''
      self.proxy= proxy
      self.processId= processId
      self.mode= mode

      self.tmpFile= NamedTemporaryFile(mode= 'r+', prefix= self.processId, delete= True)
      if 'r' in mode:
         self.__get__()

   def __get__(self):
      '''
      Get contents of static instance file and save to local temp file
      '''
      data= self.proxy.getFileContents(self.processId)
      self.tmpFile.write(data.tostring())
      self.tmpFile.seek(0)

   def __post__(self):
      '''
      Posts contents of local temp file to static instance file
      '''
      self.tmpFile.seek(0)
      data= self.tmpFile.read()
      self.proxy.setFileContents(self.processId, data)
      self.tmpFile.seek(0)

   def getName(self):
      return self.processId

   def getLocalName(self):
      return self.tmpFile.name

   def write(self, data):
      '''
      Writes data to local tempfile
      '''
      if 'w' not in self.mode:
         raise Exception('file open for read only')

      self.tmpFile.write(dumps(data))

   def read(self, size= -1):
      '''
      Reads data from local tempfile.  See file read() for more details.
      '''
      if 'r' not in self.mode:
         raise Exception('file open for write only')
  
      return loads(self.tmpFile.read(size))

   def readlines(self):
      '''
      Reads lines from local tempfile.  See file readlines() for more detals.
      '''
      if 'r' not in self.mode:
         raise Exception('file open for write only')

      return loads(self.tmpFile.readlines())

   def readline(self):
      '''
      Reads line from local tempfile. See file readline() for more details.
      '''
      if 'r' not in self.mode:
         raise Exception('file open for write only')

      return loads(self.tmpFile.readline())
 
   def close(self, delete= False):
      '''
      Saves the contents of the local tempfile and then closes/destroys the local tempfile.  See self.__post__() and python tempfile for more details.
      '''

      if 'w' in self.mode:
         self.__post__()

      elif 'r' in self.mode:

         # if delete requested -- remove file form static instance
         if delete:
            self.proxy.deleteFile(self.processId)

      self.tmpFile.close()
Example #40
0
File: test.py Project: grnet/kamaki
class History(TestCase):

    def setUp(self):
        from kamaki.cli.history import History as HClass
        self.HCLASS = HClass
        self.file = NamedTemporaryFile()

    def tearDown(self):
        self.file.close()

    def test__match(self):
        self.assertRaises(AttributeError, self.HCLASS._match, 'ok', 42)
        self.assertRaises(TypeError, self.HCLASS._match, 2.71, 'ok')
        for args, expected in (
                (('XXX', None), True),
                ((None, None), True),
                (('this line has some terms', 'some terms'), True),
                (('this line has some terms', 'some bad terms'), False),
                (('small line', 'not so small line terms'), False),
                ((['line', 'with', 'some', 'terms'], 'some terms'), True),
                ((['line', 'with', 'some terms'], 'some terms'), False)):
            self.assertEqual(self.HCLASS._match(*args), expected)

    def test_add(self):
        history = self.HCLASS(self.file.name)
        some_strings = ('a brick', 'two bricks', 'another brick', 'A wall!')
        for i, line in enumerate(some_strings):
            history.add(line)
            self.file.seek(0)
            self.assertEqual(
                self.file.read(), '\n'.join(some_strings[:(i + 1)]) + '\n')

    def test_empty(self):
        content = 'a brick\ntwo bricks\nanother brick\nA wall!\n'
        self.file.write(content)
        self.file.flush()
        self.file.seek(0)
        self.assertEqual(self.file.read(), content)
        history = self.HCLASS(self.file.name)
        history.empty()
        self.file.seek(0)
        self.assertEqual(self.file.read(), '0\n')

    def test_retrieve(self):
        sample_history = (
            '0\n',
            'kamaki history show\n',
            'kamaki file list\n',
            'kamaki file create /pithos/f1\n',
            'kamaki file info /pithos/f1\n',
            'last command is always excluded')
        self.file.write(''.join(sample_history))
        self.file.flush()

        history = self.HCLASS(self.file.name)
        self.assertRaises(ValueError, history.retrieve, 'must be number')
        self.assertRaises(TypeError, history.retrieve, [1, 2, 3])

        for i in (0, len(sample_history) + 1, - len(sample_history) - 1):
            self.assertEqual(history.retrieve(i), None)
        for i in range(1, len(sample_history)):
            self.assertEqual(history.retrieve(i), sample_history[i])
            self.assertEqual(history.retrieve(- i), sample_history[- i])

    def test_limit(self):
        sample_history = (
            '0\n',
            'kamaki history show\n',
            'kamaki file list\n',
            'kamaki file create /pithos/f1\n',
            'kamaki file info /pithos/f1\n',
            'last command is always excluded')
        sample_len = len(sample_history)
        self.file.write(''.join(sample_history))
        self.file.flush()
        history = self.HCLASS(self.file.name)

        for value, exp_e in (
                    (-2, ValueError),
                    ('non int', ValueError),
                    (None, TypeError)):
            try:
                history.limit = value
            except Exception as e:
                self.assertTrue(isinstance(e, exp_e))

        history.limit = 10
        self.assertEqual(history.limit, 10)
        self.file.seek(0)
        self.assertEqual(len(self.file.readlines()), sample_len)

        history.limit = sample_len - 1
        self.assertEqual(history.limit, sample_len - 1)
        self.file.seek(0)
        self.assertEqual(len(self.file.readlines()), sample_len)
Example #41
0
class _FileLogger:
    """File logging class wrapper.

    Class wrapping is needed manly for safety of log file removal
    after Blender is shut down.

    Registering fuction for atexit module makes sure than,
    file is deleted if Blender is closed normally.

    However file is not deleted if process is killed in Linux.
    On Windows, on the other hand, file gets deleted even if Blender
    is closed from Task Manager -> End Task/Process
    """
    __log_file = None

    def __init__(self):

        self.__log_file = NamedTemporaryFile(mode="w+",
                                             suffix=".log.txt",
                                             delete=True)

        # instead of destructor we are using delete method,
        # to close and consequentially delete log file
        atexit.register(self.delete)

    def delete(self):
        """Closes file and consiquentally deletes it as log file was created in that fashion.
        """

        # close file only if it's still exists in class variable
        if self.__log_file is not None:
            self.__log_file.close()
            self.__log_file = None

    def write(self, msg_object):
        """Writes message to the log file.

        :param msg_object: message to be written to file
        :type msg_object: object
        """

        self.__log_file.write(msg_object)

    def flush(self):
        """Flushes written content to file on disk."""

        self.__log_file.flush()

    def get_log(self):
        """Gets current content of temporary SCS BT log file,
        which was created at startup and is having log of BT session.

        :return: current content of log file as string
        :rtype: str
        """

        # firstly move to start of the file
        self.__log_file.seek(0)

        log = ""
        for line in self.__log_file.readlines():
            log += line.replace(
                "\t   ", "\t\t   "
            )  # replace for Blender text editor to be aligned the same as in console

        return log
def do_align(f_name, rev_f_name, seed=None):
    print('Reading source/target sentences from %s...' %
                f_name,
              file=sys.stderr, flush=True)
    with open(f_name, 'r', encoding='utf-8') as f:
        src_sents_text = []
        trg_sents_text = []
        for i, line in enumerate(f):
            fields = line.strip().split(' ||| ')
            if len(fields) != 2:
                print('ERROR: line %d of %s does not contain a single |||'
                      ' separator, or sentence(s) are empty!' % (
                          i+1, args.joint_filename),
                      file=sys.stderr, flush=True)
                sys.exit(1)
            src_sents_text.append(fields[0])
            trg_sents_text.append(fields[1])
        src_text = '\n'.join(src_sents_text) + '\n'
        trg_text = '\n'.join(trg_sents_text) + '\n'
        src_sents_text = None
        trg_sents_text = None

        source_prefix_len = 0
        source_suffix_len = 0
        
        target_prefix_len = 0
        target_suffix_len = 0
        
        with io.StringIO(src_text) as f:
            src_sents, src_index = read_text(
                    f, True, source_prefix_len, source_suffix_len)
            n_src_sents = len(src_sents)
            src_voc_size = len(src_index)
            srcf = NamedTemporaryFile('wb')
            write_text(srcf, tuple(src_sents), src_voc_size)
            src_sents = None
            src_text = None

        with io.StringIO(trg_text) as f:
            trg_sents, trg_index = read_text(
                    f, True, target_prefix_len, target_suffix_len)
            trg_voc_size = len(trg_index)
            n_trg_sents = len(trg_sents)
            trgf = NamedTemporaryFile('wb')
            write_text(trgf, tuple(trg_sents), trg_voc_size)
            trg_sents = None
            trg_text = None
    """
    print("source")
    with open(f_name, 'r', encoding='utf-8') as f:
        src_sents, src_index = read_text(
                f, True, 0, 0)
        n_src_sents = len(src_sents)
        src_voc_size = len(src_index)
        srcf = NamedTemporaryFile('wb')
        write_text(srcf, tuple(src_sents), src_voc_size)
        src_sents = None
    
    print("target")
    with open(rev_f_name, 'r', encoding='utf-8') as f:
        trg_sents, trg_index = read_text(
                f, True, 0, 0)
        trg_voc_size = len(trg_index)
        n_trg_sents = len(trg_sents)
        trgf = NamedTemporaryFile('wb')
        write_text(trgf, tuple(trg_sents), trg_voc_size)
        trg_sents = None
    """
    
    fwd_links_file = NamedTemporaryFile('r+')
    rev_links_file = NamedTemporaryFile('r+')
    stat_file = NamedTemporaryFile('r+')
    print("start align")
    
    
    align(srcf.name, trgf.name, statistics_filename = stat_file.name, quiet=False, links_filename_fwd=fwd_links_file.name, links_filename_rev=rev_links_file.name)
    
    # Not using stat_file at the moment
    result = fwd_links_file.readlines()
    rev_result = rev_links_file.readlines()
                   
    fwd_links_file.close()
    rev_links_file.close()
    stat_file.close()
    srcf.close()
    trgf.close()
    
        
    """
    if discretize:
        ibm_print(aaa, reverse, output.fileno())
    else: # Not used at the moment, but keeping this for the future
        with open(output_prob, 'wb') as f:
            pickle.dump(aaa, f, -1)

    output.seek(0)
    result = []
    for line in output:
        result.append(line.decode('ascii').strip())
    """
    return result, rev_result
Example #43
0
    def test_memory(self):
        """
        Memory test
        """
        def cleanup(supress=False):
            """ cleanup """
            logging.debug("test_memory: Cleanup")
            err = ""
            if item.rm_cgroup(pwd):
                err += "\nCan't remove cgroup directory"

            utils.system("swapon -a")

            if err:
                if supress:
                    logging.warn("Some parts of cleanup failed%s", err)
                else:
                    raise error.TestFail("Some parts of cleanup failed%s" %
                                         err)

        # Preparation
        item = Cgroup('memory', self._client)
        item.initialize(self.modules)
        item.smoke_test()
        pwd = item.mk_cgroup()

        logging.debug("test_memory: Memory filling test")
        meminfo = open('/proc/meminfo', 'r')
        mem = meminfo.readline()
        while not mem.startswith("MemFree"):
            mem = meminfo.readline()
        # Use only 1G or max of the free memory
        mem = min(int(mem.split()[1]) / 1024, 1024)
        mem = max(mem, 100)  # at least 100M
        try:
            item.get_property("memory.memsw.limit_in_bytes")
        except error.TestError:
            # Doesn't support memsw limitation -> disabling
            logging.info("System does not support 'memsw'")
            utils.system("swapoff -a")
            memsw = False
        else:
            # Supports memsw
            memsw = True
            # Clear swap
            utils.system("swapoff -a")
            utils.system("swapon -a")
            meminfo.seek(0)
            swap = meminfo.readline()
            while not swap.startswith("SwapTotal"):
                swap = meminfo.readline()
            swap = int(swap.split()[1]) / 1024
            if swap < mem / 2:
                logging.error("Not enough swap memory to test 'memsw'")
                memsw = False
        meminfo.close()
        outf = NamedTemporaryFile('w+', prefix="cgroup_client-", dir="/tmp")
        logging.debug("test_memory: Initializition passed")

        ################################################
        # Fill the memory without cgroup limitation
        # Should pass
        ################################################
        logging.debug("test_memory: Memfill WO cgroup")
        ps = item.test("memfill %d %s" % (mem, outf.name))
        ps.stdin.write('\n')
        i = 0
        while ps.poll() == None:
            if i > 60:
                break
            i += 1
            time.sleep(1)
        if i > 60:
            ps.terminate()
            raise error.TestFail("Memory filling failed (WO cgroup)")
        outf.seek(0)
        outf.flush()
        out = outf.readlines()
        if (len(out) < 2) or (ps.poll() != 0):
            raise error.TestFail("Process failed (WO cgroup); output:\n%s"
                                 "\nReturn: %d" % (out, ps.poll()))
        if not out[-1].startswith("PASS"):
            raise error.TestFail("Unsuccessful memory filling " "(WO cgroup)")
        logging.debug("test_memory: Memfill WO cgroup passed")

        ################################################
        # Fill the memory with 1/2 memory limit
        # memsw: should swap out part of the process and pass
        # WO memsw: should fail (SIGKILL)
        ################################################
        logging.debug("test_memory: Memfill mem only limit")
        ps = item.test("memfill %d %s" % (mem, outf.name))
        item.set_cgroup(ps.pid, pwd)
        item.set_property_h("memory.limit_in_bytes", ("%dM" % (mem / 2)), pwd)
        ps.stdin.write('\n')
        i = 0
        while ps.poll() == None:
            if i > 120:
                break
            i += 1
            time.sleep(1)
        if i > 120:
            ps.terminate()
            raise error.TestFail("Memory filling failed (mem)")
        outf.seek(0)
        outf.flush()
        out = outf.readlines()
        if (len(out) < 2):
            raise error.TestFail("Process failed (mem); output:\n%s"
                                 "\nReturn: %d" % (out, ps.poll()))
        if memsw:
            if not out[-1].startswith("PASS"):
                logging.error(
                    "test_memory: cgroup_client.py returned %d; "
                    "output:\n%s", ps.poll(), out)
                raise error.TestFail("Unsuccessful memory filling (mem)")
        else:
            if out[-1].startswith("PASS"):
                raise error.TestFail("Unexpected memory filling (mem)")
            else:
                filled = int(out[-2].split()[1][:-1])
                if mem / 2 > 1.5 * filled:
                    logging.error(
                        "test_memory: Limit = %dM, Filled = %dM (+ "
                        "python overhead upto 1/3 (mem))", mem / 2, filled)
                else:
                    logging.debug(
                        "test_memory: Limit = %dM, Filled = %dM (+ "
                        "python overhead upto 1/3 (mem))", mem / 2, filled)
        logging.debug("test_memory: Memfill mem only cgroup passed")

        ################################################
        # Fill the memory with 1/2 memory+swap limit
        # Should fail
        # (memory.limit_in_bytes have to be set prior to this test)
        ################################################
        if memsw:
            logging.debug("test_memory: Memfill mem + swap limit")
            ps = item.test("memfill %d %s" % (mem, outf.name))
            item.set_cgroup(ps.pid, pwd)
            item.set_property_h("memory.memsw.limit_in_bytes",
                                "%dM" % (mem / 2), pwd)
            ps.stdin.write('\n')
            i = 0
            while ps.poll() == None:
                if i > 120:
                    break
                i += 1
                time.sleep(1)
            if i > 120:
                ps.terminate()
                raise error.TestFail("Memory filling failed (mem)")
            outf.seek(0)
            outf.flush()
            out = outf.readlines()
            if (len(out) < 2):
                raise error.TestFail("Process failed (memsw); output:\n%s"
                                     "\nReturn: %d" % (out, ps.poll()))
            if out[-1].startswith("PASS"):
                raise error.TestFail("Unexpected memory filling (memsw)", mem)
            else:
                filled = int(out[-2].split()[1][:-1])
                if mem / 2 > 1.5 * filled:
                    logging.error(
                        "test_memory: Limit = %dM, Filled = %dM (+ "
                        "python overhead upto 1/3 (memsw))", mem / 2, filled)
                else:
                    logging.debug(
                        "test_memory: Limit = %dM, Filled = %dM (+ "
                        "python overhead upto 1/3 (memsw))", mem / 2, filled)
            logging.debug("test_memory: Memfill mem + swap cgroup passed")

        ################################################
        # CLEANUP
        ################################################
        cleanup()
Example #44
0
def align_to_genome(batch_reads_data,
                    genome_fn,
                    mapper_exe,
                    mapper_type,
                    genome_index,
                    output_format='sam'):
    # prepare fasta text with batch reads
    batch_reads_fasta = ''
    for read_fn_sg, (_, _, basecalls, _, _, _) in \
        batch_reads_data.iteritems():
        batch_reads_fasta += ">" + read_fn_sg + '\n' + \
                             ''.join(basecalls) + '\n'

    read_fp = NamedTemporaryFile(suffix='.fasta')
    read_fp.write(batch_reads_fasta)
    read_fp.flush()
    out_fp = NamedTemporaryFile()

    # optionally suppress output from mapper with devnull sink
    with open(os.devnull, 'w') as FNULL:
        if mapper_type == 'graphmap':
            mapper_options = prep_graphmap_options(genome_fn, read_fp.name,
                                                   out_fp.name, output_format)
            stdout_sink = FNULL
        elif mapper_type == 'bwa_mem':
            mapper_options = prep_bwa_mem_options(genome_fn, read_fp.name)
            stdout_sink = out_fp
        else:
            raise RuntimeError, 'Mapper not supported.'

        try:
            exitStatus = call([
                mapper_exe,
            ] + mapper_options,
                              stdout=stdout_sink,
                              stderr=FNULL)
            out_fp.seek(0)
            align_output = out_fp.readlines()
            # close files here so that they persist until
            # after basecalling is finished
            read_fp.close()
            out_fp.close()
        except:
            # whole mapping call failed so all reads failed
            return ([
                ('Problem running/parsing genome mapper. ' +
                 'Ensure you have a compatible version installed.' +
                 'Potentially failed to locate BWA index files.', read_fn_sg)
                for read_fn_sg in batch_reads_data.keys()
            ], [])

    if output_format == 'sam':
        batch_parse_failed_reads, batch_align_data = parse_sam_output(
            align_output, batch_reads_data, genome_index)
    elif output_format == 'm5':
        batch_parse_failed_reads, batch_align_data = parse_m5_output(
            align_output, batch_reads_data)
    else:
        raise RuntimeError, 'Mapper output type not supported.'

    clip_fix_align_data = fix_all_clipped_bases(batch_align_data,
                                                batch_reads_data)

    return batch_parse_failed_reads, clip_fix_align_data
Example #45
0
    def test_memory(self):
        """
        Memory test
        """
        def cleanup(supress=False):
            """ cleanup """
            logging.debug("test_memory: Cleanup")
            err = ""
            if item.rm_cgroup(pwd):
                err += "\nCan't remove cgroup directory"

            utils.system("swapon -a")

            if err:
                if supress:
                    logging.warn("Some parts of cleanup failed%s", err)
                else:
                    raise error.TestFail("Some parts of cleanup failed%s" % err)

        # Preparation
        item = CG('memory', self._client)
        item.initialize(self.modules)
        item.smoke_test()
        pwd = item.mk_cgroup()

        logging.debug("test_memory: Memory filling test")
        meminfo = open('/proc/meminfo','r')
        mem = meminfo.readline()
        while not mem.startswith("MemFree"):
            mem = meminfo.readline()
        # Use only 1G or max of the free memory
        mem = min(int(mem.split()[1])/1024, 1024)
        mem = max(mem, 100) # at least 100M
        try:
            memsw_limit_bytes = item.get_property("memory.memsw.limit_in_bytes")
        except error.TestFail:
            # Doesn't support memsw limitation -> disabling
            logging.info("System does not support 'memsw'")
            utils.system("swapoff -a")
            memsw = False
        else:
            # Supports memsw
            memsw = True
            # Clear swap
            utils.system("swapoff -a")
            utils.system("swapon -a")
            meminfo.seek(0)
            swap = meminfo.readline()
            while not swap.startswith("SwapTotal"):
                swap = meminfo.readline()
            swap = int(swap.split()[1])/1024
            if swap < mem / 2:
                logging.error("Not enough swap memory to test 'memsw'")
                memsw = False
        meminfo.close()
        outf = NamedTemporaryFile('w+', prefix="cgroup_client-",
                                  dir="/tmp")
        logging.debug("test_memory: Initializition passed")

        ################################################
        # Fill the memory without cgroup limitation
        # Should pass
        ################################################
        logging.debug("test_memory: Memfill WO cgroup")
        ps = item.test("memfill %d %s" % (mem, outf.name))
        ps.stdin.write('\n')
        i = 0
        while ps.poll() == None:
            if i > 60:
                break
            i += 1
            time.sleep(1)
        if i > 60:
            ps.terminate()
            raise error.TestFail("Memory filling failed (WO cgroup)")
        outf.seek(0)
        outf.flush()
        out = outf.readlines()
        if (len(out) < 2) or (ps.poll() != 0):
            raise error.TestFail("Process failed (WO cgroup); output:\n%s"
                                 "\nReturn: %d" % (out, ps.poll()))
        if not out[-1].startswith("PASS"):
            raise error.TestFail("Unsuccessful memory filling "
                                 "(WO cgroup)")
        logging.debug("test_memory: Memfill WO cgroup passed")

        ################################################
        # Fill the memory with 1/2 memory limit
        # memsw: should swap out part of the process and pass
        # WO memsw: should fail (SIGKILL)
        ################################################
        logging.debug("test_memory: Memfill mem only limit")
        ps = item.test("memfill %d %s" % (mem, outf.name))
        item.set_cgroup(ps.pid, pwd)
        item.set_property_h("memory.limit_in_bytes", ("%dM" % (mem/2)), pwd)
        ps.stdin.write('\n')
        i = 0
        while ps.poll() == None:
            if i > 120:
                break
            i += 1
            time.sleep(1)
        if i > 120:
            ps.terminate()
            raise error.TestFail("Memory filling failed (mem)")
        outf.seek(0)
        outf.flush()
        out = outf.readlines()
        if (len(out) < 2):
            raise error.TestFail("Process failed (mem); output:\n%s"
                          "\nReturn: %d" % (out, ps.poll()))
        if memsw:
            if not out[-1].startswith("PASS"):
                logging.error("test_memory: cgroup_client.py returned %d; "
                              "output:\n%s", ps.poll(), out)
                raise error.TestFail("Unsuccessful memory filling (mem)")
        else:
            if out[-1].startswith("PASS"):
                raise error.TestFail("Unexpected memory filling (mem)")
            else:
                filled = int(out[-2].split()[1][:-1])
                if mem/2 > 1.5 * filled:
                    logging.error("test_memory: Limit = %dM, Filled = %dM (+ "
                                  "python overhead upto 1/3 (mem))", mem/2,
                                  filled)
                else:
                    logging.debug("test_memory: Limit = %dM, Filled = %dM (+ "
                                  "python overhead upto 1/3 (mem))", mem/2,
                                  filled)
        logging.debug("test_memory: Memfill mem only cgroup passed")

        ################################################
        # Fill the memory with 1/2 memory+swap limit
        # Should fail
        # (memory.limit_in_bytes have to be set prior to this test)
        ################################################
        if memsw:
            logging.debug("test_memory: Memfill mem + swap limit")
            ps = item.test("memfill %d %s" % (mem, outf.name))
            item.set_cgroup(ps.pid, pwd)
            item.set_property_h("memory.memsw.limit_in_bytes", "%dM"%(mem/2), pwd)
            ps.stdin.write('\n')
            i = 0
            while ps.poll() == None:
                if i > 120:
                    break
                i += 1
                time.sleep(1)
            if i > 120:
                ps.terminate()
                raise error.TestFail("Memory filling failed (mem)")
            outf.seek(0)
            outf.flush()
            out = outf.readlines()
            if (len(out) < 2):
                raise error.TestFail("Process failed (memsw); output:\n%s"
                                     "\nReturn: %d" % (out, ps.poll()))
            if out[-1].startswith("PASS"):
                raise error.TestFail("Unexpected memory filling (memsw)",
                              mem)
            else:
                filled = int(out[-2].split()[1][:-1])
                if mem / 2 > 1.5 * filled:
                    logging.error("test_memory: Limit = %dM, Filled = %dM (+ "
                                  "python overhead upto 1/3 (memsw))", mem/2,
                                  filled)
                else:
                    logging.debug("test_memory: Limit = %dM, Filled = %dM (+ "
                                  "python overhead upto 1/3 (memsw))", mem/2,
                                  filled)
            logging.debug("test_memory: Memfill mem + swap cgroup passed")

        ################################################
        # CLEANUP
        ################################################
        cleanup()
Example #46
0
class Playbook(object):
    ACTIVE = "ACTIVE"
    PLAYBOOK_FAILED = "PLAYBOOK_FAILED"

    def __init__(
        self,
        ip,
        port,
        playbooks_information,
        osi_private_key,
        public_key,
        pool,
        loaded_metadata_keys,
        cloud_site,
    ):
        self.loaded_metadata_keys = loaded_metadata_keys
        self.cloud_site = cloud_site
        self.redis = redis.Redis(connection_pool=pool)  # redis connection
        self.yaml_exec = ruamel.yaml.YAML()  # yaml writer/reader
        self.vars_files = []  # _vars_file.yml to read
        self.tasks = []  # task list
        self.always_tasks = []
        self.process = (
            None  # init process, returncode, standard output, standard error output
        )
        self.returncode = -1
        self.playbooks_information = playbooks_information
        self.stdout = ""
        self.stderr = ""
        # init temporary directories and mandatory generic files
        self.ancon_dir = "/code/VirtualMachineService/ancon"  # path to this directory
        self.playbooks_dir = self.ancon_dir + "/playbooks"  # path to source playbooks
        self.directory = TemporaryDirectory(dir=self.ancon_dir)
        self.private_key = NamedTemporaryFile(mode="w+",
                                              dir=self.directory.name,
                                              delete=False,
                                              prefix="key_")
        self.private_key.write(osi_private_key)
        self.private_key.close()

        self.log_file_stdout = NamedTemporaryFile(mode="w+",
                                                  dir=self.directory.name,
                                                  delete=False,
                                                  prefix="log_stdout")
        self.log_file_stderr = NamedTemporaryFile(mode="w+",
                                                  dir=self.directory.name,
                                                  delete=False,
                                                  prefix="log_err")

        # create the custom playbook and save its name
        self.playbook_exec_name = "generic_playbook.yml"
        self.copy_playbooks_and_init(playbooks_information, public_key)

        # create inventory
        self.inventory = NamedTemporaryFile(mode="w+",
                                            dir=self.directory.name,
                                            delete=False,
                                            prefix="inventory_")

        inventory_string = (
            f"[vm]\n"
            f"{ip} ansible_port={port} ansible_user=ubuntu ansible_ssh_private_key_file={self.private_key.name} ansible_python_interpreter=/usr/bin/python3"
        )

        self.inventory.write(inventory_string)
        self.inventory.close()

    def copy_playbooks_and_init(self, playbooks_information, public_key):
        # go through every wanted playbook
        for k, v in playbooks_information.items():
            self.copy_and_init(k, v)

        # init yml to change public keys as last task
        shutil.copy(self.playbooks_dir + "/change_key.yml",
                    self.directory.name)
        shutil.copy(self.playbooks_dir + "/change_key_vars_file.yml",
                    self.directory.name)
        with open(self.directory.name + "/change_key_vars_file.yml",
                  mode="r") as key_file:
            data_ck = self.yaml_exec.load(key_file)
            data_ck["change_key_vars"]["key"] = public_key.strip('"')
        with open(self.directory.name + "/change_key_vars_file.yml",
                  mode="w") as key_file:
            self.yaml_exec.dump(data_ck, key_file)
        self.add_to_playbook_always_lists("change_key")

        # write all vars_files and tasks in generic_playbook
        shutil.copy(self.playbooks_dir + "/" + self.playbook_exec_name,
                    self.directory.name)
        with open(self.directory.name + "/" + self.playbook_exec_name,
                  mode="r") as generic_playbook:
            data_gp = self.yaml_exec.load(generic_playbook)
            data_gp[0]["vars_files"] = self.vars_files
            data_gp[0]["tasks"][0]["block"] = self.tasks
            data_gp[0]["tasks"][0]["always"] = self.always_tasks
        with open(self.directory.name + "/" + self.playbook_exec_name,
                  mode="w") as generic_playbook:
            self.yaml_exec.dump(data_gp, generic_playbook)

    def copy_and_init(self, playbook_name, playbook_vars):
        def load_vars():
            LOG.info(f" Playbook vars: {playbook_vars}")
            if playbook_name == CONDA:
                for k, v in playbook_vars.items():
                    if k == "packages":
                        p_array = []
                        p_dict = {}
                        for p in (v.strip('"')).split():
                            p_array.append(p.split("="))
                        for p in p_array:
                            p_dict.update({p[0]: {"version": p[1]}})
                        data[playbook_name + "_vars"][k] = p_dict
            if playbook_name in self.loaded_metadata_keys:
                for k, v in playbook_vars.items():
                    LOG.info(playbook_vars)
                    if k == "template_version":
                        data[playbook_name + "_vars"][k] = v
                    if k == "create_only_backend":
                        if playbook_vars[k] in ["false", "False"]:
                            data[playbook_name + "_vars"][k] = False
                        elif playbook_vars[k] in ["true", "True"]:
                            data[playbook_name + "_vars"][k] = True

                    if k == "base_url":
                        data[playbook_name + "_vars"][k] = v

            if playbook_name == OPTIONAL:
                for k, v in playbook_vars.items():
                    if k == MOSH:
                        data[playbook_name + "_defined"][k] = v

            LOG.info(f"Playbook Data - {data}")

        # copy whole directory
        shutil.copytree(
            f"{self.playbooks_dir}/{playbook_name}",
            self.directory.name,
            dirs_exist_ok=True,
        )

        site_specific_yml = f"/{playbook_name}{'-' + self.cloud_site}.yml"
        playbook_name_local = playbook_name
        if os.path.isfile(self.directory.name + site_specific_yml):
            playbook_name_local = playbook_name + "-" + self.cloud_site

        playbook_var_yml = f"/{playbook_name}_vars_file.yml"

        try:
            with open(self.directory.name + playbook_var_yml,
                      mode="r") as variables:
                data = self.yaml_exec.load(variables)
                load_vars()
            with open(self.directory.name + playbook_var_yml,
                      mode="w") as variables:
                self.yaml_exec.dump(data, variables)
            self.add_to_playbook_lists(playbook_name_local, playbook_name)
        except shutil.Error as e:
            LOG.exception(e)
            self.add_tasks_only(playbook_name_local)
        except IOError as e:
            LOG.exception(e)
            self.add_tasks_only(playbook_name_local)

    def add_to_playbook_lists(self, playbook_name_local, playbook_name):
        self.vars_files.append(playbook_name + "_vars_file.yml")
        self.tasks.append(
            dict(
                name=f"Running {playbook_name_local} tasks",
                import_tasks=playbook_name_local + ".yml",
            ))
        LOG.info("Added playbook: " + playbook_name_local + ".yml" +
                 ", vars file: " + playbook_name + "_vars_file.yml")

    def add_tasks_only(self, playbook_name):
        self.tasks.append(
            dict(
                name=f"Running {playbook_name} tasks",
                import_tasks=playbook_name + ".yml",
            ))

    def add_to_playbook_always_lists(self, playbook_name):
        self.vars_files.append(playbook_name + "_vars_file.yml")
        self.always_tasks.append(
            dict(
                name=f"Running {playbook_name} tasks",
                import_tasks=playbook_name + ".yml",
            ))

    def add_always_tasks_only(self, playbook_name):
        self.always_tasks.append(
            dict(
                name=f"Running {playbook_name} tasks",
                import_tasks=playbook_name + ".yml",
            ))

    def run_it(self):
        command_string = "/usr/local/bin/ansible-playbook -v -i {0} {1}/{2}".format(
            self.inventory.name, self.directory.name, self.playbook_exec_name)
        LOG.info(f"Run Playbook with command {command_string}")
        command_string = shlex.split(command_string)
        self.process = subprocess.Popen(
            command_string,
            stdout=self.log_file_stdout,
            stderr=self.log_file_stderr,
            universal_newlines=True,
        )

    def check_status(self, openstack_id):
        done = self.process.poll()
        if done is None:
            LOG.info(
                f"Playbook for (openstack_id) {openstack_id} still in progress."
            )
        elif done != 0:
            LOG.info(f"Playbook for (openstack_id) {openstack_id} has failed.")
            self.redis.hset(openstack_id, "status", self.PLAYBOOK_FAILED)
            self.returncode = self.process.returncode
            self.process.wait()
        else:
            LOG.info(
                f"Playbook for (openstack_id) {openstack_id} is successful.")
            self.redis.hset(openstack_id, "status", self.ACTIVE)
            self.returncode = self.process.returncode
            self.process.wait()
        return done

    def get_logs(self):
        self.log_file_stdout.seek(0, 0)
        lines_stdout = self.log_file_stdout.readlines()
        for line in lines_stdout:
            self.stdout += line
        self.log_file_stderr.seek(0, 0)
        line_stderr = self.log_file_stderr.readlines()
        for line in line_stderr:
            self.stderr += line
        return self.returncode, self.stdout, self.stderr

    def cleanup(self, openstack_id):
        # self.directory.cleanup()
        self.redis.delete(openstack_id)

    def stop(self, openstack_id):
        self.process.terminate()
        rc, stdout, stderr = self.get_logs()
        logs_to_save = {"returncode": rc, "stdout": stdout, "stderr": stderr}
        self.redis.hmset(f"pb_logs_{openstack_id}", logs_to_save)
        self.cleanup(openstack_id)