Пример #1
0
def test_NA10860_debug(monkeypatch, solver):
    file = script_path("aldy.tests.resources/NA10860.bam")
    with tmpfile(suffix=".tar.gz") as tmp:
        with tmpfile(mode="w") as out, tmpfile(mode="w") as out_log:
            out.close()
            out_log.close()
            assert_file(
                monkeypatch,
                file,
                solver,
                EXPECTED_NA10860 + "Preparing debug archive...",
                {
                    "--debug": tmp.name[:-7],
                    "--log": out_log.name,
                    "--output": out.name
                },
            )
            with open(script_path(
                    "aldy.tests.resources/NA10860.out.expected")) as f:
                expected = f.read()
            with open(out.name) as f:
                produced = f.read()
            assert produced == expected
            # Check logs
            with open(out_log.name) as f:
                log = f.read()
            s = "Major solver: MajorSol[1.33; sol=(1x*4, 1x*4.f, 1x*61); "
            s += "cn=CNSol[6.73; sol=(2x*1,1x*61); "
            s += "cn=3333333333333322222_2|333333333333334444444]"
            assert s in log

        out = subprocess.check_output(f"tar tzf {tmp.name}",
                                      shell=True).decode("utf-8")
        out = "\n".join(sorted(out.strip().split("\n")))
        assert out == EXPECTED_NA10860_DEBUG_TAR.strip()
Пример #2
0
def test_NA10860_debug(monkeypatch, solver):
    file = script_path("aldy.tests.resources/NA10860.bam")
    with tmpfile(suffix=".tar.gz") as tmp:
        with tmpfile(mode="w") as out, tmpfile(mode="w") as out_log:
            out.close()
            out_log.close()
            assert_file(
                monkeypatch,
                file,
                solver,
                EXPECTED_NA10860 + "Preparing debug archive...",
                {"--debug": tmp.name[:-7], "--log": out_log.name, "--output": out.name},
            )
            with open(script_path("aldy.tests.resources/NA10860.out.expected")) as f:
                expected = f.read()
            with open(out.name) as f:
                produced = f.read()
            assert produced == expected
            # Check logs
            with open(out_log.name) as f:
                log = f.read()
            s = "    rs1058172    42523528.C>T    3267G>A    "
            s += "(cov=  21, cn= 0.9; impact=R365H)\n"
            s = "[major] status= optimal; opt= 1.48; solution= 1x*4.021, 1x*4J, 1x*61\n"
            assert s in log

        out = subprocess.check_output(f"tar tzf {tmp.name}", shell=True).decode("utf-8")
        out = "\n".join(sorted(out.strip().split("\n")))
        assert out == EXPECTED_NA10860_DEBUG_TAR.strip()
Пример #3
0
def borderDiff((filename, blockSize, offset, index)):
    print >> sys.stderr, 'Processing {0} ...'.format(index)
    inputFile = open(fileName)
    tmpInput = tmpfile('w+r', prefix='{1}-input-s{0}-'.format(index, MINEDEP), delete=False)
    tmpOutput = tmpfile('w+r', prefix='{1}-output-s{0}-'.format(index, MINEDEP), delete=False)
    #    tmpOutput = tmpfile('w+r', prefix=MINEDEP) #In case the output is not required (just computing times) uncomment this and comment the prev
    name = tmpOutput.name
    myfiles.append(inputFile)
    myfiles.append(tmpInput)
    myfiles.append(tmpOutput)
    try:
        inputFile.seek(offset, 0)
        copyBlock(inputFile, tmpInput, blockSize)
        r = mineHypergraph(tmpInput, tmpOutput)
    except:
        raise
    finally:
        tmpInput.close()
        inputFile.close()
        tmpOutput.close()

    myfiles.remove(inputFile)
    myfiles.remove(tmpInput)
    myfiles.remove(tmpOutput)

    print >> sys.stderr, 'Done with {0}!'.format(index)
    #    return r 	#In case the output is not required (just computing times) uncomment this and comment the next
    return name
Пример #4
0
def borderDiff((filename, blockSize, offset, index)):
    print >> sys.stderr, 'Processing {0} ...'.format(index)
    inputFile = open(fileName)
    tmpInput = tmpfile('w+r',
                       prefix='{1}-input-s{0}-'.format(index, MINEDEP),
                       delete=False)
    tmpOutput = tmpfile('w+r',
                        prefix='{1}-output-s{0}-'.format(index, MINEDEP),
                        delete=False)
    #    tmpOutput = tmpfile('w+r', prefix=MINEDEP) #In case the output is not required (just computing times) uncomment this and comment the prev
    name = tmpOutput.name
    myfiles.append(inputFile)
    myfiles.append(tmpInput)
    myfiles.append(tmpOutput)
    try:
        inputFile.seek(offset, 0)
        copyBlock(inputFile, tmpInput, blockSize)
        r = mineHypergraph(tmpInput, tmpOutput)
    except:
        raise
    finally:
        tmpInput.close()
        inputFile.close()
        tmpOutput.close()

    myfiles.remove(inputFile)
    myfiles.remove(tmpInput)
    myfiles.remove(tmpOutput)

    print >> sys.stderr, 'Done with {0}!'.format(index)
    #    return r 	#In case the output is not required (just computing times) uncomment this and comment the next
    return name
Пример #5
0
    def test_main(self, site_mock):
        post_mock = site_mock.return_value.post
        post_mock.return_value = {
            'parse': {
                'text': {
                    '*': 'parsing_output',
                },
                'modulestyles': [
                    'some_module',
                    'some_other_module',
                ],
            },
        }

        expected_site = 'some_site'
        expected_text = 'some_content'
        expected_skin = 'some_skin'
        expected_extra_modules = [
            'some_extra_module', 'some_other_extra_module'
        ]
        expected_modules = post_mock.return_value['parse'][
            'modulestyles'] + expected_extra_modules

        with tmpfile('w') as wikitext_file, tmpfile('r+') as out_file:
            wikitext_file.write(expected_text)
            wikitext_file.flush()

            argv = [
                '--site',
                expected_site,
                '--wikitext-file',
                wikitext_file.name,
                '--skin',
                expected_skin,
                '--extra-style-modules',
                *expected_extra_modules,
                '--output-file',
                out_file.name,
            ]

            expected_args = {
                'text': expected_text,
                'prop': 'text|modules|jsconfigvars',
                'contentmodel': 'wikitext',
            }

            # Act
            scripts.preview.main(argv)

            # Assert
            site_mock.assert_called_once_with(expected_site)
            post_mock.assert_called_once_with('parse', **expected_args)

            output_html = out_file.read()
            self.assertRegex(output_html, 'parsing_output')
            for module in expected_modules:
                self.assertRegex(output_html, module)
Пример #6
0
 def test_load_no_type_meta_post(self):
     with tmpfile(suffix='.md', mode='w+t') as tmp:
         tmp.write(md.no_type_markdown_text)
         tmp.seek(0)
         self.ssg.ifpath = tmp.name
         post = self.ssg.load_posts()
         self.assertTrue(self.ssg.default_post_type in post[0][1]['type'])
Пример #7
0
def runconsole(vmname, istty, api):
	"""call: 'console VM'
	description: 'Log into the out-of-band management console on the given instance.
	             .This is roughly equivilent to directly connecting to the serial port on the machine.
	             .This command requires the programs "grep" and "ssh" be installed
	             .and accessible in a folder listed in the PATH environment variable.
	             .See NOTES section of the man page for extra notes.'
	args: 'VM: The instance to connect to.'
	errors: 'VM does not exist: Return exit code 3
	        .Fail to run ssh: Return exit code 4'
	"""

	console_ip = '49.156.16.12' # IP of OVM console server
	console_rsa = "%s ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA1ZKuRj8UkoCUvGKGSGd9vQAlP3uCmq+8vBdF/SeZ2sr0Gf9+ZTn5Di8EGBBwOArU791VTtTWTg2kSC6xbearWH9xxD8omnjaYyBmqBLZ0yimVuIQWh3QS5YglxdQoGZUJ7a7ddQDLvO11f4eirP6HcNYSfGT5070jqoEiETmdcoQsdsxdJFs6GBssMoMij1i4HRDbCDPMdViEOQ19LQBCd3LsTFcmZJ/LIO9BCxsSeyV5IPkUVVzVc29JOmqDbCTcHuOidrupVheSSkZjhB0Cq6L8tOaFP/5gj7Ab6PiZPC3hOoLFgPJ3zk50RfAT2/enKqwHQFnN1QzfBBMg1kJiw==" % console_ip
	
	# Check vmname exists. This also checks user/pswd is valid.
	if not getvmby(vmname, api, what='hostname'):
		if istty:
			print 'Cannot find vm "%s"' % vmname
		return 3
	
	# Write a temporary known hosts file, which means we don't clutter up user's ~/.ssh/known_hosts:
	hostfile = tmpfile()
	hostfile.write(console_rsa)
	hostfile.flush()

	try:
		proc = Popen(['ssh', '-qqt', '-oGlobalKnownHostsFile=%s' % hostfile.name, 'console@%s' % console_ip, "%s %s %s" % (api.user, vmname, api.pswd)])
	except OSError:
		if istty:
			print 'Failed to start remote shell. Is the program "ssh" installed and accessbile?'
		return 4
Пример #8
0
def runconsole(vmname, istty, api):

	console_ip = '49.156.16.12'
	console_rsa = "%s ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA1ZKuRj8UkoCUvGKGSGd9vQAlP3uCmq+8vBdF/SeZ2sr0Gf9+ZTn5Di8EGBBwOArU791VTtTWTg2kSC6xbearWH9xxD8omnjaYyBmqBLZ0yimVuIQWh3QS5YglxdQoGZUJ7a7ddQDLvO11f4eirP6HcNYSfGT5070jqoEiETmdcoQsdsxdJFs6GBssMoMij1i4HRDbCDPMdViEOQ19LQBCd3LsTFcmZJ/LIO9BCxsSeyV5IPkUVVzVc29JOmqDbCTcHuOidrupVheSSkZjhB0Cq6L8tOaFP/5gj7Ab6PiZPC3hOoLFgPJ3zk50RfAT2/enKqwHQFnN1QzfBBMg1kJiw==" % console_ip

	# Check vmname exists. This also checks user/pswd is valid.

	# Write a temporary known hosts file, which means we don't clutter up user's ~/.ssh/known_hosts:
	hostfile = tmpfile()
	hostfile.write(console_rsa)
	hostfile.flush()

	try:
		proc = Popen(['ssh', '-qqt', '-oGlobalKnownHostsFile=%s' % hostfile.name, 'console@%s' % console_ip, "%s %s  " % (api.user, vmname)], stdin=PIPE, stdout=PIPE) # Extra two spaces are a workaround for a console-side error.
	except OSError:
		if istty:
			print 'Failed to start remote shell. Is the program "ssh" installed and accessbile?'
		return 3

	proc.stdin.write(api.pswd + '\n')
	# Eat first two lines, which are a greeting and a password prompt.
	proc.stdout.readline()
	proc.stdout.readline()

	os.dup2(sys.stdin.fileno(), proc.stdin.fileno()) # Overwrite the proc PIPE fd with stdin
Пример #9
0
def upload():
    if request.method == 'POST':
        file = request.files['file']
        parmname=request.form['filename']
        session[parmname]=next(tmpfile())
        if file:
            filename_path = os.path.join(configdb.upload_folder, session[parmname])
            file.save(filename_path)
            return jsonify({"success":True})
Пример #10
0
 def __init__(self, data):
     headers                              = data.split('\r\n')
     request                              = headers.pop(0)
     self.method, self.path, self.version = request.split(' ')
     self.headers                         = Headers(headers)
     self.fd                              = tmpfile('a+')
     self.data                            = None
     self.path, sep, self.query           = self.path.partition('?')
     self.query                           = parse_qs(self.query)
Пример #11
0
 def __init__(self, data):
     headers = data.split('\r\n')
     request = headers.pop(0)
     self.method, self.path, self.version = request.split(' ')
     self.headers = Headers(headers)
     self.fd = tmpfile('a+')
     self.data = None
     self.path, sep, self.query = self.path.partition('?')
     self.query = parse_qs(self.query)
Пример #12
0
def test_NA10860_vcf_out(monkeypatch, solver):
    file = script_path("aldy.tests.resources/NA10860.bam")
    with tmpfile(suffix=".vcf", mode="w") as out:
        out.close()
        assert_file(monkeypatch, file, solver, EXPECTED_NA10860, {"--output": out.name})
        with open(script_path("aldy.tests.resources/NA10860.vcf.expected")) as f:
            expected = f.read()
        with open(out.name) as f:
            produced = f.read()
        assert produced == expected.replace("aldy-v3.1", f"aldy-v{__version__}")
Пример #13
0
def run_experiments(base, **kwargs):
    """Run all experiments found in base param.
    and check that flambe executes without errors.

    Before running the configs, it updates the save_path to
    be a tempdir and updates (potentially) the iteration's
    params (if found) to be 1.

    """
    for fname in os.listdir(base):
        full_f = os.path.join(base, fname)
        if os.path.isfile(full_f) and fname.endswith('yaml'):
            with tmpdir() as d, tmpfile() as f, tmpfile('w') as t:
                content = open(full_f).read().format(**kwargs)
                t.write(content)
                t.flush()
                new_exp = _preprocess_experiment(t.name, d)
                if new_exp:
                    yaml.dump_all(new_exp, f)
                    ret = subprocess.run(['flambe', f.name, '-i'])
                    assert ret.returncode == 0
Пример #14
0
 def edittext(self, text):
     fd, path = tmpfile()
     f = open(path, 'w')
     f.write(text)
     f.close()
     subprocess.check_call([EDITOR, path])
     f = open(path, 'r')
     s = f.read()
     f.close()
     os.unlink(path)
     if not s:
         raise Exception("Must enter a query into the editor and save the file.")
     return s.strip()
Пример #15
0
 def edittext(self, text):
     fd, path = tmpfile()
     f = open(path, 'w')
     f.write(text)
     f.close()
     subprocess.check_call([EDITOR, path])
     f = open(path, 'r')
     s = f.read()
     f.close()
     os.unlink(path)
     if not s:
         raise Exception(
             "Must enter a query into the editor and save the file.")
     return s.strip()
Пример #16
0
    def convolve_row(row):
        with tmpfile() as out_file:
            slit_filename = path.join(
                base_dir, slit_dir,
                '{band}_row_{row:03d}.txt'.format(band=band, row=row))

            doas_args = [
                doas_cl, "--version=" + doas_version, "-c", config_filename,
                "-o", out_file.name, "-xml", xml_option + slit_filename
            ]

            subprocess.call(doas_args)

            data = np.loadtxt(out_file, comments=';')
        return data[:, 1]
Пример #17
0
def execute(scriptname):
    import os
    scripts = configScripts()
    parameters = ns(request.form.items())
    params_list = []
    output_file = False
    if 'parameters' in scripts[scriptname]:
        for parm_name,parm_data in scripts[scriptname]['parameters'].items():
            if parm_data.get('type', None) ==  'FILE':
                filename=os.path.join(configdb.upload_folder,session[parm_name])
                parameters[parm_name] = filename
            elif parm_data.get('type', None) == 'FILEDOWN':
                session[parm_name]=next(tmpfile())
                filename=os.path.join(configdb.download_folder,session[parm_name])
                parameters[parm_name] = filename
                output_file = parm_name
            if not parameters.get(parm_name, None) and scripts[scriptname]['parameters'][parm_name].get('default',None):
                parameters[parm_name] = scripts[scriptname]['parameters'][parm_name]['default']
    script = scripts[scriptname].script
    if type(script) is not list:
        script = shlex.split(script)
    commandline = [
        piece.format(**parameters)
        for piece in script
        ]
    commandline = [cmd.replace('SOME_SRC',configdb.prefix) for cmd in commandline]
    return_code=0

    try:
        output=subprocess.check_output(commandline,stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError as e:
        output=e.output
        return_code=e.returncode
    try:
        output_decoded=output.decode('utf-8')
    except UnicodeDecodeError:
        output_decoded=output.decode('latin-1')
    return json.dumps(dict(
        script_name=scriptname,
        output_file=output_file,
        return_code=return_code,
        response=deansi.deansi(output_decoded),
        commandline=commandline,
        ))
Пример #18
0
def runconsole(vmname, istty, api):
	"""call: 'console VM'
	description: 'Log into the out-of-band management console on the given instance.
	             .This is roughly equivilent to directly connecting to the serial port on the machine.
	             .This command requires the programs "grep" and "ssh" be installed
	             .and accessible in a folder listed in the PATH environment variable.
	             .See NOTES section of the man page for extra notes.'
	args: 'VM: The instance to connect to.'
	errors: 'VM does not exist: Return exit code 3
	        .Fail to run ssh: Return exit code 4
	        .ssh exits with non-zero code (generic error): Return exit code 5
	        .ssh exits with code 255 (connection or protocol error): Return exit code 6'
	"""

	console_ip = '49.156.16.12' # IP of OVM console server
	console_rsa = "%s ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCzufEnOKEipxlbNqqWGy/yYMJi9g9/Co/dL4D+3RzhfH4aBDnIlrM8vfOAtIZ3UoXy7Bo9ZQdu+5XZVyZ4+ejQFJdjnphHEf82dJpwKhWajnCkqsakUBt9RoAVRKOJYxI9lCdW9xUlmr9CwpmWEnpy+fQwV4mDAa2q/eaVPk6cHvwCwYNbdyHahBrmP2EMsLShs4YQfY5hsufAN8KJPClPTn4qHYaHLHO6WwcB11uZJXEGLV4tvyfIpXVt8h6RTY4A7/b67eFFwz93Bca2/VElXgSNjW8IjF8dJdWrGNTj+2ndNE0ZdJYzO//lRyy/n7vpVUH7UmWLz4S/XfSYwZ9D" % console_ip
	
	# Check vmname exists. This also checks user/pswd is valid.
	if not getvmby(vmname, api, what='hostname'):
		if istty:
			print 'Cannot find vm "%s"' % vmname
		return 3
	
	# Write a temporary known hosts file, which means we don't clutter up user's ~/.ssh/known_hosts:
	hostfile = tmpfile()
	hostfile.write(console_rsa)
	hostfile.flush()

	try:
		proc = Popen(['ssh', '-qqt', '-oGlobalKnownHostsFile=%s' % hostfile.name, 'console@%s' % console_ip, "%s %s %s" % (api.user, vmname, api.pswd)])
	except OSError:
		if istty:
			print 'Failed to start remote shell. Is the program "ssh" installed and accessbile?'
		return 4

	ret = proc.wait()
	if ret:
		if istty:
			print 'Remote shell failed with exit code %d' % ret
		if ret == 255:
			return 6
		else:
			return 5
Пример #19
0
    def wait_for_data(self):
        try:
            self.fd = tmpfile('a+')
        except Exception:
            debug()
            return

        try:
            self.fd.write(self.data)
        except Exception:
            debug()
            return

        is_done = self.check_data_size()

        if is_done:
            return

        xmap(self.spin, LOAD, self.get_data)
Пример #20
0
def request_file(url):
    from tempfile import NamedTemporaryFile as tmpfile
    from urllib.request import urlopen

    u = urlopen(url)
    f = tmpfile(delete=False)

    block_size = 1024

    while True:
        buffer = u.read(block_size)
        if not buffer:
            break

        f.write(buffer)

    f.close()

    return f.name
Пример #21
0
    tempfile.tempdir = tempfile.mkdtemp(prefix=MINEDEP, dir="/tmp")

    functions = [paramExecSHD_RS, paramExecSHD_DFS
                 ]  #[paramExecSHD_RS,paramExecMTMiner,paramExecSHD_DFS]

    paramExec = functions[int(sys.argv[1])]
    nbThreads = int(sys.argv[2])

    inputFileName = sys.argv[3]
    classfile = sys.argv[4]
    outputName = sys.argv[5]

    #print >>sys.stderr, sys.argv
    nbPos, nbNeg = classDistribution(classfile)

    tmpOutput = tmpfile('w+', prefix=MINEDEP, delete=False)
    fileName = tmpOutput.name
    tmpOutput.close()

    try:
        lowerBound(inputFileName, classfile, fileName)
    except CalledProcessError as error:
        print >> sys.stderr, 'LowerBound Error: file ({0}), error ({1})'.format(
            inputFileName, error)
        os.remove(fileName)
        sys.exit(1)

    outputFile = open(outputName, 'w')
    offsets = computeOffset(fileName, nbPos, nbNeg)

    proc = Pool(processes=nbThreads)
Пример #22
0
    tempfile.tempdir = tempfile.mkdtemp(prefix=MINEDEP, dir="tmp")

    functions = [paramExecSHD_RS, paramExecSHD_DFS]  # [paramExecSHD_RS,paramExecMTMiner,paramExecSHD_DFS]

    paramExec = functions[int(sys.argv[1])]
    nbThreads = int(sys.argv[2])

    inputFileName = sys.argv[3]
    classfile = sys.argv[4]
    outputName = sys.argv[5]

    # print >>sys.stderr, sys.argv
    nbPos, nbNeg = classDistribution(classfile)

    tmpOutput = tmpfile('w+', prefix=MINEDEP, delete=False)
    fileName = tmpOutput.name
    tmpOutput.close()

    try:
        lowerBound(inputFileName, classfile, fileName)
    except CalledProcessError as error:
        print >> sys.stderr, 'LowerBound Error: file ({0}), error ({1})'.format(inputFileName, error)
        os.remove(fileName)
        sys.exit(1)

    outputFile = open(outputName, 'w')
    offsets = computeOffset(fileName, nbPos, nbNeg)

    proc = Pool(processes=nbThreads)
    inputs = [(fileName, nbNeg, offsets[i], i) for i in range(0, nbPos)]
Пример #23
0
#    Restarting the kernel ensures that all modules and variables your code needs
#    are actually generated and loaded in your code
# 8. You can use modules other than the ones mentioned below but do NOT use
#    modules that are not part of the rsm-msba-spark docker container by default

import pandas as pd
import sqlite3
from datetime import date
import pyrsm as rsm
import urllib.request
from tempfile import NamedTemporaryFile as tmpfile
import os
import numpy as np

# load the original bbb.pkl data frame from a Dropbox link
bbb_file = tmpfile().name
urllib.request.urlretrieve(
    "https://www.dropbox.com/s/6bulog0ij4pr52o/bbb.pkl?dl=1", bbb_file)
bbb = pd.read_pickle(bbb_file)

# view the data description of the original data to determine
# what needs to be re-created
rsm.describe(bbb)

# set the working directory to the location of this script
os.getcwd()
# os.chdir(os.path.dirname(os.path.abspath(__file__)))

# load demographics data from bbb_demographics.tsv
bbb_demographics = pd.read_csv('data/bbb_demographics.tsv', sep='\t')
bbb_demographics['zip'] = bbb_demographics['zip'].apply(
Пример #24
0
        xs_out = args.o
    else:
        xs_out = "%s_%s_%s%s" % (name, args.binning, band, extension)

    if args.I0 > 0.0:
        convtype = 'iocorr'
        conv_header = 'I0 correction, conc=%.4e' % args.I0
    else:
        convtype = 'std'
        conv_header = 'standard'

    time = datetime.now()

    with open(path.join(base_dir, config_template), 'r') as f:
        configstring = f.read()
    with tmpfile() as config_file:
        config_file.write(
            configstring.format(convtype=convtype,
                                i0conc=args.I0,
                                input=xs_in,
                                calib=args.grid))
        config_file.flush()
        xs_data = map_parallel(
            args.j, convolution(config_file.name, args.binning, band),
            range(0, num_rows[args.binning][band]))

    with open(xs_in) as f:
        gen = (line for line in f if not line[0] in (';', '*'))
        input_grid = np.genfromtxt(gen)[:, 0]

    target_grid = np.loadtxt(args.grid)
Пример #25
0
def test_exporter_builder():
    with tmpdir() as d, tmpdir() as d2, tmpfile(
            mode="w", suffix=".yaml") as f, tmpfile(mode="w",
                                                    suffix=".yaml") as f2:
        # First run an experiment
        exp = """
!Experiment

name: exporter
save_path: {}

pipeline:
  dataset: !SSTDataset
    transform:
      text: !TextField
      label: !LabelField
  model: !TextClassifier
    embedder: !Embedder
      embedding: !torch.Embedding
        num_embeddings: !@ dataset.text.vocab_size
        embedding_dim: 30
      encoder: !PooledRNNEncoder
        input_size: 30
        rnn_type: lstm
        n_layers: 1
        hidden_size: 16
    output_layer: !SoftmaxLayer
      input_size: !@ model[embedder].encoder.rnn.hidden_size
      output_size: !@ dataset.label.vocab_size

  exporter: !Exporter
    model: !@ model
    text: !@ dataset.text
"""

        exp = exp.format(d)
        f.write(exp)
        f.flush()
        ret = subprocess.run(['flambe', f.name, '-i'])
        assert ret.returncode == 0

        # Then run a builder

        builder = """
flambe_inference: tests/data/dummy_extensions/inference/
---

!Builder

destination: {0}

component: !flambe_inference.DummyInferenceEngine
  model: !TextClassifier.load_from_path
    path: {1}
"""
        base = os.path.join(d, "output__exporter", "exporter")
        path_aux = [
            x for x in os.listdir(base) if os.path.isdir(os.path.join(base, x))
        ][0]  # Should be only 1 folder bc of no variants
        model_path = os.path.join(base, path_aux, "checkpoint",
                                  "checkpoint.flambe", "model")

        builder = builder.format(d2, model_path)
        f2.write(builder)
        f2.flush()

        ret = subprocess.run(['flambe', f2.name, '-i'])
        assert ret.returncode == 0

        # The extensions needs to be imported using extensions.py module
        extensions.import_modules(["flambe_inference"])

        # Import the module after import_modules (which registered tags already)
        from flambe_inference import DummyInferenceEngine

        eng1 = flambe.load(d2)

        assert type(eng1) is DummyInferenceEngine
        assert type(eng1.model) is TextClassifier

        extension_path = os.path.join(
            os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
            "tests/data/dummy_extensions/inference")
        assert eng1._extensions == {"flambe_inference": extension_path}

        eng2 = DummyInferenceEngine.load_from_path(d2)

        assert type(eng2) is DummyInferenceEngine
        assert type(eng2.model) is TextClassifier

        assert eng2._extensions == {"flambe_inference": extension_path}

        assert module_equals(eng1.model, eng2.model)
Пример #26
0
 def test_load_invalid_markdown_file(self):
     with tmpfile(suffix='.mdx') as tmp:
         self.ssg.ifpath = tmp.name
         self.assertFalse(self.ssg.load_posts())
Пример #27
0
        continue
    seen_hosts.append(node_name.lower())

    if 'ec2' not in node.attributes:
        print "Did not get ec2 for node", node.attributes.keys()
        #print "Did not get ec2 for node", node.attributes['hostname']
        continue
    print "Adding node", node.name
    include_file.write("%s\tIN\tCNAME\t%s.\n" % (node.name, node.attributes['ec2']['public_hostname']))
include_file.close()

# Update Serial in parent zone file
zone_file = open(domain_zonefile_path, 'r')
print "Zone file path is %s" % domain_zonefile_path
print "Include file path is %s" % hosts_include_file
tmp_zone = tmpfile(delete=False)

while True:
    line = zone_file.readline()
    serial_match = re.match(r'(\s*)(\d+)\s*;\s*serial', line)
    if serial_match:
        new_serial = int(serial_match.group(2)) + 1
        tmp_zone.write("%s%s ; serial\n" % (serial_match.group(1), new_serial))
    else:
        tmp_zone.write(line)

    if not line:
        break

# Close and rename prior to BIND reload
zone_file.close()
Пример #28
0
from tempfile import NamedTemporaryFile as tmpfile
from csv2xls import xls




def decode(s, encodings=('gbk', 'utf8')):
	for encoding in encodings:
		try:
			return s.decode(encoding)
		except UnicodeDecodeError:
			pass
	return s.decode('ascii', 'ignore')

tempcsv = tmpfile(delete = False)
tempxls = tmpfile(delete = False)

tempcsv.write('''
11111,21111
11111,21111
''')
tempcsv.close()
tempxls.close()

print tempcsv.name
this_instance = xls()
f = open('/tmp/f**k.csv')
this_instance.options.infile_names = ['A']
this_instance.options.outfile_name = 'B'
this_instance.options.set_default_options()
this_instance.options.check_options()