예제 #1
0
파일: cdb_tests.py 프로젝트: RLuisier/RSeQC
def test():

    d = {}
    for i in range( 10000 ):
        d[ 'foo' + str( i ) ] = 'bar' + str( i )
    
    # Open temporary file and get name    
    file = NamedTemporaryFile()
    file_name = file.name
        
    # Write cdb to file
    FileCDBDict.to_file( d, file )
    file.flush()
    
    # Open on disk
    file2 = open( file_name )
    cdb = FileCDBDict( file2 )
    
    for key, value in d.iteritems():
        assert cdb[key] == value
    
    try:
        cdb['notin']
        assert False, "KeyError was not raised"
    except KeyError, e:
        pass
예제 #2
0
 def _get_tmp_file(self):
     ''' Creates a tmp file with the file data '''
     data = self.request.files['xml_file'][0]['body']
     tmp_file = NamedTemporaryFile(delete=False)
     tmp_file.write(data)
     tmp_file.close()
     return tmp_file.name
예제 #3
0
def main():
    logging.basicConfig(format='%(message)s', level=logging.INFO)

    args = parse_args()

    from_date = args.from_date
    to_date = args.to_date

    s3_client = S3Client.connect_with_profile(args.aws_profile)
    s3_bucket = args.s3_bucket
    s3_key = "{}_{}.logs.json".format(from_date.isoformat(), to_date.isoformat())
    sumo_access_id = args.sumo_access_id or os.environ["SUMOLOGIC_ACCESS_ID"]
    sumo_access_key = args.sumo_access_key or os.environ["SUMOLOGIC_ACCESS_KEY"]
    sumo_logic_client = SumoLogicClient(sumo_access_id, sumo_access_key)

    logging.info(
        "Searching for request logs in {} in range {} to {}".format(
            args.environment, from_date.isoformat(), to_date.isoformat()))

    request_logs = sumo_logic_client.search(
        sumo_logic_query(args.environment), start_dt=from_date, end_dt=to_date, fields=REQUEST_FIELDS)

    tmp_file = NamedTemporaryFile(mode='w', encoding='utf-8', delete=False)
    logging.debug("Writing out logs to temporary file {}".format(tmp_file.name))

    try:
        for request in request_logs:
            tmp_file.write(json.dumps(request, default=json_serialize) + '\n')

        s3_client.upload_from_file(s3_bucket, s3_key, tmp_file.name)
    finally:
        logging.debug("Removing temporary file {}".format(tmp_file.name))
        os.remove(tmp_file.name)
예제 #4
0
class AtomicFileWriter(object):
    def __init__(self, path, tmp_prefix=None, encoding='utf-8'):
        self.name = path
        output_dir, base = os.path.split(path)
        if tmp_prefix is None:
            tmp_prefix = base + '.'

        self.tmpf = NamedTemporaryFile(dir=output_dir, prefix=tmp_prefix,
                                       mode='w', encoding=encoding,
                                       delete=False)

    def __enter__(self):
        self.tmpf.__enter__()
        return self

    def __exit__(self, exc_type, exc_value, exc_traceback):
        tmp_name = self.tmpf.name
        result = self.tmpf.__exit__(exc_type, exc_value, exc_traceback)
        if result or exc_type is None:
            os.rename(tmp_name, self.name)
        else:
            os.unlink(tmp_name)
        return result

    def write(self, data):
        return self.tmpf.write(data)
예제 #5
0
	def run(self):
		active_view = self.window.active_view()
		text = "\n\n".join(getSelectedText(active_view)).strip()

		tf = NamedTemporaryFile(mode="w", delete=False)
		try:
			tf.write(text)
			tf.close()

			res = subprocess.check_output(["m4", tf.name],
			                              stderr=subprocess.STDOUT,
			                              cwd=os.path.dirname(os.path.abspath(active_view.file_name())))
			res = res.decode('utf-8').replace('\r', '').strip()

			panel_name = "m4expand.results"
			panel = self.window.create_output_panel(panel_name)
			self.window.run_command("show_panel", {"panel": "output." + panel_name})

			panel.set_read_only(False)
			panel.set_syntax_file(active_view.settings().get("syntax"))
			panel.run_command("append", {"characters": res})
			panel.set_read_only(True)
		except Exception as e:
			print("M4Expand - An error occurred: ", e)
		finally:
			os.unlink(tf.name)
예제 #6
0
def ipconnections(target, **kwargs):
    """Returns a list of ip connections made by the target.

    A connection is a named tuple with the following properties:
    host (string), host_port (int), remote_port (string), protocol (string),
    timestamp(int).
    """
    if not target:
        raise Exception("Invalid target for ipconnections()")

    output_file = NamedTemporaryFile()
    cmd = ["sudo", "/usr/sbin/dtrace", "-C"]
    if "timeout" in kwargs:
        cmd += ["-DANALYSIS_TIMEOUT=%d" % kwargs["timeout"]]
    cmd += ["-s", path_for_script("ipconnections.d")]
    cmd += ["-o", output_file.name]
    if "args" in kwargs:
        line = "%s %s" % (sanitize_path(target), " ".join(kwargs["args"]))
        cmd += ["-c", line]
    else:
        cmd += ["-c", sanitize_path(target)]

    # The dtrace script will take care of timeout itself, so we just launch
    # it asynchronously
    with open(os.devnull, "w") as f:
        handler = Popen(cmd, stdout=f, stderr=f)

    for entry in filelines(output_file):
        if "## ipconnections.d done ##" in entry.strip():
            break
        yield _parse_single_entry(entry.strip())
    output_file.close()
예제 #7
0
def apicalls(target, **kwargs):
    """
    """
    if not target:
        raise Exception("Invalid target for apicalls()")

    output_file = NamedTemporaryFile()
    kwargs.update({"output_file" : output_file})
    cmd = _dtrace_command_line(target, **kwargs)

    # Generate dtrace probes for analysis
    definitions = os.path.abspath(os.path.join(__file__, "../../core/data/signatures.yml"))
    probes_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "probes.d")
    generate_probes(definitions, probes_file, overwrite=True)

    # The dtrace script will take care of timeout itself, so we just launch
    # it asynchronously
    with open(os.devnull, "w") as null:
        _ = Popen(cmd, stdout=null, stderr=null, cwd=current_directory())

    with open('/Users/cloudmark/yield.txt', 'w+') as f:
        for entry in filelines(output_file):
            value = entry.strip()
            if "## apicalls.d done ##" in value:
                break
            if len(value) == 0:
             continue
            f.write(str(_parse_entry(value)))
            f.flush()
            import time
            time.sleep(1)
            yield _parse_entry(value)
    output_file.close()
    os.remove(probes_file)
예제 #8
0
    def test_safe_md5(self):
        """Make sure we have the expected md5 with varied input types

        This method is ported from PyCogent (http://www.pycogent.org). PyCogent
        is a GPL project, but we obtained permission from the authors of this
        method to port it to the BIOM Format project (and keep it under BIOM's
        BSD license).
        """
        exp = 'd3b07384d113edec49eaa6238ad5ff00'

        tmp_f = NamedTemporaryFile(
            mode='w',
            prefix='test_safe_md5',
            suffix='txt')
        tmp_f.write('foo\n')
        tmp_f.flush()

        obs = safe_md5(open(tmp_f.name, 'U'))
        self.assertEqual(obs, exp)

        obs = safe_md5(['foo\n'])
        self.assertEqual(obs, exp)

        # unsupported type raises TypeError
        self.assertRaises(TypeError, safe_md5, 42)
예제 #9
0
 def testLogfile(self):
     """Test logging into a logfile"""
     f = NamedTemporaryFile(delete=False)
     filename = f.name
     try:
         set_log_level("error")  # avoid using the console logger
         f.write(":-P\n")
         f.close()
         start_logfile(f.name, "devinfo")
         log = getLogger("prosoda.test.integration.test_logger")
         log.debug("Should not be in logfile! :-( ")
         log.info("Should be in logfile :-) ")
         log.devinfo("Should be in logfile :-) ")
         log.warning("Should really be in logfile :-D ")
         stop_logfile(f.name)
         contents = file(f.name).read()
         self.assertNotIn(":-(", contents)
         self.assertNotIn(":-P", contents)
         self.assertIn(":-)", contents)
         self.assertIn(":-D", contents)
         # Make sure no colour codes are leaked into the logfile
         self.assertNotIn("\033", contents)
     finally:
         set_log_level("debug")
         unlink(filename)
예제 #10
0
        def odt_subreport(name=None, obj=None):
            if not aeroo_ooo:
                return _("Error! Subreports not available!")
            report_xml_ids = ir_obj.search(cr, uid, [('report_name', '=', name)], context=context)
            if report_xml_ids:
                service = netsvc.Service._services['report.%s' % name]
                report_xml = ir_obj.browse(cr, uid, report_xml_ids[0], context=context)
                data = {'model': obj._table_name, 'id': obj.id, 'report_type': 'aeroo', 'in_format': 'oo-odt'}
                ### Get new printing object ###
                sub_aeroo_print = AerooPrint()
                service.active_prints[sub_aeroo_print.id] = sub_aeroo_print
                context['print_id'] = sub_aeroo_print.id
                ###############################
                sub_aeroo_print.start_time = time.time()
                report, output = service.create_aeroo_report(cr, uid, \
                                                             [obj.id], data, report_xml, context=context,
                                                             output='odt')  # change for OpenERP 6.0 - Service class usage

                ### Delete printing object ###
                AerooPrint.print_ids.remove(sub_aeroo_print.id)
                del service.active_prints[sub_aeroo_print.id]
                ##############################
                temp_file = NamedTemporaryFile(suffix='.odt', prefix='aeroo-report-', delete=False)
                try:
                    temp_file.write(report)
                finally:
                    temp_file.close()
                # self.oo_subreports[print_id].append(temp_file.name)
                # aeroo_print.subreports.append(temp_file.name)
                self.active_prints[aeroo_print.id].subreports.append(temp_file.name)
                return "<insert_doc('%s')>" % temp_file.name
            return None
예제 #11
0
파일: hashfs.py 프로젝트: kanchanarp/hashfs
def tmpfile(stream, mode=None):
    """Context manager that writes a :class:`Stream` object to a named
    temporary file and yield it's filename. Cleanup deletes from the temporary
    file from disk.

    Args:
        stream (Stream): Stream object to write to disk as temporary file.
        mode (int, optional): File mode to set on temporary file.

    Returns:
        str: Temporoary file name
    """
    tmp = NamedTemporaryFile(delete=False)

    if mode is not None:
        oldmask = os.umask(0)

        try:
            os.chmod(tmp.name, mode)
        finally:
            os.umask(oldmask)

    for data in stream:
        tmp.write(to_bytes(data))

    tmp.close()

    yield tmp.name

    os.remove(tmp.name)
예제 #12
0
def test_cmyk():
    ref = imread(os.path.join(data_dir, 'color.png'))

    img = Image.open(os.path.join(data_dir, 'color.png'))
    img = img.convert('CMYK')

    f = NamedTemporaryFile(suffix='.jpg')
    fname = f.name
    f.close()
    img.save(fname)
    try:
        img.close()
    except AttributeError:  # `close` not available on PIL
        pass

    new = imread(fname)

    ref_lab = rgb2lab(ref)
    new_lab = rgb2lab(new)

    for i in range(3):
        newi = np.ascontiguousarray(new_lab[:, :, i])
        refi = np.ascontiguousarray(ref_lab[:, :, i])
        sim = ssim(refi, newi, dynamic_range=refi.max() - refi.min())
        assert sim > 0.99
    def run_solver(self, conflicts, election, deletion_handler, outfile=None):
        if not conflicts:
            return [], 0

        self.deletion_handler = deletion_handler

        instance = self.generate_instance(conflicts, election)

        f = NamedTemporaryFile(delete=False)
        f.write(instance.encode(code))
        f.close()

        process = Popen([self.cmd, f.name], stdout=PIPE)
        out, err = process.communicate()

        conflict_variables, optimum = self.parse_instance(out)

        if outfile:
            candidates = election[0]
            votes = election[1]
            votecounts = election[2]

            votemap = self.delete_votes(votes, votecounts, conflict_variables)
            votesum = sum(votemap.values())

            write_map(candidates, votesum, votemap, open(outfile, "w"))

        remove(f.name)
        return conflict_variables, optimum
    def _write_local_schema_file(self, cursor):
        """
        Takes a cursor, and writes the BigQuery schema for the results to a
        local file system.

        :return: A dictionary where key is a filename to be used as an object
            name in GCS, and values are file handles to local files that
            contains the BigQuery schema fields in .json format.
        """
        schema = []
        for field in cursor.description:
            # See PEP 249 for details about the description tuple.
            field_name = field[0]
            field_type = self.type_map(field[1])
            field_mode = 'REPEATED' if field[1] in (1009, 1005, 1007,
                                                    1016) else 'NULLABLE'
            schema.append({
                'name': field_name,
                'type': field_type,
                'mode': field_mode,
            })

        self.log.info('Using schema for %s: %s', self.schema_filename, schema)
        tmp_schema_file_handle = NamedTemporaryFile(delete=True)
        s = json.dumps(schema, sort_keys=True)
        if PY3:
            s = s.encode('utf-8')
        tmp_schema_file_handle.write(s)
        return {self.schema_filename: tmp_schema_file_handle}
예제 #15
0
    def __enter__(self):
        # Ensure that we have not re-entered
        if self.temp_path != None or self.service != None:
            raise Exception('Cannot use multiple nested with blocks on same Youtube object!')

        flow = flow_from_clientsecrets(
            self.client_secrets_path,
            scope=YOUTUBE_UPLOAD_SCOPE,
            message=MISSING_CLIENT_SECRETS_MESSAGE)

        temp_file = NamedTemporaryFile(delete=False)
        self.temp_path = temp_file.name
        temp_file.close()

        storage = Storage(self.temp_path)
        credentials = storage.get()

        if credentials is None or credentials.invalid:
            credentials = run_flow(
                flow, storage, argparser.parse_args(list())
            )

        self.service = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
            http=credentials.authorize(httplib2.Http()))

        return self
    def _create_empty_image(self, image_width, image_height):

        # Check pycairo capabilities
        if not (cairo.HAS_IMAGE_SURFACE and cairo.HAS_PNG_FUNCTIONS):
            raise HTTPBadRequest("cairo was not compiled with ImageSurface and PNG support")

        # Create a new cairo surface
        surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, int(image_width), int(image_height))

        ctx = cairo.Context(surface)

        text = "No imagery available for requested coordinates."

        x_bearing, y_bearing, width, height, x_advance, y_advance = ctx.text_extents(text)

        ctx.move_to((image_width / 2) - (width / 2), (image_height / 2) + (height / 2))
        ctx.set_source_rgba(0, 0, 0, 0.85)
        ctx.show_text(text)

        temp_datadir = self.config.get("main", "temp.datadir")
        temp_url = self.config.get("main", "temp.url")
        file = NamedTemporaryFile(suffix=".png", dir=temp_datadir, delete=False)
        surface.write_to_png(file)
        file.close()

        return {"file": "%s/%s" % (temp_url, file.name.split("/")[-1])}
예제 #17
0
파일: tasks.py 프로젝트: fnp/audio
    def run(self, uid, aid, publish=True):
        aid = int(aid)
        audiobook = Audiobook.objects.get(id=aid)
        self.set_status(aid, status.ENCODING)

        user = User.objects.get(id=uid)

        try:
            os.makedirs(BUILD_PATH)
        except OSError as e:
            if e.errno == errno.EEXIST:
                pass
            else:
                raise

        out_file = NamedTemporaryFile(delete=False, prefix='%d-' % aid, suffix='.%s' % self.ext, dir=BUILD_PATH)
        out_file.close()
        self.encode(audiobook.source_file.path, out_file.name)
        self.set_status(aid, status.TAGGING)
        self.set_tags(audiobook, out_file.name)
        self.set_status(aid, status.SENDING)

        if publish:
            self.put(user, audiobook, out_file.name)
            self.published(aid)
        else:
            self.set_status(aid, None)

        self.save(audiobook, out_file.name)
예제 #18
0
    def _generate_training_files(self):
        """Returns a tuple of file objects suitable for passing to the
        RdpTrainer application controller.
        """
        tmp_dir = get_qiime_temp_dir()
        training_set = RdpTrainingSet()
        reference_seqs_file = open(self.Params['reference_sequences_fp'], 'U')
        id_to_taxonomy_file = open(self.Params['id_to_taxonomy_fp'], 'U')

        for seq_id, seq in MinimalFastaParser(reference_seqs_file):
            training_set.add_sequence(seq_id, seq)

        for line in id_to_taxonomy_file:
            seq_id, lineage_str = map(strip, line.split('\t'))
            training_set.add_lineage(seq_id, lineage_str)

        training_set.dereplicate_taxa()

        rdp_taxonomy_file = NamedTemporaryFile(
            prefix='RdpTaxonAssigner_taxonomy_', suffix='.txt', dir=tmp_dir)
        rdp_taxonomy_file.write(training_set.get_rdp_taxonomy())
        rdp_taxonomy_file.seek(0)

        rdp_training_seqs_file = NamedTemporaryFile(
            prefix='RdpTaxonAssigner_training_seqs_', suffix='.fasta',
            dir=tmp_dir)
        for rdp_id, seq in training_set.get_training_seqs():
            rdp_training_seqs_file.write('>%s\n%s\n' % (rdp_id, seq))
        rdp_training_seqs_file.seek(0)

        self._training_set = training_set

        return rdp_taxonomy_file, rdp_training_seqs_file
예제 #19
0
파일: context.py 프로젝트: Ludwsam/spark
    def parallelize(self, c, numSlices=None):
        """
        Distribute a local Python collection to form an RDD.

        >>> sc.parallelize(range(5), 5).glom().collect()
        [[0], [1], [2], [3], [4]]
        """
        numSlices = numSlices or self.defaultParallelism
        # Calling the Java parallelize() method with an ArrayList is too slow,
        # because it sends O(n) Py4J commands.  As an alternative, serialized
        # objects are written to a file and loaded through textFile().
        tempFile = NamedTemporaryFile(delete=False, dir=self._temp_dir)
        # Make sure we distribute data evenly if it's smaller than self.batchSize
        if "__len__" not in dir(c):
            c = list(c)    # Make it a list so we can compute its length
        batchSize = min(len(c) // numSlices, self._batchSize)
        if batchSize > 1:
            serializer = BatchedSerializer(self._unbatched_serializer,
                                           batchSize)
        else:
            serializer = self._unbatched_serializer
        serializer.dump_stream(c, tempFile)
        tempFile.close()
        readRDDFromFile = self._jvm.PythonRDD.readRDDFromFile
        jrdd = readRDDFromFile(self._jsc, tempFile.name, numSlices)
        return RDD(jrdd, self, serializer)
    def test_seq_pipeline_parallel_run_with_fasta_qual(self):
        'The pipeline runs in parallel with fasta and qual'
        pipeline = 'sanger_with_qual'

        fhand_adaptors = NamedTemporaryFile()
        fhand_adaptors.write(ADAPTORS)
        fhand_adaptors.flush()
        arabidopsis_genes = 'arabidopsis_genes+'
        univec = os.path.join(TEST_DATA_DIR, 'blast', arabidopsis_genes)
        configuration = {'remove_vectors': {'vectors': univec},
                         'remove_adaptors': {'adaptors': fhand_adaptors.name}}

        seq1 = create_random_seqwithquality(500, qual_range=50)
        seq2 = create_random_seqwithquality(500, qual_range=51)
        seq3 = create_random_seqwithquality(500, qual_range=52)
        seqs = [seq1, seq2, seq3]
        inseq_fhand, inqual_fhand = create_temp_seq_file(seqs, format='qual')

        in_fhands = {}
        in_fhands['in_seq'] = open(inseq_fhand.name)
        in_fhands['in_qual'] = open(inqual_fhand.name)

        outseq_fhand = NamedTemporaryFile()
        outqual_fhand = NamedTemporaryFile()
        writer = SequenceWriter(outseq_fhand, qual_fhand=outqual_fhand,
                                file_format='fasta')
        writers = {'seq': writer}

        seq_pipeline_runner(pipeline, configuration, in_fhands,
                            processes=4, writers=writers)
        out_fhand = open(outseq_fhand.name, 'r')

        result_seq = out_fhand.read()
        assert result_seq.count('>') == 3
    def test_pipeline_run():
        'It tests that the pipeline runs ok'
        pipeline = 'sanger_with_qual'

        fhand_adaptors = NamedTemporaryFile()
        fhand_adaptors.write(ADAPTORS)
        fhand_adaptors.flush()

        arabidopsis_genes = 'arabidopsis_genes+'

        univec = os.path.join(TEST_DATA_DIR, 'blast', arabidopsis_genes)
        configuration = {'remove_vectors_blastdb': {'vectors': univec},
                         'remove_adaptors': {'adaptors': fhand_adaptors.name}}

        seq_fhand = open(os.path.join(TEST_DATA_DIR, 'seq.fasta'), 'r')
        qual_fhand = open(os.path.join(TEST_DATA_DIR, 'qual.fasta'), 'r')

        seq_iter = seqs_in_file(seq_fhand, qual_fhand)

        filtered_seq_iter = _pipeline_builder(pipeline, seq_iter,
                                              configuration)

        seq_list = list(filtered_seq_iter)
        assert 'CGAtcgggggg' in str(seq_list[0].seq)
        assert len(seq_list) == 6
예제 #22
0
def reg_code():
    img, code =generate_code_image((80,30),5)
    session["code"] = code
    tp = NamedTemporaryFile()
    img.save(tp.name,format="png")
    tp.seek(0)
    return send_file(tp.name,mimetype='image/png')
예제 #23
0
def test_toy_corpus():

    keats = ('She dwells with Beauty - Beauty that must die;\n\n'
             'And Joy, whose hand is ever at his lips\n\n' 
             'Bidding adieu; and aching Pleasure nigh,\n\n'
             'Turning to poison while the bee-mouth sips:\n\n'
             'Ay, in the very temple of Delight\n\n'
             'Veil\'d Melancholy has her sovran shrine,\n\n'
             'Though seen of none save him whose strenuous tongue\n\n'
             'Can burst Joy\'s grape against his palate fine;\n\n'
             'His soul shall taste the sadness of her might,\n\n'
             'And be among her cloudy trophies hung.')

    assert toy_corpus(keats)
    assert toy_corpus(keats, nltk_stop=True)
    assert toy_corpus(keats, stop_freq=1)
    assert toy_corpus(keats, add_stop=['and', 'with'])
    assert toy_corpus(keats, nltk_stop=True,
                      stop_freq=1, add_stop=['ay'])

    import os
    from tempfile import NamedTemporaryFile as NFT

    tmp = NFT(delete=False)
    tmp.write(keats)
    tmp.close()

    c = toy_corpus(tmp.name, is_filename=True, 
                   nltk_stop=True, add_stop=['ay'])
    
    assert c
    os.remove(tmp.name)

    return c
예제 #24
0
    def execute(self, context):
        hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id)
        logging.info("Extracting data from Hive")
        logging.info(self.sql)

        if self.bulk_load:
            tmpfile = NamedTemporaryFile()
            hive.to_csv(self.sql, tmpfile.name, delimiter='\t',
                lineterminator='\n', output_header=False)
        else:
            results = hive.get_records(self.sql)

        mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
        if self.mysql_preoperator:
            logging.info("Running MySQL preoperator")
            mysql.run(self.mysql_preoperator)

        logging.info("Inserting rows into MySQL")

        if self.bulk_load:
            mysql.bulk_load(table=self.mysql_table, tmp_file=tmpfile.name)
            tmpfile.close()
        else:
            mysql.insert_rows(table=self.mysql_table, rows=results)

        if self.mysql_postoperator:
            logging.info("Running MySQL postoperator")
            mysql.run(self.mysql_postoperator)

        logging.info("Done.")
예제 #25
0
def write_temp_file(data):
    # create a temp file for use as a config file. This should get cleaned
    # up magically at the end of the run.
    fid = NamedTemporaryFile(mode='w+b', suffix='.tmp')
    fid.write(data)
    fid.seek(0)
    return fid
 def roundtrip(self, dtype, x, suffix):
     f = NamedTemporaryFile(suffix='.' + suffix)
     fname = f.name
     f.close()
     sio.imsave(fname, x)
     y = sio.imread(fname)
     assert_array_equal(y, x)
예제 #27
0
    def _build_and_catch_errors(self, build_func, options_bytes, source=None):
        try:
            return build_func()
        except _cl.RuntimeError as e:
            msg = e.what
            if options_bytes:
                msg = msg + "\n(options: %s)" % options_bytes.decode("utf-8")

            if source is not None:
                from tempfile import NamedTemporaryFile
                srcfile = NamedTemporaryFile(mode="wt", delete=False, suffix=".cl")
                try:
                    srcfile.write(source)
                finally:
                    srcfile.close()

                msg = msg + "\n(source saved as %s)" % srcfile.name

            code = e.code
            routine = e.routine

            err = _cl.RuntimeError(
                    _cl.Error._ErrorRecord(
                        msg=msg,
                        code=code,
                        routine=routine))

        # Python 3.2 outputs the whole list of currently active exceptions
        # This serves to remove one (redundant) level from that nesting.
        raise err
예제 #28
0
파일: cli.py 프로젝트: dmeulen/sedge
def command_update(args):
    def write_to(out):
        config.output(out)
    library = KeyLibrary(args.key_directory)
    with open(args.config_file) as fd:
        config = SedgeEngine(library, fd, not args.no_verify, url=args.config_file)
    if args.output_file == '-':
        write_to(ConfigOutput(sys.stdout))
        return
    if not check_or_confirm_overwrite(args.output_file):
        print("Aborting.", file=sys.stderr)
        sys.exit(1)

    tmpf = NamedTemporaryFile(mode='w', dir=os.path.dirname(args.output_file), delete=False)
    try:
        tmpf.file.write('''\
# :sedge:
#
# this configuration generated from `sedge' file:
# %s
#
# do not edit this file manually, edit the source file and re-run `sedge'
#

''' % (args.config_file))
        write_to(ConfigOutput(tmpf.file))
        tmpf.close()
        if args.verbose:
            diff_config_changes(args.output_file, tmpf.name)
        os.rename(tmpf.name, args.output_file)
    except:
        os.unlink(tmpf.name)
        raise
예제 #29
0
    def _write_local_schema_file(self, cursor):
        """
        Takes a cursor, and writes the BigQuery schema for the results to a
        local file system.

        :return: A dictionary where key is a filename to be used as an object
            name in GCS, and values are file handles to local files that
            contains the BigQuery schema fields in .json format.
        """
        schema = []
        for field in cursor.description:
            # See PEP 249 for details about the description tuple.
            field_name = field[0]
            field_type = self.type_map(field[1])
            # Always allow TIMESTAMP to be nullable. MySQLdb returns None types
            # for required fields because some MySQL timestamps can't be
            # represented by Python's datetime (e.g. 0000-00-00 00:00:00).
            field_mode = 'NULLABLE' if field[6] or field_type == 'TIMESTAMP' else 'REQUIRED'
            schema.append({
                'name': field_name,
                'type': field_type,
                'mode': field_mode,
            })

        self.log.info('Using schema for %s: %s', self.schema_filename, schema)
        tmp_schema_file_handle = NamedTemporaryFile(delete=True)
        s = json.dumps(schema, tmp_schema_file_handle)
        if PY3:
            s = s.encode('utf-8')
        tmp_schema_file_handle.write(s)
        return {self.schema_filename: tmp_schema_file_handle}
    def test_seq_pipeline_run():
        'It tests that the pipeline runs ok'
        pipeline = 'sanger_with_qual'

        fhand_adaptors = NamedTemporaryFile()
        fhand_adaptors.write(ADAPTORS)
        fhand_adaptors.flush()
        arabidopsis_genes = 'arabidopsis_genes+'
        univec = os.path.join(TEST_DATA_DIR, 'blast', arabidopsis_genes)
        configuration = {'remove_vectors_blastdb': {'vectors': univec},
                         'remove_adaptors': {'adaptors': fhand_adaptors.name}}

        in_fhands = {}
        in_fhands['in_seq'] = open(os.path.join(TEST_DATA_DIR, 'seq.fasta'),
                                   'r')
        in_fhands['in_qual'] = open(os.path.join(TEST_DATA_DIR, 'qual.fasta'),
                                    'r')

        out_seq_fhand = NamedTemporaryFile()
        out_qual_fhand = NamedTemporaryFile()
        writer = SequenceWriter(out_seq_fhand, qual_fhand=out_qual_fhand,
                                file_format='fasta')
        seq_pipeline_runner(pipeline, configuration, in_fhands,
                            writers={'seq': writer})
        result_seq = open(out_seq_fhand.name).read()
        assert result_seq.count('>') == 6

        #are we keeping the description?
        assert 'mdust' in result_seq
예제 #31
0
def treetime(nwk, fasta, outdir, binpath='treetime', clock=None, verbosity=1):
    """
    :param nwk: str, Newick tree string from fasttree()
    :param fasta: dict, header-sequence pairs
    :param outdir:  path to write output files
    :param clock: float, clock rate to constrain analysis - defaults
                  to None (no constraint)
    :param verbosity:  verbose level, defaults to 1
    :return:  path to NEXUS output file
    """
    # extract dates from sequence headers
    datefile = NamedTemporaryFile('w', prefix="cvz_tt_", delete=False)
    datefile.write('name,date\n')
    alnfile = NamedTemporaryFile('w', prefix="cvz_tt_", delete=False)
    for h, s in fasta.items():
        # TreeTime seems to have trouble handling labels with spaces
        _, accn, coldate = h.split('|')
        datefile.write('{},{}\n'.format(accn, coldate))
        alnfile.write('>{}\n{}\n'.format(accn, s))
    datefile.close()
    alnfile.close()

    with NamedTemporaryFile('w', prefix="cvz_tt_", delete=False) as nwkfile:
        nwkfile.write(nwk.replace(' ', ''))

    call = [
        binpath, '--tree', nwkfile.name, '--aln', alnfile.name, '--dates',
        datefile.name, '--outdir', outdir, '--verbose',
        str(verbosity), '--plot-rtt', 'none'
    ]  # see issue #66
    if clock:
        call.extend(['--clock-rate', str(clock)])
    check_call(call)

    # clean up temporary files
    os.remove(datefile.name)
    os.remove(alnfile.name)
    os.remove(nwkfile.name)

    # return path to NEXUS file
    nexus_file = os.path.join(outdir, 'timetree.nexus')
    if not os.path.exists(nexus_file):
        print(
            "Error: missing expected NEXUS output file {}".format(nexus_file))
        return None
    return nexus_file
예제 #32
0
def mms_load_data(trange=['2015-10-16', '2015-10-17'],
                  probe='1',
                  data_rate='srvy',
                  level='l2',
                  instrument='fgm',
                  datatype='',
                  varformat=None,
                  prefix='',
                  suffix='',
                  get_support_data=False,
                  time_clip=False,
                  no_update=False,
                  center_measurement=False,
                  available=False,
                  notplot=False,
                  latest_version=False,
                  major_version=False,
                  min_version=None,
                  cdf_version=None):
    """
    This function loads MMS data into pyTplot variables
    """

    if not isinstance(probe, list): probe = [probe]
    if not isinstance(data_rate, list): data_rate = [data_rate]
    if not isinstance(level, list): level = [level]
    if not isinstance(datatype, list): datatype = [datatype]

    probe = [str(p) for p in probe]

    # allows the user to pass in trange as list of datetime objects
    if type(trange[0]) == datetime and type(trange[1]) == datetime:
        trange = [
            time_string(trange[0].timestamp()),
            time_string(trange[1].timestamp())
        ]

    # allows the user to pass in trange as a list of floats (unix times)
    if isinstance(trange[0], float):
        trange[0] = time_string(trange[0])
    if isinstance(trange[1], float):
        trange[1] = time_string(trange[1])

    start_date = parse(trange[0]).strftime(
        '%Y-%m-%d')  # need to request full day, then parse out later
    end_date = parse(time_string(time_double(trange[1]) - 0.1)).strftime(
        '%Y-%m-%d-%H-%M-%S'
    )  # -1 second to avoid getting data for the next day

    download_only = CONFIG['download_only']

    no_download = False
    if no_update or CONFIG['no_download']: no_download = True

    user = None
    if not no_download:
        sdc_session, user = mms_login_lasp()

    out_files = []
    available_files = []

    for prb in probe:
        for drate in data_rate:
            for lvl in level:
                for dtype in datatype:
                    if user is None:
                        url = 'https://lasp.colorado.edu/mms/sdc/public/files/api/v1/file_info/science?start_date=' + start_date + '&end_date=' + end_date + '&sc_id=mms' + prb + '&instrument_id=' + instrument + '&data_rate_mode=' + drate + '&data_level=' + lvl
                    else:
                        url = 'https://lasp.colorado.edu/mms/sdc/sitl/files/api/v1/file_info/science?start_date=' + start_date + '&end_date=' + end_date + '&sc_id=mms' + prb + '&instrument_id=' + instrument + '&data_rate_mode=' + drate + '&data_level=' + lvl

                    if dtype != '':
                        url = url + '&descriptor=' + dtype

                    if CONFIG['debug_mode']: logging.info('Fetching: ' + url)

                    if no_download == False:
                        # query list of available files
                        try:
                            with warnings.catch_warnings():
                                warnings.simplefilter("ignore",
                                                      category=ResourceWarning)
                                http_json = sdc_session.get(
                                    url, verify=True).json()

                            if CONFIG['debug_mode']:
                                logging.info(
                                    'Filtering the results down to your trange'
                                )

                            files_in_interval = mms_files_in_interval(
                                http_json['files'], trange)

                            if available:
                                for file in files_in_interval:
                                    logging.info(
                                        file['file_name'] + ' (' + str(
                                            np.round(file['file_size'] /
                                                     (1024. * 1024),
                                                     decimals=1)) + ' MB)')
                                    available_files.append(file['file_name'])
                                continue

                            for file in files_in_interval:
                                file_date = parse(file['timetag'])
                                if dtype == '':
                                    out_dir = os.sep.join([
                                        CONFIG['local_data_dir'], 'mms' + prb,
                                        instrument, drate, lvl,
                                        file_date.strftime('%Y'),
                                        file_date.strftime('%m')
                                    ])
                                else:
                                    out_dir = os.sep.join([
                                        CONFIG['local_data_dir'], 'mms' + prb,
                                        instrument, drate, lvl, dtype,
                                        file_date.strftime('%Y'),
                                        file_date.strftime('%m')
                                    ])

                                if drate.lower() == 'brst':
                                    out_dir = os.sep.join(
                                        [out_dir,
                                         file_date.strftime('%d')])

                                out_file = os.sep.join(
                                    [out_dir, file['file_name']])

                                if CONFIG['debug_mode']:
                                    logging.info('File: ' + file['file_name'] +
                                                 ' / ' + file['timetag'])

                                if os.path.exists(out_file) and str(
                                        os.stat(out_file).st_size) == str(
                                            file['file_size']):
                                    if not download_only:
                                        logging.info('Loading ' + out_file)
                                    out_files.append(out_file)
                                    continue

                                if user is None:
                                    download_url = 'https://lasp.colorado.edu/mms/sdc/public/files/api/v1/download/science?file=' + file[
                                        'file_name']
                                else:
                                    download_url = 'https://lasp.colorado.edu/mms/sdc/sitl/files/api/v1/download/science?file=' + file[
                                        'file_name']

                                logging.info('Downloading ' +
                                             file['file_name'] + ' to ' +
                                             out_dir)

                                with warnings.catch_warnings():
                                    warnings.simplefilter(
                                        "ignore", category=ResourceWarning)
                                    fsrc = sdc_session.get(download_url,
                                                           stream=True,
                                                           verify=True)
                                ftmp = NamedTemporaryFile(delete=False)

                                with open(ftmp.name, 'wb') as f:
                                    copyfileobj(fsrc.raw, f)

                                if not os.path.exists(out_dir):
                                    os.makedirs(out_dir)

                                # if the download was successful, copy to data directory
                                copy(ftmp.name, out_file)
                                out_files.append(out_file)
                                fsrc.close()
                                ftmp.close()
                        except requests.exceptions.ConnectionError:
                            # No/bad internet connection; try loading the files locally
                            logging.error('No internet connection!')

                    if out_files == []:
                        if not download_only:
                            logging.info('Searching for local files...')
                        out_files = mms_get_local_files(
                            prb, instrument, drate, lvl, dtype, trange)

                        if out_files == [] and CONFIG[
                                'mirror_data_dir'] != None:
                            # check for network mirror; note: network mirrors are assumed to be read-only
                            # and we always copy the files from the mirror to the local data directory
                            # before trying to load into tplot variables
                            logging.info(
                                'No local files found; checking network mirror...'
                            )
                            out_files = mms_get_local_files(prb,
                                                            instrument,
                                                            drate,
                                                            lvl,
                                                            dtype,
                                                            trange,
                                                            mirror=True)

    if not no_download:
        sdc_session.close()

    if available:
        return available_files

    if not download_only:
        out_files = sorted(out_files)

        filtered_out_files = mms_file_filter(out_files,
                                             latest_version=latest_version,
                                             major_version=major_version,
                                             min_version=min_version,
                                             version=cdf_version)
        if filtered_out_files == []:
            logging.info('No matching CDF versions found.')
            return

        new_variables = cdf_to_tplot(filtered_out_files,
                                     varformat=varformat,
                                     merge=True,
                                     get_support_data=get_support_data,
                                     prefix=prefix,
                                     suffix=suffix,
                                     center_measurement=center_measurement,
                                     notplot=notplot)

        if notplot:
            return new_variables

        if new_variables == []:
            logging.warning('No data loaded.')
            return

        if time_clip:
            for new_var in new_variables:
                tclip(new_var, trange[0], trange[1], suffix='')

        return new_variables
    else:
        return out_files
예제 #33
0
def test_vs_scoring():
    protein = next(oddt.toolkit.readfile('pdb', xiap_protein))
    protein.protein = True

    data_dir = os.path.join(test_data_dir, 'data')
    home_dir = mkdtemp()
    pdbbind_versions = (2007, 2013, 2016)

    pdbbind_dir = os.path.join(data_dir, 'pdbbind')
    for pdbbind_v in pdbbind_versions:
        version_dir = os.path.join(data_dir, 'v%s' % pdbbind_v)
        if not os.path.isdir(version_dir):
            os.symlink(pdbbind_dir, version_dir)

    filenames = []
    # train mocked SFs
    for model in [nnscore(n_jobs=1)
                  ] + [rfscore(version=v, n_jobs=1) for v in [1, 2, 3]]:
        model.gen_training_data(data_dir,
                                pdbbind_versions=pdbbind_versions,
                                home_dir=home_dir)
        filenames.append(model.train(home_dir=home_dir))
    vs = virtualscreening(n_cpu=-1, chunksize=10)
    vs.load_ligands('sdf', xiap_actives_docked)
    # error if no protein is fed
    with pytest.raises(ValueError):
        vs.score('nnscore')
    # bad sf name
    with pytest.raises(ValueError):
        vs.score('bad_sf', protein=protein)
    vs.score('nnscore', protein=xiap_protein)
    vs.score('nnscore_pdbbind2016', protein=protein)
    vs.score('rfscore_v1', protein=protein)
    vs.score('rfscore_v1_pdbbind2016', protein=protein)
    vs.score('rfscore_v2', protein=protein)
    vs.score('rfscore_v3', protein=protein)
    vs.score('pleclinear', protein=protein)
    vs.score('pleclinear_p5_l1_s65536_pdbbind2016', protein=protein)
    # use pickle directly
    vs.score(filenames[0], protein=protein)
    # pass SF object directly
    vs.score(scorer.load(filenames[0]), protein=protein)
    # pass wrong object (sum is not an instance of scorer)
    with pytest.raises(ValueError):
        vs.score(sum, protein=protein)

    mols = list(vs.fetch())

    assert len(mols) == 100
    mol_data = mols[0].data
    assert 'nnscore' in mol_data
    assert 'rfscore_v1' in mol_data
    assert 'rfscore_v2' in mol_data
    assert 'rfscore_v3' in mol_data
    assert 'PLEClinear_p5_l1_s65536' in mol_data

    vs = virtualscreening(n_cpu=-1, chunksize=10)
    vs.load_ligands('sdf', xiap_actives_docked)
    vs.score('nnscore', protein=protein)
    vs.score('rfscore_v1', protein=protein)
    vs.score('rfscore_v2', protein=protein)
    vs.score('rfscore_v3', protein=protein)
    with NamedTemporaryFile('w', suffix='.sdf') as molfile:
        with NamedTemporaryFile('w', suffix='.csv') as csvfile:
            vs.write('sdf', molfile.name, csv_filename=csvfile.name)
            data = pd.read_csv(csvfile.name)
            assert 'nnscore' in data.columns
            assert 'rfscore_v1' in data.columns
            assert 'rfscore_v2' in data.columns
            assert 'rfscore_v3' in data.columns

            mols = list(oddt.toolkit.readfile('sdf', molfile.name))
            assert len(mols) == 100

            vs.write_csv(
                csvfile.name,
                fields=['nnscore', 'rfscore_v1', 'rfscore_v2', 'rfscore_v3'])
            data = pd.read_csv(csvfile.name)
            assert len(data.columns) == 4
            assert len(data) == len(mols)
            assert 'nnscore' in data.columns
            assert 'rfscore_v1' in data.columns
            assert 'rfscore_v2' in data.columns
            assert 'rfscore_v3' in data.columns

    # remove files
    for f in filenames:
        os.unlink(f)

    # remove symlinks
    for pdbbind_v in pdbbind_versions:
        version_dir = os.path.join(data_dir, 'v%s' % pdbbind_v)
        if os.path.islink(version_dir):
            os.unlink(version_dir)
예제 #34
0
def handle_submission(request, su_id):
    """
    Saves the submission if it is allowed
    """
    uuid = request.cookies.get("uuid")
    logging.debug("Cookie: {}".format(uuid))
    request_form = request.form
    logging.debug(request_form)
    ass_code = request_form["ass_code"]
    module = request_form["module_code"]
    path = "modules/{}/{}/submissions".format(module, ass_code)
    expected_file = "submission_file_{}".format(ass_code)
    if expected_file in request.files:
        file = request.files[expected_file]
        if file and allowed_file(file.filename):
            if su_id not in os.listdir(path):
                path = "{}/{}/".format(path, su_id)
                os.mkdir(path)
            else:
                path = "{}/{}/".format(path, su_id)
            if allowed_submission(su_id, ass_code):
                # Remove previous submissions
                for sub in os.listdir(path):
                    os.unlink("{}/{}".format(path, sub))
                # Add resubmission
                filename = secure_filename(file.filename)
                file.save("{}/{}".format(path, filename))
                # Log submission and date
                tempfile = NamedTemporaryFile(delete=False)
                try:
                    locks[ass_code].acquire(timeout=10)
                    with open(
                            "modules/{}/{}/assignment_files/logs.csv".format(
                                module, ass_code),
                            "rb",
                    ) as logs, tempfile:
                        logs = csv.reader(logs, delimiter=",")
                        writer = csv.writer(tempfile, delimiter=",")
                        found = False
                        for row in logs:
                            if str(row[0]) == str(su_id):
                                found = True
                                subs = int(row[2])
                                row[2] = subs + 1
                                row[3] = datetime.datetime.now()
                                row[5] = request.remote_addr
                                row[6] = uuid
                            writer.writerow(row)
                        if not found:
                            # Still log submissions if the user is not a registered user somehow
                            writer.writerow([
                                su_id.strip(),
                                0,
                                1,
                                datetime.datetime.now(),
                                None,
                                request.remote_addr,
                                uuid,
                                None,
                                None,
                            ])
                        shutil.move(
                            tempfile.name,
                            "modules/{}/{}/assignment_files/logs.csv".format(
                                module, ass_code),
                        )
                        # Finally send email to inform student of their submission
                        send_email(su_id, ass_code)
                finally:
                    locks[ass_code].release()
            else:
                return False
        else:
            return False
    else:
        return False
    return True
예제 #35
0
파일: disk.py 프로젝트: pombredanne/kiwi-1
    def _build_and_map_disk_partitions(self):
        self.disk.wipe()
        if self.firmware.legacy_bios_mode():
            log.info('--> creating EFI CSM(legacy bios) partition')
            self.disk.create_efi_csm_partition(
                self.firmware.get_legacy_bios_partition_size()
            )

        if self.firmware.efi_mode():
            log.info('--> creating EFI partition')
            self.disk.create_efi_partition(
                self.firmware.get_efi_partition_size()
            )

        if self.firmware.ofw_mode():
            log.info('--> creating PReP partition')
            self.disk.create_prep_partition(
                self.firmware.get_prep_partition_size()
            )

        if self.disk_setup.need_boot_partition():
            log.info('--> creating boot partition')
            self.disk.create_boot_partition(
                self.disk_setup.boot_partition_size()
            )

        if self.spare_part_mbsize:
            log.info('--> creating spare partition')
            self.disk.create_spare_partition(
                self.spare_part_mbsize
            )

        if self.root_filesystem_is_overlay:
            log.info('--> creating readonly root partition')
            squashed_root_file = NamedTemporaryFile()
            squashed_root = FileSystemSquashFs(
                device_provider=None, root_dir=self.root_dir
            )
            squashed_root.create_on_file(
                filename=squashed_root_file.name,
                exclude=[Defaults.get_shared_cache_location()]
            )
            squashed_rootfs_mbsize = os.path.getsize(
                squashed_root_file.name
            ) / 1048576
            self.disk.create_root_readonly_partition(
                int(squashed_rootfs_mbsize + 50)
            )

        if self.volume_manager_name and self.volume_manager_name == 'lvm':
            log.info('--> creating LVM root partition')
            self.disk.create_root_lvm_partition('all_free')

        elif self.mdraid:
            log.info('--> creating mdraid root partition')
            self.disk.create_root_raid_partition('all_free')

        else:
            log.info('--> creating root partition')
            self.disk.create_root_partition('all_free')

        if self.firmware.bios_mode():
            log.info('--> setting active flag to primary boot partition')
            self.disk.activate_boot_partition()

        if self.firmware.ofw_mode():
            log.info('--> setting active flag to primary PReP partition')
            self.disk.activate_boot_partition()

        if self.firmware.efi_mode():
            if self.force_mbr:
                log.info('--> converting partition table to MBR')
                self.disk.create_mbr()
            elif self.hybrid_mbr:
                log.info('--> converting partition table to hybrid GPT/MBR')
                self.disk.create_hybrid_mbr()

        self.disk.map_partitions()

        return self.disk.get_device()
예제 #36
0
파일: disk.py 프로젝트: pombredanne/kiwi-1
    def create_disk(self):
        """
        Build a bootable raw disk image
        """
        if self.install_media and self.build_type_name != 'oem':
            raise KiwiInstallMediaError(
                'Install media requires oem type setup, got %s' %
                self.build_type_name
            )

        if self.root_filesystem_is_overlay and self.volume_manager_name:
            raise KiwiVolumeManagerSetupError(
                'Volume management together with root overlay is not supported'
            )

        # setup recovery archive, cleanup and create archive if requested
        self.system_setup.create_recovery_archive()

        # prepare boot(initrd) root system
        log.info('Preparing boot system')
        self.boot_image.prepare()

        # precalculate needed disk size
        disksize_mbytes = self.disk_setup.get_disksize_mbytes()

        # create the disk
        log.info('Creating raw disk image %s', self.diskname)
        loop_provider = LoopDevice(
            self.diskname, disksize_mbytes, self.blocksize
        )
        loop_provider.create()

        self.disk = Disk(
            self.firmware.get_partition_table_type(), loop_provider
        )

        # create the bootloader instance
        self.bootloader_config = BootLoaderConfig(
            self.bootloader, self.xml_state, self.root_dir, {
                'targetbase':
                    loop_provider.get_device(),
                'grub_directory_name':
                    Defaults.get_grub_boot_directory_name(self.root_dir)
            }
        )

        # create disk partitions and instance device map
        device_map = self._build_and_map_disk_partitions()

        # create raid on current root device if requested
        if self.mdraid:
            self.raid_root = RaidDevice(device_map['root'])
            self.raid_root.create_degraded_raid(raid_level=self.mdraid)
            device_map['root'] = self.raid_root.get_device()

        # create luks on current root device if requested
        if self.luks:
            self.luks_root = LuksDevice(device_map['root'])
            self.luks_root.create_crypto_luks(
                passphrase=self.luks, os=self.luks_os
            )
            device_map['root'] = self.luks_root.get_device()

        # create filesystems on boot partition(s) if any
        self._build_boot_filesystems(device_map)

        # create volumes and filesystems for root system
        if self.volume_manager_name:
            volume_manager_custom_parameters = {
                'fs_mount_options':
                    self.custom_root_mount_args,
                'root_label':
                    self.disk_setup.get_root_label(),
                'root_is_snapshot':
                    self.xml_state.build_type.get_btrfs_root_is_snapshot(),
                'root_is_readonly_snapshot':
                    self.xml_state.build_type.get_btrfs_root_is_readonly_snapshot(),
                'image_type':
                    self.xml_state.get_build_type_name()
            }
            volume_manager = VolumeManager(
                self.volume_manager_name, device_map['root'],
                self.root_dir + '/',
                self.volumes,
                volume_manager_custom_parameters
            )
            volume_manager.setup(
                self.volume_group_name
            )
            volume_manager.create_volumes(
                self.requested_filesystem
            )
            volume_manager.mount_volumes()
            self.generic_fstab_entries += volume_manager.get_fstab(
                self.persistency_type, self.requested_filesystem
            )
            self.system = volume_manager
            device_map['root'] = volume_manager.get_device()['root']
        else:
            log.info(
                'Creating root(%s) filesystem on %s',
                self.requested_filesystem, device_map['root'].get_device()
            )
            filesystem_custom_parameters = {
                'mount_options': self.custom_root_mount_args
            }
            filesystem = FileSystem(
                self.requested_filesystem, device_map['root'],
                self.root_dir + '/',
                filesystem_custom_parameters
            )
            filesystem.create_on_device(
                label=self.disk_setup.get_root_label()
            )
            self.system = filesystem

        # create a random image identifier
        self.mbrid = SystemIdentifier()
        self.mbrid.calculate_id()

        # create first stage metadata to boot image
        self._write_partition_id_config_to_boot_image()

        self._write_recovery_metadata_to_boot_image()

        self._write_raid_config_to_boot_image()

        self._write_generic_fstab_to_boot_image(device_map)

        self.system_setup.export_modprobe_setup(
            self.boot_image.boot_root_directory
        )

        # create first stage metadata to system image
        self._write_image_identifier_to_system_image()

        self._write_crypttab_to_system_image()

        self._write_generic_fstab_to_system_image(device_map)

        # create initrd cpio archive
        self.boot_image.create_initrd(self.mbrid)

        # create second stage metadata to system image
        self._copy_first_boot_files_to_system_image()

        self._write_bootloader_config_to_system_image(device_map)

        self.mbrid.write_to_disk(
            self.disk.storage_provider
        )

        # set SELinux file security contexts if context exists
        self._setup_selinux_file_contexts()

        # syncing system data to disk image
        log.info('Syncing system to image')
        if self.system_efi:
            log.info('--> Syncing EFI boot data to EFI partition')
            self.system_efi.sync_data()

        if self.system_boot:
            log.info('--> Syncing boot data at extra partition')
            self.system_boot.sync_data(
                self._get_exclude_list_for_boot_data_sync()
            )

        log.info('--> Syncing root filesystem data')
        if self.root_filesystem_is_overlay:
            squashed_root_file = NamedTemporaryFile()
            squashed_root = FileSystemSquashFs(
                device_provider=None, root_dir=self.root_dir
            )
            squashed_root.create_on_file(
                filename=squashed_root_file.name,
                exclude=self._get_exclude_list_for_root_data_sync(device_map)
            )
            Command.run(
                [
                    'dd',
                    'if=%s' % squashed_root_file.name,
                    'of=%s' % device_map['readonly'].get_device()
                ]
            )
        else:
            self.system.sync_data(
                self._get_exclude_list_for_root_data_sync(device_map)
            )

        # install boot loader
        self._install_bootloader(device_map)

        # set root filesystem properties
        self._setup_property_root_is_readonly_snapshot()

        # prepare for install media if requested
        if self.install_media:
            if self.initrd_system and self.initrd_system == 'dracut':
                # for the installation process we need a kiwi initrd
                # Therefore an extra install boot root system needs to
                # be prepared if dracut was set as the initrd system
                # to boot the system image
                log.info('Preparing extra install boot system')

                self.xml_state.build_type.set_initrd_system('kiwi')
                self.initrd_system = \
                    self.xml_state.build_type.get_initrd_system()

                self.boot_image = BootImageKiwi(
                    self.xml_state, self.target_dir,
                    signing_keys=self.signing_keys
                )

                self.boot_image.prepare()

                # apply disk builder metadata also needed in the install initrd
                self._write_partition_id_config_to_boot_image()
                self._write_recovery_metadata_to_boot_image()
                self._write_raid_config_to_boot_image()
                self.system_setup.export_modprobe_setup(
                    self.boot_image.boot_root_directory
                )

            log.info('Saving boot image instance to file')
            self.boot_image.dump(
                self.target_dir + '/boot_image.pickledump'
            )

        # store image file name in result
        self.result.add(
            key='disk_image',
            filename=self.diskname,
            use_for_bundle=True if not self.image_format else False,
            compress=True,
            shasum=True
        )

        # create image root metadata
        self.result.add(
            key='image_packages',
            filename=self.system_setup.export_rpm_package_list(
                self.target_dir
            ),
            use_for_bundle=True,
            compress=False,
            shasum=False
        )
        self.result.add(
            key='image_verified',
            filename=self.system_setup.export_rpm_package_verification(
                self.target_dir
            ),
            use_for_bundle=True,
            compress=False,
            shasum=False
        )

        return self.result
예제 #37
0
def write_mesos_cli_config(config):
    mesos_cli_config_file = NamedTemporaryFile(delete=False)
    mesos_cli_config_file.write(json.dumps(config))
    mesos_cli_config_file.close()
    return mesos_cli_config_file.name
예제 #38
0
class TestPushToken(TestController):
    def setUp(self):

        self.delete_all_policies()
        self.delete_all_token()
        self.delete_all_realms()
        self.delete_all_resolvers()
        super(TestPushToken, self).setUp()
        self.create_common_resolvers()
        self.create_common_realms()
        self.create_dummy_cb_policies()

        # ------------------------------------------------------------------ --

        self.gda = 'DEADBEEF'
        self.tokens = defaultdict(dict)

        # ------------------------------------------------------------------ --

        pk, sk = gen_dsa_keypair()
        self.secret_key = sk
        self.public_key = pk

        # ----------------------------------------------------------------- --

        # we need a dummy file to sneak past the file existence check
        # in the initial provider configuration

        self.dummy_temp_cert = NamedTemporaryFile()

        # ------------------------------------------------------------------ --

        # make dummy provider config
        p_config = {
            "push_url": "https://pushproxy.keyidentity.com",
            "access_certificate": self.dummy_temp_cert.name,
            "server_certificate": ""
        }

        params = {
            'name': 'dummy_provider',
            'class': 'DefaultPushProvider',
            'config': json.dumps(p_config),
            'timeout': '120',
            'type': 'push'
        }

        self.make_system_request('setProvider', params=params)

        # ------------------------------------------------------------------ --

        params = {
            'name': 'dummy_push_policy',
            'scope': 'authentication',
            'action': 'push_provider=dummy_provider',
            'user': '******',
            'realm': '*',
            'client': '',
            'time': ''
        }

        self.create_policy(params=params)
        self.uri = self.app.config.get('MOBILE_APP_PROTOCOLL_ID', 'lseqr')

# -------------------------------------------------------------------------- --

    def tearDown(self):

        self.delete_all_policies()
        self.delete_all_realms()
        self.delete_all_resolvers()
        self.delete_all_token()
        super(TestPushToken, self).tearDown()

        # ------------------------------------------------------------------ --

        # delete dummy provider config

        params = {'name': 'dummy_provider_config', 'type': 'push'}

        self.make_system_request('delProvider', params=params)

        # ------------------------------------------------------------------ --

        # delete temp file

        self.dummy_temp_cert.close()

# -------------------------------------------------------------------------- --

    def setPolicy(self, params):
        """ sets a system policy defined by param """

        response = self.make_system_request('setPolicy', params)
        response_dict = json.loads(response.body)

        assert 'result' in response_dict
        result = response_dict.get('result')

        assert 'status' in result
        status = result.get('status')

        assert status

        response = self.make_system_request('getPolicy', params)

# -------------------------------------------------------------------------- --

    def create_dummy_cb_policies(self):
        """ sets some dummy callback policies. callback policies get ignored
        by the tests, but are nonetheless necessary for the backend """

        # ------------------------------------------------------------------ --

        # set pairing callback policies

        params = {
            'name': 'dummy1',
            'scope': 'authentication',
            'realm': '*',
            'action': 'pushtoken_pairing_callback_url=foo',
            'user': '******'
        }

        self.setPolicy(params)

        # ------------------------------------------------------------------ --

        # set challenge callback policies

        params = {
            'name': 'dummy2',
            'scope': 'authentication',
            'realm': '*',
            'action': 'pushtoken_challenge_callback_url=foo',
            'user': '******'
        }

        self.setPolicy(params)

# -------------------------------------------------------------------------- --

    def enroll_pushtoken(self, user=None, pin='1234', serial=None):
        """
        enrolls a pushtoken

        :param user: the user to which the token should be assigned
        :param pin: the pin the token should have after enrollment
            (default is 1234)

        :returns pairing url
        """

        # initialize an unfinished token on the server

        params = {'type': 'push', 'pin': pin}

        if user:
            params['user'] = user

        if serial:
            params['serial'] = serial

        response = self.make_admin_request('init', params)

        # ------------------------------------------------------------------ --

        # response should contain pairing url, check if it was
        # sent and validate

        response_dict = json.loads(response.body)
        assert 'pairing_url' in response_dict.get('detail', {})

        pairing_url = response_dict.get('detail', {}).get('pairing_url')
        assert pairing_url is not None
        assert pairing_url.startswith(self.uri + '://pair/')

        return pairing_url

# -------------------------------------------------------------------------- --

    def activate_token(self, user_token_id, data='', retry_activation=1):
        """
            activate the token
            - create the activation challenge by calling /validate/check
            - calculate the resonse in the test user token
            - send the signature to /validate/accept_transaction

         :param user_token_id: the id to the to be used token
         :param data: the data which is used during activation
         :return: the activation challenge and signature
        """

        for i in range(0, retry_activation):

            # ------------------------------------------------------------- --

            # request activation challenge

            challenge_url = self.trigger_challenge(user_token_id, data=data)

            # ------------------------------------------------------------- --

            # parse, descrypt and verify the challenge url

            challenge, sig = self.decrypt_and_verify_challenge(challenge_url,
                                                               action='ACCEPT')

            # ------------------------------------------------------------- --

            # check if the content type is right (we are doing pairing
            # right now, so type must be CONTENT_TYPE_PAIRING)

            content_type = challenge['content_type']
            assert content_type == CONTENT_TYPE_PAIRING

        # ----------------------------------------------------------------- --

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('accept_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert value, response

    def execute_correct_pairing(self,
                                user=None,
                                pin='1234',
                                serial=None,
                                retry_pairing=1,
                                retry_activation=1):
        """
        enroll token and pair it

        :param user: the user the token should be assigned to
            (default: None)
        :param pin: the pin the token should have (default '1234')
        :param retry_pairing: num of re-pairing
        :param retry_activation: num of re-activation

        :return user_token_id (index for self.tokens)
        """

        # ------------------------------------------------------------------ --

        # enroll token

        pairing_url = self.enroll_pushtoken(user=user, pin=pin, serial=serial)

        # ------------------------------------------------------------------ --

        # pair the token

        for i in range(0, retry_pairing):

            user_token_id = self.pair_token(pairing_url, pin)

        # ------------------------------------------------------------------ --

        # activate the token

        self.activate_token(user_token_id,
                            data='',
                            retry_activation=retry_activation)

        return user_token_id

# -------------------------------------------------------------------------- --

    def pair_token(self, pairing_url, pin='1234'):
        """
        Pair an enrolled token
        - use the qr url to instantiate the test user token and
        - call the /validate/pair to bind this to the LinOTP token

        :param pairing_url: the pairing url provided by the token

        :param pin: the pin of the token (default: '1234')

        :return: handle of the test user token
        """

        # save data extracted from pairing url to the 'user database'

        user_token_id = self.create_user_token_by_pairing_url(pairing_url, pin)

        # ------------------------------------------------------------------ --

        # create the pairing response

        pairing_response = self.create_pairing_response_by_serial(
            user_token_id)

        # ------------------------------------------------------------------ --

        # send pairing response

        response_dict = self.send_pairing_response(pairing_response)

        # ------------------------------------------------------------------ --

        # check if returned json is correct

        assert 'result' in response_dict
        result = response_dict.get('result')

        assert 'value' in result
        value = result.get('value')
        assert not value

        assert 'status' in result
        status = result.get('status')
        assert status

        return user_token_id

# -------------------------------------------------------------------------- --

    def create_user_token_by_pairing_url(self, pairing_url, pin='1234'):
        """
        parses the pairing url and saves the extracted data in
        the fake token database of this test class.

        :param pairing_url: the pairing url received from the server
        :param pin: the pin of the token (default: '1234')

        :returns: user_token_id of newly created token
        """

        # extract metadata and the public key

        data_encoded = pairing_url[len(self.uri + '://pair/'):]
        data = decode_base64_urlsafe(data_encoded)
        version, token_type, flags = struct.unpack('<bbI', data[0:6])
        partition = struct.unpack('<I', data[6:10])[0]

        server_public_key = data[10:10 + 32]

        # validate protocol versions and type id

        assert token_type == TYPE_PUSHTOKEN
        assert version == PAIRING_URL_VERSION

        # ------------------------------------------------------------------ --

        # extract custom data that may or may not be present
        # (depending on flags)

        custom_data = data[10 + 32:]

        token_serial = None
        if flags & FLAG_PAIR_SERIAL:
            token_serial, __, custom_data = custom_data.partition(b'\x00')

        callback_url = None
        if flags & FLAG_PAIR_CBURL:
            callback_url, __, custom_data = custom_data.partition(b'\x00')
        else:
            raise NotImplementedError(
                'Callback URL is mandatory for PushToken')

        # ------------------------------------------------------------------ --

        # save token data for later use

        user_token_id = len(self.tokens)
        self.tokens[user_token_id] = {
            'serial': token_serial.decode(),
            'server_public_key': server_public_key,
            'partition': partition,
            'callback_url': callback_url.decode(),
            'pin': pin
        }

        # ------------------------------------------------------------------ --

        return user_token_id

# -------------------------------------------------------------------------- --

    def decrypt_and_verify_challenge(self, challenge_url, action):
        """
        Decrypts the data packed in the challenge url, verifies
        its content, returns the parsed data as a dictionary,
        calculates and returns the signature.

        The calling method must then send the signature
        back to the server. (The reason for this control flow
        is that the challenge data must be checked in different
        scenarios, e.g. when we have a pairing the data must be
        checked by the method that simulates the pairing)

        :param challenge_url: the challenge url as sent by the server
        :param action: a string identifier for the verification action
            (at the moment 'ACCEPT' or 'DENY')

        :returns: (challenge, signature)

            challenge has the keys

                * content_type - one of the three values CONTENT_TYPE_SIGNREQ,
                    CONTENT_TYPE_PAIRING or CONTENT_TYPE_LOGIN)
                    (all defined in this module)
                * transaction_id - used to identify the challenge
                    on the server
                * callback_url (optional) - the url to which the challenge
                    response should be set
                * user_token_id - used to identify the token in the
                    user database for which this challenge was created

            depending on the content type additional keys are present

                * for CONTENT_TYPE_PAIRING: serial
                * for CONTENT_TYPE_SIGNREQ: message
                * for CONTENT_TYPE_LOGIN: login, host

            signature is the generated user signature used to
            respond to the challenge
        """

        challenge_data_encoded = challenge_url[len(self.uri + '://chal/'):]
        challenge_data = decode_base64_urlsafe(challenge_data_encoded)

        # ------------------------------------------------------------------ --

        # parse and verify header information in the
        # encrypted challenge data

        header = challenge_data[0:5]
        version, user_token_id = struct.unpack('<bI', header)
        assert version == CHALLENGE_URL_VERSION

        # ------------------------------------------------------------------ --

        # get token from client token database

        token = self.tokens[user_token_id]
        server_public_key = token['server_public_key']

        # ------------------------------------------------------------------ --

        # prepare decryption by seperating R from
        # ciphertext and server signature

        R = challenge_data[5:5 + 32]
        ciphertext = challenge_data[5 + 32:-64]
        server_signature = challenge_data[-64:]

        # check signature

        data = challenge_data[0:-64]
        crypto_sign_verify_detached(server_signature, data, server_public_key)

        # ------------------------------------------------------------------ --

        # key derivation

        secret_key_dh = dsa_to_dh_secret(self.secret_key)
        ss = calc_dh(secret_key_dh, R)
        U = SHA256.new(ss).digest()

        sk = U[0:16]
        nonce = U[16:32]

        # ------------------------------------------------------------------ --

        # decrypt and verify challenge

        nonce_as_int = int_from_bytes(nonce, byteorder='big')
        ctr = Counter.new(128, initial_value=nonce_as_int)
        cipher = AES.new(sk, AES.MODE_CTR, counter=ctr)
        plaintext = cipher.decrypt(ciphertext)

        # ------------------------------------------------------------------ --

        # parse/check plaintext header

        # 1 - for content type
        # 8 - for transaction id
        # 8 - for time stamp
        offset = 1 + 8 + 8

        pt_header = plaintext[0:offset]
        (content_type, transaction_id,
         _time_stamp) = struct.unpack('<bQQ', pt_header)

        transaction_id = u64_to_transaction_id(transaction_id)

        # ------------------------------------------------------------------ --

        # prepare the parsed challenge data

        challenge = {}
        challenge['content_type'] = content_type

        # ------------------------------------------------------------------ --

        # retrieve plaintext data depending on content_type

        if content_type == CONTENT_TYPE_PAIRING:

            serial, callback_url, __ = plaintext[offset:].split(b'\x00')
            challenge['serial'] = serial.decode()

        elif content_type == CONTENT_TYPE_SIGNREQ:

            message, callback_url, __ = plaintext[offset:].split(b'\x00')
            challenge['message'] = message.decode()

        elif content_type == CONTENT_TYPE_LOGIN:

            login, host, callback_url, __ = plaintext[offset:].split(b'\x00')
            challenge['login'] = login.decode()
            challenge['host'] = host.decode()

        # ------------------------------------------------------------------ --

        # prepare the parsed challenge data

        challenge['callback_url'] = callback_url.decode()
        challenge['transaction_id'] = transaction_id
        challenge['user_token_id'] = user_token_id

        # calculate signature

        sig_base = (struct.pack('<b', CHALLENGE_URL_VERSION) +
                    b'%s\0' % action.encode('utf-8') + server_signature +
                    plaintext)

        sig = crypto_sign_detached(sig_base, self.secret_key)
        encoded_sig = encode_base64_urlsafe(sig)

        return challenge, encoded_sig

# -------------------------------------------------------------------------- --

    def test_correct_pairing(self):
        """ PushToken: Check if pairing works correctly """
        self.execute_correct_pairing()

    def test_multiple_pairing_activations(self):
        """ PushToken: Check if pairing works multiple times correctly """
        self.execute_correct_pairing(retry_pairing=3, retry_activation=3)

# -------------------------------------------------------------------------- --

    def create_pairing_response_by_serial(self, user_token_id):
        """
        Creates a base64-encoded pairing response that identifies
        the token by its serial

        :param user_token_id: the token id (primary key for the user token db)
        :returns base64 encoded pairing response
        """

        token_serial = self.tokens[user_token_id]['serial']
        server_public_key = self.tokens[user_token_id]['server_public_key']
        partition = self.tokens[user_token_id]['partition']

        # ------------------------------------------------------------------ --

        # assemble header and plaintext

        header = struct.pack('<bI', PAIR_RESPONSE_VERSION, partition)

        pairing_response = b''
        pairing_response += struct.pack('<bI', TYPE_PUSHTOKEN, user_token_id)

        pairing_response += self.public_key

        pairing_response += token_serial.encode('utf8') + b'\x00\x00'
        pairing_response += self.gda.encode('utf-8') + b'\x00'

        signature = crypto_sign_detached(pairing_response, self.secret_key)
        pairing_response += signature

        # ------------------------------------------------------------------ --

        # create public diffie hellman component
        # (used to decrypt and verify the reponse)

        r = os.urandom(32)
        R = calc_dh_base(r)

        # ------------------------------------------------------------------ --

        # derive encryption key and nonce

        server_public_key_dh = dsa_to_dh_public(server_public_key)
        ss = calc_dh(r, server_public_key_dh)
        U = SHA256.new(ss).digest()
        encryption_key = U[0:16]
        nonce = U[16:32]

        # ------------------------------------------------------------------ --

        # encrypt in EAX mode

        cipher = AES.new(encryption_key, AES.MODE_EAX, nonce)
        cipher.update(header)
        ciphertext, tag = cipher.encrypt_and_digest(pairing_response)

        return encode_base64_urlsafe(header + R + ciphertext + tag)

# -------------------------------------------------------------------------- --

    def send_pairing_response(self, pairing_response):
        """ sends a pairing response to /validate/pair """

        params = {'pairing_response': pairing_response}

        # we use the standard calback url in here
        # in a real client we would use the callback
        # defined in the pairing url (and saved in
        # the 'token database' of the user)

        response = self.make_validate_request('pair', params)
        response_dict = json.loads(response.body)

        return response_dict

# -------------------------------------------------------------------------- --

    def trigger_challenge(self, user_token_id, content_type=None, data=None):

        serial = self.tokens[user_token_id]['serial']
        pin = self.tokens[user_token_id]['pin']

        params = {'serial': serial, 'pass': pin}

        if content_type is not None:
            params['content_type'] = content_type

        if data is not None:
            params['data'] = data

        # ------------------------------------------------------------------ --

        # we mock the interface of the push provider (namely the method
        # push_notification) to get the generated challenge_url passed
        # to it (which would normaly be sent over the PNP)

        with mock.patch.object(default_provider.DefaultPushProvider,
                               'push_notification',
                               autospec=True) as mock_push_notification:

            mock_push_notification.return_value = (True, None)
            response = self.make_validate_request('check_s', params)
            challenge_url = mock_push_notification.call_args[0][1]

            response_dict = json.loads(response.body)
            assert 'result' in response_dict

            result = response_dict.get('result')
            assert 'status' in result
            assert 'value' in result

            status = result.get('status')
            value = result.get('value')

            assert status
            assert not value

        # ------------------------------------------------------------------ --

        return challenge_url

# -------------------------------------------------------------------------- --

    def test_signreq(self):
        """ PushToken: Check if signing transactions works correctly """

        user_token_id = self.execute_correct_pairing(user='******')
        challenge_url = self.trigger_challenge(
            user_token_id,
            data=(
                'Yes, I want to know why doctors hate this guy. Take these '
                '6000 $ with all my sincere benevolence and send me the black '
                'magic diet pill they don\'t want me to know about'),
            content_type=CONTENT_TYPE_SIGNREQ)

        challenge, sig = self.decrypt_and_verify_challenge(challenge_url,
                                                           action='ACCEPT')

        # ------------------------------------------------------------------ --

        # check if the content type is right

        content_type = challenge['content_type']
        assert content_type == CONTENT_TYPE_SIGNREQ

        # ------------------------------------------------------------------ --

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('accept_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert value, response

        # ------------------------------------------------------------------ --

        # status check

        params = {
            'transactionid': challenge['transaction_id'],
            'user': '******',
            'pass': '******'
        }

        response = self.make_validate_request('check_status', params)
        response_dict = json.loads(response.body)

        transactions = response_dict.get('detail', {}).get('transactions', {})
        transaction = transactions[challenge['transaction_id']]

        assert transaction['status'] == 'closed', response
        assert transaction['accept'], response
        assert transaction['valid_tan'], response

        assert 'KIPT' in transaction['token']['serial'], response

        return

# -------------------------------------------------------------------------- --

    def test_multiple_signreq(self):
        """ PushToken: Check if signing multiple transactions works correctly """

        user_token_id = self.execute_correct_pairing(user='******',
                                                     serial='KIPuOne')

        # ------------------------------------------------------------------ --

        created_challenges = []
        for i in range(0, 10):

            challenge_url = self.trigger_challenge(
                user_token_id,
                data=
                ('Yes, I want to know why doctors hate this guy. Take these '
                 '%d000 $ with all my sincere benevolence and send me the black '
                 'magic diet pill they don\'t want me to know about' % i),
                content_type=CONTENT_TYPE_SIGNREQ)

            challenge, sig = self.decrypt_and_verify_challenge(challenge_url,
                                                               action='ACCEPT')

            # ------------------------------------------------------------------ --

            # check if the content type is right

            content_type = challenge['content_type']
            assert content_type == CONTENT_TYPE_SIGNREQ

            created_challenges.append((challenge_url, challenge, sig))

        # ------------------------------------------------------------------ --

        # verify that all challenges are kept

        params = {'serial': 'KIPuOne', 'open': True}

        response = self.make_admin_request('checkstatus', params)
        response_dict = json.loads(response.body)

        challenges = response_dict.get('result', {}).get('value', {}).get(
            'values', {}).get('KIPuOne', {}).get('challenges', [])

        # remark:
        # we have here one additonal challenge, which was the inital
        # pairing challenge

        assert len(challenges) == (len(created_challenges) + 1)

        # ------------------------------------------------------------------ --

        # validate the one of the eldest challenge:
        # from 10 challenges 5 are left open, so we take the 7th one

        (challenge_url, challenge, sig) = created_challenges[7]

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('accept_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert value, response

        # ------------------------------------------------------------------ --

        # status check

        params = {
            'transactionid': challenge['transaction_id'],
            'user': '******',
            'pass': '******'
        }

        response = self.make_validate_request('check_status', params)
        response_dict = json.loads(response.body)

        transactions = response_dict.get('detail', {}).get('transactions', {})
        transaction = transactions[challenge['transaction_id']]

        assert transaction['status'] == 'closed', response
        assert transaction['accept'], response
        assert transaction['valid_tan'], response

        # verify that all challenges are kept

        params = {'serial': 'KIPuOne', 'open': True}

        response = self.make_admin_request('checkstatus', params)
        response_dict = json.loads(response.body)

        challenges = response_dict.get('result', {}).get('value', {}).get(
            'values', {}).get('KIPuOne', {}).get('challenges', [])

        open_challenges = 0
        accept_challenges = 0

        for challenge in list(challenges.values()):

            status = challenge['session']['status']
            accept = challenge['session'].get('accept')

            if status == 'open':
                open_challenges += 1

            if status == 'closed' and accept:
                accept_challenges += 1

        assert open_challenges == 9
        assert accept_challenges == 2

        return

# -------------------------------------------------------------------------- --

    def test_signreq_reject(self):
        """ PushToken: Check if reject signing transactions works correctly """

        user_token_id = self.execute_correct_pairing(user='******', pin='1234')
        challenge_url = self.trigger_challenge(
            user_token_id,
            data=(
                'Yes, I want to know why doctors hate this guy. Take these '
                '6000 $ with all my sincere benevolence and send me the black '
                'magic diet pill they don\'t want me to know about'),
            content_type=CONTENT_TYPE_SIGNREQ)

        challenge, sig = self.decrypt_and_verify_challenge(challenge_url,
                                                           action='DENY')

        # ------------------------------------------------------------------ --

        # check if the content type is right

        content_type = challenge['content_type']
        assert content_type == CONTENT_TYPE_SIGNREQ

        # ------------------------------------------------------------------ --

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('reject_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert value, response

        # ------------------------------------------------------------------ --

        # status check

        params = {
            'transactionid': challenge['transaction_id'],
            'user': '******',
            'pass': '******'
        }

        response = self.make_validate_request('check_status', params)
        response_dict = json.loads(response.body)

        transactions = response_dict.get('detail', {}).get('transactions', {})
        transaction = transactions[challenge['transaction_id']]

        assert transaction['status'] == 'closed', response
        assert transaction['reject'], response
        assert not transaction['valid_tan'], response

        return

# -------------------------------------------------------------------------- --

    def test_failed_signreq(self):
        """ PushToken: Check if signing transactions fails correctly """

        user_token_id = self.execute_correct_pairing()
        challenge_url = self.trigger_challenge(
            user_token_id,
            data=(
                'Yes, I want to know why doctors hate this guy. Take these '
                '6000 $ with all my sincere benevolence and send me the black '
                'magic diet pill they don\'t want me to know about'),
            content_type=CONTENT_TYPE_SIGNREQ)

        challenge, __ = self.decrypt_and_verify_challenge(challenge_url,
                                                          action='ACCEPT')

        wrong_sig = 'DEADBEEF' * 32

        # ------------------------------------------------------------------ --

        # check if the content type is right

        content_type = challenge['content_type']
        assert content_type == CONTENT_TYPE_SIGNREQ

        # ------------------------------------------------------------------ --

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': wrong_sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('accept_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert not value, response

# -------------------------------------------------------------------------- --

    def test_repairing(self):
        """ PushToken: Check if repairing works correctly """

        user_token_id = self.execute_correct_pairing()

        # temporarily switch the gda

        tmp_gda = self.gda
        self.gda = '7777'

        # ------------------------------------------------------------------ --

        # send repairing pairing response

        pairing_response = self.create_pairing_response_by_serial(
            user_token_id)

        response_dict = self.send_pairing_response(pairing_response)

        # ------------------------------------------------------------------ --

        # check if returned json is correct

        assert 'result' in response_dict
        result = response_dict.get('result')

        assert 'value' in result
        value = result.get('value')
        assert not value

        assert 'status' in result
        status = result.get('status')
        assert status

        # ------------------------------------------------------------------ --

        # reset the gda

        self.gda = tmp_gda

# -------------------------------------------------------------------------- --

    def test_repairing_fail_sig(self):
        """ PushToken: Check if repairing fails correctly (wrong sig) """

        user_token_id = self.execute_correct_pairing()

        # temporarily switch the secret key (used for signature)

        tmp_secret_key = self.secret_key
        _public_key, self.secret_key = crypto_sign_keypair()

        # ------------------------------------------------------------------ --

        # send repairing pairing response

        pairing_response = self.create_pairing_response_by_serial(
            user_token_id)

        response_dict = self.send_pairing_response(pairing_response)

        # ------------------------------------------------------------------ --

        # check if returned json is correct

        assert 'result' in response_dict
        result = response_dict.get('result')

        assert 'value' in result
        value = result.get('value')
        assert not value

        assert 'status' in result
        status = result.get('status')
        assert not status

        # ------------------------------------------------------------------ --

        # reset the secret key

        self.secret_key = tmp_secret_key

# -------------------------------------------------------------------------- --

    def test_repairing_fail_pubkey(self):
        """ PushToken: Check if repairing fails correctly (wrong pubkey) """

        user_token_id = self.execute_correct_pairing()

        # temporarily switch the keypair (used for signature)

        tmp_secret_key = self.secret_key
        tmp_public_key = self.public_key

        pk, sk = gen_dsa_keypair()
        self.secret_key = sk
        self.public_key = pk

        # ------------------------------------------------------------------ --

        # send repairing pairing response

        pairing_response = self.create_pairing_response_by_serial(
            user_token_id)

        response_dict = self.send_pairing_response(pairing_response)

        # ------------------------------------------------------------------ --

        # check if returned json is correct

        assert 'result' in response_dict
        result = response_dict.get('result')

        assert 'value' in result
        value = result.get('value')
        assert not value

        assert 'status' in result
        status = result.get('status')
        assert not status

        # ------------------------------------------------------------------ --

        # reset the secret key

        self.secret_key = tmp_secret_key
        self.public_key = tmp_public_key

# -------------------------------------------------------------------------- --

    def test_login(self):
        """ PushToken: Check if signing logins works correctly """

        user_token_id = self.execute_correct_pairing()
        challenge_url = self.trigger_challenge(user_token_id,
                                               data='root@foo',
                                               content_type=CONTENT_TYPE_LOGIN)

        challenge, sig = self.decrypt_and_verify_challenge(challenge_url,
                                                           action='ACCEPT')

        # ------------------------------------------------------------------ --

        # check if the content type is right

        content_type = challenge['content_type']
        assert content_type == CONTENT_TYPE_LOGIN

        # ------------------------------------------------------------------ --

        # prepare params for validate

        params = {
            'transactionid': challenge['transaction_id'],
            'signature': sig
        }

        # again, we ignore the callback definitions

        response = self.make_validate_request('accept_transaction', params)
        response_dict = json.loads(response.body)

        status = response_dict.get('result', {}).get('status')
        assert status

        value = response_dict.get('result', {}).get('value')
        assert value, response

# -------------------------------------------------------------------------- --

    def test_unsupported_content_type(self):
        """ PushToken: Check for unsupported content types """

        user_token_id = self.execute_correct_pairing()

        serial = self.tokens[user_token_id]['serial']
        pin = self.tokens[user_token_id]['pin']

        params = {
            'serial': serial,
            'pass': pin,
            'data': 'wohoooo',
            'content_type': 99999999999
        }

        response = self.make_validate_request('check_s', params)
        response_dict = json.loads(response.body)
        assert 'result' in response_dict

        result = response_dict.get('result')
        assert 'status' in result
        assert 'value' in result

        status = result.get('status')
        value = result.get('value')

        assert not status
        assert not value
예제 #39
0
    def __init__(self, filename=None, title=None, overwrite=False):
        """
        Used to initialize the makefile.
        Will generate a main function for the makefile.
        Optionally, will display a title string at beginning of execution.
        filename: str, path to the makefile (Default is None, i.e. generate a temporary file)
        title: string to display at the beginning of the execution
        overwrite: if set to True, will overwrite an existing makefile.
        """

        # Create and open temporary file
        if filename is None:
            tmpf = NamedTemporaryFile(mode='w+')
            filename = tmpf.name
            f = tmpf.file
            self.tmpf = tmpf   # Keep otherwise file will be deleted
            self.filename = tmpf.name

        # Open chosen file
        else:
            # Just open the file in append mode.
            # Won't work for temporary files though as self.tmpf is deleted.
            if os.path.exists(filename) & (not overwrite):
                f = open(filename, 'a+')  # append and writing
                f.seek(0)
                self.filename = filename
                self.f = f
                return
            else:
                f = open(filename, 'w+')  # writing and reading
                self.filename = filename

        # Write header
        f.write(".PHONY: MAIN\n\n")

        # Write colors for commands highlighting
        f.write("CMDCOL := \n")
        f.write("DEFCOL := \n\n")

        # Write MAIN line, without title
        if title is None:
            f.write("MAIN: \n\n")

        # with title
        else:
            f.write("MAIN: pre-build \n\n")
            f.write("pre-build:\n\t@+printf '%s\\n'\n\n" % title)

        # Add a function to list missing outputs, call with 'make list'
        f.write("list:\n")
        f.write("\t@printf '** Missing outputs **\\n'\n")
        f.write("\t@$(MAKE) -n --debug -f $(lastword $(MAKEFILE_LIST)) | \
        sed -n -e 's/^.*Must remake target //p' | \
        sed -e '/MAIN/d' | sed -e '/pre-build/d'\n\n")

        # save
        f.flush()
        self.f = f

        # Check if make version is newer than 4.3
        make_version = get_make_version()
        if make_version > version.parse('4.3'):
            self.new_version = True
        else:
            self.new_version = False
예제 #40
0
    def setUp(self):

        self.delete_all_policies()
        self.delete_all_token()
        self.delete_all_realms()
        self.delete_all_resolvers()
        super(TestPushToken, self).setUp()
        self.create_common_resolvers()
        self.create_common_realms()
        self.create_dummy_cb_policies()

        # ------------------------------------------------------------------ --

        self.gda = 'DEADBEEF'
        self.tokens = defaultdict(dict)

        # ------------------------------------------------------------------ --

        pk, sk = gen_dsa_keypair()
        self.secret_key = sk
        self.public_key = pk

        # ----------------------------------------------------------------- --

        # we need a dummy file to sneak past the file existence check
        # in the initial provider configuration

        self.dummy_temp_cert = NamedTemporaryFile()

        # ------------------------------------------------------------------ --

        # make dummy provider config
        p_config = {
            "push_url": "https://pushproxy.keyidentity.com",
            "access_certificate": self.dummy_temp_cert.name,
            "server_certificate": ""
        }

        params = {
            'name': 'dummy_provider',
            'class': 'DefaultPushProvider',
            'config': json.dumps(p_config),
            'timeout': '120',
            'type': 'push'
        }

        self.make_system_request('setProvider', params=params)

        # ------------------------------------------------------------------ --

        params = {
            'name': 'dummy_push_policy',
            'scope': 'authentication',
            'action': 'push_provider=dummy_provider',
            'user': '******',
            'realm': '*',
            'client': '',
            'time': ''
        }

        self.create_policy(params=params)
        self.uri = self.app.config.get('MOBILE_APP_PROTOCOLL_ID', 'lseqr')
예제 #41
0
 def set_stdout(self):
     self.stdout = NamedTemporaryFile('w',
                                      delete=False,
                                      prefix='astevaltest')
     self.interp.writer = self.stdout
예제 #42
0
 def create_recovery_archive(self):
     """
     Create a compressed recovery archive from the root tree
     for use with kiwi's recvoery system. The method creates
     additional data into the image root filesystem which is
     deleted prior to the creation of a new recovery data set
     """
     # cleanup
     bash_comand = ['rm', '-f', self.root_dir + '/recovery.*']
     Command.run(['bash', '-c', ' '.join(bash_comand)])
     if not self.oemconfig['recovery']:
         return
     # recovery.tar
     log.info('Creating recovery tar archive')
     metadata = {
         'archive_name': self.root_dir + '/recovery.tar',
         'archive_filecount': self.root_dir + '/recovery.tar.files',
         'archive_size': self.root_dir + '/recovery.tar.size',
         'partition_size': self.root_dir + '/recovery.partition.size',
         'partition_filesystem': self.root_dir + '/recovery.tar.filesystem'
     }
     recovery_archive = NamedTemporaryFile(delete=False)
     archive = ArchiveTar(filename=recovery_archive.name,
                          create_from_file_list=False)
     archive.create(source_dir=self.root_dir,
                    exclude=['dev', 'proc', 'sys'],
                    options=[
                        '--numeric-owner', '--hard-dereference',
                        '--preserve-permissions'
                    ])
     Command.run(['mv', recovery_archive.name, metadata['archive_name']])
     # recovery.tar.filesystem
     recovery_filesystem = self.xml_state.build_type.get_filesystem()
     with open(metadata['partition_filesystem'], 'w') as partfs:
         partfs.write('%s' % recovery_filesystem)
     log.info('--> Recovery partition filesystem: %s', recovery_filesystem)
     # recovery.tar.files
     bash_comand = ['tar', '-tf', metadata['archive_name'], '|', 'wc', '-l']
     tar_files_call = Command.run(['bash', '-c', ' '.join(bash_comand)])
     tar_files_count = int(tar_files_call.output.rstrip('\n'))
     with open(metadata['archive_filecount'], 'w') as files:
         files.write('%d\n' % tar_files_count)
     log.info('--> Recovery file count: %d files', tar_files_count)
     # recovery.tar.size
     recovery_archive_size_bytes = os.path.getsize(metadata['archive_name'])
     with open(metadata['archive_size'], 'w') as size:
         size.write('%d' % recovery_archive_size_bytes)
     log.info('--> Recovery uncompressed size: %d mbytes',
              int(recovery_archive_size_bytes / 1048576))
     # recovery.tar.gz
     log.info('--> Compressing recovery archive')
     compress = Compress(self.root_dir + '/recovery.tar')
     compress.gzip()
     # recovery.partition.size
     recovery_archive_gz_size_mbytes = int(
         os.path.getsize(metadata['archive_name'] + '.gz') / 1048576)
     recovery_partition_mbytes = recovery_archive_gz_size_mbytes \
         + Defaults.get_recovery_spare_mbytes()
     with open(metadata['partition_size'], 'w') as gzsize:
         gzsize.write('%d' % recovery_partition_mbytes)
     log.info('--> Recovery partition size: %d mbytes',
              recovery_partition_mbytes)
     # delete recovery archive if inplace recovery is requested
     # In this mode the recovery archive is created at install time
     # and not at image creation time. However the recovery metadata
     # is preserved in order to be able to check if enough space
     # is available on the disk to create the recovery archive.
     if self.oemconfig['recovery_inplace']:
         log.info('--> Inplace recovery requested, deleting archive')
         Path.wipe(metadata['archive_name'] + '.gz')
예제 #43
0
def create_yaml_config(**kw):
    tmp = NamedTemporaryFile(mode="wt", suffix=".yml", delete=False)
    yaml.dump(kw, tmp, default_flow_style=False)
    tmp.flush()
    return tmp.name
예제 #44
0
 def set_stderr(self):
     self.stderr = NamedTemporaryFile('w',
                                      delete=False,
                                      prefix='astevaltest_stderr')
     self.interp.err_writer = self.stderr
예제 #45
0
def run_tests():
    import json

    def extract_json(resp):
        return json.loads(''.join(
            [l for l in resp[0] if not l.startswith('//')]))

    def test(assertion, test):
        print('Test %s: %s' % ('passed' if assertion else 'failed', test))
        if not assertion:
            raise AssertionError

    resp = extract_json(
        do_request([
            '# comment', '# :a=barf', 'GET http://httpbin.org/headers',
            'X-Hey: :a', '# comment'
        ], []))
    test(resp['headers']['X-Hey'] == 'barf',
         'Headers are passed with variable substitution.')

    resp = extract_json(
        do_request(['# :a = barf', 'GET http://httpbin.org/get?data=:a'], []))
    test(resp['args']['data'] == 'barf',
         'GET data is passed with variable substitution.')

    resp = extract_json(
        do_request(['POST http://httpbin.org/post', 'some data'], []))
    test(resp['data'] == 'some data',
         'POST data is passed with variable substitution.')

    resp = extract_json(
        do_request([
            'POST http://httpbin.org/post',
            'forma=a',
            'formb=b',
        ], []))
    test(resp['form']['forma'] == 'a', 'POST form data is passed.')

    resp = extract_json(
        do_request([
            'POST http://$global/post',
            'forma=a',
            'formb=b',
        ], ['# $global = httpbin.org']))
    test(resp['form']['forma'] == 'a', 'Global variables are substituted.')

    import os
    from tempfile import NamedTemporaryFile

    SAMPLE_FILE_CONTENT = 'sample file content'

    temp_file = NamedTemporaryFile(delete=False)
    temp_file.write(SAMPLE_FILE_CONTENT)
    temp_file.close()
    resp = extract_json(
        do_request([
            'POST http://httpbin.org/post',
            'forma=a',
            'formb=b',
            "formc=!file(%s)" % temp_file.name,
        ], []))
    test(resp['files']['formc'] == SAMPLE_FILE_CONTENT,
         'Files given as path are sent properly.')
    test(not 'formc' in resp['form'], 'File not included in form data.')
    os.unlink(temp_file.name)

    resp = extract_json(
        do_request([
            'POST http://httpbin.org/post',
            'forma=a',
            'formb=b',
            "formc=!content(%s)" % SAMPLE_FILE_CONTENT,
        ], []))
    test(resp['files']['formc'] == SAMPLE_FILE_CONTENT,
         'Files given as content are sent properly.')

    resp = extract_json(
        do_request([
            'POST http://httpbin.org/post',
            "c=!content(foo \\(bar\\))",
        ], []))
    test(resp['files']['c'] == 'foo (bar)',
         'Escaped parenthesis should be unescaped during request')
예제 #46
0
class TestCase(unittest.TestCase):
    """testing of asteval"""
    def setUp(self):
        self.interp = Interpreter()
        self.symtable = self.interp.symtable
        self.set_stdout()
        self.set_stderr()

    def set_stdout(self):
        self.stdout = NamedTemporaryFile('w',
                                         delete=False,
                                         prefix='astevaltest')
        self.interp.writer = self.stdout

    def read_stdout(self):
        self.stdout.close()
        time.sleep(0.1)
        fname = self.stdout.name
        with open(self.stdout.name) as inp:
            out = inp.read()
        self.set_stdout()
        os.unlink(fname)
        return out

    def set_stderr(self):
        self.stderr = NamedTemporaryFile('w',
                                         delete=False,
                                         prefix='astevaltest_stderr')
        self.interp.err_writer = self.stderr

    def read_stderr(self):
        self.stderr.close()
        time.sleep(0.1)
        fname = self.stderr.name
        with open(self.stderr.name) as inp:
            out = inp.read()
        self.set_stderr()
        os.unlink(fname)
        return out

    def tearDown(self):
        if not self.stdout.closed:
            self.stdout.close()
        if not self.stderr.closed:
            self.stderr.close()

        # noinspection PyBroadException
        try:
            os.unlink(self.stdout.name)
        except:
            pass
        try:
            os.unlink(self.stderr.name)
        except:
            pass

    # noinspection PyUnresolvedReferences
    def isvalue(self, sym, val):
        """assert that a symboltable symbol has a particular value"""
        tval = self.interp.symtable[sym]
        if isinstance(val, np.ndarray):
            assert_allclose(tval, val, rtol=0.01)
        else:
            assert (tval == val)

    def isnear(self, expr, val):
        tval = self.interp(expr)
        assert_allclose(tval, val, rtol=1.e-4, atol=1.e-4)

    # noinspection PyUnresolvedReferences
    def istrue(self, expr):
        """assert that an expression evaluates to True"""
        val = self.interp(expr)
        if isinstance(val, np.ndarray):
            val = np.all(val)
        return self.assertTrue(val)

    # noinspection PyUnresolvedReferences
    def isfalse(self, expr):
        """assert that an expression evaluates to False"""
        val = self.interp(expr)
        if isinstance(val, np.ndarray):
            val = np.all(val)
        return self.assertFalse(val)

    def check_output(self, chk_str, exact=False):
        self.interp.writer.flush()
        out = self.read_stdout().split('\n')
        if out:
            if exact:
                return chk_str == out[0]
            return chk_str in out[0]
        return False

    def check_error(self, chk_type='', chk_msg=''):
        try:
            errtype, errmsg = self.interp.error[0].get_error()
            self.assertEqual(errtype, chk_type)
            if chk_msg:
                self.assertTrue(chk_msg in errmsg)
        except IndexError:
            if chk_type:
                self.assertTrue(False)
예제 #47
0
def get_shelf():
    with NamedTemporaryFile(suffix=".ldb", mode="r") as tmpfile:
        yield partial(Shelf, tmpfile.name)
예제 #48
0
 async def kubectl_create(definition):
     with NamedTemporaryFile('w') as f:
         json.dump(definition, f)
         f.flush()
         await scp_to(f.name, vessel, '/tmp/test-registry/temp.yaml')
     await kubectl('create -f /tmp/test-registry/temp.yaml')
예제 #49
0
def addBackgroundOnPdfFile(orginal_pdf, background_pdf):
    '''This method apply the background on all pages of the original pdf file
     Pdftk uses only the first page from the background_pdf_file and
     applies it to every page of the orginal_pdf_file. This page is
     scaled and rotated as needed to fit the input page.'''
    tmp_pdf_file_name = NamedTemporaryFile().name

    # create two temporary files to give to pdftk command
    orginal_pdf_file = NamedTemporaryFile()
    background_pdf_file = NamedTemporaryFile()

    # saving content
    orginal_pdf_file.write(orginal_pdf)
    orginal_pdf_file.seek(0)

    # saving content
    background_pdf_file.write(_unpackData(background_pdf.data))
    background_pdf_file.seek(0)

    try:
        result = commands.getstatusoutput('pdftk %s background %s output %s' % \
              (orginal_pdf_file.name, background_pdf_file.name, tmp_pdf_file_name))

        # check that the command has been done succeful
        if result[0] != 0:
            LOG('addBackgroundOnPdfFile :', ERROR, 'pdftk command'\
                'failed with the following error message : \n%s' % result[1])

            # delete created pdf before raise an error
            #os.remove(tmp_pdf_file_name)
            orginal_pdf_file.close()
            background_pdf_file.close()

            raise ValueError, 'Error: pdftk command failed with the following'\
                              'error message : \n%s' % result[1]

    finally:
        background_pdf_file.close()
        orginal_pdf_file.close()

    return tmp_pdf_file_name
예제 #50
0
class BaseTaskRunner(LoggingMixin):
    """
    Runs Airflow task instances by invoking the `airflow tasks run` command with raw
    mode enabled in a subprocess.

    :param local_task_job: The local task job associated with running the
        associated task instance.
    :type local_task_job: airflow.jobs.local_task_job.LocalTaskJob
    """

    def __init__(self, local_task_job):
        # Pass task instance context into log handlers to setup the logger.
        super().__init__(local_task_job.task_instance)
        self._task_instance = local_task_job.task_instance

        popen_prepend = []
        if self._task_instance.run_as_user:
            self.run_as_user = self._task_instance.run_as_user
        else:
            try:
                self.run_as_user = conf.get('core', 'default_impersonation')
            except AirflowConfigException:
                self.run_as_user = None

        # Add sudo commands to change user if we need to. Needed to handle SubDagOperator
        # case using a SequentialExecutor.
        self.log.debug("Planning to run as the %s user", self.run_as_user)
        if self.run_as_user and (self.run_as_user != getpass.getuser()):
            # We want to include any environment variables now, as we won't
            # want to have to specify them in the sudo call - they would show
            # up in `ps` that way! And run commands now, as the other user
            # might not be able to run the cmds to get credentials
            cfg_path = tmp_configuration_copy(chmod=0o600)

            # Give ownership of file to user; only they can read and write
            subprocess.call(['sudo', 'chown', self.run_as_user, cfg_path], close_fds=True)

            # propagate PYTHONPATH environment variable
            pythonpath_value = os.environ.get(PYTHONPATH_VAR, '')
            popen_prepend = ['sudo', '-E', '-H', '-u', self.run_as_user]

            if pythonpath_value:
                popen_prepend.append(f'{PYTHONPATH_VAR}={pythonpath_value}')

        else:
            # Always provide a copy of the configuration file settings. Since
            # we are running as the same user, and can pass through environment
            # variables then we don't need to include those in the config copy
            # - the runner can read/execute those values as it needs
            cfg_path = tmp_configuration_copy(chmod=0o600)

        self._error_file = NamedTemporaryFile(delete=True)
        self._cfg_path = cfg_path
        self._command = (
            popen_prepend
            + self._task_instance.command_as_list(
                raw=True,
                pickle_id=local_task_job.pickle_id,
                mark_success=local_task_job.mark_success,
                job_id=local_task_job.id,
                pool=local_task_job.pool,
                cfg_path=cfg_path,
            )
            + ["--error-file", self._error_file.name]
        )
        self.process = None

    def deserialize_run_error(self) -> Optional[Union[str, Exception]]:
        """Return task runtime error if its written to provided error file."""
        return load_error_file(self._error_file)

    def _read_task_logs(self, stream):
        while True:
            line = stream.readline()
            if isinstance(line, bytes):
                line = line.decode('utf-8')
            if not line:
                break
            self.log.info(
                'Job %s: Subtask %s %s',
                self._task_instance.job_id,
                self._task_instance.task_id,
                line.rstrip('\n'),
            )

    def run_command(self, run_with=None):
        """
        Run the task command.

        :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']``
        :type run_with: list
        :return: the process that was run
        :rtype: subprocess.Popen
        """
        run_with = run_with or []
        full_cmd = run_with + self._command

        self.log.info("Running on host: %s", get_hostname())
        self.log.info('Running: %s', full_cmd)
        # pylint: disable=subprocess-popen-preexec-fn
        proc = subprocess.Popen(
            full_cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            universal_newlines=True,
            close_fds=True,
            env=os.environ.copy(),
            preexec_fn=os.setsid,
        )

        # Start daemon thread to read subprocess logging output
        log_reader = threading.Thread(
            target=self._read_task_logs,
            args=(proc.stdout,),
        )
        log_reader.daemon = True
        log_reader.start()
        return proc

    def start(self):
        """Start running the task instance in a subprocess."""
        raise NotImplementedError()

    def return_code(self) -> Optional[int]:
        """
        :return: The return code associated with running the task instance or
            None if the task is not yet done.
        :rtype: int
        """
        raise NotImplementedError()

    def terminate(self) -> None:
        """Force kill the running task instance."""
        raise NotImplementedError()

    def on_finish(self) -> None:
        """A callback that should be called when this is done running."""
        if self._cfg_path and os.path.isfile(self._cfg_path):
            if self.run_as_user:
                subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True)
            else:
                os.remove(self._cfg_path)
        self._error_file.close()
예제 #51
0
    def test_two_files_are_different_as_string_is_true___result_is_concatenated_list_of_differences(
            self):
        first = NamedTemporaryFile(mode='w', delete=False)
        second = NamedTemporaryFile(mode='w', delete=False)
        try:
            first.writelines(
                ['HEADING\n', 'first\n', 'same\n', 'second\n', 'FOOTER\n'])
            first.close()

            second.writelines(
                ['HEADING\n', 'third\n', 'same\n', 'fourth\n', 'FOOTER\n'])
            second.close()

            diff = unified_diff(first.name, second.name, as_string=True)

            self.assertEqual(
                diff, ''.join([
                    '--- {}\n'.format(first.name),
                    '+++ {}\n'.format(second.name),
                    '@@ -1,5 +1,5 @@\n',
                    ' HEADING\n',
                    '-first\n',
                    '+third\n',
                    ' same\n',
                    '-second\n',
                    '+fourth\n',
                    ' FOOTER\n',
                ]))
        finally:
            os.remove(first.name)
            os.remove(second.name)
예제 #52
0
def mergePDF(pdf_document_list):
    '''Merge all pdf in the pdf_document_list in one using pdftk and return it'''

    from warnings import warn
    warn(
        "mergePDF is deprecated, use erp5_pdf_merge business template instead")
    tmp_pdf_list = []
    # create as tmp file as there is pdf_documents
    for pdf_document in pdf_document_list:
        tmp_pdf_file = NamedTemporaryFile()
        # saving content
        tmp_pdf_file.write(_unpackData(pdf_document.data))
        tmp_pdf_file.seek(0)
        tmp_pdf_list.append(tmp_pdf_file)

    # create a tmp file to put the resulting pdf file
    result_file = NamedTemporaryFile()
    result = None

    try:
        name_list = [x.name for x in tmp_pdf_list]
        cmd = 'pdftk %s cat output %s' % (' '.join(name_list),
                                          result_file.name)
        result = commands.getstatusoutput(cmd)

        # check that the command has been done succeful
        if result[0] != 0:
            LOG('mergePDF :', ERROR, 'pdftk command'\
                'failed with the following error message : \n%s' % result[1])

            # delete created pdf before raise an error
            for tmp_file in tmp_pdf_list:
                tmp_file.close()

            raise ValueError, 'Error: pdftk command failed with the following'\
                              'error message : \n%s' % result[1]

        else:
            # going to the begining of the input file
            result_file.seek(0)
            # put content in variable
            result = result_file.read()

    finally:
        for tmp_file in tmp_pdf_list:
            tmp_file.close()
        # close result file
        result_file.close()

    return result
예제 #53
0
def export_range_matrix_table_col_p100():
    with NamedTemporaryFile() as f:
        mt = hl.utils.range_matrix_table(n_rows=1_000_000, n_cols=10, n_partitions=100)
        mt.col.export(f.name)
예제 #54
0
def main(argv=["-h"]):
    global items
    args = arg_parser.parse_args(argv)

    items = args.items
    netblocks = []

    if args.update:
        from tempfile import NamedTemporaryFile
        tmpfile = NamedTemporaryFile()
        try:
            update(tmpfile)
        except Exception as e:
            print str(e)
        tmpfile.close()
    else:
        params = {}
        if args.ipaddr:
            params['ipaddr'] = args.ipaddr
        if args.inetnum:
            params['inetnum'] = args.inetnum
        if args.inetnums:
            params['inetnums'] = args.inetnums
        if args.netname:
            params['netname'] = args.netname
        if args.descr:
            params['descr'] = args.descr
        if args.country:
            params['country'] = args.country
        if args.notify:
            params['notify'] = args.notify
        if args.address:
            params['address'] = args.address
        if args.phone:
            params['phone'] = args.phone
        if params:
            if check_db():
                netblocks = ripe_search(params)
            else:
                print "update database first"
                return

    if netblocks:
        summary = get_stat(netblocks, items)
        try:
            margins = map(
                lambda i: max(
                    map(lambda n: len(str(n.get(i) or '').decode('utf-8')),
                        netblocks) + [len(i), len(summary[i])]), items)
        except:
            margins = map(
                lambda i: max(
                    map(lambda n: len(str(n.get(i) or '')), netblocks) +
                    [len(i), len(summary[i])]), items)
        if len(items) > 1:
            print_row(tuple(items), margins)
            print_row(tuple(map(lambda m: '-' * m, margins)), margins)
        for netblock in netblocks:
            print_row(map(lambda i: str(netblock.get(i) or ''), items),
                      margins)
        if len(items) > 1:
            print_row(tuple(map(lambda m: '-' * m, margins)), margins)
            print_row(tuple(map(lambda i: str(summary.get(i) or ''), items)),
                      margins)

    if db:
        db.close()
예제 #55
0
파일: driver.py 프로젝트: SpringRi/phd
def DriveKernel(env: _env.OpenCLEnvironment,
                src: str,
                inputs: np.array,
                gsize: typing.Union[typing.Tuple[int, int, int], NDRange],
                lsize: typing.Union[typing.Tuple[int, int, int], NDRange],
                timeout: int = -1,
                optimizations: bool = True,
                profiling: bool = False,
                debug: bool = False) -> np.array:
    """Drive an OpenCL kernel.

  Executes an OpenCL kernel on the given environment, over the given inputs.
  Execution is performed in a subprocess.

  Args:
    env: The OpenCL environment to run the kernel in.
    src: The OpenCL kernel source.
    inputs: The input data to the kernel.
    optimizations: Whether to enable or disbale OpenCL compiler optimizations.
    profiling: If true, print OpenCLevent times for data transfers and kernel
      executions to stderr.
    timeout: Cancel execution if it has not completed after this many seconds.
      A value <= 0 means never time out.
    debug: If true, silence the OpenCL compiler.

  Returns:
    A numpy array of the same shape as the inputs, with the values after
    running the OpenCL kernel.

  Raises:
  ValueError: If input types are incorrect.
  TypeError: If an input is of an incorrect type.
  LogicError: If the input types do not match OpenCL kernel types.
  PorcelainError: If the OpenCL subprocess exits with non-zero return code.
  RuntimeError: If OpenCL program fails to build or run.

  Examples:
    A simple kernel which doubles its inputs:
    >>> src = "kernel void A(global int* a) { a[get_global_id(0)] *= 2; }"
    >>> inputs = [[1, 2, 3, 4, 5]]
    >>> DriveKernel(env, src, inputs, gsize=(5,1,1), lsize=(1,1,1)) # doctest: +SKIP
    array([[ 2,  4,  6,  8, 10]], dtype=int32)
  """
    def Log(*args, **kwargs):
        """Log a message to stderr."""
        if debug:
            print("[cldrive] ", end="", file=sys.stderr)
            print(*args, **kwargs, file=sys.stderr)

    # Assert input types.
    err.assert_or_raise(isinstance(env, _env.OpenCLEnvironment), ValueError,
                        "env argument is of incorrect type")
    err.assert_or_raise(isinstance(src, str), ValueError,
                        "source is not a string")

    # Validate global and local sizes.
    err.assert_or_raise(len(gsize) == 3, TypeError)
    err.assert_or_raise(len(lsize) == 3, TypeError)
    gsize, lsize = NDRange(*gsize), NDRange(*lsize)

    err.assert_or_raise(gsize.product >= 1, ValueError,
                        f"Scalar global size {gsize.product} must be >= 1")
    err.assert_or_raise(lsize.product >= 1, ValueError,
                        f"Scalar local size {lsize.product} must be >= 1")
    err.assert_or_raise(
        gsize >= lsize, ValueError,
        f"Global size {gsize} must be larger than local size {lsize}")

    # Parse args in this process since we want to preserve the sueful exception
    # type.
    args = _args.GetKernelArguments(src)

    # Check that the number of inputs is correct.
    args_with_inputs = [
        i for i, arg in enumerate(args) if not arg.address_space == 'local'
    ]
    err.assert_or_raise(
        len(args_with_inputs) == len(inputs), ValueError,
        "Kernel expects {} inputs, but {} were provided".format(
            len(args_with_inputs), len(inputs)))

    # All inputs must have some length.
    for i, x in enumerate(inputs):
        err.assert_or_raise(len(x), ValueError, f"Input {i} has size zero")

    # Copy inputs into the expected data types.
    data = np.array(
        [np.array(d).astype(a.numpy_type) for d, a in zip(inputs, args)])

    job = {
        "env": env,
        "src": src,
        "args": args,
        "data": data,
        "gsize": gsize,
        "lsize": lsize,
        "optimizations": optimizations,
        "profiling": profiling
    }

    with NamedTemporaryFile('rb+', prefix='cldrive-',
                            suffix='.job') as tmp_file:
        porcelain_job_file = tmp_file.name

        # Write job file.
        pickle.dump(job, tmp_file)
        tmp_file.flush()

        # Enforce timeout using sigkill.
        if timeout > 0:
            cli = ["timeout", "--signal=9", str(int(timeout))]
        else:
            cli = []
        cli += [sys.executable, __file__, porcelain_job_file]

        cli_str = " ".join(cli)
        Log("Porcelain invocation:", cli_str)

        # Fork and run.
        process = Popen(cli, stdout=PIPE, stderr=PIPE)
        stdout, stderr = process.communicate()
        status = process.returncode

        if debug:
            print(stdout.decode('utf-8').strip(), file=sys.stderr)
            print(stderr.decode('utf-8').strip(), file=sys.stderr)
        elif profiling:
            # Print profiling output when not in debug mode.
            for line in stderr.decode('utf-8').split('\n'):
                if re.match(r'\[cldrive\] .+ time: [0-9]+\.[0-9]+ ms', line):
                    print(line, file=sys.stderr)
        Log(f"Porcelain return code: {status}")

        # Test for non-zero exit codes. The porcelain subprocess catches exceptions
        # and completes gracefully, so a non-zero return code is indicative of a
        # more serious problem.
        #
        # FIXME: I'm seeing a number of SIGABRT return codes which I can't explain.
        # However, ignoring them seems to not cause a problem ...
        if status != 0 and status != -Signals['SIGABRT'].value:
            # A negative return code means a signal. Try and convert the value into a
            # signal name.
            with suppress(ValueError):
                status = Signals(-status).name

            if status == "SIGKILL":
                raise TimeoutError(timeout)
            else:
                raise PorcelainError(status)

        # Read result.
        tmp_file.seek(0)
        return_value = pickle.load(tmp_file)
        outputs = return_value["outputs"]
        error = return_value["err"]
        if error:  # Porcelain raised an exception, re-raise it.
            raise error
        else:
            return outputs
예제 #56
0
def _dump(path: Path, thing: str) -> None:
    path.parent.mkdir(parents=True, exist_ok=True)
    with NamedTemporaryFile(dir=path.parent, mode="w", delete=False) as fd:
        fd.write(thing)
    Path(fd.name).replace(path)
예제 #57
0
class AbstractWrapper(object):
    '''
        abstract solver wrapper
    '''
    
    def __init__(self):
        '''
            Constructor
        '''
        #program_name = os.path.basename(sys.argv[0])
        program_version = "v%s" % __version__
        program_build_date = str(__updated__)
        program_version_message = "%%(prog)s %s (%s)" % (program_version, program_build_date)
        program_shortdesc = __import__("__main__").__doc__.split("\n")[1]
        program_license = '''%s
    
          Created by %s on %s.
          Copyright 2014 - AClib. All rights reserved.
          
          Licensed under the GPLv2
          http://www.gnu.org/licenses/gpl-2.0.html
          
          Distributed on an "AS IS" basis without warranties
          or conditions of any kind, either express or implied.
        
          USAGE
        ''' % (program_shortdesc, str(__authors__), str(__date__))
        self.parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter, add_help=False)
        self.args = None

        self.RESULT_MAPPING = {'SUCCESS': "SAT"} 
        self._watcher_file = None
        self._solver_file = None

        self._instance = ""
        self._specifics = ""
        self._cutoff = 0.0
        self._runlength = 0
        self._seed = 0
        
        self._exit_code = None
        
        self._runsolver = None
        self._mem_limit = 2048
        self._tmp_dir = None

        self._crashed_if_non_zero_status = True
        
        self._subprocesses = []
        
        self._DEBUG = True
        self._DELAY2KILL = 2

        self._ta_status = "EXTERNALKILL"
        self._ta_runtime = 999999999.0
        self._ta_runlength = -1
        self._ta_quality = -1
        self._ta_exit_code = None
        self._ta_misc = ""
        

    def print_d(self, str_):
        if self._DEBUG:
            print(str_)
        
    def main(self, argv=None): 
        ''' parse command line'''
        if argv is None:
            argv = sys.argv
        else:
            sys.argv.extend(argv)
    
        try:
            signal.signal(signal.SIGTERM, signalHandler)
            signal.signal(signal.SIGQUIT, signalHandler)
            signal.signal(signal.SIGINT, signalHandler)

            # Setup argument parser
            
            run_group = self.parser.add_argument_group("Run")
            run_group.add_argument("--runsolver-path", dest="runsolver", default=os.path.join(os.path.join(os.path.dirname(__file__),"runsolver"), "runsolver"), help="path to runsolver binary (if None, the runsolver is deactivated)")
            run_group.add_argument("--temp-file-dir", dest="tmp_dir", default=".", help="directory for temporary files (relative to -exec-dir in SMAC scenario)")
            run_group.add_argument("--mem-limit", dest="mem_limit", default=self._mem_limit, type=int, help="memory limit in MB")
            run_group.add_argument("--internal", dest="internal", default=False, action="store_true", help="skip calling an external target algorithm")
            
            run_group = self.parser.add_argument_group("External Callstring Generation and Output Parsing")
            run_group.add_argument("--ext-callstring", dest="ext_callstring", default=None, help="Command to get call string via external program;" +
                                                                                             "your programm gets a file with"+
                                                                                             "first line: instance name,"+
                                                                                             "second line: seed"+
                                                                                             "further lines: paramter name, paramater value;"+ 
                                                                                             "output: one line with callstring for target algorithm")
            run_group.add_argument("--ext-parsing", dest="ext_parsing", default=None, help="Command to use an external program to parse the output of your target algorihm;" +
                                                                                           "only paramter: name of output file;"+
                                                                                           "output of your progam:"+
                                                                                           "status: SAT|UNSAT|TIMEOUT|CRASHED\n"+
                                                                                           "quality: <integer>\n"+
                                                                                           "misc: <string>")

            help_group = self.parser.add_argument_group("Help")
            help_group.add_argument("--help", dest="show_help", action="store_true", help="show this help message")
            
            # Process arguments
            self.args, target_args = self.parser.parse_known_args()
            args = self.args
           
            if args.show_help:
                self.parser.print_help()
                self._ta_status = "ABORT"
                self._ta_misc = "help was requested..."
                self._exit_code = 1
                sys.exit(1)

            if args.runsolver != "None" and not os.path.isfile(args.runsolver) and not args.internal:
                self._ta_status = "ABORT"
                self._ta_misc = "runsolver is missing - should have been at %s." % (args.runsolver)
                self._exit_code = 1
                sys.exit(1)
            else:
                self._runsolver = args.runsolver
                self._mem_limit = args.mem_limit
            
            if not os.path.isdir(args.tmp_dir):
                self._ta_status = "ABORT"
                self._ta_misc = "temp directory is missing - should have been at %s." % (args.tmp_dir)
                self._exit_code = 1
                sys.exit(1)
            else:
                self._tmp_dir = args.tmp_dir
            
            if len(target_args) < 5:
                self._ta_status = "ABORT"
                self._ta_misc = "some required TA parameters (instance, specifics, cutoff, runlength, seed) missing - was [%s]." % (" ".join(target_args))
                self._exit_code = 1
                sys.exit(1)
                
            config_dict = self.build_parameter_dict(target_args)
            runargs = {
                        "instance": self._instance,
                        "specifics" : self._specifics,
                        "cutoff" : self._cutoff,
                        "runlength" : self._runlength,
                        "seed" : self._seed
                      }
            if args.ext_callstring:
                target_cmd = self.get_command_line_args_ext(runargs=runargs, config=config_dict, ext_call=args.ext_callstring).split(" ")
            else:
                target_cmd = self.get_command_line_args(runargs=runargs, config=config_dict).split(" ")
            
            if not args.internal:
                self.call_target(target_cmd)
                self.read_runsolver_output()
                
            if args.ext_parsing:
                resultMap = self.process_results_ext(self._solver_file, {"exit_code" : self._ta_exit_code}, ext_call=args.ext_parsing)
            else:
                resultMap = self.process_results(self._solver_file, {"exit_code" : self._ta_exit_code})
            
            if ('status' in resultMap):
                self._ta_status = self.RESULT_MAPPING.get(resultMap['status'],resultMap['status'])
            if ('runtime' in resultMap):
                self._ta_runtime = resultMap['runtime']
            if ('quality' in resultMap):
                self._ta_quality = resultMap['quality']
            if ('misc' in resultMap):
                self._ta_misc = resultMap['misc']
                
            # if still no status was determined, something went wrong and output files should be kept
            if self._ta_status is "EXTERNALKILL":
                self._ta_status = "CRASHED"
            sys.exit()
        except (KeyboardInterrupt, SystemExit):
            self.cleanup()
            self.print_result_string()
            if self._ta_exit_code:
                sys.exit(self._ta_exit_code)
            elif self._exit_code:
                sys.exit(self._exit_code)
            else:
                sys.exit(0)
        
    def build_parameter_dict(self, arg_list):
        '''
            Reads all arguments which were not parsed by ArgumentParser,
            extracts all meta information
            and builds a mapping: parameter name -> parameter value
            Format Assumption: <instance> <specifics> <runtime cutoff> <runlength> <seed> <solver parameters>
            Args:
                list of all options not parsed by ArgumentParser
        '''
        self._instance = arg_list[1]
        self._specifics = arg_list[2]
        self._cutoff = int(float(arg_list[3]) + 1) # runsolver only rounds down to integer
        self._runlength = int(arg_list[4])
        self._seed = int(arg_list[5])
        
        params = arg_list[6:]
        if (len(params)/2)*2 != len(params):
            self._ta_status = "ABORT"
            self._ta_misc = "target algorithm parameter list MUST have even length - found %d arguments." % (len(params))
            self.print_d(" ".join(params))
            self._exit_code = 1
            sys.exit(1)
        
        return dict((name, value) for name, value in zip(params[::2], params[1::2]))
        
    def call_target(self, target_cmd):
        '''
            extends the target algorithm command line call with the runsolver
            and executes it
            Args:
                list of target cmd (from getCommandLineArgs)
        '''
        random_id = random.randint(0,1000000)
        self._watcher_file = NamedTemporaryFile(suffix=".log", prefix="watcher-%d-" %(random_id), dir=self._tmp_dir, delete=False)
        self._solver_file = NamedTemporaryFile(suffix=".log", prefix="solver-%d-" %(random_id), dir=self._tmp_dir, delete=False)
        
        runsolver_cmd = []
        if self._runsolver != "None":
            runsolver_cmd = [self._runsolver, "-M", self._mem_limit, "-C", self._cutoff,
                             "-w", self._watcher_file.name,
                             "-o", self._solver_file.name]
        
        runsolver_cmd.extend(target_cmd)
        #for debugging
        self.print_d("Calling runsolver. Command-line:")
        self.print_d(" ".join(map(str,runsolver_cmd)))

        # run
        try:
            if self._runsolver != "None":
                io = Popen(map(str, runsolver_cmd), shell=False, preexec_fn=os.setpgrp)
            else:
                io = Popen(map(str, runsolver_cmd), stdout=self._solver_file, shell=False, preexec_fn=os.setpgrp)
            self._subprocesses.append(io)
            io.wait()
            self._subprocesses.remove(io)
            if io.stdout:
                io.stdout.flush()
        except OSError:
            self._ta_status = "ABORT"
            self._ta_misc = "execution failed: %s"  % (" ".join(map(str,runsolver_cmd)))
            self._exit_code = 1 
            sys.exit(1)
            
        self._solver_file.seek(0)

    def float_regex(self):
        return '[+-]?\d+(?:\.\d+)?(?:[eE][+-]\d+)?'

    def read_runsolver_output(self):
        '''
            reads self._watcher_file, 
            extracts runtime
            and returns if memout or timeout found
        ''' 
        if self._runsolver == "None":
            self._ta_exit_code = 0
            return
        
        self.print_d("Reading runsolver output from %s" % (self._watcher_file.name))
        data = self._watcher_file.read()

        if (re.search('runsolver_max_cpu_time_exceeded', data) or re.search('Maximum CPU time exceeded', data)):
            self._ta_status = "TIMEOUT"

        if (re.search('runsolver_max_memory_limit_exceeded', data)):
            self._ta_status = "TIMEOUT"
            self._ta_misc = "memory limit was exceeded"
           
        cpu_pattern1 = re.compile('runsolver_cputime: (%s)' % (self.float_regex()))
        cpu_match1 = re.search(cpu_pattern1, data)
            
        cpu_pattern2 = re.compile('CPU time \\(s\\): (%s)' % (self.float_regex()))
        cpu_match2 = re.search(cpu_pattern2, data)

        if (cpu_match1):
            self._ta_runtime = float(cpu_match1.group(1))
        if (cpu_match2):
            self._ta_runtime = float(cpu_match2.group(1))

        exitcode_pattern = re.compile('Child status: ([0-9]+)')
        exitcode_match = re.search(exitcode_pattern, data)

        if (exitcode_match):
            self._ta_exit_code = int(exitcode_match.group(1))

    def print_result_string(self):
        sys.stdout.write("Result for ParamILS: %s, %s, %s, %s, %s" % (self._ta_status, str(self._ta_runtime), str(self._ta_runlength), str(self._ta_quality), str(self._seed)))
        if (len(self._ta_misc) > 0):
            sys.stdout.write(", %s" % (self._ta_misc))

        print('')
        
    def cleanup(self):
        '''
            cleanup if error occurred or external signal handled
        '''
        if (len(self._subprocesses) > 0):
            print("killing the target run!")
            try:
                for sub in self._subprocesses:
                    #sub.terminate()
                    Popen(["pkill","-TERM", "-P",str(sub.pid)])
                    self.print_d("Wait %d seconds ..." % (self._DELAY2KILL))
                    time.sleep(self._DELAY2KILL)
                    if sub.returncode is None: # still running
                        sub.kill()

                self.print_d("done... If anything in the subprocess tree fork'd a new process group, we may not have caught everything...")
                self._ta_misc = "forced to exit by signal or keyboard interrupt."
                self._ta_runtime = self._cutoff
            except (OSError, KeyboardInterrupt, SystemExit):
                self._ta_misc = "forced to exit by multiple signals/interrupts."
                self._ta_runtime = self._cutoff

        if (self._ta_status is "ABORT" or self._ta_status is "CRASHED"):
            if (len(self._ta_misc) == 0):
                self._ta_misc = 'Problem with run. Exit code was %d.' % (self._ta_exit_code)

            if (self._watcher_file and self._solver_file):
                self._ta_misc = self._ta_misc + '; Preserving runsolver output at %s - preserving target algorithm output at %s' % (self._watcher_file.name or "<none>", self._solver_file.name or "<none>")

        try:
            if (self._watcher_file):
                self._watcher_file.close()
            if (self._solver_file):
                self._solver_file.close()

            if (self._ta_status is not "ABORT" and self._ta_status is not "CRASHED"):
                os.remove(self._watcher_file.name)
                os.remove(self._solver_file.name)
        except (OSError, KeyboardInterrupt, SystemExit):
            self._ta_misc = "problems removing temporary files during cleanup."
        except AttributeError:
            pass #in internal mode, these files are not generated
    
        if self._ta_status is "EXTERNALKILL":
            self._ta_status = "CRASHED"
            self._exit_code = 3

    def get_command_line_args(self, runargs, config):
        '''
        Returns the command call list containing arguments to execute the implementing subclass' solver.
        The default implementation delegates to get_command_line_args_ext. If this is not implemented, a
        NotImplementedError will be raised.
    
        Args:
            runargs: a map of any non-configuration arguments required for the execution of the solver.
            config: a mapping from parameter name (with prefix) to parameter value.
        Returns:
            A command call list to execute a target algorithm.
        '''
        raise NotImplementedError()

    def get_command_line_args_ext(self, runargs, config, ext_call):
        '''
        When production of the target algorithm is done from a source other than python,
        override this method to return a command call list to execute whatever you need to produce the command line.

        Args:
            runargs: a map of any non-configuration arguments required for the execution of the solver.
            config: a mapping from parameter name (with prefix) to parameter value.
            ext_call: string to call external program to get callstring of target algorithm
        Returns:
            A command call list to execute the command producing a single line of output containing the solver command string
        '''
        callstring_in = NamedTemporaryFile(suffix=".csv", prefix="callstring", dir=self._tmp_dir, delete=False)
        callstring_in.write("%s\n" %(runargs["instance"]))
        callstring_in.write("%d\n" %(runargs["seed"]))
        for name,value in config.items():
            callstring_in.write("%s,%s\n" %(name,value))
        callstring_in.flush()
        
        cmd = ext_call.split(" ")
        cmd.append(callstring_in.name)
        self.print_d(" ".join(cmd))
        try:
            io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE)
            self._subprocesses.append(io)
            out_, _ = io.communicate()
            self._subprocesses.remove(io)
        except OSError:
            self._ta_misc = "failed to run external program for output parsing : %s" %(" ".join(cmd))
            self._ta_runtime = self._cutoff
            self._exit_code = 2
            sys.exit(2)
        if not out_ :
            self._ta_misc = "external program for output parsing yielded empty output: %s" %(" ".join(cmd))
            self._ta_runtime = self._cutoff
            self._exit_code = 2
            sys.exit(2)
        callstring_in.close()
        os.remove(callstring_in.name)
        return out_.strip("\n")
    
    def process_results(self, filepointer, out_args):
        '''
        Parse a results file to extract the run's status (SUCCESS/CRASHED/etc) and other optional results.
    
        Args:
            filepointer: a pointer to the file containing the solver execution standard out.
            exit_code : exit code of target algorithm
        Returns:
            A map containing the standard AClib run results. The current standard result map as of AClib 2.06 is:
            {
                "status" : <"SAT"/"UNSAT"/"TIMEOUT"/"CRASHED"/"ABORT">,
                "runtime" : <runtime of target algrithm>,
                "quality" : <a domain specific measure of the quality of the solution [optional]>,
                "misc" : <a (comma-less) string that will be associated with the run [optional]>
            }
            ATTENTION: The return values will overwrite the measured results of the runsolver (if runsolver was used). 
        '''
        raise NotImplementedError()

    def process_results_ext(self, filepointer, out_args, ext_call):
        '''
        Args:
            filepointer: a pointer to the file containing the solver execution standard out.
            exit_code : exit code of target algorithm
        Returns:
            A map containing the standard AClib run results. The current standard result map as of AClib 2.06 is:
            {
                "status" : <"SAT"/"UNSAT"/"TIMEOUT"/"CRASHED"/"ABORT">,
                "quality" : <a domain specific measure of the quality of the solution [optional]>,
                "misc" : <a (comma-less) string that will be associated with the run [optional]>
            }
        '''
        
        cmd = ext_call.split(" ")
        cmd.append(filepointer.name)
        self.print_d(" ".join(cmd))
        try:
            io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE)
            self._subprocesses.append(io)
            out_, _ = io.communicate()
            self._subprocesses.remove(io)
        except OSError:
            self._ta_misc = "failed to run external program for output parsing"
            self._ta_runtime = self._cutoff
            self._exit_code = 2
            sys.exit(2)
        
        result_map = {}
        for line in out_.split("\n"):
            if line.startswith("status:"):
                result_map["status"] = line.split(":")[1].strip(" ")
            elif line.startswith("quality:"):
                result_map["quality"] = line.split(":")[1].strip(" ")
            elif line.startswith("misc:"):
                result_map["misc"] = line.split(":")[1]
        
        return result_map
예제 #58
0
def export_range_matrix_table_entry_field_p100():
    with NamedTemporaryFile() as f:
        mt = hl.utils.range_matrix_table(n_rows=1_000_000, n_cols=10, n_partitions=100)
        mt = mt.annotate_entries(x=mt.col_idx + mt.row_idx)
        mt.x.export(f.name)
예제 #59
0
    def get_command_line_args_ext(self, runargs, config, ext_call):
        '''
        When production of the target algorithm is done from a source other than python,
        override this method to return a command call list to execute whatever you need to produce the command line.

        Args:
            runargs: a map of any non-configuration arguments required for the execution of the solver.
            config: a mapping from parameter name (with prefix) to parameter value.
            ext_call: string to call external program to get callstring of target algorithm
        Returns:
            A command call list to execute the command producing a single line of output containing the solver command string
        '''
        callstring_in = NamedTemporaryFile(suffix=".csv", prefix="callstring", dir=self._tmp_dir, delete=False)
        callstring_in.write("%s\n" %(runargs["instance"]))
        callstring_in.write("%d\n" %(runargs["seed"]))
        for name,value in config.items():
            callstring_in.write("%s,%s\n" %(name,value))
        callstring_in.flush()
        
        cmd = ext_call.split(" ")
        cmd.append(callstring_in.name)
        self.print_d(" ".join(cmd))
        try:
            io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE)
            self._subprocesses.append(io)
            out_, _ = io.communicate()
            self._subprocesses.remove(io)
        except OSError:
            self._ta_misc = "failed to run external program for output parsing : %s" %(" ".join(cmd))
            self._ta_runtime = self._cutoff
            self._exit_code = 2
            sys.exit(2)
        if not out_ :
            self._ta_misc = "external program for output parsing yielded empty output: %s" %(" ".join(cmd))
            self._ta_runtime = self._cutoff
            self._exit_code = 2
            sys.exit(2)
        callstring_in.close()
        os.remove(callstring_in.name)
        return out_.strip("\n")
def applyRules(outputLocation,output,Juniperip,Juniperusr,Juniperpwd):
	numnewrules = 0

	# Set up the SSH client to connect to the Juniper firewall
	con_pre=paramiko.SSHClient()
	con_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
	con_pre.connect(Juniperip, username=Juniperusr, password=Juniperpwd,look_for_keys=False, allow_agent=False)
	
	# Connect to the Juniper firewall via SSH
	con = con_pre.invoke_shell()

	# Enter cli
	con.send('cli\n')
	time.sleep(.2)

	# Enter config
	con.send('configure\n')
	time.sleep(.2)

	# Open the rule file
	f = open(output, 'rb')
	reader = csv.reader(f, delimiter=',', quotechar='"')

	# Open the rule file and create a temporary file to store the applied firewall rules
	tf = NamedTemporaryFile(delete=False)
	writer = csv.writer(tf, delimiter=',', quotechar='"')

	# Loop through each row of the rule file to get IP addresses
	for row in reader:
		# Check if the rule is new
		if row[3] == 'new':
			time.sleep(.2)
			# If the rule is for an IP address, format the filter appropriatly and apply it to the firewall's configuration
			if row[0] == "ip":
				# Apply filter
				con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' from destination-address ' + row[1] + '\n')
				time.sleep(.2)

				numnewrules += 1

				# Mark the rule as applied within the rule file to avoid duplicate entries
				row[3] = 'applied'

			if row[0] == "domain":
				# Apply filter
				con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' from destination-address ' + row[1] + '\n')
				time.sleep(.2)

				numnewrules += 1

				# Mark the rule as applied within the rule file to avoid duplicate entries
				row[3] = 'applied'

		#Apply Protocols
		con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' from protocol icmp' + '\n')
		time.sleep(.2)
		con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' from protocol udp' + '\n')
		time.sleep(.2)
		con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' from protocol tcp' + '\n')
		time.sleep(.2)

		#Apply Discard
		con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' then discard' + '\n')
		time.sleep(.2)

		#Apply Counter
		con.send('set firewall family inet filter MSISAC_Anomali term ' + today2 + ' then count MSISAC_Anomali-Counter' + '\n')
		time.sleep(.2)

		#Commit Filter
		con.send('commit' + '\n')
		time.sleep(.2)

		#Set Interfaces
		con.send('set interfaces ' + Juniperif + ' unit 0 family inet filter MSISAC_Anomali' + '\n')
		time.sleep(.2)

		#Commit Filter to Interfaces
		con.send('commit' + '\n')
		time.sleep(.2)

		# Write the rules to the temporary file
		writer.writerow(row)
	# Replace the old rule file with the temporary file that has the updated 'applied' rules
	shutil.move(tf.name, output) 
	f.close()

	return numnewrules