コード例 #1
0
def buildGroundTruth(tiku_dir):
	regInfo = getGroundTruthRegisterInfo(tiku_dir)
	tiku = []
	size = 1
	for fileName in os.listdir(tiku_dir):
		if not fileName.endswith('.tsv'):
			continue
		id = fileName.strip('.tsv')
		
		if regInfo[id]['status'] == 'Done':
			source = regInfo[id]['source']
			fin = open(os.path.join(tiku_dir, fileName), 'r')
			lines = fin.readlines()
			fin.close()
			
			for line in lines:
				if line.startswith('#'):
					continue
					
				fds = line.decode('utf8').strip().split('\t')
				timu = {}
				timu['source'] = source
				timu['original_text'] = fds[0]
				timu['ans'] = fds[1]
				timu['id'] = size
				tiku.append(timu)
				size += 1
				

	
	gt_file = os.path.join(tiku_dir, 'groundTruths.json')
	sContent = Template(filename="C:\\projects\\mathRobot\\knols\\Templates\\tiku.mako").render(tiku = tiku)
	fgt = open(gt_file, 'w')
	fgt.write(sContent.encode('utf8'))
	fgt.close()
コード例 #2
0
 def get_populated_label(self, delivery, sender, address, option, kwargs):
     zpl_file = self._service
     if self._specific_label:
         # in this case, you must use a really specific label
         # (some 'commerçant' use case)
         zpl_file += '_' + self._specific_label
     zpl_file += '.mako'
     zpl_file_path = os.path.join(os.path.dirname(__file__), 'report',
                                  zpl_file)
     with open(zpl_file_path, 'r') as opened_file:
         file_content = opened_file.read()
         self.validate_mako(file_content, delivery, sender, address, option,
                            kwargs)
         try:
             #print 'd', delivery, '\na', address, '\ns', sender, '\no', kwargs
             zpl = Template(file_content).render(d=delivery,
                                                 s=sender,
                                                 a=address,
                                                 o=option,
                                                 **kwargs)
             content = zpl.encode(encoding=CODING, errors=ERROR_BEHAVIOR)
             if self._test_name:
                 self._record_unit_test_datas(content, delivery, sender,
                                              address, option, kwargs)
         except:
             traceback = RichTraceback()
             self.extract_mako_error(traceback, zpl_file)
         return content
コード例 #3
0
 def handle_index(self, url, query):
     """Render the index template"""
     try:
         # Can do some data here if you want
         template = Template(filename="index_template.html").render()
         self.wfile.write(template.encode('utf-8'))
     except:
         self.wfile.write(exceptions.html_error_template().render())
コード例 #4
0
ファイル: ui.py プロジェクト: trehn/intermix
class UI(object):
    def __init__(self, layers=1, state=None, title="Intermix", host="127.0.0.1", port=8080):
        self.html = Template(HTML).render(layers=layers, port=port, title=title)
        self.layers = layers
        self.url = "http://{host}:{port}/".format(host=host, port=port)

        self.state = state
        self._running = True

        self.app = web.Application()
        self.app.router.add_route('GET', "/", self._html_client)
        self.app.router.add_route('GET', "/ws", self._websocket_handler)
        self.handler = self.app.make_handler()

        self.loop = get_event_loop()
        server_init = self.loop.create_server(self.handler, host, port)
        self.server = self.loop.run_until_complete(server_init)
        self.thread = Thread(target=self._thread_body, daemon=True)
        self.thread.start()

    @coroutine
    def _html_client(self, request):
        return web.Response(body=self.html.encode('utf-8'))

    def _thread_body(self):
        try:
            self.loop.run_forever()
        finally:
            self.loop.run_until_complete(self.handler.finish_connections(0.1))
            self.server.close()
            self.loop.run_until_complete(self.server.wait_closed())
            self.loop.run_until_complete(self.app.finish())

    @coroutine
    def _websocket_handler(self, request):
        ws = web.WebSocketResponse()
        ws.start(request)
        client = Client(ws, self.layers)
        while self._running:
            start = time()
            msg = yield from ws.receive()
            if msg.tp in (MsgType.close, MsgType.error):
                break
            elif msg.tp != MsgType.text:
                continue
            client.attrs = loads(msg.data)
            delay = self.draw(client)
            client.raw_js("send_info();")
            ws.send_str(client._dump_js())
            sleep(max(0, delay - (time() - start)))
        return ws

    def draw(self, output):
        pass

    def stop(self):
        self._running = False
        self.loop.stop()
コード例 #5
0
def _run(args):
    if args.config_file is None:
        if not args.show_defaults:
            raise ValueError("Missing argument: config_file")
    else:
        if args.show_defaults:
            raise ValueError(
                "Can't specify both config_file and --show_defaults")

    # defaults that can be overwritten by arguments
    tmpl_stored_args = {
        'http_server': 'waitress',
        'lang': 'en',
        'database_engine': 'sqlite',
        'host': '127.0.0.1',
        'port': 5000,
        'error_aggregation_service': None,
    }
    for custom in args.custom:
        # parse arguments
        kwargs = {}
        for el in _escape_split(custom, ','):
            kv = _escape_split(el, '=')
            if len(kv) == 2:
                k, v = kv
                kwargs[k] = v
        # update our template stored args
        tmpl_stored_args.update(kwargs)

    if args.show_defaults:
        for k, v in tmpl_stored_args.iteritems():
            print '%s=%s' % (k, v)
        sys.exit(0)

    # use default that cannot be replaced
    tmpl_stored_args.update({
        'uuid': lambda: uuid.uuid4().hex,
    })
    try:
        # built in template
        tmpl_file = os.path.join(here, TMPL)

        with open(tmpl_file, 'rb') as f:
            tmpl_data = f.read().decode('utf-8')
            tmpl = Template(tmpl_data).render(**tmpl_stored_args)
        with open(args.config_file, 'wb') as f:
            f.write(tmpl.encode('utf-8'))
        print 'Wrote new config file in %s' % (os.path.abspath(
            args.config_file))

    except Exception:
        from mako import exceptions
        print exceptions.text_error_template().render()
コード例 #6
0
def _run(argv):
    parser, args, other = argparser(argv)
    if not len(sys.argv) > 1:
        print parser.print_help()
        sys.exit(0)
    # defaults that can be overwritten by arguments
    tmpl_stored_args = {
        'http_server': 'waitress',
        'lang': 'en',
        'database_engine': 'sqlite',
        'host': '127.0.0.1',
        'port': 5000,
        'error_aggregation_service': None,
    }
    if other:
        # parse arguments, we assume only first is correct
        kwargs = {}
        for el in _escape_split(other[0], ','):
            kv = _escape_split(el, '=')
            if len(kv) == 2:
                k, v = kv
                kwargs[k] = v
        # update our template stored args
        tmpl_stored_args.update(kwargs)

    # use default that cannot be replaced
    tmpl_stored_args.update({
        'uuid': lambda: uuid.uuid4().hex,
        'here': os.path.abspath(os.curdir),
    })
    if args.show_defaults:
        for k,v in tmpl_stored_args.iteritems():
            print '%s=%s' % (k, v)
        sys.exit(0)
    try:
        # built in template
        tmpl_file = os.path.join(here, TMPL)
        if args.template:
            tmpl_file = args.template

        with open(tmpl_file, 'rb') as f:
            tmpl_data = f.read().decode('utf-8')
            if args.raw:
                tmpl = tmpl_data
            else:
                tmpl = Template(tmpl_data).render(**tmpl_stored_args)
        with open(args.filename, 'wb') as f:
            f.write(tmpl.encode('utf-8'))
        print 'Wrote new config file in %s' % (os.path.abspath(args.filename))

    except Exception:
        from mako import exceptions
        print exceptions.text_error_template().render()
コード例 #7
0
def _run(args):
    if args.config_file is None:
        if not args.show_defaults:
            raise ValueError("Missing argument: config_file")
    else:
        if args.show_defaults:
            raise ValueError("Can't specify both config_file and --show_defaults")

    # defaults that can be overwritten by arguments
    tmpl_stored_args = {
        'http_server': 'waitress',
        'lang': 'en',
        'database_engine': 'sqlite',
        'host': '127.0.0.1',
        'port': 5000,
        'error_aggregation_service': None,
    }
    for custom in args.custom:
        # parse arguments
        kwargs = {}
        for el in _escape_split(custom, ','):
            kv = _escape_split(el, '=')
            if len(kv) == 2:
                k, v = kv
                kwargs[k] = v
        # update our template stored args
        tmpl_stored_args.update(kwargs)

    if args.show_defaults:
        for k,v in tmpl_stored_args.iteritems():
            print '%s=%s' % (k, v)
        sys.exit(0)

    # use default that cannot be replaced
    tmpl_stored_args.update({
        'uuid': lambda: uuid.uuid4().hex,
    })
    try:
        # built in template
        tmpl_file = os.path.join(here, TMPL)

        with open(tmpl_file, 'rb') as f:
            tmpl_data = f.read().decode('utf-8')
            tmpl = Template(tmpl_data).render(**tmpl_stored_args)
        with open(args.config_file, 'wb') as f:
            f.write(tmpl.encode('utf-8'))
        print 'Wrote new config file in %s' % (os.path.abspath(args.config_file))

    except Exception:
        from mako import exceptions
        print exceptions.text_error_template().render()
コード例 #8
0
    parents = {}
    reglas = {}

    for row in reader:
        if not row['category_ids/code'] in categorias:
            categorias[row['category_ids/code']] = row

        if not row['category_ids/padre/code'] in parents:
            parents[row['category_ids/padre/code']] = row

        reglas[row['code']] = row

    catpXml = Template(filename='tpl/categoria_reglas_parent.tpl').render(
        categorias=parents.values())
    catXml = Template(filename='tpl/categoria_reglas.tpl').render(
        categorias=categorias.values())
    regXml = Template(filename='tpl/reglas_salariales.tpl').render(
        reglas=reglas.values())

    fileCategories = open('../categorias_reglas_salariales.xml', 'wb')
    fileCategories.write(catXml.encode('utf-8'))
    fileCategories.close()

    fileCategoriesp = open('../categorias_reglas_salariales_parent.xml', 'wb')
    fileCategoriesp.write(catpXml.encode('utf-8'))
    fileCategoriesp.close()

    fileRules = open('../reglas_salariales.xml', 'wb')
    fileRules.write(regXml.encode('utf-8'))
    fileRules.close()
コード例 #9
0
ファイル: exportgraf.py プロジェクト: FrankNagel/qlc
def main(argv):
    log = logging.getLogger()
    logging.basicConfig(level=logging.INFO)
    
    conf = appconfig('config:development.ini', relative_to='.')
    config = None
    if not pylons.test.pylonsapp:
        config = load_environment(conf.global_conf, conf.local_conf)
    
    # Create the tables if they don't already exist
    #metadata.create_all(bind=Session.bind)

    c = ContextObj() 
    py_obj = PylonsContext() 
    py_obj.tmpl_context = c
    pylons.tmpl_context._push_object(c)
    #corpushistory = model.meta.Session.query(model.Corpusversion).all()
    corpusversion = model.meta.Session.query(model.Corpusversion).order_by(model.Corpusversion.updated).first()
    c.iso_time = corpusversion.updated.strftime("%Y-%m-%d")
    c.version_number = "{0}.{1}".format(corpusversion.version, corpusversion.revision)

    
    # template_entries_seg
    mylookup = TemplateLookup(directories=config['pylons.paths']['templates'])
    template_header = open(os.path.join(config['pylons.paths']['templates'][0], 'base', 'graf-header.hdr')).read()    
    template_entries = open(os.path.join(config['pylons.paths']['templates'][0], 'base', 'graf-entries.txt')).read()    
    template_entries_seg = open(os.path.join(config['pylons.paths']['templates'][0], 'base', 'graf-entries.xml')).read()
    template_annotations = open(os.path.join(config['pylons.paths']['templates'][0], 'base', 'graf-annotations.xml')).read()
    #template_annotations_seg = open(os.path.join(config['pylons.paths']['templates'][0], 'base', 'graf-annotations-seg.xml')).read()        

    metadata_file = codecs.open(os.path.join(config['pylons.paths']['static_files'], 'downloads', "xml", "sources.csv"), "w", "utf-8")
    metadata_file.write("ID\tTYPE\tLANGUAGES\tIS_READY\tTITLE\tCOMPONENT\n")
         
    #http://www.cidles.eu/quanthistling/book/minor1987/hto/spa?format=xml
    for b in quanthistling.dictdata.books.list + quanthistling.dictdata.toolboxfiles.list:
        #if b['bibtex_key'] != "leach1969":
        #    continue

        c.book = model.meta.Session.query(model.Book).filter_by(bibtex_key=b['bibtex_key']).first()
        
        if c.book:

            # escape characters for XML
            c.bookinfo = escape(c.book.bookinfo())
            c.book_title = escape(c.book.title)
            c.book_author = escape(c.book.author)

            # collect book data
            languages = [ l.language_iso.langcode for dictdata in c.book.dictdata for l in dictdata.src_languages + dictdata.tgt_languages if l.language_iso]
            components = [ dictdata.component.name for dictdata in c.book.dictdata ]
            metadata_file.write(u"{0}\t{1}\t{2}\t{3}\t{4}\t{5}\n".format(c.book.bibtex_key, "dictionary", ",".join(languages), c.book.is_ready, c.book.bookinfo(), ",".join(components)))

            print "Exporting XML data for %s..." % b['bibtex_key']
            #temppath = tempfile.mkdtemp()
            temppath = os.path.join(config['pylons.paths']['static_files'], 'downloads', 'xml', b['bibtex_key'])
            if not os.path.exists(temppath):
                os.mkdir(temppath)
            else:
                files = glob.glob(os.path.join(temppath, "*"))
                for f in files:
                    os.remove(f)

            for c.dictdata in c.book.dictdata:
                
    
                c.url_for = url_for
                c.base_url = "http://www.quanthistling.info/data"
                #c.relative_url = url_for(controller='book', action='dictdata', bibtexkey=c.book.bibtex_key, startpage=c.dictdata.startpage, endpage=c.dictdata.endpage, format='html')

                #c.heading = c.book.bookinfo()
                c.basename = "dict-%s-%i-%i" % (b['bibtex_key'], c.dictdata.startpage, c.dictdata.endpage)

                print "  getting entries..."
                
                c.entries = model.meta.Session.query(model.Entry).filter(model.Entry.dictdata_id==c.dictdata.id).order_by("startpage", "pos_on_page").all()

                print "  getting annotations..."
                
                annotations = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).order_by("startpage", "pos_on_page").all()
                c.annotations = collections.defaultdict(dict)
                for a in annotations:
                    if not c.annotations[a.entry_id]:
                        c.annotations[a.entry_id] = collections.defaultdict(list)
                    c.annotations[a.entry_id][(a.start, a.end)].append(a)

                print "  getting counts..."
                
                c.count_heads = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Annotation.value==u"head").count()
                c.count_translations = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Annotation.value==u"translation").count()
                c.count_pos = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Annotation.value==u"pos").count()
                c.count_examples_src = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Annotation.value==u"example-src").count()
                c.count_examples_tgt = model.meta.Session.query(model.Annotation).join(model.Entry, model.Annotation.entry_id==model.Entry.id).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Annotation.value==u"example-tgt").count()
                c.count_manually_corrected = model.meta.Session.query(model.Entry).filter(model.Entry.dictdata_id==c.dictdata.id).filter(model.Entry.has_manual_annotations==True).count()

                #xml =  render('/derived/book/dictdata.xml')
                #xml = literal(template.render_unicode(c))

                print "  header..."

                # write header
                xml = Template(template_header, lookup=mylookup).render_unicode(c=c)
                oFile = open(os.path.join(temppath, "%s.hdr" % c.basename),'wb')
                oFile.write(xml.encode("utf-8"))
                oFile.close()

                print "  base data..."

                # write base data file
                xml = Template(template_entries, lookup=mylookup).render_unicode(c=c)
                oFile = open(os.path.join(temppath, "%s.txt" % c.basename),'wb')
                oFile.write(xml.encode("utf-8"))
                oFile.close()
    
                print "  entries..."

                # write entry file
                xml = Template(template_entries_seg, lookup=mylookup).render_unicode(c=c)
                oFile = open(os.path.join(temppath, "%s-entries.xml" % c.basename),'wb')
                oFile.write(xml.encode("utf-8"))
                oFile.close()

                print "  formatting annotations..."

                c.annotationtypes = [ "pagelayout", "formatting" ]
                c.annotationname = "formatting"
            
                xml = Template(template_annotations, lookup=mylookup).render_unicode(c=c)
                oFile = open(os.path.join(temppath, "%s-formatting.xml" % c.basename),'wb')
                oFile.write(xml.encode("utf-8"))
                oFile.close()          

                print "  dictinterpretation annotations..."
                c.annotationtypes = [ "dictinterpretation", "orthographicinterpretation", "errata" ]

                c.annotationname = "dictinterpretation"

                xml = Template(template_annotations, lookup=mylookup).render_unicode(c=c)
                oFile = open(os.path.join(temppath, "%s-dictinterpretation.xml" % c.basename),'wb')
                oFile.write(xml.encode("utf-8"))
                oFile.close()

            # create archive
            myzip = zipfile.ZipFile(os.path.join(config['pylons.paths']['static_files'], 'downloads', 'xml', '%s.zip' % b['bibtex_key']), 'w', zipfile.ZIP_DEFLATED)
            for file in glob.glob(os.path.join(temppath, "*.*")):
                myzip.write(file, os.path.basename(file))
            myzip.close()
            #shutil.rmtree(temppath)

    metadata_file.close()
    myzip = zipfile.ZipFile(os.path.join(config['pylons.paths']['static_files'], 'downloads', 'xml', 'data.zip'), 'w', zipfile.ZIP_DEFLATED)
    graf_dirs = [d for d in glob.glob(os.path.join(config['pylons.paths']['static_files'], 'downloads', 'xml', "*")) if os.path.isdir(d)]
    for d in graf_dirs:
        bibtex_key = d[d.rfind(os.sep)+1:]
        for f in glob.glob(os.path.join(d, "*.*")):
            myzip.write(f, os.path.join(bibtex_key, os.path.basename(f)))
    f = os.path.join(config['pylons.paths']['static_files'], 'downloads', 'xml', 'sources.csv')
    myzip.write(f, os.path.basename(f))
    myzip.close()


    pylons.tmpl_context._pop_object() 
コード例 #10
0
ファイル: setup.py プロジェクト: bjodah/symcxx
        for k in ('unary', 'binary', 'args_stack'):
            subsd[stub + k] = list(
                _read(path_stub + k + '.inc', inc_dir='./include/'))
            subsd['types'] += subsd[stub + k]

        subsd['_message_for_rendered'] = 'THIS IS A GENERATED FILE DO NOT EDIT'
        try:
            rendered_pyx = Template(
                io.open(template_path, 'rt',
                        encoding='utf-8').read()).render(**subsd)
        except:
            print(text_error_template().render_unicode())
            raise
        else:
            sha256hex = hashlib.sha256(
                rendered_pyx.encode('utf-8')).hexdigest()
            hash_path = os.path.join(
                'build',
                pyx_path.replace('/', '__') + '.sha256hex')
            if os.path.exists(hash_path) and open(hash_path, 'rt').read(
                    256 // 4) == sha256hex:
                pass
            else:
                open(pyx_path, 'wt').write(rendered_pyx)
                if not os.path.exists('build'):
                    os.makedirs('build')
                open(hash_path, 'wt').write(sha256hex)
        ext_modules = cythonize(ext_modules,
                                include_path=['./include'],
                                gdb_debug=True)
    else:
コード例 #11
0
ファイル: label.py プロジェクト: marionumza/UduuX
 def get_label(self, delivery, address, parcel):
     tracking_number = False
     self.check_model(parcel, PARCEL_MODEL, 'package')
     self.check_model(address, ADDRESS_MODEL, 'partner')
     self.product_code, self.uniship_product = self.get_product(
         address['country_code'])
     self.check_model(delivery, DELIVERY_MODEL, 'delivery')
     delivery['gls_origin_reference'] = self.set_origin_reference(
         parcel, address)
     # transfom human keys in GLS keys (with 'T' prefix)
     T_account = self.map_semantic_keys(ACCOUNT_MAPPING, self.sender)
     T_delivery = self.map_semantic_keys(DELIVERY_MAPPING, delivery)
     T_parcel = self.map_semantic_keys(PARCEL_MAPPING, parcel)
     T_address = self.map_semantic_keys(ADDRESS_MAPPING, address)
     # merge all datas
     all_dict = {}
     all_dict.update(T_account)
     all_dict.update(T_delivery)
     all_dict.update(T_parcel)
     all_dict.update(T_address)
     all_dict.update(self.add_specific_keys(address))
     if address['country_code'] != 'FR':
         label_content = self.select_label(parcel['parcel_number_label'],
                                           all_dict, address)
         if ('contact_id_inter' not in self.sender
                 or not self.sender['contact_id_inter']):
             raise InvalidAccountNumber(
                 u"There is no account number defined for international "
                 "transportation, please set it in your company settings "
                 "to send parcel outside France")
     else:
         failed_webservice = False
         # webservice
         response = self.get_webservice_response(all_dict)
         # refactor webservice response failed and webservice downed
         if isinstance(response, dict):
             if self.get_result_analysis(response['RESULT'], all_dict):
                 all_dict.update(response)
                 tracking_number = all_dict['T8913']
             else:
                 failed_webservice = True
                 label_content = self.select_label(
                     parcel['parcel_number_label'],
                     all_dict,
                     address,
                 )
         else:
             failed_webservice = True
         label_content = self.select_label(
             parcel['parcel_number_label'],
             all_dict,
             address,
             failed_webservice=failed_webservice)
     # some keys are not defined by GLS but are in mako template
     # this add empty values to these keys
     keys_without_value = self.validate_mako(label_content, all_dict.keys())
     if keys_without_value:
         empty_mapped = (zip(keys_without_value,
                             [''] * len(keys_without_value)))
         all_dict.update(dict(empty_mapped))
     try:
         tpl = Template(label_content).render(**all_dict)
         content2print = tpl.encode(encoding=REPORT_CODING,
                                    errors=ERROR_BEHAVIOR)
         return {
             "content": content2print,
             "tracking_number": tracking_number,
             'filename': self.filename
         }
     except:
         traceback = RichTraceback()
         for (filename, lineno, function, line) in traceback.traceback:
             logger.info("File %s, line %s, in %s" %
                         (filename, lineno, function))
         raise InvalidDataForMako(
             "%s: %s" %
             (str(traceback.error.__class__.__name__), traceback.error))
コード例 #12
0
hist = zip(range(1, len(hist)*trainer.validation_frequency+1, trainer.validation_frequency), hist)

description = {
    '#Training Patterns' : inp.shape[0],
    '#Test Patterns' : inpt.shape[0],
    '#Attributes' : inp.shape[1],
    '#Targets' : targ.shape[1],
    'Task' : 'Regression'
}

layers = [{
    'Neurons' : layer.output_shape,
    'Type' : type(layer).__name__
} for layer in net.layers]

summary = {
    '#Total Steps' : len(hist),
    'Best Test Error' : trainer.errors['best']['test'],
    'Best Step' : trainer.best_step,
    'Seconds taken' : time_taken,
    'Steps/Second' : trainer.steps_per_sec
}

trainer_classes = [cls.__name__ for cls in trainer.__class__.__bases__]
data = dict(history=hist, trainer_classes=trainer_classes, dataset_name="abalone", summary=summary, layers=layers, description=description, dataset_url="https://archive.ics.uci.edu/ml/datasets/Abalone", time=time.asctime(), parameters=trainer.parameters)
data_string = json.dumps(data)

html = Template(filename="neuronaut-report.html", input_encoding='utf-8').render(data=data_string)
with open(os.path.join(dir_name, "report.html"), "wb") as f:
    f.write(html.encode('utf-8'))
コード例 #13
0
ファイル: runner.py プロジェクト: mdmosarafmd/Splunk
 def _run(self):
     '''
     return: 3 element tuple
     (return_code, raw_stdout_out, raw_stderr_out)
     '''
     ckpt = self._ckpter.get(CKPT_NAME)
     if ckpt is None:
         ckpt = {}
     if CKPT_KEY not in ckpt:
         ckpt[CKPT_KEY] = {}
     input_scheme = Template(OPTION_FILE_CONTENT).render(
         server_uri=self._server_uri,
         session_key=self._session_key,
         checkpoint_dir=self._checkpoint_dir,
         options=self._options,
         interval=self._interval,
         input_name=self._input_name,
         sourcetype=self._sourcetype)
     # runner_logger.debug('input stream:' + input_scheme)
     # use python3 for test by default
     if os.path.isfile(make_splunk_path(('bin', "python3"))) \
             or os.path.isfile(make_splunk_path(('bin', "python3.exe"))):
         cmd2 = [self._get_splunk_bin(), 'cmd', 'python3', self._file_path]
     else:
         cmd2 = [self._get_splunk_bin(), 'cmd', 'python', self._file_path]
     # make it the same as core
     cwd = "C:\Windows\system32" if platform.system() == "Windows" else '/'
     # prepare the env
     child_env = os.environ.copy()
     child_env[AOB_TEST_FLAG] = 'true'
     if self._globalsettings:
         child_env[GLOBALSETTINGS] = json.dumps(self._globalsettings)
     child_env[DATA_INPUTS_OPTIONS] = json.dumps(self._data_inputs_options)
     runner_logger.debug("Start the test subprocess with env:%s",
                         logger.hide_sensitive_field({
                             GLOBALSETTINGS: self._globalsettings,
                             DATA_INPUTS_OPTIONS: self._data_inputs_options
                         }))
     try:
         child2 = subprocess.Popen(
             cmd2,
             stdin=subprocess.PIPE,
             stderr=subprocess.PIPE,
             stdout=subprocess.PIPE,
             cwd=cwd,
             env=child_env)
         ckpt[CKPT_KEY][self._test_id] = {
             'pid': child2.pid,
             'app': self._app,
             'input': self._input_name
         }
         self._ckpter.update(CKPT_NAME, ckpt)
         stdout_str, stderr_str = child2.communicate(input=input_scheme.encode())
         stdout_str = stdout_str.decode()
         stderr_str = stderr_str.decode()
         retcode = child2.returncode
         del ckpt[CKPT_KEY][self._test_id]
         if not has_kill_flag(CKPT_DIR, self._test_id):
             # normal exist, not killed
             self._ckpter.update(CKPT_NAME, ckpt)
         return retcode, stdout_str, stderr_str
     except subprocess.CalledProcessError as e:
         runner_logger.error('Fail to execute the test process:%s. %s',
                             e.cmd, traceback.format_exc())
         return e.returncode, '', e.output