コード例 #1
0
def main(args):

    # set filenames
    srtName = args[1]
    evtName = args[3]

    RFflag = False

    with open(srtName) as json_file:
        data = json.load(json_file)

    for material in data["Events"][0]["materials"]:
        if material["type"] == "PM4Sand_Random" or material[
                "type"] == "PDMY03_Random" or material[
                    "type"] == "Elastic_Random":
            RFflag = True
            break
    if RFflag:
        #create material file based on 1D Gaussian field
        soilData = data["Events"][0]
        createMaterial(soilData)

    #Run OpenSees
    subprocess.Popen("OpenSees model.tcl", shell=True).wait()

    #Run postprocessor to create EVENT.json
    postProcess(evtName)
コード例 #2
0
def main(args):

    # set filenames
    srtName = args[1]
    evtName = args[3]

    #If requesting random variables run getUncertainty
    #Otherwise, QSh3ark already ran, do nothing
    RFflag = False

    with open(srtName) as json_file:
        data = json.load(json_file)

    for material in data["Events"][0]["materials"]:
        if material["type"] == "PM4Sand_Random" or material[
                "type"] == "PDMY03_Random" or material[
                    "type"] == "Elastic_Random":
            RFflag = True
            break
    if RFflag:
        soilData = data["Events"][0]
        createMaterial(soilData)

        #Run OpenSees
        subprocess.Popen("OpenSees model.tcl", shell=True).wait()

        #Run postprocessor to create EVENT.json
        postProcess(evtName)
コード例 #3
0
ファイル: whole_procedure.py プロジェクト: yghlc/Landuse_DL
def post_processing_backup(para_file,
                           inf_post_note=None,
                           b_skip_getshp=False,
                           test_id=None):
    # the script will check whether each image has been post-processed
    # command_string = os.path.join(eo_dir, 'workflow', 'postProcess.py') + ' ' + para_file
    # if inf_post_note is not None:
    #     command_string += ' ' + str(inf_post_note)
    # basic.os_system_exit_code(command_string)
    import postProcess
    postProcess.postProcess(para_file,
                            inf_post_note,
                            b_skip_getshp=b_skip_getshp,
                            test_id=test_id)
コード例 #4
0
def executeScript(inputScript, scriptOptionalArgs, inputFile=None):
    global testcaseInfoList
    global testcaseInfo
    global yamlCont
    testcaseInfoList = []
    yamlCont = {}
    if inputFile != None:
        fil = open(inputFile, "r")
        yamlCont = yaml.load(fil)


#        inputFileType = re.search('(.*).yaml', inputFile).group(1)

    inputFileType = 'unknown'
    if inputFile != None:
        inputFileType = re.search('(.*).yaml', inputFile).group(1)
    cmd = "%s %s" % (inputScript, ' '.join(scriptOptionalArgs))
    #inputFileType = 'unknown'
    print "@@@@@@@@@@@@@@@"
    print cmd
    print "@@@@@@@@@@@@@@@"
    ps = winpexpect.winspawn(cmd)
    ps.logfile_read = sys.stdout

    ## Initialize testcaseInfo if the script is not a flist.
    if inputFileType in ['unknown']:
        scriptName = inputScript
        testcaseInfo = testInfo(scriptName)
        testcaseInfoList.append(testcaseInfo)

    timeout = -1
    ps.interact(output_filter=scriptExecutionMonitor)

    if inputFileType is 'unknown':
        ## creates a nested suite
        suiteList = []
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            suite = createRobotSuite(testcaseInfo)
            suiteList.append(suite)
        suite = TestSuite(inputScript)
        suite.suites = suiteList
        result = suite.run(output='output.xml', loglevel='debug')
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.variableFile.close()
    else:
        ## creates a single suite
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            print testcaseInfo
            suite = createRobotSuite(testcaseInfo)
            result = suite.run(output='output.xml', loglevel='debug')
            testcaseInfo.variableFile.close()

    # Generating log files requires processing the earlier generated output XML.
    ResultWriter('output.xml').write_results()
    pp = postProcess.postProcess(suiteFile=testcaseInfo.scriptName)
    pp.close()
コード例 #5
0
def executeScript(inputScript, scriptOptionalArgs, inputFile=None):
    global testcaseInfoList
    global testcaseInfo
    global yamlCont
    testcaseInfoList = []
    yamlCont = {}
    if inputFile != None:
        fil = open(inputFile, "r")
        yamlCont = yaml.load(fil)
#        inputFileType = re.search('(.*).yaml', inputFile).group(1)

    inputFileType = 'unknown'
    if inputFile != None:
        inputFileType = re.search('(.*).yaml', inputFile).group(1)
    cmd = "%s %s" %(inputScript, ' '.join(scriptOptionalArgs))
    #inputFileType = 'unknown'
    print "@@@@@@@@@@@@@@@"
    print cmd
    print "@@@@@@@@@@@@@@@"
    ps = winpexpect.winspawn(cmd)
    ps.logfile_read = sys.stdout

    ## Initialize testcaseInfo if the script is not a flist.
    if inputFileType in ['unknown']:
        scriptName = inputScript
        testcaseInfo = testInfo(scriptName)
        testcaseInfoList.append(testcaseInfo)

    timeout = -1
    ps.interact(output_filter=scriptExecutionMonitor)

    if inputFileType is 'unknown':
        ## creates a nested suite
        suiteList = []
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            suite = createRobotSuite(testcaseInfo)
            suiteList.append(suite)
        suite = TestSuite(inputScript)
        suite.suites = suiteList
        result = suite.run(output='output.xml', loglevel='debug')
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.variableFile.close()
    else:
        ## creates a single suite
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            print testcaseInfo
            suite = createRobotSuite(testcaseInfo)
            result = suite.run(output='output.xml', loglevel='debug')
            testcaseInfo.variableFile.close()

    # Generating log files requires processing the earlier generated output XML.
    ResultWriter('output.xml').write_results()
    pp = postProcess.postProcess(suiteFile=testcaseInfo.scriptName)
    pp.close()
コード例 #6
0
ファイル: Test.py プロジェクト: raju-tm/reautomation_handoff
def executeScript(inputScript, scriptOptionalArgs, inputFile=None):
    global testcaseInfoList
    global testcaseInfo
    global yamlCont
    testcaseInfoList = []
    yamlCont = {}
    if inputFile != None:
        fil = open(inputFile, "r")
        yamlCont = yaml.load(fil)
    #        inputFileType = re.search('(.*).yaml', inputFile).group(1)

    if re.search("\.log\.", inputScript):
        cmd = (
            "python /work/swtest01_1/sgsubramaniam/sw-test/special_script/demoScript/slowcat.py -d 0.002 %s"
            % inputScript
        )
    else:
        cmd = "f10tool %s %s" % (inputScript, " ".join(scriptOptionalArgs))
    if re.search("\.flist$", inputScript):
        inputFileType = "flist"
        cmd = "f10tool %s %s" % (inputScript, " ".join(scriptOptionalArgs))
    elif re.search("\.f10$", inputScript):
        inputFileType = "f10"
        cmd = "f10tool %s %s" % (inputScript, " ".join(scriptOptionalArgs))
    else:
        inputFileType = "unknown"
        if inputFile != None:
            inputFileType = re.search("(.*).yaml", inputFile).group(1)
        cmd = "%s %s;echo '----------'" % (inputScript, " ".join(scriptOptionalArgs))

    ps = winpexpect.winspawn(cmd)
    ps.logfile_read = sys.stdout

    ## Initialize testcaseInfo if the script is not a flist.
    if inputFileType in ["f10", "unknown"]:
        scriptName = inputScript
        testcaseInfo = testInfo(scriptName)
        testcaseInfoList.append(testcaseInfo)

    timeout = -1
    ps.interact(output_filter=scriptExecutionMonitor)

    if inputFileType is "flist" or "rspec":
        ## creates a nested suite
        suiteList = []
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            suite = createRobotSuite(testcaseInfo)
            suiteList.append(suite)
        suite = TestSuite(inputScript)
        suite.suites = suiteList
        result = suite.run(output="output.xml", loglevel="debug")
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.variableFile.close()
    else:
        ## creates a single suite
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            print testcaseInfo
            suite = createRobotSuite(testcaseInfo)
            result = suite.run(output="output.xml", loglevel="debug")
            testcaseInfo.variableFile.close()

    # Generating log files requires processing the earlier generated output XML.
    ResultWriter("output.xml").write_results()
    pp = postProcess.postProcess(suiteFile=testcaseInfo.scriptName)
    pp.close()
コード例 #7
0
def ROIDataDriver(path, filename, dt):
    t_min = 1e9
    t_max = 0.
    save_files = True
    FULL_BG = {'Time': np.array([]), 'IMAGE': np.array([])}

    print("=========================1st sweep================================")
    for i in range(0, len(filename)):
        fid = open(path + filename[i], "rb")
        print("Reading file..." + filename[i])
        bytes = fid.read()
        print("done")

        lb = len(bytes)
        # make bytes even
        if np.mod(lb, 2) != 0:
            bytes = bytes[0:-1]
            lb = lb - 1

        order = np.mgrid[0:int(len(bytes) / 2)]
        # https://stackoverflow.com/questions/45187101/converting-bytearray-to-short-int-in-python
        ushort1 = struct.unpack('H' * int(lb / 2),
                                bytes)  # the H means ushort int
        order1 = order
        ushort2 = struct.unpack('H' * int((lb) / 2),
                                bytes[1:len(bytes)] + b'1')  # append a byte
        order2 = order[1:len(order) + 1]

        # append the two tuples
        ushort = ushort1 + ushort2
        # append two nmpy arrays
        order = np.append(order1, order2)

        bytes = struct.pack('H' * int(lb), *ushort)  # the H means ushort int

        ushort = np.asarray(ushort)

        #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        #find the positions of the start of block marks ++++++++++++++++++++++++++
        #484B
        print('Finding house keeping...')
        house, = np.where(ushort == int('0x484B', 0))
        print('done')

        # A3D5
        print('Finding image data...')
        images, = np.where(ushort == int('0xa3d5', 0))
        print('done')

        #B2E6
        print('Finding roi data...')
        rois, = np.where(ushort == int('0xb2e6', 0))
        print('done')

        fid.close()
        #--------------------------------------------------------------------------

        #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        #convert the bytes to Header, Images and ROI structs +++++++++++++++++++++
        print('Post-processing data, stage 1...')
        Header = convertDataToHeaderSA(ushort)
        (I, images) = convertDataToImageSA(bytes, ushort, order, images)
        (R, rois) = convertDataToROISA(bytes, ushort, order, rois)
        (H) = convertDataToHouseSA(bytes, ushort, order, house)
        print('done')
        #--------------------------------------------------------------------------

        #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        # extract the images, times, etc ++++++++++++++++++++++++++++++++++++++++++
        print('Post-processing data, stage 2...')
        (ROI_N, HOUSE, IMAGE1) = postProcess(bytes, rois, R, H, I, Header)
        print('done')
        #--------------------------------------------------------------------------

        #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        #Backgrounds +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        print('Getting backgrounds...')
        FULL_BG1 = fullBackgrounds(ROI_N)  # append here
        if len(FULL_BG1['Time']):
            FULL_BG['IMAGE'] = np.append(FULL_BG['IMAGE'], FULL_BG1['IMAGE'])
            FULL_BG['Time'] = np.append(FULL_BG['Time'], FULL_BG1['Time'])
        print('done')
        #--------------------------------------------------------------------------

        #https://stackoverflow.com/questions/10012788/python-find-min-max-of-two-lists
        t_min = np.minimum(np.min(ROI_N['Time']), t_min)
        t_max = np.maximum(np.max(ROI_N['Time']), t_max)

        t_range = np.array([
            np.floor(t_min * 86400 / dt) * dt / 86400,
            np.ceil(t_max * 86400 / dt) * dt / 86400
        ])

        if save_files:
            #https://docs.scipy.org/doc/scipy/reference/tutorial/io.html
            print('Saving to file...')
            sio.savemat(path + filename[i].replace('.roi', '.mat'), {
                'ROI_N': ROI_N,
                'HOUSE': HOUSE,
                'IMAGE1': IMAGE1
            })
            sio.savemat(path + 'full_backgrounds.mat', {
                'FULL_BG': FULL_BG,
                't_range': t_range
            })
            print('done')

    print('=========================2nd sweep================================')
    for i in range(0, len(filename)):
        # load from file
        print('Loading from file...')
        #https://stackoverflow.com/questions/7008608/scipy-io-loadmat-nested-structures-i-e-dictionaries
        dataload = sio.loadmat(path + filename[i].replace('.roi', '.mat'),
                               variable_names=['ROI_N', 'HOUSE', 'IMAGE1'])
        #dataload['ROI_N']['StartX'][0,0][0,:]
        ROI_N = dataload['ROI_N']
        HOUSE = dataload['HOUSE']
        IMAGE1 = dataload['IMAGE1']
        print('done')

        #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
        # Assocate backgrounds ++++++++++++++++++++++++++++++++++++++++++++++++
        print('Associate backgrounds...')
        BG = associateBackgrounds(ROI_N, FULL_BG)
        print('done')
        #----------------------------------------------------------------------

        if save_files:
            #https://docs.scipy.org/doc/scipy/reference/tutorial/io.html
            print('Saving to file...')
            sio.savemat(path + filename[i].replace('.roi', '.mat'), {
                'ROI_N': ROI_N,
                'HOUSE': HOUSE,
                'IMAGE1': IMAGE1,
                'BG': BG
            })
            #with open(path + filename[i].replace('.roi','.mat'),'ab') as f:
            #   sio.savemat(f, {'BG':BG})
            print('done')

    return (bytes, house, images, rois, ushort, Header, I, R, H, t_range)
コード例 #8
0
def mult_job(path1,filename1,dt,FULL_BG,t_min,t_max,save_files,\
             process_sweep1_if_exist,cpiv1):

    if (os.path.isfile("{0}{1}".format(path1, filename1.replace(
            '.roi', '.mat'))) and not (process_sweep1_if_exist)):

        dataload=sio.loadmat(path1+'full_backgrounds.mat', \
                             variable_names=['FULL_BG','t_range'])
        FULL_BG = dataload['FULL_BG']
        t_range1 = dataload['t_range']
        t_min = t_range1[0, 0]
        t_max = t_range1[0, 1]

        del dataload

        print("{0}{1}".format("Skipping file...", filename1))
        bytes1,house,images,rois,ushort,Header,I,R,H = \
            False, False, False, False, False, False, False, False, False
        return (FULL_BG, t_min, t_max)

    fid = open("{0}{1}".format(path1, filename1), "rb")
    print("{0}{1}".format("Reading file...", filename1))
    bytes1 = fid.read()
    print("done")

    lb = len(bytes1)
    # make bytes1 even
    if np.mod(lb, 2) != 0:
        bytes1 = bytes1[0:-1]
        lb = lb - 1

    order = np.mgrid[0:int(len(bytes1) / 2)]
    # https://stackoverflow.com/questions/45187101/converting-bytearray-to-short-int-in-python
    ushort1 = unpack('H' * int(lb / 2), bytes1)  # the H means ushort int
    order1 = order
    ushort2 = unpack('H' * int((lb) / 2),
                     bytes1[1:len(bytes1)] + b'1')  # append a byte
    order2 = order[1:len(order) + 1]

    # append the two tuples
    #http://datos.io/2016/10/04/python-memory-issues-tips-and-tricks/
    #ushort="{0}{1}".format(ushort1,ushort2)
    ushort = ushort1 + ushort2

    del ushort1, ushort2
    # append two nmpy arrays
    order = np.append(order1, order2)

    del order1, order2

    bytes1 = pack('H' * int(lb), *ushort)  # the H means ushort int

    ushort = np.asarray(ushort)

    #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #find the positions of the start of block marks ++++++++++++++++++++++++++
    #484B
    print('Finding house keeping...')
    if cpiv1:
        house, = np.where(ushort == int('0xa1d7', 0))
    else:
        house, = np.where(ushort == int('0x484B', 0))
    print('done')

    # A3D5
    print('Finding image data...')
    images, = np.where(ushort == int('0xa3d5', 0))
    print('done')

    #B2E6
    print('Finding roi data...')
    rois, = np.where(ushort == int('0xb2e6', 0))
    print('done')

    fid.close()
    #--------------------------------------------------------------------------

    # Garbage collection:
    # https://stackoverflow.com/questions/1316767/how-can-i-explicitly-free-memory-in-python
    gc.collect()
    del gc.garbage[:]

    #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #convert the bytes1 to Header, Images and ROI structs +++++++++++++++++++++
    print('Post-processing data, stage 1...')
    Header = convertDataToHeaderSA(ushort)
    (I, images) = convertDataToImageSA(bytes1, ushort, order, images)
    (R, rois) = convertDataToROISA(bytes1, ushort, order, rois)
    (H) = convertDataToHouseSA(bytes1, ushort, order, house, cpiv1)
    print('done')
    #--------------------------------------------------------------------------

    #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    # extract the images, times, etc ++++++++++++++++++++++++++++++++++++++++++
    print('Post-processing data, stage 2...')
    (ROI_N, HOUSE, IMAGE1) = postProcess(bytes1, rois, R, H, I, Header, cpiv1)
    print('done')
    #--------------------------------------------------------------------------

    # Garbage collection:
    gc.collect()
    del gc.garbage[:]

    #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    #Backgrounds +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    print('Getting backgrounds...')
    FULL_BG1 = fullBackgrounds(ROI_N, cpiv1)  # append here
    if len(FULL_BG1['Time']):
        temp_name = tempfile.mktemp()
        sio.savemat(temp_name, {'FULL_BG': FULL_BG1})
        dataload = sio.loadmat(temp_name, variable_names=['FULL_BG'])
        FULL_BG1 = dataload['FULL_BG']
        del dataload

        r = len(FULL_BG['IMAGE'][0, 0])
        if r > 0:
            FULL_BG['IMAGE'][0,0]=np.append(FULL_BG['IMAGE'][0,0], \
              FULL_BG1['IMAGE'][0,0],axis=1)
            FULL_BG['Time'][0,0]=np.append(FULL_BG['Time'][0,0], \
              FULL_BG1['Time'][0,0],axis=0)
        else:  # append
            FULL_BG = FULL_BG1
    print('done')
    #--------------------------------------------------------------------------

    #https://stackoverflow.com/questions/10012788/python-find-min-max-of-two-lists
    t_min = np.minimum(np.min(ROI_N['Time']), t_min)
    t_max = np.maximum(np.max(ROI_N['Time']), t_max)

    t_range = np.array([
        np.floor(t_min * 86400 / dt) * dt / 86400,
        np.ceil(t_max * 86400 / dt) * dt / 86400
    ])

    if save_files:
        #https://docs.scipy.org/doc/scipy/reference/tutorial/io.html
        print('Saving to file...')
        sio.savemat("{0}{1}".format(path1, filename1.replace('.roi', '.mat')),
                    {
                        'ROI_N': ROI_N,
                        'HOUSE': HOUSE,
                        'IMAGE1': IMAGE1
                    })
        sio.savemat("{0}{1}".format(path1, 'full_backgrounds.mat'), {
            'FULL_BG': FULL_BG,
            't_range': t_range
        })
        print('done')

    del ROI_N, HOUSE, IMAGE1
    # Garbage collection:
    gc.collect()
    del gc.garbage[:]

    return (FULL_BG, t_min, t_max)
コード例 #9
0
ファイル: score.py プロジェクト: MountainRange/mobius_score
    #results = tensorEval(eval_data, eval_labels, clf)

    predict_input_fn = tf.estimator.inputs.numpy_input_fn(
        x={"x": np.array(features).astype("float32")},
        num_epochs=1,
        shuffle=False)

    results = list(clf.predict(input_fn=predict_input_fn))

    result = [p["classes"] for p in results]

    for r in result:
        r[r < 0.3] = 0
        r[r >= 0.3] = 1
        u, c = np.unique(r, return_counts=True)

    out = [list(x) for x in result]
    return out


if __name__ == "__main__":
    if len(sys.argv) < 2:
        song, tempo = score()
    else:
        song, tempo = score(sys.argv[1])
    post = postProcess(song)
    # print('postProcess result:')
    # print(post)
    sheetMusic('test', post, int(tempo))
コード例 #10
0
ファイル: mainroutine.py プロジェクト: ibrahim85/SEEDproject
# read PM excel to template excel
logger.info('\n## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
logger.info('converting pm to template')
logger.info('## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
pm_in_dir = os.getcwd() + "/PMfile/PMinput/"
pm_out_dir = os.getcwd() + "/PMfile/PMoutput/"
pm.readPM(pm_in_dir, pm_out_dir)
logger.info('\nend converting pm to template')

pm_out_dir = os.getcwd() + "/PMfile/PMoutput/"
logger.info('\n## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
logger.info('process template')
logger.info('## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
# process template excel
json_in_dir = pm_out_dir
#json_out_dir = os.getcwd() + "/Jsonfile/raw/"
json_out_dir = os.getcwd() + "/Jsonfile/local_raw/"
# query goes here
tm.pm2json(json_in_dir, json_out_dir)

logger.info('\n## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
logger.info('post process json')
logger.info('## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
# post-process json files
#post_out_dir = os.getcwd() + "/Jsonfile/post/" #original file, there's an offset
post_out_dir = os.getcwd() + "/Jsonfile/local_post/"
ps.postProcess(json_out_dir, post_out_dir)
logger.info('\n## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
logger.info('finished')
logger.info('## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##')
コード例 #11
0
ファイル: Test.py プロジェクト: cash2one/reautomation_handoff
def executeScript(inputScript, scriptOptionalArgs, inputFile=None):
    global testcaseInfoList
    global testcaseInfo
    global yamlCont
    testcaseInfoList = []
    yamlCont = {}
    if inputFile != None:
        fil = open(inputFile, "r")
        yamlCont = yaml.load(fil)
#        inputFileType = re.search('(.*).yaml', inputFile).group(1)

    if re.search("\.log\.", inputScript):
        cmd = "python /work/swtest01_1/sgsubramaniam/sw-test/special_script/demoScript/slowcat.py -d 0.002 %s" %inputScript
    else:
        cmd = "f10tool %s %s" %(inputScript, ' '.join(scriptOptionalArgs))
    if re.search("\.flist$", inputScript):
        inputFileType = 'flist'
        cmd = "f10tool %s %s" %(inputScript, ' '.join(scriptOptionalArgs))
    elif re.search("\.f10$", inputScript):
        inputFileType = 'f10'
        cmd = "f10tool %s %s" %(inputScript, ' '.join(scriptOptionalArgs))
    else:
        inputFileType = 'unknown'
        if inputFile != None:
            inputFileType = re.search('(.*).yaml', inputFile).group(1)
        cmd = "%s %s;echo '----------'" %(inputScript, ' '.join(scriptOptionalArgs))


    ps = winpexpect.winspawn(cmd)
    ps.logfile_read = sys.stdout

    ## Initialize testcaseInfo if the script is not a flist.
    if inputFileType in ['f10', 'unknown']:
        scriptName = inputScript
        testcaseInfo = testInfo(scriptName)
        testcaseInfoList.append(testcaseInfo)

    timeout = -1
    ps.interact(output_filter=scriptExecutionMonitor)

    if inputFileType is 'flist' or 'rspec':
        ## creates a nested suite
        suiteList = []
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            suite = createRobotSuite(testcaseInfo)
            suiteList.append(suite)
        suite = TestSuite(inputScript)
        suite.suites = suiteList
        result = suite.run(output='output.xml', loglevel='debug')
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.variableFile.close()
    else:
        ## creates a single suite
        for testcaseInfo in testcaseInfoList:
            testcaseInfo.flush()
            print testcaseInfo
            suite = createRobotSuite(testcaseInfo)
            result = suite.run(output='output.xml', loglevel='debug')
            testcaseInfo.variableFile.close()

    # Generating log files requires processing the earlier generated output XML.
    ResultWriter('output.xml').write_results()
    pp = postProcess.postProcess(suiteFile=testcaseInfo.scriptName)
    pp.close()
コード例 #12
0
ファイル: main.py プロジェクト: FracturedRocketSpace/Argon
# Start Timer
start = timeit.default_timer()

# Initialize position + velocity
particles = Particles(config.nParticles);
(temp, eK, eP, compr, cV, displacement, zeroPositions) = initSimulation(particles);

if(config.animation):
    anim = animationPlot();

# Main loop
for i in range(config.iterations):
    # Update position
    virial = argonMove(particles, eP, temp, i);
    
    # Calculate physical quantities
    zeroPositions = calculateQuantities(particles, temp, eK, compr, virial, cV, displacement, zeroPositions, i);
    
    # Update animation
    if(config.animation and i % config.animationIter == 0):
        anim.updateParticlePlot(particles);
    
    # Inform user of progress
    print("Iteration", i+1, "completed; Time is: ", round(i*config.dt, 3) );

postProcess(particles, temp, eK, eP, compr, cV, displacement)

# Stop timer
stop = timeit.default_timer()
print("Program ended in  =", int(stop - start), "seconds");
print(round( ((stop - start)/config.iterations)*1000 , 3 ), "ms per iteration for", config.nParticles, "particles")