コード例 #1
0
 def makeTestSection(test, details, images):
     section = ""
     section += mdHeader(3, "Test: " + test)
     section += mdBlock(details)
     section += mdBlock(
         images.generateTable(columns=2, directory=str(project)))
     return section
コード例 #2
0
def output_table(args):

    # if True: return

    project = Path(RAWDATA) / args.project

    print('\n\n## Summary Table \n\n')
    summary_table = args.state.summaryTable.generateTable().format()
    print(summary_table)

    ## Make summary markdown
    summaryFile = "test_summary_table.md"
    summarySections = (
        mdHeader(1, "Meniscus Fatigue Expr 1 - Pretrial"),
        mdBlock('\n\n\n'),
        mdHeader(2, "Summary Table"),
        mdBlock(summary_table),
        mdBlock('\n\nDone...\n\n'),
    )

    # writeImageTable(project, summaryFile, summarySections, writemode='w')

    ## Make images markdown
    # imagesMarkdown = os.sep.join(RAWDATA,args.project,"test_all_images.md")
    # writeImageTable(args, imagesMarkdown)

    testDetails, testImages = args.state.testDetails, args.state.testImages

    def makeTestSection(test, details, images):
        section = ""
        section += mdHeader(3, "Test: " + test)
        section += mdBlock(details)
        section += mdBlock(
            images.generateTable(columns=2, directory=str(project)))
        return section

    testSections = (makeTestSection(test, details, images) for (
        test,
        details), (_, images) in zip(testDetails.items(), testImages.items()))

    import itertools

    writeImageTable(project,
                    summaryFile,
                    itertools.chain(summarySections, testSections),
                    writemode='w')

    print(subprocess.call(['open', '-a', 'Marked',
                           str(project / summaryFile)]))
コード例 #3
0
def handler(testconf, excelfile, args):
    def updateMetaData(data):
        ## Handle Names
        data['name'] = testconf.info.name
        data['id'] = testconf.info.short
        data['info'] = testconf.info._asdict()

    assert excelfile.exists()

    ## Update with Excel Values
    data = parse_data_from_worksheet(
        parser_data_sheet_excel=args.parser_data_sheet_excel,
        testpath=excelfile,
        testconf=testconf,
        args=args,
    )

    print(mdBlock("Excel Sheet Data:"))
    rows = [(k, v) for k, v in flatten(data).items()]
    rows = sorted(rows)

    # print()
    # print(HTML(tabulate.tabulate( rows, [ "Key", "Value", ], tablefmt ='html' )))

    # print(mdBlock("<pre>\n"+json.dumps(data,indent=4)+"\n</pre>"))
    updateMetaData(data)

    # excel_data = DataTree(notes=data.pop("notes"), other=data.pop("other"))
    json_data = collections.OrderedDict(sorted(data.items()))
    testconf.folder.save_calculated_json_raw(test=testconf,
                                             name='excel',
                                             json_data=json_data,
                                             overwrite=True)
コード例 #4
0
def handler(testconf, excelfile, args):
        
    def updateMetaData(data):
        ## Handle Names    
        data['name'] = testconf.info.name
        data['id'] = testconf.info.short
        data['info'] = testconf.info._asdict()

    assert excelfile.exists()

    ## Update with Excel Values    
    data = parse_data_from_worksheet(
        parser_data_sheet_excel=args.parser_data_sheet_excel,
        testpath=excelfile,
        testconf=testconf,
        args=args,
        )

    print(mdBlock("Excel Sheet Data:"))
    rows = [ (k,v) for k,v in flatten(data).items() ]
    rows = sorted(rows)

    # print()
    # print(HTML(tabulate.tabulate( rows, [ "Key", "Value", ], tablefmt ='html' )))
    
    # print(mdBlock("<pre>\n"+json.dumps(data,indent=4)+"\n</pre>"))
    updateMetaData(data)
    
    # excel_data = DataTree(notes=data.pop("notes"), other=data.pop("other"))
    json_data = collections.OrderedDict(sorted(data.items()))
    testconf.folder.save_calculated_json_raw(test=testconf, name='excel',json_data=json_data, overwrite=True)
コード例 #5
0
ファイル: guitestprocessor.py プロジェクト: elcritch/scilab
def guitestprocess(testinfodict, fs, args, logFileNames):

    with setupStdFiles(*logFileNames) as logging:  # setup for logging output!
        try:
            from scilab.tools.project import DataTree, debug
            from scilab.tools import testingtools

            import scilab.datahandling.dataprocessor as dataprocessor
            from scilab.expers.configuration import FileStructure, generatetestinfoclass

            print("### Processing ")
            TestInfo = generatetestinfoclass(
                **fs.projdesc["experiment_config"]["testinfo"])
            testinfo = TestInfo(**testinfodict)
            debug(testinfo)

            test = DataTree()
            test.data = DataTree()
            test.info = testinfo
            test.folder = fs.testfolder(testinfo=test.info,
                                        ensure_folders_exists=False)

            debug(list(test.keys()), fs, args)

            expertype = fs.projdesc["experiment_config"]["type"]

            if expertype == "cycles":
                import scilab.expers.mechanical.fatigue.cycles as exper
            elif expertype == "uts":
                import scilab.expers.mechanical.fatigue.uts as exper
            else:
                raise ValueError("Do not know how to process test type.",
                                 expertype)

            args.parser_data_sheet_excel = exper.parser_data_sheet_excel
            args.parser_image_measurements = exper.parser_image_measurements
            args.codehandlers = exper.getcodehandlers()

            if not fs or not test or not args:
                debug(fs, test, args)
                logging.warning("Cannot execute test! Empty arguments ")
                return

            if not 'data' in test:
                test.data = DataTree()

            dataprocessor.execute(fs=fs,
                                  name=test.info.name,
                                  testconf=test,
                                  args=args)

            print(mdBlock("<p>\n# Done #</p>"))
        except Exception as err:
            print("\n<br>\n[Error]\n<br>\n", flush=True)
            print("Exception:\n", repr(err))
            print("Traceback:\n", traceback.format_exc())
            logging.exception(err)
コード例 #6
0
def guitestprocess(testinfodict, fs, args, logFileNames):
    
    with setupStdFiles(*logFileNames) as logging: # setup for logging output!
        try:
            from scilab.tools.project import DataTree, debug
            from scilab.tools import testingtools

            import scilab.datahandling.dataprocessor as dataprocessor
            from scilab.expers.configuration import FileStructure, generatetestinfoclass
    
            print("### Processing ")
            TestInfo = generatetestinfoclass(**fs.projdesc["experiment_config"]["testinfo"])
            testinfo = TestInfo(**testinfodict)
            debug(testinfo)
    
            test = DataTree()
            test.data = DataTree()
            test.info = testinfo
            test.folder = fs.testfolder(testinfo=test.info, ensure_folders_exists=False)
    
            debug(list(test.keys()), fs, args)

            expertype = fs.projdesc["experiment_config"]["type"]

            if expertype == "cycles":
                import scilab.expers.mechanical.fatigue.cycles as exper
            elif expertype == "uts":
                import scilab.expers.mechanical.fatigue.uts as exper
            else:
                raise ValueError("Do not know how to process test type.", expertype)
    
            args.parser_data_sheet_excel = exper.parser_data_sheet_excel
            args.parser_image_measurements = exper.parser_image_measurements
            args.codehandlers = exper.getcodehandlers()


            if not fs or not test or not args:
                debug(fs, test, args)
                logging.warning("Cannot execute test! Empty arguments ")
                return

            if not 'data' in test:
                test.data = DataTree()
        
        
            dataprocessor.execute(fs=fs, name=test.info.name, testconf=test, args=args )

            print(mdBlock("<p>\n# Done #</p>"))
        except Exception as err:
            print("\n<br>\n[Error]\n<br>\n", flush=True)
            print("Exception:\n", repr(err))
            print("Traceback:\n", traceback.format_exc())
            logging.exception(err)
コード例 #7
0
ファイル: make_data_json.py プロジェクト: elcritch/scilab
def handler(testconf, excelfile, args):

    # print(mdHeader(3, ": {}",testconf.info.name))

    def updateMetaData(data):
        ## Handle Names
        data['name'] = testconf.info.name
        data['id'] = testconf.info.short
        data['info'] = testconf.info.as_dict()

    debug(testconf.folder.jsoncalc.as_posix())

    # print(str(testconf.info), file=args.report)

    ## Update with Excel Values
    data = parse_data_from_worksheet(
        testpath=excelfile,
        testconf=testconf,
        args=args,
    )

    print(mdBlock("Excel Sheet Data:"))
    rows = [(k, v) for k, v in flatten(data).items()]
    rows = sorted(rows)

    print()
    print(HTML(tabulate.tabulate(rows, [
        "Key",
        "Value",
    ], tablefmt='html')))

    # print(mdBlock("<pre>\n"+json.dumps(data,indent=4)+"\n</pre>"))
    updateMetaData(data)

    # excel_data = DataTree(notes=data.pop("notes"), other=data.pop("other"))
    json_data = collections.OrderedDict(sorted(data.items()))
    testconf.folder.save_calculated_json_raw(test=testconf,
                                             name='excel',
                                             json_data=json_data,
                                             overwrite=True)

    ## Update with Image Measurements

    data = parse_from_image_measurements(testconf=testconf, args=args)

    updateMetaData(data)

    testconf.folder.save_calculated_json_raw(test=testconf,
                                             name='measurements',
                                             json_data=data,
                                             overwrite=True)

    return
コード例 #8
0
ファイル: make_data_json.py プロジェクト: manasdas17/scilab-2
def handler(testconf, excelfile, args):
    
    
    # print(mdHeader(3, ": {}",testconf.info.name))

    def updateMetaData(data):
        ## Handle Names    
        data['name'] = testconf.info.name
        data['id'] = testconf.info.short
        data['info'] = testconf.info.as_dict()
    
    
    debug(testconf.folder.jsoncalc.as_posix())
    
    # print(str(testconf.info), file=args.report)
    
    ## Update with Excel Values    
    data = parse_data_from_worksheet(
        testpath=excelfile,
        testconf=testconf,
        args=args,
        )

    print(mdBlock("Excel Sheet Data:"))
    rows = [ (k,v) for k,v in flatten(data).items() ]
    rows = sorted(rows)

    print()
    print(HTML(tabulate.tabulate( rows, [ "Key", "Value", ], tablefmt ='html' )))
    
    # print(mdBlock("<pre>\n"+json.dumps(data,indent=4)+"\n</pre>"))
    updateMetaData(data)
    
    # excel_data = DataTree(notes=data.pop("notes"), other=data.pop("other"))
    json_data = collections.OrderedDict(sorted(data.items()))
    testconf.folder.save_calculated_json_raw(test=testconf, name='excel',json_data=json_data, overwrite=True)
    
    ## Update with Image Measurements
    
    data = parse_from_image_measurements(
        testconf=testconf,
        args=args)
    
    updateMetaData(data)

    testconf.folder.save_calculated_json_raw(test=testconf, name='measurements',json_data=data, overwrite=True)
    
    return
コード例 #9
0
ファイル: summary_images.py プロジェクト: manasdas17/scilab-2
def print_csv_data(testinfo, testfolder, details, report):
    
    fatigue_data = """
        Short Name,set,location,sample,Test Name,Level,Area,DM3,Pred UTS,Orient,Cycles (1st Quartile),Cycles (4th Quartile),UTS Load,Max Strain
        1002-1LG-1001,1002,1LG,1001,,90%,0.788,120.33,12.635,,29,39,,
        1002-1LG-701,1002,1LG,701,,60%,1.13,160.73,17.66,LG,972,975,,
        1002-1LG-702,1002,1LG,702,,70%,1.466,141.22,15.716,,,,25,
        1002-1LG-902,1002,1LG,902,,80%,0.931,136.98,15.293,LG,104,105,,
        1002-1TR-601,1002,1TR,601,,60%,0.16894,5.563,0.5,,,,2.1,
        1002-1TR-602,1002,1TR,602,,70%,1.1296,12.035,5.4,,11,12,,
        1002-1TR-603,1002,1TR,603,,70%,1.02,5.433,1.056,,,,1.7,
        1002-1TR-801,1002,1TR,801,,80%,0.7912,5.319,1.016,,3790,3791,,
        1006-1TR-501,1006,1TR,501,,90%,0.713,4.942,0.884,,,,,
        1006-1TR-502,1006,1TR,502,,80%,0.555,4.72,0.644,,1,21,,
        1006-1TR-701,1006,1TR,701,,70%,0.786,8.083,1.39,,1597,1598,,
        1006-1TR-702,1006,1TR,702,,60%,0.701,10.939,2.988,,6620,6630,,
        1009-1LG-1002,1009,1LG,1002,jan10(gf10.9-llm)-wa-lg-l10-x2,60%,1.02,133.97,14.994,LG,,,14,
        1009-1LG-1003,1009,1LG,1003,jan10(gf10.9-llm)-wa-lg-l10-x3,70%,0.72,156.2,17.208,LG,98,99,,
        1009-1LG-1201,1009,1LG,1201,jan10(gf10.9-llm)-wa-lg-l12-x1,80%,1.2,192.6,20.83,LG,,,38,
        1009-1LG-1202,1009,1LG,1202,jan10(gf10.9-llm)-wa-lg-l12-x2,90%,1.36,118.05,13.407,LG,184,185,,
        1009-1LG-1203,1009,1LG,1203,jan10(gf10.9-llm)-wa-lg-l12-x3,80%,1.1,97.07,11.317,LG,559,560,,
        1009-1TR-1101,1009,1TR,1101,,70%,0.79,9.742,2.568,,,,5,
        1009-1TR-1102,1009,1TR,1102,,60%,1.02,9.632,2.529,,,,6.7,
        1009-1TR-902,1009,1TR,902,,90%,0.87,7.029,1.615,,,,3.4,
        1009-1TR-903,1009,1TR,903,,90%,0.88,14.487,4.233,,1,1,,
        1104-1LG-501,1104,1LG,501,jan11(gf11.4-llm)-wa-lg-l5-x1,90%,1.1,113.96,12.99,LG,1,17,,
        1104-1LG-502,1104,1LG,502,jan11(gf11.4-llm)-wa-lg-l5-x2,80%,1.56,64.166,8.03,LG,102,126,,
        1104-1LG-701,1104,1LG,701,jan11(gf11.4-llm)-wa-lg-l7-x1,70%,1.26,207.92,22.361,LG,,,12,
        1104-1LG-702,1104,1LG,702,jan11(gf11.4-llm)-wa-lg-l7-x2,60%,1.26,113.76,12.98,LG,,,12,
        1105-1LG-601,1105,1LG,601,jan11(gf11.5-llm)-wa-lg-l6-x1,60%,0.88,250.89,26.642,LG,,,24,
        1105-1LG-602,1105,1LG,602,jan11(gf11.5-llm)-wa-lg-l6-x2,70%,0.93,171.9,18.772,LG,198,199,,
        1105-1LG-801,1105,1LG,801,jan11(gf11.5-llm)-wa-lg-l8-x1,80%,1.23,19.789,19.789,LG,751,752,,
        1105-1LG-802,1105,1LG,802,jan11(gf11.5-llm)-wa-lg-l8-x2,90%,1.81,143.35,15.928,LG,25,26,,
        1105-1TR-501,1105,1TR,501,,90%,0.52,8.92,2.279,,104,105,,
        1105-1TR-502,1105,1TR,502,,80%,0.38,7.899,1.921,,7,8,,
        1105-1TR-701,1105,1TR,701,,70%,0.67,11.912,3.329,,16,18,,
        1105-1TR-702,1105,1TR,702,,60%,0.629,8.977,2.3,,143,145,,
        """
    
    def values(v):
        try:
            return int(v)
        except:
            try:
                return float(v)
            except:
                return v

    fatigue_header, *fatigue_rows = [ l.strip().split(',') for l in fatigue_data.strip().split('\n') ]
    # debug(fatigue_header)
    
    fatigue_dicts = [ { k:values(i) for k,i in zip(fatigue_header, r) } for r in fatigue_rows ]
    fatigue_dicts = DataTree(**{ r['Short Name']:r for r in fatigue_dicts })
    
    # debug(fatigue_dicts)
    
    
    
    print(mdHeader(3, "CSV Data" ), file=report)
    
    # KeyError: "Key 'elapsedStep' not found in: step, loadLinearLoad1Maximum, displacementLinearDigitalPositionMinimum, rotationRotaryRotationMinimum, torqueRotaryTorqueMinimum, rotationRotaryRotationMaximum, torqueRotaryTorqueMaximum, indices, positionLinearPositionMaximum, rotaryDisplacementRotaryDigitalRotationMaximum, rotaryDisplacementRotaryDigitalRotationMinimum, loadLinearLoadMaximum, loadLinearLoad1Minimum, elapsedCycles, pc|frequency, _matrix, _InstronMatrixData__slices, displacementLinearDigitalPositionMaximum, totalCycles, loadLinearLoadMinimum, positionLinearPositionMinimum, pc|cycleStartTime"

    if testinfo.orientation == 'lg':
        trendsfolder = testfolder.raws.cycles_lg_csv
        loads = ['loadLinearLoadMaximum','loadLinearLoadMinimum']        
        loadsTracking = 'loadLinearLoad'        
    elif testinfo.orientation == 'tr':
        trendsfolder = testfolder.raws.cycles_tr_csv
        loads = ['loadLinearLoad1Maximum','loadLinearLoad1Maximum']
        loadsTracking = 'loadLinearLoad1'
    
    debug(testinfo.short, trendsfolder)
    
    if not trendsfolder.trends or not trendsfolder.trends.exists():
        print("Error: couldn't find trend files! (%s)"%(str(testinfo),), file=report)
        return 
        
    trends = csvread(str(trendsfolder.trends))
    
    indicies = trends._getslices('step')
    debug(indicies)
    
    sl_fatigue = indicies[5]
    # sl_uts = indicies[7]
    
    def fmt(name, N=-25, O=-1):
        if hasattr(name, '__call__'):
            return name(trends, N, O)[1]
        return trends[name].array[N:O]
        
    columns = [
        'elapsedCycles', 
        'step', 
        'displacementLinearDigitalPositionMinimum', 
        'displacementLinearDigitalPositionMaximum',
        lambda x,N,O: ('Min Strain', x['displacementLinearDigitalPositionMinimum'].array[N:O]/details.gauge.value),
        lambda x,N,O: ('Max Strain', x['displacementLinearDigitalPositionMaximum'].array[N:O]/details.gauge.value),
        ]+loads
        
    def short(name, N=-1, O=-1):
        if hasattr(name, '__call__'):
            return name(trends, N, O)[0]
        return name.replace('Linear', '').replace('displacement', 'disp').replace('DigitalPosition','')
    
    
    fatigue_dict = fatigue_dicts.get(testinfo.short,None) 
    quartile1 = 'Cycles (1st Quartile)'
    quartile4 = 'Cycles (4th Quartile)'
    print(mdBlock("Fatigue Data:"+str(fatigue_dict)), file=report)

    
    if fatigue_dict and fatigue_dict[quartile4]:
        
        startEndCycleIdx = find_index(fatigue_dict[quartile1], trends['elapsedCycles'].array[sl_fatigue])
        lastCycleIdx = find_index(fatigue_dict[quartile4], trends['elapsedCycles'].array[sl_fatigue])
        N, O = startEndCycleIdx-1, lastCycleIdx
        
        if not lastCycleIdx:
            debug(trends['elapsedCycles'].array[sl_fatigue])
            
        debug(startEndCycleIdx, lastCycleIdx, N, O)
        
        lastCycleMaxPosition = (trends['displacementLinearDigitalPositionMaximum'].array[sl_fatigue])[lastCycleIdx]
        
        debug(lastCycleMaxPosition, fatigue_dict)
        
        print(mdBlock("**Fatigue Cycle**: {'id': '%s', 'loc': '%s', 'max disp': %.3f, 'max strain':%.3f, 'lastCycle': %d, 'timeLast': %.3f} "%(
                    fatigue_dict['Short Name'],
                    fatigue_dict['location'],
                    lastCycleMaxPosition,
                    lastCycleMaxPosition/details.gauge.value,
                    fatigue_dict[quartile4],
                    trends['pc|cycleStartTime'].array[sl_fatigue][lastCycleIdx]
                    ),
                ), file=report)
    else:
        N, O = -25, -1
    
    if fatigue_dict and fatigue_dict['UTS Load']:
        
        tracking = csvread(str(trendsfolder.tracking))
        
        track_indicies = trends._getslices('step')
        sl_uts = track_indicies[7]
        debug(track_indicies, sl_uts)
            
        maxload = data_find_max(tracking[loadsTracking].array[sl_uts])
        debug(maxload)
        
        maxes = DataTree()
        maxes.load = maxload.value
        maxes.stress = maxes.load / details.measurements.area.value

        maxes.displacement = tracking.displacement.array[sl_uts][maxload.idx]
        maxes.strain = maxes.displacement / details.gauge.value

        debug(maxes)
                
        print(mdBlock("**UTS Load**: %s: %s, %s, %.3f, %.3f, %.3f, %.3f, , %.3f, %.3f, %d, %.3f "%(
                    'Name, Orient, UTS Load (manual), UTS Stress (manual), disp, strain, load, stress, idx, time',
                    fatigue_dict['Short Name'],
                    fatigue_dict['location'],
                    fatigue_dict['UTS Load'],
                    fatigue_dict['UTS Load']/details.gauge.value,
                    maxes.displacement,
                    maxes.strain,
                    maxes.load,
                    maxes.stress,
                    maxload.idx,
                    tracking.totalTime.array[sl_uts][maxload.idx],
                    )), file=report)    
    
    tabledata = OrderedDict([(short(col), fmt(col, N, O)) for col in columns ])    
    print(tabulate(tabledata, headers="keys", numalign="right", floatfmt=".2f"), file=report)
    
    
    print(mdBlock("Steps: "), set(trends['step'].array.tolist()), file=report)
コード例 #10
0
def handler(test: Path, args: dict):

    data_json = Json.load_data_path(test)

    ## Details

    ### lookup details
    detailValues = [
        part.strip() if type(part) is str else part
        for column in args.state.jsonColumns.values()
        for part in [attributesAccessor(data_json, column)]
    ]

    testName = "Test {}".format(test.stems())

    if 'n/a' in detailValues:
        debug(detailValues)
        logging.error("Skipping:" + testName + " due to missing details: " +
                      repr(detailValues))
        return

    ### details table
    detailsTable = MarkdownTable(headers=['Detail', 'Value'])
    detailsTable.add_rows(zip(args.state.jsonColumns.keys(), detailValues))

    details = detailsTable.generateTable(headers=['Name', 'Value']).format()
    args.state.testDetails[testName] = details

    ## Overview Images

    ### test overview images
    testOverviewImages = ImageTable()\
        .addImageGlob(str(test.parent), 'img', '*.png')\
        .addImageGlob(str(test.parent), "img","overview","*Stress*last*.png")\
        .addImageGlob(str(test.parent), "img","overview","*Stress*all*.png")\
        .addImageGlob(str(test.parent), "img","trends","*Stress*all*.png")\
        .addImageGlob(str(test.parent), "img","trends","*Stress*last*.png")

    ### add details and images to test section
    args.state.testImages[testName] = testOverviewImages

    # args.state.tests[testFolder] = [details, images]

    ### summary details
    # debug(detailValues)
    attribs, testId = parse_test_name(test.parent.stems())
    args.state.summaryTable.add_row(
        [testId, test.parent.stems(), attribs.orientation] + detailValues)

    ## all test images
    allImagesTable = ImageTable().addImageGlob(str(test.parent), 'img',
                                               '**/*.png')
    allImages = allImagesTable.generateTable(columns=2,
                                             directory=str(
                                                 test.parent)).format()

    testDetails = [
        mdHeader(2, 'Test: ' + testName),
        mdBlock(details),
        mdHeader(2, 'All Images'), allImages
    ]

    writeImageTable(test.parent, 'Test Images ({}).md'.format(testName),
                    testDetails)

    # print
    print("## CSV Data")
    print('>', ','.join(map(str, detailValues)))

    return
コード例 #11
0
ファイル: summary_images.py プロジェクト: elcritch/scilab
def print_csv_data(testinfo, testfolder, details, report):

    fatigue_data = """
        Short Name,set,location,sample,Test Name,Level,Area,DM3,Pred UTS,Orient,Cycles (1st Quartile),Cycles (4th Quartile),UTS Load,Max Strain
        1002-1LG-1001,1002,1LG,1001,,90%,0.788,120.33,12.635,,29,39,,
        1002-1LG-701,1002,1LG,701,,60%,1.13,160.73,17.66,LG,972,975,,
        1002-1LG-702,1002,1LG,702,,70%,1.466,141.22,15.716,,,,25,
        1002-1LG-902,1002,1LG,902,,80%,0.931,136.98,15.293,LG,104,105,,
        1002-1TR-601,1002,1TR,601,,60%,0.16894,5.563,0.5,,,,2.1,
        1002-1TR-602,1002,1TR,602,,70%,1.1296,12.035,5.4,,11,12,,
        1002-1TR-603,1002,1TR,603,,70%,1.02,5.433,1.056,,,,1.7,
        1002-1TR-801,1002,1TR,801,,80%,0.7912,5.319,1.016,,3790,3791,,
        1006-1TR-501,1006,1TR,501,,90%,0.713,4.942,0.884,,,,,
        1006-1TR-502,1006,1TR,502,,80%,0.555,4.72,0.644,,1,21,,
        1006-1TR-701,1006,1TR,701,,70%,0.786,8.083,1.39,,1597,1598,,
        1006-1TR-702,1006,1TR,702,,60%,0.701,10.939,2.988,,6620,6630,,
        1009-1LG-1002,1009,1LG,1002,jan10(gf10.9-llm)-wa-lg-l10-x2,60%,1.02,133.97,14.994,LG,,,14,
        1009-1LG-1003,1009,1LG,1003,jan10(gf10.9-llm)-wa-lg-l10-x3,70%,0.72,156.2,17.208,LG,98,99,,
        1009-1LG-1201,1009,1LG,1201,jan10(gf10.9-llm)-wa-lg-l12-x1,80%,1.2,192.6,20.83,LG,,,38,
        1009-1LG-1202,1009,1LG,1202,jan10(gf10.9-llm)-wa-lg-l12-x2,90%,1.36,118.05,13.407,LG,184,185,,
        1009-1LG-1203,1009,1LG,1203,jan10(gf10.9-llm)-wa-lg-l12-x3,80%,1.1,97.07,11.317,LG,559,560,,
        1009-1TR-1101,1009,1TR,1101,,70%,0.79,9.742,2.568,,,,5,
        1009-1TR-1102,1009,1TR,1102,,60%,1.02,9.632,2.529,,,,6.7,
        1009-1TR-902,1009,1TR,902,,90%,0.87,7.029,1.615,,,,3.4,
        1009-1TR-903,1009,1TR,903,,90%,0.88,14.487,4.233,,1,1,,
        1104-1LG-501,1104,1LG,501,jan11(gf11.4-llm)-wa-lg-l5-x1,90%,1.1,113.96,12.99,LG,1,17,,
        1104-1LG-502,1104,1LG,502,jan11(gf11.4-llm)-wa-lg-l5-x2,80%,1.56,64.166,8.03,LG,102,126,,
        1104-1LG-701,1104,1LG,701,jan11(gf11.4-llm)-wa-lg-l7-x1,70%,1.26,207.92,22.361,LG,,,12,
        1104-1LG-702,1104,1LG,702,jan11(gf11.4-llm)-wa-lg-l7-x2,60%,1.26,113.76,12.98,LG,,,12,
        1105-1LG-601,1105,1LG,601,jan11(gf11.5-llm)-wa-lg-l6-x1,60%,0.88,250.89,26.642,LG,,,24,
        1105-1LG-602,1105,1LG,602,jan11(gf11.5-llm)-wa-lg-l6-x2,70%,0.93,171.9,18.772,LG,198,199,,
        1105-1LG-801,1105,1LG,801,jan11(gf11.5-llm)-wa-lg-l8-x1,80%,1.23,19.789,19.789,LG,751,752,,
        1105-1LG-802,1105,1LG,802,jan11(gf11.5-llm)-wa-lg-l8-x2,90%,1.81,143.35,15.928,LG,25,26,,
        1105-1TR-501,1105,1TR,501,,90%,0.52,8.92,2.279,,104,105,,
        1105-1TR-502,1105,1TR,502,,80%,0.38,7.899,1.921,,7,8,,
        1105-1TR-701,1105,1TR,701,,70%,0.67,11.912,3.329,,16,18,,
        1105-1TR-702,1105,1TR,702,,60%,0.629,8.977,2.3,,143,145,,
        """

    def values(v):
        try:
            return int(v)
        except:
            try:
                return float(v)
            except:
                return v

    fatigue_header, *fatigue_rows = [
        l.strip().split(',') for l in fatigue_data.strip().split('\n')
    ]
    # debug(fatigue_header)

    fatigue_dicts = [{k: values(i)
                      for k, i in zip(fatigue_header, r)}
                     for r in fatigue_rows]
    fatigue_dicts = DataTree(**{r['Short Name']: r for r in fatigue_dicts})

    # debug(fatigue_dicts)

    print(mdHeader(3, "CSV Data"), file=report)

    # KeyError: "Key 'elapsedStep' not found in: step, loadLinearLoad1Maximum, displacementLinearDigitalPositionMinimum, rotationRotaryRotationMinimum, torqueRotaryTorqueMinimum, rotationRotaryRotationMaximum, torqueRotaryTorqueMaximum, indices, positionLinearPositionMaximum, rotaryDisplacementRotaryDigitalRotationMaximum, rotaryDisplacementRotaryDigitalRotationMinimum, loadLinearLoadMaximum, loadLinearLoad1Minimum, elapsedCycles, pc|frequency, _matrix, _InstronMatrixData__slices, displacementLinearDigitalPositionMaximum, totalCycles, loadLinearLoadMinimum, positionLinearPositionMinimum, pc|cycleStartTime"

    if testinfo.orientation == 'lg':
        trendsfolder = testfolder.raws.cycles_lg_csv
        loads = ['loadLinearLoadMaximum', 'loadLinearLoadMinimum']
        loadsTracking = 'loadLinearLoad'
    elif testinfo.orientation == 'tr':
        trendsfolder = testfolder.raws.cycles_tr_csv
        loads = ['loadLinearLoad1Maximum', 'loadLinearLoad1Maximum']
        loadsTracking = 'loadLinearLoad1'

    debug(testinfo.short, trendsfolder)

    if not trendsfolder.trends or not trendsfolder.trends.exists():
        print("Error: couldn't find trend files! (%s)" % (str(testinfo), ),
              file=report)
        return

    trends = csvread(str(trendsfolder.trends))

    indicies = trends._getslices('step')
    debug(indicies)

    sl_fatigue = indicies[5]

    # sl_uts = indicies[7]

    def fmt(name, N=-25, O=-1):
        if hasattr(name, '__call__'):
            return name(trends, N, O)[1]
        return trends[name].array[N:O]

    columns = [
        'elapsedCycles',
        'step',
        'displacementLinearDigitalPositionMinimum',
        'displacementLinearDigitalPositionMaximum',
        lambda x, N, O:
        ('Min Strain', x['displacementLinearDigitalPositionMinimum'].array[N:O]
         / details.gauge.value),
        lambda x, N, O:
        ('Max Strain', x['displacementLinearDigitalPositionMaximum'].array[N:O]
         / details.gauge.value),
    ] + loads

    def short(name, N=-1, O=-1):
        if hasattr(name, '__call__'):
            return name(trends, N, O)[0]
        return name.replace('Linear',
                            '').replace('displacement',
                                        'disp').replace('DigitalPosition', '')

    fatigue_dict = fatigue_dicts.get(testinfo.short, None)
    quartile1 = 'Cycles (1st Quartile)'
    quartile4 = 'Cycles (4th Quartile)'
    print(mdBlock("Fatigue Data:" + str(fatigue_dict)), file=report)

    if fatigue_dict and fatigue_dict[quartile4]:

        startEndCycleIdx = find_index(
            fatigue_dict[quartile1], trends['elapsedCycles'].array[sl_fatigue])
        lastCycleIdx = find_index(fatigue_dict[quartile4],
                                  trends['elapsedCycles'].array[sl_fatigue])
        N, O = startEndCycleIdx - 1, lastCycleIdx

        if not lastCycleIdx:
            debug(trends['elapsedCycles'].array[sl_fatigue])

        debug(startEndCycleIdx, lastCycleIdx, N, O)

        lastCycleMaxPosition = (
            trends['displacementLinearDigitalPositionMaximum'].
            array[sl_fatigue])[lastCycleIdx]

        debug(lastCycleMaxPosition, fatigue_dict)

        print(mdBlock(
            "**Fatigue Cycle**: {'id': '%s', 'loc': '%s', 'max disp': %.3f, 'max strain':%.3f, 'lastCycle': %d, 'timeLast': %.3f} "
            % (fatigue_dict['Short Name'], fatigue_dict['location'],
               lastCycleMaxPosition, lastCycleMaxPosition /
               details.gauge.value, fatigue_dict[quartile4],
               trends['pc|cycleStartTime'].array[sl_fatigue][lastCycleIdx]), ),
              file=report)
    else:
        N, O = -25, -1

    if fatigue_dict and fatigue_dict['UTS Load']:

        tracking = csvread(str(trendsfolder.tracking))

        track_indicies = trends._getslices('step')
        sl_uts = track_indicies[7]
        debug(track_indicies, sl_uts)

        maxload = data_find_max(tracking[loadsTracking].array[sl_uts])
        debug(maxload)

        maxes = DataTree()
        maxes.load = maxload.value
        maxes.stress = maxes.load / details.measurements.area.value

        maxes.displacement = tracking.displacement.array[sl_uts][maxload.idx]
        maxes.strain = maxes.displacement / details.gauge.value

        debug(maxes)

        print(mdBlock(
            "**UTS Load**: %s: %s, %s, %.3f, %.3f, %.3f, %.3f, , %.3f, %.3f, %d, %.3f "
            % (
                'Name, Orient, UTS Load (manual), UTS Stress (manual), disp, strain, load, stress, idx, time',
                fatigue_dict['Short Name'],
                fatigue_dict['location'],
                fatigue_dict['UTS Load'],
                fatigue_dict['UTS Load'] / details.gauge.value,
                maxes.displacement,
                maxes.strain,
                maxes.load,
                maxes.stress,
                maxload.idx,
                tracking.totalTime.array[sl_uts][maxload.idx],
            )),
              file=report)

    tabledata = OrderedDict([(short(col), fmt(col, N, O)) for col in columns])
    print(tabulate(tabledata, headers="keys", numalign="right",
                   floatfmt=".2f"),
          file=report)

    print(mdBlock("Steps: "), set(trends['step'].array.tolist()), file=report)