Пример #1
0
def checkConstraintErrors(myModel, args):
    """
        Submit a series of data check jobs in Abaqus to analyze input deck representing model.
        If constraint issues are present, attempt to resolve issues and submit again.
        If parts are overlapping, or subsequent data check fails, exit program.
    """
    logger = logging.getLogger()
    try:
        fControl = dataCheck_one(myModel, args)
    except:
        fControl = False
        logger.info("1st data check failed" + '\n')
        logger.info("Constraint issues might be present" + '\n')

    problemConstraints = []
    if not fControl:
        jobName = 'DataCheck'
        odb = odbAccess.openOdb(path=jobName + '.odb')
        try:
            errorNodes = odb.rootAssembly.nodeSets['ErrNodeOverconTieSlave']
            tryModifyingConstraints(myModel, problemConstraints, errorNodes,
                                    True)
        except:
            logger.info("WARNING: Cannot modify constraints" + '\n')
            logger.info(
                "WARNING: Turning ON the option for adjusting initial surface "
                "positions and specifying initial clearances" + '\n')
            for key in myModel.constraints.keys():
                if key[:5] == 'Rigid':
                    continue
                myModel.constraints[key].setValues(adjust=ON)
                if key[0:3] == 'CC-':
                    myModel.constraints[key].setValues(tieRotations=OFF)

        try:
            dataCheck_two(myModel, args)
        except:
            logger.info("2nd data check failed" + '\n')
            jobName2 = 'DataCheck2'
            odb2 = odbAccess.openOdb(path=jobName2 + '.odb')

            try:
                errorNodes = odb2.rootAssembly.nodeSets[
                    'ErrNodeOverconTieSlave']
                tryModifyingConstraints(myModel, problemConstraints,
                                        errorNodes, True)
            except:
                pass
            checkOverlap(odb2, jobName2)
    try:
        dataCheck_final(myModel, args)
    except:
        logger.info("Final data check failed" + '\n')
        jobName3 = 'DataCheckFinal'
        odb3 = odbAccess.openOdb(path=jobName3 + '.odb')
        checkOverlap(odb3, jobName3)

    logger.info(
        "**********************************************************************************"
        + '\n')
Пример #2
0
def checkConstraintErrors(myModel, args):
    """
        Submit a series of data check jobs in Abaqus to analyze input deck representing model.
        If constraint issues are present, attempt to resolve issues and submit again.
        If parts are overlapping, or subsequent data check fails, exit program.
    """
    logger = logging.getLogger()
    try:
        fControl = dataCheck_one(myModel, args)
    except:
        fControl = False
        logger.info("1st data check failed" + '\n')
        logger.info("Constraint issues might be present" + '\n')
    
    problemConstraints = []
    if not fControl:
        jobName = 'DataCheck'
        odb = odbAccess.openOdb(path=jobName + '.odb')
        try:
            errorNodes = odb.rootAssembly.nodeSets['ErrNodeOverconTieSlave']
            tryModifyingConstraints(myModel, problemConstraints, errorNodes, True)
        except:
            logger.info("WARNING: Cannot modify constraints" + '\n')
            logger.info("WARNING: Turning ON the option for adjusting initial surface "
                        "positions and specifying initial clearances" + '\n')
            for key in myModel.constraints.keys():
                if key[:5] == 'Rigid':
                    continue
                myModel.constraints[key].setValues(adjust=ON)
                if key[0:3] == 'CC-':
                    myModel.constraints[key].setValues(tieRotations=OFF)
    
        try:
            dataCheck_two(myModel, args)
        except:
            logger.info("2nd data check failed" + '\n')
            jobName2 = 'DataCheck2'
            odb2 = odbAccess.openOdb(path=jobName2 + '.odb')
            
            try:
                errorNodes = odb2.rootAssembly.nodeSets['ErrNodeOverconTieSlave']
                tryModifyingConstraints(myModel, problemConstraints, errorNodes, True)
            except:
                pass
            checkOverlap(odb2, jobName2) 
    try:
        dataCheck_final(myModel, args)
    except:
        logger.info("Final data check failed" + '\n')
        jobName3 = 'DataCheckFinal'
        odb3 = odbAccess.openOdb(path=jobName3 + '.odb')
        checkOverlap(odb3, jobName3)
        
    logger.info("**********************************************************************************" + '\n')
def checkConstraintErrors(myModel, args):
    logger = logging.getLogger()
    try:
        fControl = dataCheck_one(myModel, args)
    except:
        fControl = False
        logger.info("1st data check failed" + '\n')
        logger.info("Constraint issues might be present" + '\n')
    
    problemConstraints = []
    if not fControl:
        jobName = 'DataCheck'
        odb = odbAccess.openOdb(path=jobName + '.odb')
        try:
            errorNodes = odb.rootAssembly.nodeSets['ErrNodeOverconTieSlave']
            tryModifyingConstraints(myModel, problemConstraints, errorNodes, True)
        except:
            logger.info("WARNING: Cannot modify constraints" + '\n')
            logger.info("WARNING: Turning ON the option for adjusting initial surface positions and specifying initial clearances" + '\n')
            for key in myModel.constraints.keys():
                if key[:5] == 'Rigid':
                    continue
                myModel.constraints[key].setValues(adjust=ON)
                if key[0:3] == 'CC-':
                    myModel.constraints[key].setValues(tieRotations=OFF)
    
        try:
            dataCheck_two(myModel, args)
        except:
            logger.info("2nd data check failed" + '\n')
            jobName2 = 'DataCheck2'
            odb2 = odbAccess.openOdb(path=jobName2 + '.odb')
            overlapCheck = False
            
            try:
                errorNodes = odb2.rootAssembly.nodeSets['ErrNodeOverconTieSlave']
                tryModifyingConstraints(myModel, problemConstraints, errorNodes, True)
            except:
                pass
            checkOverlap(odb2, jobName2) 
    try:
        dataCheck_final(myModel, args)
    except:
        logger.info(STR.join(traceback.format_exception(*sys.exc_info())))
        logger.info("Final data check failed" + '\n')
        jobName3 = 'DataCheckFinal'
        odb3 = odbAccess.openOdb(path=jobName3 + '.odb')
        
        checkOverlap(odb3, jobName3)
        
    logger.info("**********************************************************************************" + '\n')
Пример #4
0
def read_lastframe(file_name, step, part):
    odb = openOdb(file_name)
    frame_num = -1
    odb_reader = ReadOdb(file_name)
    args = [odb, step, part, frame_num]
    disps = odb_reader.read_frame(args) 
    return disps
Пример #5
0
def fetch_hist(odb, step_name, node_name, hist_out_name):
    """
    Return a history output from an odb

    Parameters
    ----------
    odb :  str
        odb filename (without the extension)
    step_name :  str
        Name of the step containing the history output
    node_name : str
        Name of the regarded node
    hist_out_name : str
        Name of the history output

    Return
    ------
    tuple

    """
    my_odb = odbAccess.openOdb(path=odb + '.odb')
    step = my_odb.steps[step_name]
    node = step.historyRegions[node_name]
    hist_out = node.historyOutputs[hist_out_name]
    data = hist_out.data
    odbAccess.closeOdb(my_odb)
    return data
Пример #6
0
def open_odb(odb_path):
    """
    A more sophisticated open odb function.

    Parameters
    ----------
    odb_path : string
        Path and filename of the database (without the '.odb' extension)

    Attributes
    ----------

    Notes
    -----

    References
    ----------

    """

    base, ext = os.path.splitext(odb_path)
    odb_path = base + '.odb'
    if odbAccess.isUpgradeRequiredForOdb(upgradeRequiredOdbPath=odb_path):
        print('odb %s needs upgrading' % (odb_path,))
        path, file_name = os.path.split(odb_path)
        file_name = base + "_upgraded.odb"
        new_odb_path = os.path.join(path, file_name)
        odbAccess.upgradeOdb(existingOdbPath=odb_path, upgradedOdbPath=new_odb_path)
        odb_path = new_odb_path
    odb = odbAccess.openOdb(path=odb_path, readOnly=True)
    return odb
def getMaxMises(odbName,elsetName):
    """ Print max mises location and value given odbName
        and elset(optional)
    """
    elset = elemset = None
    region = "over the entire model"
    """ Open the output database """
    odb = openOdb(odbName)
    assembly = odb.rootAssembly

    """ Check to see if the element set exists
        in the assembly
    """
    if elsetName:
        try:
            elemset = assembly.elementSets[elsetName]
            region = " in the element set : " + elsetName;
        except KeyError:
            print 'An assembly level elset named %s does' \
                   'not exist in the output database %s' \
                   % (elsetName, odbName)
            odb.close()
            
    """ Initialize maximum values """
    maxMises = -0.1
    maxElem = 0
    maxStep = "Step-1"
    maxFrame = -1
    Stress = 'S'
    isStressPresent = 0
    for step in odb.steps.values():
        print 'Processing Step:', step.name
        for frame in step.frames:
            allFields = frame.fieldOutputs
            if (allFields.has_key(Stress)):
                isStressPresent = 1
                stressSet = allFields[Stress]
                if elemset:
                    stressSet = stressSet.getSubset(
                        region=elemset)      
                for stressValue in stressSet.values:                
                    if (stressValue.mises > maxMises):
                        maxMises = stressValue.mises
                        maxElem = stressValue.elementLabel
                        maxStep = step.name
                        maxFrame = frame.incrementNumber
    if(isStressPresent):
        print 'Maximum von Mises stress %s is %f in element %d'%(
            region, maxMises, maxElem)
        print 'Location: frame # %d  step:  %s '%(maxFrame,maxStep)
    else:
        print 'Stress output is not available in' \
              'the output database : %s\n' %(odb.name)
    
    """ Close the output database before exiting the program """
    obj=(maxMises, maxElem)
    return obj
    odb.close()
Пример #8
0
def CreateOverlapPNG(jobName, overlapCheck, root):
    """ Generate PNG files displaying locations of overlapping nodes relative to entire design. """
    try:
        myOdb = odbAccess.openOdb(path=jobName + '.odb')

        resultsDir = os.path.join(root, jobName)
        if not os.path.exists(resultsDir):
            os.mkdir(resultsDir)

        mainDir = os.path.join(root, jobName, "Contour_and_BC_plots")
        if not os.path.exists(mainDir):
            os.mkdir(mainDir)
        os.chdir(mainDir)

        myViewport = session.viewports['Viewport: 1']
        myViewport.setValues(displayedObject=myOdb)

        for k in range(len(overlapCheck.elements)):
            overlappingElems = overlapCheck.elements[k]
            for i in range(len(overlappingElems)):
                overlappingElem = overlappingElems[i]
                highlight(overlappingElem)

        myViewport.view.setValues(session.views['Iso'])
        myViewport.view.zoom(0.8)
        session.printToFile("Overlapping_Elements_1", PNG, (myViewport, ))

        myViewport.view.setValues(cameraPosition=(987.505, -35.8282, 7.68834),
                                  cameraUpVector=(0, 1, 0))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_2", PNG, (myViewport, ))

        session.viewports['Viewport: 1'].view.setValues(
            nearPlane=757.99,
            farPlane=1200.36,
            width=542.18,
            height=365.72,
            cameraPosition=(0.996667, 36.7201, -977.094),
            cameraUpVector=(0.0, 1.0, 0.0),
            cameraTarget=(-0.87486, -35.267, 7.11584))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_3", PNG, (myViewport, ))

        session.viewports['Viewport: 1'].view.setValues(
            nearPlane=759.096,
            farPlane=1215.06,
            width=542.971,
            height=366.255,
            cameraPosition=(-91.9079, -1009.75, -32.4658),
            cameraUpVector=(-1.0, 0.0, 0.0),
            cameraTarget=(1.67948, -27.8817, -0.616374))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_4", PNG, (myViewport, ))

    except:
        cad_library.exitwitherror('Error in creating overlap PNG files.', -1,
                                  'AbaqusDataCheck.py')
    os.chdir(root)
def main(argv):
    odb_file, step_key, out_file = process_command_line_input(argv)

    abaqus_odb = openOdb(odb_file)
    abaqus_frequency_step = abaqus_odb.steps[step_key]

    frequencies = get_frequencies(abaqus_frequency_step)

    write_csv(out_file, frequencies)
Пример #10
0
def main(argv):
    odb_file, step_key, hist_reg, var_name, out_file = \
        process_command_line_input(argv)

    abaqus_odb = openOdb(odb_file)
    abaqus_ss_dynamics_step = abaqus_odb.steps[step_key]
    abaqus_history_region = abaqus_ss_dynamics_step.historyRegions[hist_reg]
    history_data = abaqus_history_region.historyOutputs[var_name].data

    write_csv(out_file, history_data)
Пример #11
0
def safeOpenOdb(odb_path):
    # upgrade odb if required (original simulation executed in older abaq. version)
    if isUpgradeRequiredForOdb(odb_path):
        upgradeOdb(odb_path, odb_path + '_')
        shutil.move(odb_path + '_.odb', odb_path)
    try:
        odb = openOdb(odb_path)
    except OdbError, e:
        print str(e)
        exit(1)
Пример #12
0
def add_node_set(odb_file_name, node_set_name, labels, instance_name=None):
    odb = odbAccess.openOdb(odb_file_name, readOnly=False)
    if instance_name:
        base = odb.rootAssembly.instances[instance_name]
    else:
        base = odb.rootAssembly
    print base.nodeSets
    if node_set_name not in base.nodeSets:
        base.NodeSetFromNodeLabels(name=node_set_name, nodeLabels=labels)
    odb.save()
    odb.close()
Пример #13
0
 def __init__(self, filename, step=[], frame_num=[], instance_name=[]):
     self.file_name = filename
     self.odb = odbAccess.openOdb(filename)
     self.assembly = self.odb.rootAssembly
     self.step_name = []
     self.frame = []
     self.instance_name = []
     if step != []:
         self.defineStep(step, frame_num)
     if (step == [] and frame_num != []):
         warnings.warn('Frame cannot be defined if no step is selected')
     if instance_name != []:
         self.defineInstance(instance_name)
Пример #14
0
def get_nodal_coordinates_from_node_set(odb_file_name,
                                        node_set_name,
                                        instance_name=None):
    odb = odbAccess.openOdb(odb_file_name, readOnly=True)
    if instance_name:
        node_set = odb.rootAssembly.instances[instance_name]
    else:
        node_set = odb.rootAssembly.nodeSets[node_set_name]
    node_dict = {}
    for node in node_set.nodes:
        node_dict[node.label] = node.coordinates
    odb.close()
    return node_dict
def write_case_hardening_data_along_path(data_odb_name,
                                         path,
                                         pickle_name,
                                         session,
                                         fields,
                                         step_name=None,
                                         frame_number=None,
                                         output_position=ELEMENT_NODAL):
    odb = odbAccess.openOdb(data_odb_name)

    session.Viewport(name='Viewport: 1',
                     origin=(0.0, 0.0),
                     width=309.913116455078,
                     height=230.809509277344)
    session.viewports['Viewport: 1'].makeCurrent()
    session.viewports['Viewport: 1'].maximize()
    o7 = session.odbs[session.odbs.keys()[0]]
    session.viewports['Viewport: 1'].setValues(displayedObject=o7)

    if step_name is None:
        step_name = odb.steps.keys()[-1]

    step_index = odb.steps.keys().index(step_name)
    if frame_number is None:
        frame_number = len(odb.steps[step_name].frames)
    session.viewports['Viewport: 1'].odbDisplay.setFrame(step=step_index,
                                                         frame=frame_number)
    path.data[:, 0:2] -= 1e-4
    root_path = create_path(path.data, 'longitudinal_path', session)
    data_dict = {}

    for field in fields:
        if field == 'S':
            data = get_stress_tensors_from_path(root_path, session,
                                                output_position)
            nx, ny, nz = path.normal
            data_dict['normal_stress'] = (
                (data[:, 1] * nx + data[:, 4] * ny + data[:, 5] * nz) * nx +
                (data[:, 4] * nx + data[:, 2] * ny + data[:, 6] * nz) * ny +
                (data[:, 5] * nx + data[:, 6] * ny + data[:, 3] * nz) * nz)

        else:
            data = get_scalar_field_from_path(root_path, session, field,
                                              output_position)
        data_dict[field] = data[:, 1:]
        if 'r' not in data_dict:
            data_dict['r'] = data[:, 0]

    with open(pickle_name, 'wb') as result_pickle_handle:
        pickle.dump(data_dict, result_pickle_handle)
    odb.close()
Пример #16
0
 def write_all_frame(self, step_name, part_name, folder):
     file_names = []
     for file in self.odb_files:
         odb = openOdb(file)
         total_frames = len(odb.steps[step_name].frames)
         for frame_num in range(total_frames):
             args = [odb, step_name, part_name, frame_num]
             positions = self.read_frame(args)
             file_name = folder + '\/' + file.split('.')[0] + '_' + \
                                str(frame_num) + '.txt'                
             write_to_file(file_name, positions, 'write')
             file_names.append(file_name)
         odb.close()
     return file_names
def CreateOverlapPNG(jobName, overlapCheck, root):
    logger = logging.getLogger()
    try:
        myOdb = odbAccess.openOdb(path=jobName + '.odb')
        save2fileName=jobName.replace(' ','') + "_"

        resultsDir = os.path.join(root,"Analysis","Abaqus", jobName)
        if not os.path.exists(resultsDir):
            os.mkdir(resultsDir)
        
        mainDir = os.path.join(root,"Analysis","Abaqus", jobName, "Contour_and_BC_plots")
        if not os.path.exists(mainDir):
            os.mkdir(mainDir)

        os.chdir(mainDir)

        myViewport = session.viewports['Viewport: 1']
        myViewport.setValues(displayedObject=myOdb)

        for k in range(len(overlapCheck.elements)):
            overlappingElems = overlapCheck.elements[k]
            for i in range(len(overlappingElems)):
                overlappingElem = overlappingElems[i]
                highlight(overlappingElem)

        myViewport.view.setValues(session.views['Iso'])
        myViewport.view.zoom(0.8)
        session.printToFile("Overlapping_Elements_1", PNG, (myViewport,))

        myViewport.view.setValues(cameraPosition=(987.505, -35.8282, 7.68834), cameraUpVector=(0, 1, 0))
        myViewport.view.fitView()

        session.printToFile("Overlapping_Elements_2", PNG, (myViewport,))

        session.viewports['Viewport: 1'].view.setValues(nearPlane=757.99, farPlane=1200.36, width=542.18, height=365.72, cameraPosition=(0.996667, 
            36.7201, -977.094), cameraUpVector=(0.0, 1.0, 0.0), cameraTarget=(-0.87486, -35.267, 7.11584))
        myViewport.view.fitView()

        session.printToFile("Overlapping_Elements_3", PNG, (myViewport,))

        session.viewports['Viewport: 1'].view.setValues(nearPlane=759.096, farPlane=1215.06, width=542.971, height=366.255, cameraPosition=(-91.9079, 
            -1009.75, -32.4658), cameraUpVector=(-1.0, 0.0, 0.0), cameraTarget=(1.67948, -27.8817, -0.616374))
        myViewport.view.fitView()

        session.printToFile("Overlapping_Elements_4", PNG, (myViewport,))
        
    except:
        logger.error('Error in creating overlap PNG files. \n')
        pass
    os.chdir(root)  
Пример #18
0
def open_odb(odbPath):
    base, ext = os.path.splitext(odbPath)
    odbPath = base + '.odb'
    new_odbPath = None
    if odbAccess.isUpgradeRequiredForOdb(upgradeRequiredOdbPath=odbPath):
        print('odb %s needs upgrading' % (odbPath, ))
        path, file_name = os.path.split(odbPath)
        file_name = base + "_upgraded.odb"
        new_odbPath = os.path.join(path, file_name)
        odbAccess.upgradeOdb(existingOdbPath=odbPath,
                             upgradedOdbPath=new_odbPath)
        odbPath = new_odbPath
    odb = odbAccess.openOdb(path=odbPath, readOnly=True)
    return odb
Пример #19
0
def xyDataFFT(xyData):
    """perform fft on xyData and produce frequency spectrum output data """
    signalData = numpy.array(
        [dataPair[1] for dataPair in xyData.data if dataPair[0] > 0.1],
        dtype=float)
    timeData = [dataPair[0] for dataPair in xyData.data if dataPair[0] > 0.1]
    timeIncData = []
    for i in range(len(timeData) - 1):
        timeIncData.append(timeData[i + 1] - timeData[i])
    fourier = numpy.abs(numpy.fft.fft(signalData))
    n = signalData.size
    timeStep = sum(timeIncData) / len(timeIncData)
    freq = numpy.fft.fftfreq(n, d=timeStep)
    description = 'FourierSpectrum %s' % xyData.positionDescription
    elementLabel = xyData.positionDescription.split(' ')[-1]
    newData = []
    newData.append((freq[0], fourier[0] / n))
    for i in range(1, n / 2 - 1):
        newData.append((freq[i], 2 * fourier[i] / n))
    newData.append((freq[n / 2 - 1], fourier[n / 2 - 1] / n))
    session.XYData(data=newData, name='Spectrum_%s' % elementLabel)
    maxValue = sorted(newData[1:], key=lambda dat: dat[1])[-1]
    return maxValue
    #
    #
    # S T A R T
    #
    ############if __name__ == '__main__':
    odbName = 'Round.odb'
    odb = odbAccess.openOdb(path=odbName, readOnly=True)
    instance = odb.rootAssembly.instances['VOLUME-1']
    outputSet = instance.elementSets['HISTORYOUTPUT']
    step = odb.steps['Flow']
    isData = False
    for element in outputSet.elements:
        elementLabel = element.label
        historyPoint = odbAccess.HistoryPoint(element=element)
        if step.getHistoryRegion(
                point=historyPoint).historyOutputs.has_key('PRESSURE'):
            isData = True
            history = step.getHistoryRegion(
                point=historyPoint).historyOutputs['PRESSURE']
            historyData = history.data
            historyName = history.name
            historyDescription = history.description
            session.XYData(data=historyData,
                           name='%s-%s' % (historyName, elementLabel),
                           positionDescription='%s at element %s' %
                           (historyDescription, elementLabel))
Пример #20
0
 def read_start_end_pos(self, step_name, node_label, part_name):
     start_positions = []
     end_positions = []
     for file in self.odb_files:
         odb = openOdb(file)
         total_frames = len(odb.steps[step_name].frames)
         for frame_num in range(total_frames):
             args = [odb, step_name, frame_num, node_label, part_name]
             
             pos, disp = self.readpoint_specific_frame(args)
             
             start_positions.append(pos) 
             end_positions.append(pos + disp)
            
     return start_positions, end_positions 
Пример #21
0
def CreateOverlapPNG(jobName, overlapCheck, root):
    """ Generate PNG files displaying locations of overlapping nodes relative to entire design. """
    try:
        myOdb = odbAccess.openOdb(path=jobName + '.odb')

        resultsDir = os.path.join(root, jobName)
        if not os.path.exists(resultsDir):
            os.mkdir(resultsDir)
        
        mainDir = os.path.join(root, jobName, "Contour_and_BC_plots")
        if not os.path.exists(mainDir):
            os.mkdir(mainDir)
        os.chdir(mainDir)

        myViewport = session.viewports['Viewport: 1']
        myViewport.setValues(displayedObject=myOdb)

        for k in range(len(overlapCheck.elements)):
            overlappingElems = overlapCheck.elements[k]
            for i in range(len(overlappingElems)):
                overlappingElem = overlappingElems[i]
                highlight(overlappingElem)

        myViewport.view.setValues(session.views['Iso'])
        myViewport.view.zoom(0.8)
        session.printToFile("Overlapping_Elements_1", PNG, (myViewport,))

        myViewport.view.setValues(cameraPosition=(987.505, -35.8282, 7.68834), cameraUpVector=(0, 1, 0))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_2", PNG, (myViewport,))

        session.viewports['Viewport: 1'].view.setValues(nearPlane=757.99, farPlane=1200.36, width=542.18,
                                                        height=365.72, cameraPosition=(0.996667, 36.7201, -977.094),
                                                        cameraUpVector=(0.0, 1.0, 0.0),
                                                        cameraTarget=(-0.87486, -35.267, 7.11584))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_3", PNG, (myViewport,))

        session.viewports['Viewport: 1'].view.setValues(nearPlane=759.096, farPlane=1215.06, width=542.971,
                                                        height=366.255, cameraPosition=(-91.9079, -1009.75, -32.4658),
                                                        cameraUpVector=(-1.0, 0.0, 0.0),
                                                        cameraTarget=(1.67948, -27.8817, -0.616374))
        myViewport.view.fitView()
        session.printToFile("Overlapping_Elements_4", PNG, (myViewport,))
        
    except:
        cad_library.exitwitherror('Error in creating overlap PNG files.', -1, 'AbaqusDataCheck.py')
    os.chdir(root)  
Пример #22
0
def main():

    if not len(sys.argv) == 2:
        print('usage:\n    {0} <job name>'.format(sys.argv[0]),
              file=sys.stderr)
        sys.exit(2)
    odbpath = sys.argv[1] + '.odb'
    jacpath = sys.argv[1] + '.jac'

    print('%s -> %s' % (
        odbpath,
        jacpath,
    ))

    odb = oa.openOdb(odbpath, readOnly=True)
    instances = odb.rootAssembly.instances

    b = dict()
    b['CPx4R'] = 0.25 * np.array((
        (-1., +1., +1., -1.),
        (-1., -1., +1., +1.),
    ), )

    with open(jacpath, 'w') as txt:
        txt.write('** inverse jacobian matrix\n')
        for element in instances['PART-1-1'].elementSets[HYDRA].elements:
            if element.type in ['CPS4R', 'CPE4R', 'CPE4RT']:
                jac = np.zeros((2, 2))
                xy = np.zeros((4, 2))
                inames = element.instanceNames
                conn = element.connectivity
                for n in range(4):
                    xy[n, :] = instances[inames[n]].getNodeFromLabel(
                        conn[n]).coordinates[0:2]
                jac[:, :] -= xy[0, :]
                jac[0, :] += xy[1, :]
                jac[1, :] -= xy[1, :]
                jac[:, :] += xy[2, :]
                jac[0, :] -= xy[3, :]
                jac[1, :] += xy[3, :]
                jac *= 0.25
                grad = np.linalg.solve(jac, b['CPx4R'])
                txt.write("** {0}\n".format(element.type))
                txt.write("%d\n" % element.label)
                np.savetxt(txt, conn, fmt='%d')
                np.savetxt(txt, grad)
            else:
                raise NotImplementedError, element.type
def abaqusopen(filename):
    """
    open and retrive data from odb file
    :param filename:
    :return:
    """
    from odbAccess import openOdb
    odbname = filename + '.odb'
    odb = openOdb(odbname)
    assem = odb.rootAssembly
    stepKey = odb.steps.keys()
    steps = odb.steps[stepKey[-1]]
    frames = steps.frames[-1]
    FieldOut = frames.fieldOutputs

    return odb, assem, stepKey, steps, frames, FieldOut
Пример #24
0
def create_empty_odb(new_odb_file_name, old_odb_file_name):
    """
    :param new_odb_file_name:   Filename including path for the new odb
    :param old_odb_file_name:   Filename including path for the odb file containing the geometry
    :return:                    Nothing
    """

    new_odb = odbAccess.Odb(name=os.path.basename(new_odb_file_name),
                            path=new_odb_file_name)
    old_odb = odbAccess.openOdb(old_odb_file_name, readOnly=True)
    # Copying the part and copying the nodes in that part
    for part_name in old_odb.parts.keys():
        old_part = old_odb.parts[part_name]
        new_part = new_odb.Part(name=part_name,
                                embeddedSpace=THREE_D,
                                type=old_part.type)
        _copy_node_and_elements(new_part, old_part)
        _copy_sets(new_part, old_part)
        new_odb.update()
        new_odb.save()

    # Copying the instances and copying the nodes
    for instance_name in old_odb.rootAssembly.instances.keys():
        old_instance = old_odb.rootAssembly.instances[instance_name]
        try:
            new_part = new_odb.parts[instance_name]
        except KeyError:
            try:
                new_part = new_odb.Part(name=instance_name,
                                        embeddedSpace=THREE_D,
                                        type=old_odb.parts[instance_name].type)
            except KeyError:
                new_part = new_odb.Part(name=instance_name,
                                        embeddedSpace=THREE_D,
                                        type=DEFORMABLE_BODY)

        # Copying the instance nodes to the part with the same name
        _copy_node_and_elements(new_part, old_instance)

        new_instance = new_odb.rootAssembly.Instance(
            name=instance_name, object=new_odb.parts[instance_name])
        _copy_sets(new_instance, old_instance)
        new_odb.update()
        new_odb.save()
    new_odb.close()
    old_odb.close()
Пример #25
0
def add_element_set(odb_file_name,
                    element_set_name,
                    labels,
                    instance_name=None):
    odb = odbAccess.openOdb(odb_file_name, readOnly=False)
    if instance_name:
        base = odb.rootAssembly.instances[instance_name]
    else:
        if len(odb.rootAssembly.instances) == 1:
            base = odb.rootAssembly.instances[
                odb.rootAssembly.instances.keys()[0]]
        else:
            raise ValueError(
                'odb has multiple instances, please specify an instance')
    if element_set_name not in base.elementSets:
        base.ElementSetFromElementLabels(name=element_set_name,
                                         elementLabels=labels)
    odb.save()
    odb.close()
Пример #26
0
def fetch_eigenv(odb_name, step_name, n_eigen):
    """
    Get eigenvalues.

    Return the eigenvalues of a perturbation buckling analysis from an abaqus database.

    Parameters
    ----------
    odb_name : class
        Abaqus model containing the eigenvalues
    step_name : string
        Name of the step
    n_eigen : int
        Number of eigenvalues to return

    Attributes
    ----------

    Notes
    -----

    References
    ----------

    """

    bckl_odb = odbAccess.openOdb(path=odb_name + '.odb')
    bckl_step = bckl_odb.steps[step_name]

    # Gather the eigenvalues
    eigenvalues = ()
    eigen_string = ""
    for J_eigenvalues in range(1, n_eigen + 1):
        current_eigen = float(bckl_step.frames[J_eigenvalues].description.split()[-1])
        eigenvalues = eigenvalues + (current_eigen,)
        eigen_string = eigen_string + "%.3E " % current_eigen

    # Close the odb
    odbAccess.closeOdb(bckl_odb)

    # Return variables
    return eigenvalues, eigen_string
def write_stress_pickles(stress_odb_filename, static_pickle_filename,
                         cyclic_pickle_filename):
    stress_odb = odbAccess.openOdb(stress_odb_filename)

    instance_name = stress_odb.rootAssembly.instances.keys()[0]
    element_set_names = stress_odb.rootAssembly.instances[
        instance_name].elementSets.keys()
    element_set_name = None
    for element_set_name in element_set_names:
        if 'ballast_elements' in element_set_name.lower():
            break
    stress_odb.close()
    static_stresses = read_field_from_odb('S',
                                          stress_odb_filename,
                                          step_name='gravity',
                                          set_name=element_set_name,
                                          instance_name=instance_name)
    loading_stresses = read_field_from_odb('S',
                                           stress_odb_filename,
                                           step_name='loading',
                                           set_name=element_set_name,
                                           instance_name=instance_name)

    cyclic_stresses = loading_stresses - static_stresses

    with open(static_pickle_filename, 'wb') as static_pickle:
        pickle.dump(
            {
                'data': static_stresses,
                'instance': instance_name,
                'element_set': element_set_name
            }, static_pickle)

    with open(cyclic_pickle_filename, 'wb') as cyclic_pickle:
        pickle.dump(
            {
                'data': cyclic_stresses,
                'instance': instance_name,
                'element_set': element_set_name
            }, cyclic_pickle)
Пример #28
0
def main():
    pickle_file_name = sys.argv[-1]
    with open(pickle_file_name, 'r') as parameter_pickle:
        parameters = pickle.load(parameter_pickle)

    odb_filename = str(parameters['odb_filename'])
    path_points_filename = str(parameters['path_points_filename'])
    variable = str(parameters['variable'])
    output_position = output_positions[str(parameters['output_position'])]
    data_filename = str(parameters['data_filename'])
    component = None
    if 'component' in parameters:
        component = str(parameters['component'])

    odb = odbAccess.openOdb(odb_filename)
    session.Viewport(name='Viewport: 1', origin=(0.0, 0.0), width=309.913116455078,
                     height=230.809509277344)
    session.viewports['Viewport: 1'].makeCurrent()
    session.viewports['Viewport: 1'].maximize()
    o7 = session.odbs[session.odbs.keys()[0]]
    session.viewports['Viewport: 1'].setValues(displayedObject=o7)

    if 'step_name' not in parameters:
        step_name = odb.steps.keys()[-1]
    else:
        step_name = str(parameters['step_name'])

    step_index = odb.steps.keys().index(step_name)
    if 'frame_number' not in parameters:
        frame_number = len(odb.steps[step_name].frames)
    else:
        frame_number = parameters['frame_number']
    session.viewports['Viewport: 1'].odbDisplay.setFrame(step=step_index, frame=frame_number)

    path_points = np.load(path_points_filename)
    path = create_path(path_points, 'path', session)
    data = get_data_from_path(path, session, variable, component, output_position=output_position)
    np.save(data_filename, data)
    odb.close()
def pcklcreate(workdir, name):
    # ABAQUS/PYTHON POST PROCESSING SCRIPT
    # Run using abaqus python / abaqus viewer -noGUI / abaqus cae -noGUI
    print("Initiation of pckl creation: " + name + ".pckl")
    print

    # Opening the Odb File
    odb = openOdb(workdir + '/' + name + '.odb')
    print("odb = openOdb(workdir + '/' + name + '.odb')")

    # Finding back the position of the reference node of the indenter. Its number is stored inside a node set named REF_NODE.

    ref_node_label = odb.rootAssembly.instances['I_INDENTER'].nodeSets[
        'RP_INDENTER'].nodes[0].label
    print(
        "ref_node_label = odb.rootAssembly.instances['I_INDENTER'].nodeSets['RP_INDENTER'].nodes[0].label"
    )

    # Getting back the reaction forces along Y (RF2) and displacements along Y (U2) where they are recorded.
    RF2 = gho(odb, 'RF2')
    U2 = gho(odb, 'U2')
    print("RF2 = gho(odb, 'RF2')")
    print("U2  = gho(odb, 'U2')")

    # Packing data
    data = {'ref_node_label': ref_node_label, 'RF2': RF2, 'U2': U2}
    print("data = {'ref_node_label': ref_node_label, 'RF2':RF2, 'U2':U2}")

    # Dumping data
    dump(data, workdir + '/' + name + '.pckl')
    print("dump(data, workdir + '/' + name + '.pckl')")

    # Closing Odb
    odb.close()
    print("odb.close()")

    print
    print("ERROR REPORT:")
Пример #30
0
def main():

    path = sys.argv[1]

    dbpath = path + '.odb'
    savepath = path + '.npz'

    odb = oa.openOdb(dbpath, readOnly=True)

    print('-'*60)
    print('name:', odb.name)
    print('analysisTitle:', odb.analysisTitle)
    print('creationTime:', odb.jobData.creationTime)

    step = odb.steps['Step-1']

    print('step name:', step.name)
    print('step procedure:', step.procedure)
    print('step description:', step.description)
    print('step domain:', step.domain)
    print('step frames: %d' % (len(step.frames), ))

    t = np.fromiter((i.frameValue for i in step.frames), dtype=np.float)

    res = {'t': t}
    for hr in step.historyRegions.keys():
        label = genlab(hr)
        if not label:
            continue
        houts = step.historyRegions[hr].historyOutputs
        for key in houts.keys():
            t1, v = np.asarray(houts[key].data).T
            np.testing.assert_array_equal(t, t1)
            res[label+key] = v

    print('saving %s to %s' % (sorted(res.keys()), savepath))
    np.savez(savepath, **res)
    print('-'*60)
Пример #31
0
def write_dante_pickle(odb_file_name, step_name, pickle_file_name, fatigue_set_name=None, instance_name=None,
                       coordinate_system=None):
    field_vars = ['HV']
    dante_dict = {}

    if instance_name is None:
        odb = odbAccess.openOdb(odb_file_name, readOnly=True)
        instance_names = odb.rootAssembly.instances.keys()
        if len(instance_names) == 1:
            instance_name = instance_names[0]
        else:
            raise ValueError('odb has multiple instances, please specify an instance')
    for var in field_vars:
        dante_dict[var] = read_field_from_odb(var, odb_file_name, step_name, frame_number=0,
                                              element_set_name=fatigue_set_name, instance_name=instance_name)
    residual_stress, n, e = read_field_from_odb('S', odb_file_name, step_name, frame_number=0,
                                                element_set_name=fatigue_set_name,  instance_name=instance_name,
                                                coordinate_system=coordinate_system, get_position_numbers=True)

    dante_dict['S'] = residual_stress

    with open(pickle_file_name, 'w') as pickle_handle:
        pickle.dump(dante_dict, pickle_handle)
Пример #32
0
def add_node_set_to_odb(odb_file_name,
                        node_set_name,
                        x_min=-1e99,
                        x_max=1e99,
                        y_min=-1e99,
                        y_max=1e99,
                        z_min=-1e99,
                        z_max=1e99,
                        instance_name=None):
    odb = odbAccess.openOdb(odb_file_name, readOnly=False)
    if instance_name is None:
        instance_name = odb.rootAssembly.instances.keys()[0]
    nodes = odb.rootAssembly.instances[instance_name].nodes
    set_node_labels = []
    for node in nodes:
        x, y, z = node.coordinates
        if x_min < x < x_max and y_min < y < y_max and z_min < z < z_max:
            set_node_labels.append(node.label)

    odb.rootAssembly.instances[instance_name].NodeSetFromNodeLabels(
        name=node_set_name, nodeLabels=set_node_labels)
    odb.save()
    odb.close()
Пример #33
0
def extract_odb(path_to_odb):
    stepName = 'Loading Step'
    historyRegionName = 'Node Rigid Loading Part-1.1'
    historyOutputName = 'RF2'

    # open odb file
    ODBFile = odbAccess.openOdb(path=path_to_odb)

    #
    # assign step object
    # print ODBFile.steps.keys()
    step = ODBFile.steps[stepName]
    #
    # assign historyRegion object
    # print step.historyRegions.keys()
    historyRegion = step.historyRegions[historyRegionName]
    #
    # assign historyOutput object
    # print historyRegion.historyOutputs.keys()
    data = np.array(historyRegion.historyOutputs[historyOutputName].data)

    data[:, 1] = data[:, 1] * -1

    return data
	fh.close()
	print x
	von = open('C:\Users\celestink\Documents\TestingStation\EllptContact3\StiffInclusions\MiseMax_'+str(p)+'.txt')
	for line in von.readlines():
		yv = [value for value in line.split()]
		xv.append(int(yv[1]))
	von.close()
	print xv
	for k in range(m[p][0],m[p][0]+m[p][1]):
	#Create a file that contain maximum stresses for every every standard deviation given to random stiffness in the ties
	
		l=k-m[p][0]
		newSetElements2 = (x[l],)
		newSetElementsv2 = (xv[l],)
		fileName = 'SimTrack1_k_'+str(k)+'.odb' # name of odb file
		odb1= odbAccess.openOdb(path = fileName, readOnly = False) #open odb 
		
		# ElementSetFromElementLabels constructor to build new set (NewElSets) for max principal stress element 
		#that was found with soft inclusion into instance (RAILINST)
		
		odb1.rootAssembly.ElementSetFromElementLabels(name = 'NewElSetst2', elementLabels =
		(('RAILINST',newSetElements2),))
		
		# ElementSetFromElementLabels constructor to build new set (NewElSetsvon) for max von Mises stress element 
		#that was found with soft inclusion into instance (RAILINST)		
		odb1.rootAssembly.ElementSetFromElementLabels(name = 'NewElSetsvoM2', elementLabels =
		(('RAILINST',newSetElementsv2),))
		#
		#Maximum stress:
		InclElem=odb1.rootAssembly.elementSets['NewElSetst2']
		stresses=odb1.steps['Step-1'].frames[-1].fieldOutputs['S']
Пример #35
0
def main():
    logger.info("**********************************************************************************" + '\n')
    logger.info("**********************************************************************************" + '\n')
    logger.info("*************************   STARTING FEA MODEL-BASED   ***************************" + '\n')
    logger.info("**********************************************************************************" + '\n')
    logger.info("**********************************************************************************" + '\n')
    logger.info("Initializing Abaqus/CAE" + '\n')
    logger.info("**********************************************************************************" + '\n')
    logger.info("**********************************************************************************" + '\n')

    # take command line arguments
    usage = "usage: abaqus cae script=AbaqusMain.py -- [options]"
    parser = OptionParser(usage=usage)
    parser.add_option("-o", "--meshOnly", default=False, action="store_true",
                      help="""disable assembly of faces from datum points and creation
                      of loads and BCs (necessary for META 13.13)""")
    parser.add_option("-b", "--meshAndBC", default=False, action="store_true",
                      help="Create mesh and generate loads/boundary conditions. Do not run analysis.")
    parser.add_option("-s", "--standard", default=False, action="store_true",
                      help="run a standard FEA analysis")
    parser.add_option("-i", "--dynamicImplicit", default=False, action="store_true",
                      help="run a dynamic implicit FEA analysis")
    parser.add_option("-e", "--dynamicExplicit", default=False, action="store_true",
                      help="run a dynamic explicit FEA analysis")
    parser.add_option("-m", "--modal", default=False, action="store_true",
                      help="run a modal analysis")
    parser.add_option("-p", "--parallelCores", default=1, type=int,
                      help="number of CPU cores to use in solver")
    parser.add_option("-r", "--ramAllocated", default=90, type=int,
                      help="integer amount of memory allocated to solver, defined in units memoryUnits")
    parser.add_option("-u", "--memoryUnits", default="PERCENTAGE",
                      choices=["PERCENTAGE", "MEGA_BYTES", "GIGA_BYTES"],
                      help="units of memory allocated to solver")
    parser.add_option("-t", "--separationTolerance", default=1E-3, type=float,
                      help="tolerance for separation between faces when creating tie constraints")
    parser.add_option("-q", "--errorIndicator", default="ENDENERI",
                      choices=["MISESERI", "ENDENERI", "CPRESSERI", "CSHEARERI", "PEEQUERI",
                               "PEERI", "CEERI", "HFLERI", "EFLERI", "EPGERI"],
                      help="""error indicator variable for adaptive meshing;
                      not all variables are supported yet
                      (see Section 4.1.4 of the Abaqus Analysis Manual)""")
    parser.add_option("-v", "--rigidParts", default=True, action="store_true",
                      help="use Rigid parts as specified in CADAssembly.xml")

    if sys.argv[2] == '-noGUI':
        (args, testBenchArg) = parser.parse_args(sys.argv[8:])
    else:
        (args, testBenchArg) = parser.parse_args(sys.argv[6:])
    
    # Initialize constants to be used throughout the analysis.
    (cadAssemblyXML, cadMetricsXML, requestedMetricsXML, computedValuesXML, kinComputedValuesXML,
     analysisMetaDataXML, testbench_manifest, postprocessingScript, resultsDir, solverDir,
     stepDir, myModel, myAsm, uniqueSuffix, asmIdentifier, structuralOutputs, thermalOutputs) \
        = initializeConstants()

    # Determine if ADAMS analysis preceeded this FEA run
    # Looks for dependency notifications in the testbench_manifest.json file.
    runAdams = check_for_adams(False, testbench_manifest, kinComputedValuesXML,
                               computedValuesXML, cadMetricsXML, resultsDir, solverDir)

    # Parse necessary information from the XML files
    (feaXML, cadAssemblyXMLTree, maxNumberIter, analysisConstraintSetXML, thermalSetXML) \
        = parseCADAssemblyXML(cadAssemblyXML, resultsDir, args)

    # TODO: remove runAdams constraint
    if maxNumberIter > 1 and any([args.dynamicImplicit, args.dynamicExplicit, runAdams]):
        cad_library.exitwitherror("Adaptive remeshing requested with invalid analysis type. Only static, "
                                  "thermal, and coupled thermal/displacement analyses are supported. FEA "
                                  "analyses launched from a ADAMS kinematic test bench are also currently "
                                  "invalid. Please change your GME test bench settings.", 1, 'AbaqusMain.py')

    elif runAdams and args.meshAndBC or runAdams and args.modal:
        cad_library.exitwitherror("ADAMS-to-Abaqus runs can not have the Abaqus analysis set to "
                                  "MeshAndBoundaryConditions or Modal.", 1, 'AbaqusMain.py')

    if thermalSetXML and any([args.modal, runAdams]):
        cad_library.exitwitherror("Coupled Thermal analysis not supported for modal or Adams-to-Abaqus"
                                  "analyses.", 1, 'AbaqusMain.py')

    # Parse STEP file for raw assembly data
    (stepPath, testBenchName, step) = parseStep(cadAssemblyXMLTree, stepDir)

    # Populate assembly information from CADAssembly_metrics.xml
    asminfo = cad_library.AssemblyInfo()
    asminfo.read_metrics_file(cadMetricsXML)

    # Determine unit length of CAD files    
    unitLength = unit_utils.getUnitLength(asminfo)

    if runAdams:
        # Parse kinematic computed values (ADAMS data)
        (anchorID, anchorPointID) = parseKinComputedValuesXML(kinComputedValuesXML)

    # Calculate scale factor (with respect to meters)
    (unitScale, unitShort) = unit_utils.calcGeoScaleFac(unitLength)
    # Create dictionary of all possibly used units throughout the analysis along with their conversion factors.
    conv = unit_utils.generateConvDict(unitScale)

    # Generate necessary general dictionaries
    # These contain information parsed from the various input XML files.
    # InstRef creates a dictionary entry for each component.
    (instRef, instIndex, instAssemblyRef, instAssemblyIndex,
     rigidPartMasses, rigidPartVolumes, rigidPartDensities,
     rigidParts, rigidPartPresent, Jan24_deactivate_rigidity) = \
        generateInstRef(asminfo, cadAssemblyXML, uniqueSuffix, asmIdentifier, runAdams, args)
    
    # Generate a dictionary containing material properties for each component in the assembly.
    mtrlRef = generateMtrlRef(asminfo, thermalSetXML, rigidParts, rigidPartDensities, conv)

    if not runAdams:
        # If no Adams dependency is found, generate dictionary of loads and boundary conditions placed on each part.
        (loadBCLib, accel) = generateLoadBCLib(analysisConstraintSetXML, feaXML, thermalSetXML, conv, asminfo)
    else:
        # If Adams dependency exists, loads are defined in the LOD files output from Adams. Boundary conditions
        # are applied at a later point.
        (loadBCLib, accel) = ([], [])

    # Process the data obtained from the step file
    (pointsBySR, inst2SR, localTMs, localTVs, topLevelAsm, subAsms,
     asmParts) = processStep(stepPath, testBenchName, uniqueSuffix, asmIdentifier)

    # Generate necessary geometric dictionaries
    (localCoords, CGs) = generateLocalCoords(inst2SR, pointsBySR, asminfo, uniqueSuffix)
    logger.info("Creating a new dictionary with points translated into the global coordinate system" + '\n')
    datumPointDict = coordTransform(localTMs, localTVs, topLevelAsm, subAsms, asmParts, localCoords)
    logger.info("**********************************************************************************" + '\n')
    
    # Directory where analysis output files and post-processing files will be written to.
    if not os.path.exists(solverDir):
        os.mkdir(solverDir)
    os.chdir(solverDir)

    # Start building Abaqus model
    createParts(myModel, step, CGs)
    deleteInvalidParts(myModel)

    # Define step - standard 'static' default
    if thermalSetXML:
        if args.dynamicExplicit:
            (myStep, amp, analysisStepName) = \
                defineDynamicExplicitCoupledThermalDisplacementStep(myModel, structuralOutputs+thermalOutputs)
        else:
            (myStep, amp, analysisStepName) = defineCoupledThermalDisplacementStep(myModel, args.dynamicImplicit,
                                                                                   structuralOutputs+thermalOutputs)
    else:
        if args.dynamicImplicit or args.dynamicExplicit:
            (myStep, amp, analysisStepName) = defineDynamicStep(myModel, args.dynamicExplicit, structuralOutputs)
        elif args.modal:
            (myStep, amp, analysisStepName) = defineModalStep(myModel)
        else:
            (myStep, amp, analysisStepName) = defineStaticStep(myModel, structuralOutputs)
    
    # Assign materials to each component.
    defineMaterials(myModel, mtrlRef)

    if not runAdams:
        assignSections(instRef, myModel, myAsm, subAsms, asmParts)
        mergeAssemblyComponents(myModel, myAsm, instAssemblyRef, asmParts)
        defineContactInteractions(myModel, args.dynamicExplicit)
        defineRigidBodyConstraints(instRef, Jan24_deactivate_rigidity,
                                   instIndex, myAsm, myModel)
        EliminateOverlaps(instRef, rigidParts, myAsm, myModel)

        apply_loads_and_bcs(myModel, myAsm, myStep, instRef, loadBCLib, instIndex, instAssemblyIndex,
                            datumPointDict, accel, amp, args, Jan24_deactivate_rigidity, thermalSetXML)

        meshInstances(asminfo, 40.0, unitShort, instRef,
                      instAssemblyRef, myAsm, feaXML,  args)

    if runAdams:
        EliminateOverlaps(instRef, rigidParts, myAsm, myModel)

        (includeAnchoredPart, anchoredPart) = decideAnchoredPart(anchorID, anchorPointID, instRef, myModel)

        AbaqusCAE_ADAMS(asminfo, instRef, includeAnchoredPart, anchoredPart, myModel,
                        myAsm, 40.0, unitShort, feaXML, args.parallelCores,
                        args.ramAllocated, args, instAssemblyRef, asmParts)

    else:
        includeAnchoredPart, anchoredPart = None, None

    logger.info("**********************************************************************************" + '\n')

    if not runAdams:
        if not args.meshOnly:
            connectParts(myModel, myAsm, args)
            try:
                CreateLOADSBCPNG(myAsm, analysisStepName, solverDir)
            except:
                logger.info('ERROR: Error during creating BC PNG files - Program will keep executing \n')
                pass
            createCAE(solverDir, testBenchName, args)

            checkConstraintErrors(myModel, args)

            exportNastranDeck()

    modifyMetaDataFile(os.path.join(solverDir, '..', analysisMetaDataXML), asminfo, uniqueSuffix, asmIdentifier)

    if not args.meshOnly and not args.meshAndBC:
        jobName = runAbaqusDispatch(myModel, myAsm, instRef, analysisStepName, rigidPartPresent,
                                    rigidParts, args, includeAnchoredPart, anchoredPart, maxNumberIter,
                                    postprocessingScript, analysisMetaDataXML, requestedMetricsXML, testbench_manifest)

    if not runAdams:
        if args.dynamicImplicit or args.dynamicExplicit or args.standard:
            check = 0
            # Adaptive runs can complete before the MaxNumberIterations,
            # so multiple checks need to be done. Start with assuming it went to MaxNumberIter
            if jobName.startswith("Adaptivity-1-iter"):
                lastIterStr = jobName.replace("Adaptivity-1-iter", "")
                lastIter = int(lastIterStr)

                while lastIter > 0 and check == 0:
                    try:
                        AdaptiveJobName = "Adaptivity-1-iter" + str(lastIter)
                        odbAccess.openOdb(path=AdaptiveJobName + '.odb')
                        check = 1
                        jobName = AdaptiveJobName
                    except:
                        # Error thrown because file does not exist. If lastIter > 1, this means the analysis
                        # completed before the max number allowed. Decrease lastIter and try opening again.
                        logger.info('ERROR: Error in reading results of %s.\n' % AdaptiveJobName)
                        lastIter -= 1

            os.chdir(solverDir)
            # Invoke post processing
            # Spawn new Python process by calling the post-processing script along with necessary arguments.
            os.system("abaqus cae noGUI=" + os.path.join(cad_library.META_PATH, "bin", "CAD", postprocessingScript) +
                      " -- " + "-o " + jobName + ".odb " + "-p " + "..\\" + analysisMetaDataXML + " -m " +
                      "..\\..\\" + requestedMetricsXML + " -j " + "..\\..\\" + testbench_manifest)
        elif args.modal:
            # Modal analyses use a different, older post-processing method as the output ODB is analyzed different.
            ABQ_CompletePostProcess.afterJobModal(jobName, analysisStepName)
from odbAccess import openOdb
from abapy.postproc import GetVectorFieldOutput_byRpt
odb_name = 'indentation.odb'
odb = openOdb(odb_name)
U = GetVectorFieldOutput_byRpt(
  odb = odb, 
  instance = 'I_SAMPLE', 
  step = 0,
  frame = -1,
  original_position = 'NODAL', 
  new_position = 'NODAL', 
  position = 'node',
  field = 'U', 
  sub_set_type = 'element', 
  sub_set = 'CORE',
  delete_report = True)


Пример #37
0
def afterJobModal(jobName, analysisStepName):
    """ This is only called in Model-Based. Not mentioned in this PostProcessing program. """
    import odbAccess
    logger = logging.getLogger()
    odb = odbAccess.openOdb(path=jobName + '.odb')
    logger.info("**********************************************************************************" + '\n')
    logger.info('Job complete\n')
    root = os.getcwd()
    try:
        odb = odbAccess.openOdb(path=jobName + '.odb')
        headers = ('Mode Number', 'Frequency(Hz)')
    except:
        cad_library.exitwitherror('Error in opening %s.\n' % jobName, -1, 'afterJobModel()')
    try:
        for histReg in odb.steps[analysisStepName].historyRegions.keys():
            eigenFrequencies = odb.steps[analysisStepName].historyRegions[histReg].historyOutputs['EIGFREQ'].data
    except:
        cad_library.exitwitherror('Error in reading eigenfrequencies', -1, 'afterJobModal()')
        
    forCSV = (headers,) + eigenFrequencies
    logger.info("Creating the CSV file" + '\n')
    
    report_file2 = 'modalOutput.csv'
    try:
        with open(report_file2, 'wb') as f2:
            writer = csv.writer(f2)
            for (number, frequency) in forCSV:
                val = (number, frequency)
                writer.writerow(val)                    
    except:
        cad_library.exitwitherror('Error in exporting data to %s.\n' % report_file2, -1, 'afterJobModal()')

    reportFile = 'testbench_manifest.json'                              # name of metric JSON
    reportPath = os.path.join(os.getcwd(), '..', '..', reportFile)
    logger.info("Updating the testbench_manifest.json file" + '\n')

    try:
        with open(reportPath, 'r') as json_data:
            data = json.load(json_data)
        ourResults = data["Metrics"]

        minMode = -1
        for (eigenkey, entry) in eigenFrequencies:
            if entry < minMode or minMode == -1:
                minMode = entry
        if minMode == 0:
            logger.info('WARNING: Rigid body modes are present, model has not been constrained properly' + '\n')
        for cs in ourResults:
            if cs["Name"] == 'Minimum_Mode':
                cs["Value"] = minMode
                cs["Unit"] = 'Hz'
    
        with open(reportPath, 'wb') as file_out:                        # open JSON file in write mode...
            json.dump(data, file_out, indent=4)                         # and overwrite with new dictionary

    except:
        cad_library.exitwitherror('Error in exporting data to %s.\n' % reportFile, -1, 'afterJobModal()')

    odb.close()
    os.chdir(root)
    try:
        utility_functions.CopyOrDeleteResults(root)
    except:
        logger.info('Error in copying or deleting result files to users machine\n')
        pass

    logger.info("**********************************************************************************" + '\n')
    logger.info('Success\n')
    logger.info("**********************************************************************************" + '\n')    
Пример #38
0
# Warning: executable only in abaqus abaqus viewer -noGUI,... not regular python.
import sys
from abapy.postproc import GetFieldOutput_byRpt as gfo
from abapy.postproc import GetVectorFieldOutput_byRpt as gvfo
from abapy.postproc import GetTensorFieldOutput_byRpt as gtfo
from abapy.postproc import GetHistoryOutputByKey as gho
from abapy.indentation import Get_ContactData
from abapy.misc import dump
from odbAccess import openOdb
from abaqusConstants import JOB_STATUS_COMPLETED_SUCCESSFULLY



# Odb opening  
file_name = 'indentation'
odb = openOdb(file_name + '.odb')
data = {}

# Check job status:
job_status = odb.diagnosticData.jobStatus

if job_status == JOB_STATUS_COMPLETED_SUCCESSFULLY:
  data['completed'] = True 
  # Field Outputs
  data['field'] = {}
  fo = data['field']
  fo['Uind'] = [
    gvfo(odb = odb, 
      instance = 'I_INDENTER', 
      step = 1,
      frame = -1,
Пример #39
0
from abapy.postproc import GetMesh  
from odbAccess import openOdb
odb = openOdb('myOdb.odb')      
mesh = GetMesh(odb,'MYINSTANCE')
Пример #40
0
from abapy.postproc import GetFieldOutput
from odbAccess import openOdb
odb = openOdb('indentation.odb')
U2 = GetFieldOutput(odb, step = 'LOADING0', frame = -1, instance ='I_SAMPLE', position =  'node', field = 'U', subField = 'U1') # Gets U2 at all nodes of instance 'I_SAMPLE'
U1 = GetFieldOutput(odb, step = 'LOADING0', frame = -1, instance ='I_SAMPLE', position =  'node', field = 'U', subField = 'U1', labels = [5,6]) # Here labels refer to nodes 5 and 6
S11 = GetFieldOutput(odb, step = 'LOADING0', frame = -1, instance ='I_SAMPLE', position =  'node', field = 'S', subField = 'S11', labels = 'CORE') # Here labels refers to nodes belonging to the node set 'CORE'
S12 = GetFieldOutput(odb, step = 'LOADING0', frame = -1, instance ='I_SAMPLE', position =  'element', field = 'S', subField = 'S12', labels = 'CORE') # Here labels refers to nodes belonging to the node set 'CORE'
Пример #41
0
from abapy.indentation import Get_ContactData
from odbAccess import openOdb
from abapy.misc import dump

# Odb opening  
Berk_name = 'workdir/indentation_berko'
Axi_name = 'workdir/indentation_axi'
Berk_odb = openOdb(Berk_name + '.odb')
Axi_odb = openOdb(Axi_name + '.odb')
# Getting data
Berk_out = Get_ContactData(odb = Berk_odb, instance = 'I_SAMPLE', node_set = 'TOP_NODES')
Axi_out = Get_ContactData(odb = Axi_odb, instance = 'I_SAMPLE', node_set = 'TOP_NODES')
# Dumping data
dump(Axi_out, 'ContactData_axi.pckl')
dump(Berk_out, 'ContactData_berk.pckl')


def afterJob(jobName, superRef, analysisStepName, runAdams, thermalSetXML=None):
    logger = logging.getLogger()
    root = os.getcwd()
    analysisDir = os.path.join(root,"Analysis", "Abaqus", jobName)
    if not os.path.exists(analysisDir):
        os.makedirs(analysisDir)
    logger.info("**********************************************************************************" + '\n')
    logger.info('Job complete\n')

    check = 0
    if jobName.startswith("Adaptivity-1-iter"):
        lastIterStr = jobName.replace("Adaptivity-1-iter","")
        lastIter = int(lastIterStr)

        while lastIter > 0 and check == 0:
            try:
                AdaptiveJobName = "Adaptivity-1-iter" + str(lastIter)
                checkOdb = odbAccess.openOdb(path=AdaptiveJobName + '.odb')
                frameCheck = checkOdb.steps[analysisStepName].frames[1]
                check = 1
                jobName = AdaptiveJobName
            except:
                logger.info('ERROR: Error in reading results of %s.\n' % AdaptiveJobName)
                lastIter = lastIter - 1

    try:
        odb = odbAccess.openOdb(path=jobName + '.odb')
        logger.info('Reporting results of %s.\n' % jobName)
    except Exception as e:
        logger.error(STR.join(traceback.format_exception(*sys.exc_info())))
        logger.error('Error in reporting results of %s.\n' % jobName)
        raise

    instRef = superRef['instRef']                         # identify refs in superRef
    instAssemblyRef = superRef['instAssemblyRef']
    mtrlRef = superRef['mtrlRef']                         # ''
    metricRef = superRef['metricRef']                     # ''
    metricRef2 = superRef['metricRef2']
    metricRef3 = superRef['metricRef3']
    lengthUnit = superRef['lengthUnit']                   # ''
    allAssembly = superRef['allAssembly']

    maxStress = {}
    maxTemp = {}
    maxDisp = {}
    fosFatigue = {}
    fosYield = {}
    fosUltimate = {}
    fos = {}
    maxTemp = None
    adamsInst = None
    mySteps = odb.steps
    numSteps = len(mySteps)
    maxStressStep = None
    
    if thermalSetXML is not None:
        (maxTemp, minTemp) = thermalPostProcessing(odb, mySteps, numSteps, jobName, superRef, analysisStepName)
    
    if 'Mises' in metricRef:        # Stress analysis
        try:
            if jobName in instRef.keys() or \
                    jobName in instAssemblyRef.keys():       # This is hit if Adams2Abaqus - otherwise jobName's != any key
                maxStress.update([[jobName,0]])                     # create a maxStress entry
                maxDisp.update([[jobName,0]])                       # create a maxDisp entry
                fosFatigue.update([[jobName,0]])                    # create a fos entry for fatigue strength
                fosUltimate.update([[jobName,0]])                   # create a fos entry for ultimate strength
                fos.update([[jobName,0]])
            else:
                for key in instRef.keys():                      # for each part/subasm instance:
                    if instRef[key]['isRigid']:
                        continue
                    maxStress.update([[key,0]])                     # create a maxStress entry
                    maxDisp.update([[key,0]])                       # create a maxDisp entry
                    fosFatigue.update([[key,0]])                    # create a fos entry for fatigue strength
                    fosUltimate.update([[key,0]])                   # create a fos entry for ultimate strength
                    fos.update([[key,0]])
        except Exception as e:
            logger.error(STR.join(traceback.format_exception(*sys.exc_info())))
            logger.error('Error in creating entires for post-processing\n')
            raise
            
        for j in range(numSteps):
            stepKey = mySteps.keys()[j]
            step = mySteps[stepKey]
            sout = step.frames[-1].fieldOutputs['S'].values
            for i in range(len(sout)):                      # for each value in the stress field output:
                try:
                    inst = sout[i].instance.name                    # get name of part instance
                    if instRef[inst]['isRigid']:
                        continue
                    if sout[i].mises > maxStress[inst]:             # if the Mises stress is greater than maxStress for that part:
                        maxStress[inst] = sout[i].mises             # set the part's maxStress equal to that Mises stress
                        maxStressStep = j
                except AttributeError:
                    pass

        if allAssembly == False:
            if 'Mises' in metricRef:                        # if Mises stress is a metric in the XML:
                for inst in maxStress.keys():                   # iterating through key+value pairs in maxStress
                    if inst in metricRef['Mises']:
                        metricRef['Mises'][inst] = maxStress[inst]
            try:
                for inst in maxStress.keys():                   # for each entry in maxStress:
                    mtrlID = instRef[inst]['MaterialID']            # get the Material ID
                    failFatigueStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_fatigue']                        # get the Fatigue Strength material property
                    failYieldStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_tensile_yield']                  # get the Yield Tensile Strength material property
                    failUltimateStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_tensile_ultimate']               # get the Ultimate Tensile Strength material property
                    fosFatigue[inst] = failFatigueStress/maxStress[inst]        # store the factor of safety for fatigue strength
                    fosYield[inst] = failYieldStress/maxStress[inst]           # store the factor of safety for fatigue strength
                    fosUltimate[inst] = failUltimateStress/maxStress[inst]      # store the factor of safety for ultimate strength
                    fos[inst] = failFatigueStress/maxStress[inst]
                for inst in fos.keys():                                         # for each entry in fos:
                    if inst in metricRef['FactorOfSafety']:
                        metricRef['FactorOfSafety'][inst] = fos[inst]
            except:
                logger.error("Insufficient data for FactorOfSafety\n")
                raise
                
            try:
                for j in range(numSteps):
                    stepKey = mySteps.keys()[j]
                    step = mySteps[stepKey]
                    uout = step.frames[-1].fieldOutputs['U'].values           # displacement field output
            except:
                logger.error('Error in reading the displacement data\n')
                raise
       
                for i in range(len(uout)):                      # for each value in the disp. field output:
                    try:
                        inst = uout[i].instance.name
                        if uout[i].magnitude > maxDisp[inst]:           # if disp. magnitude is greater than maxDisp:
                            maxDisp[inst] = uout[i].magnitude           # set maxDisp equal to that disp. magnitude
                    except AttributeError:
                        continue
            if 'Displacement' in metricRef:                 # if displacement is a metric in the XML:
                for inst in maxDisp.keys():                     # for each entry in maxDisp:
                    if inst in metricRef['Displacement']:
                        metricRef['Displacement'][inst] = maxDisp[inst]

            idArray = []
            reportFile = 'testbench_manifest.json'         # name of metric JSON
            reportPath = os.path.join(root, reportFile)

            logger.info("Updating the testbench_manifest.json file" + '\n')

            try:
                with open(reportPath, 'r') as json_data:
                    data = json.load(json_data)
                ourResults = data["Metrics"]
                for rs in ourResults:
                    idArray.append(rs["GMEID"])
                for component in idArray:
                    for component2 in metricRef2:
                        if component == component2:
                            for component3 in maxStress:
                                if metricRef3[component] == instRef[component3]['ComponentID']:
                                    if metricRef2[component] == 'Mises':
                                        for cs in ourResults:
                                            if cs["GMEID"] == component:
                                                cs["Value"] = maxStress[component3]
                                                cs["Unit"] = 'MPa'
                                    elif metricRef2[component] == 'FactorOfSafety':
                                        for cs in ourResults:
                                            if cs["GMEID"] == component:
                                                cs["Value"] = fos[component3]
                                                cs["Unit"] = 'None'
                minFosFatigue = -1
                for inst in fosFatigue.keys():
                    if fosFatigue[inst] < minFosFatigue or minFosFatigue == -1:
                        minFosFatigue = fosFatigue[inst]

                minFosYield = -1
                for inst in fosYield.keys():
                    if fosYield[inst] < minFosYield or minFosYield == -1:
                        minFosYield = fosYield[inst]

                minFosUltimate = -1
                for inst in fosUltimate.keys():
                    if fosUltimate[inst] < minFosUltimate or minFosUltimate == -1:
                        minFosUltimate = fosUltimate[inst]

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Fatigue_Strength_Factor_of_Safety':
                        cs["Value"] = minFosFatigue
                        cs["Unit"] = 'None'

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Yield_Strength_Factor_of_Safety':
                        cs["Value"] = minFosYield
                        cs["Unit"] = 'None'

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Ultimate_Tensile_Strength_Factor_of_Safety':
                        cs["Value"] = minFosUltimate
                        cs["Unit"] = 'None'

                with open(reportPath, 'wb') as file_out:                        # open JSON file in write mode...
                    json.dump(data, file_out, indent=4)                         # and overwrite with new dictionary       

            except:
                logger.error('Error in exporting data to %s.\n' % reportFile)
                raise

        else:
            try:
                for inst in maxStress.keys():                   # for each entry in maxStress:
                    mtrlID = instRef[inst]['MaterialID']            # get the Material ID
                    failFatigueStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_fatigue']                        # get the Fatigue Strength material property
                    failYieldStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_tensile_yield']                  # get the Yield Tensile Strength material property
                    failUltimateStress = mtrlRef[mtrlID]\
                        ['mechanical__strength_tensile_ultimate']               # get the Ultimate Tensile Strength material property
                    fosFatigue[inst] = failFatigueStress/maxStress[inst]        # store the factor of safety for fatigue strength
                    fosYield[inst] = failYieldStress/maxStress[inst]            # store the factor of safety for yield strength
                    fosUltimate[inst] = failUltimateStress/maxStress[inst]      # store the factor of safety for ultimate strength
                    fos[inst] = failFatigueStress/maxStress[inst]  
            except:
                logger.error("Insufficient data for FactorOfSafety\n")
                raise

            idArray = []
            reportFile = 'testbench_manifest.json'         # name of metric JSON
            reportPath = os.path.join(root, reportFile)

            logger.info("Updating the testbench_manifest.json file" + '\n')

            try:
                with open(reportPath, 'r') as json_data:
                    data = json.load(json_data)
                ourResults = data["Metrics"]
                minFosFatigue = -1
                for inst in fosFatigue.keys():
                    if fosFatigue[inst] < minFosFatigue or minFosFatigue == -1:
                        minFosFatigue = fosFatigue[inst]

                minFosYield = -1
                for inst in fosYield.keys():
                    if fosYield[inst] < minFosYield or minFosYield == -1:
                        minFosYield = fosYield[inst]

                minFosUltimate = -1
                for inst in fosUltimate.keys():
                    if fosUltimate[inst] < minFosUltimate or minFosUltimate == -1:
                        minFosUltimate = fosUltimate[inst]

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Fatigue_Strength_Factor_of_Safety':
                        cs["Value"] = minFosFatigue
                        cs["Unit"] = 'None'

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Yield_Strength_Factor_of_Safety':
                        cs["Value"] = minFosYield
                        cs["Unit"] = 'None'

                for cs in ourResults:
                    if cs["Name"] == 'Minimum_Ultimate_Tensile_Strength_Factor_of_Safety':
                        cs["Value"] = minFosUltimate
                        cs["Unit"] = 'None'               
        
                with open(reportPath, 'wb') as file_out:                        # open JSON file in write mode...
                    json.dump(data, file_out, indent=4)                         # and overwrite with new dictionary
            except:
                logger.error('Error in exporting data to %s.\n' % reportFile)
                raise
    
    # END if stress analysis
            
    try:    
        logger.info("Creating the CSV file" + '\n')

        tempInstRef = {}
        headers = {}
        val = {}
        forCSVlist = []   
    
        headers['Part Name'] = {'fatigueStr': 'Fatigue Strength (MPa)', 'yieldStr': 'Yield Strength (MPa)', 'ultimateStr': 'Ultimate Tensile Strength (MPa)', 'fosFatigue': 'Factor of Safety for Fatigue Strength', 
            'fosYield': 'Factor of Safety for Yield Strength', 'fosUltimate': 'Factor of Safety for Ultimate Tensile Strength', 'maxStress': 'Maximum Stress (MPa)', 'MaterialID': 'IloveMyJob', 'order': 1,
            'isPart': 'NA', 'ComponentID': 'Unique ID', 'maxTemp': 'Maximum Temperature (K)', 'minTemp': 'Minimum Temperature (K)'}
        tempInstRef = instRef

        orderNum = 2
        for key in tempInstRef:
            matID = tempInstRef[key]['MaterialID']
            if key in maxStress:
                try:
                    tempInstRef[key]['maxStress'] = maxStress[key]
                    tempInstRef[key]['fosFatigue'] = fosFatigue[key]
                    tempInstRef[key]['fosYield'] = fosYield[key]
                    tempInstRef[key]['fosUltimate'] = fosUltimate[key]
                    tempInstRef[key]['fatigueStr'] = mtrlRef[matID]['mechanical__strength_fatigue']
                    tempInstRef[key]['yieldStr'] = mtrlRef[matID]['mechanical__strength_tensile_yield']
                    tempInstRef[key]['ultimateStr'] = mtrlRef[matID]['mechanical__strength_tensile_ultimate']
                except:
                    tempInstRef[key]['maxStress'] = 'Err'
                    tempInstRef[key]['fosFatigue'] = 'Err'
                    tempInstRef[key]['fosYield'] = 'Err'
                    tempInstRef[key]['fosUltimate'] = 'Err'
            else:
                if jobName in instRef.keys() and key != jobName:    # ADAMS run, but not on analyzed component
                    continue
                else:
                    tempInstRef[key]['maxStress'] = 'N/A'
                    tempInstRef[key]['fosFatigue'] = 'N/A'
                    tempInstRef[key]['fosYield'] = 'N/A'
                    tempInstRef[key]['fosUltimate'] = 'N/A'
        
            if maxTemp and minTemp:
                tempInstRef[key]['maxTemp'] = maxTemp[key]
                tempInstRef[key]['minTemp'] = minTemp[key]
        
            tempInstRef[key]['order'] = orderNum
            orderNum = orderNum + 1        

        forCSV = dict(headers, **tempInstRef)
        
        if maxTemp and minTemp:
            for d in forCSV:
                try:
                    if len(maxStress) == 0:     # Thermal analysis only
                        temp = (d, forCSV[d]['ComponentID'], forCSV[d]['maxTemp'], forCSV[d]['minTemp'], forCSV[d]['order'])
                    else:       # Coupled analysis
                        temp = (d, forCSV[d]['ComponentID'], forCSV[d]['fatigueStr'], forCSV[d]['yieldStr'], forCSV[d]['ultimateStr'], forCSV[d]['maxStress'],
                                forCSV[d]['fosFatigue'],forCSV[d]['fosYield'],forCSV[d]['fosUltimate'], forCSV[d]['maxTemp'],
                                forCSV[d]['minTemp'], forCSV[d]['order'])  
                except KeyError:
                    continue    # ADAMS run - no data for this key available yet.
                except IndexError:
                    logger.info("Error adding stress data to CSV file! Most likely caused by running test bench " + \
                                "without FEAComputation blocks. Continuing test bench.")
                    continue
                forCSVlist.append(temp)
        else:   # Structural analysis only
            for d in forCSV:
                try:
                    temp = (d, forCSV[d]['ComponentID'], forCSV[d]['fatigueStr'], forCSV[d]['yieldStr'], forCSV[d]['ultimateStr'], forCSV[d]['maxStress'],
                        forCSV[d]['fosFatigue'],forCSV[d]['fosYield'],forCSV[d]['fosUltimate'], forCSV[d]['order'])
                except KeyError:
                    continue
                except IndexError:
                    logger.info("Error adding stress data to CSV file! Most likely caused by running test bench " + \
                                "without FEAComputation blocks. Continuing test bench.")
                    continue
                forCSVlist.append(temp)
        forCSVlist.sort(key=lambda x: float(x[-1]))  # Sort by orderNum - independent of structural or thermal analysis
      
        report_file2 = 'stressOutput.csv'
        try:
            with open(report_file2,'wb') as f:
                writer = csv.writer(f)
                for d in range(0, orderNum-1):
                    try:
                        if maxTemp and minTemp:
                            if len(maxStress) == 0:
                                val = (forCSVlist[d][0], forCSVlist[d][1], forCSVlist[d][2], forCSVlist[d][3])
                            else:
                                val = (forCSVlist[d][0], forCSVlist[d][1], forCSVlist[d][2], forCSVlist[d][3], forCSVlist[d][4],
                                       forCSVlist[d][5], forCSVlist[d][6], forCSVlist[d][7], forCSVlist[d][8], forCSVlist[d][9], forCSVlist[d][10])
                        else:
                            val = (forCSVlist[d][0], forCSVlist[d][1], forCSVlist[d][2], forCSVlist[d][3], forCSVlist[d][4],
                                   forCSVlist[d][5], forCSVlist[d][6], forCSVlist[d][7], forCSVlist[d][8])
                    except IndexError:
                        logger.info("Error adding stress data to CSV file! Most likely caused by running test bench " + \
                                    "without FEAComputation blocks. Continuing test bench.")
                        continue
                    writer.writerow(val)
        except:
            logger.error('Error in exporting data to %s.\n' % report_file2)
            raise
    except:
        logger.error('Error in creating the CSV file\n')
        raise

    if analysisStepName in ['staticLoading', 'coupledTempDispl']:
        if maxTemp: 
            thermal = True
        else:
            thermal = False
        if len(maxStress) > 0: 
            structural = True
        else:
            structural = False
        try:
            SetupViewportPNG(odb, jobName, thermal, structural, maxStressStep)
        except Exception as e:
            logger.error(STR.join(traceback.format_exception(*sys.exc_info())))
            logger.error('Error\n')
            raise
        

    odb.close()
    os.chdir(root)
    try:
        CopyOrDeleteResults(root, jobName, runAdams)
    except:
        logger.error('Error in copying or deleting result files to users machine\n')
        pass


    logger.info("**********************************************************************************" + '\n')
    logger.info('Success\n')
    logger.info("**********************************************************************************" + '\n')
Пример #43
0
def openOdb(odbName):
    if not odbName.endswith('.odb'):odbName+='.odb'
    import odbAccess
    return odbAccess.openOdb(path=odbName)
Пример #44
0
def afterJob(jobName, superRef):

    analysisDir = os.path.join(root, "Analysis", "Abaqus")
    if not os.path.exists(analysisDir):
        os.makedirs(analysisDir)
    f = open(logDir, "a")  # open the log file

    f.write("\n")
    f.write("**********************************************************************************" + "\n")
    f.write("\n")
    f.write("Job complete.\n")

    instRef = superRef["instRef"]
    mtrlRef = superRef["mtrlRef"]
    metricRef = superRef["metricRef"]

    try:
        odb = odbAccess.openOdb(path=jobName + ".odb")
        f.write("Reporting results of %s.\n" % jobName)
    except Exception as e:
        f.write("ERROR: Error in reporting results of %s.\n" % jobName)
        f.write(STR.join(traceback.format_exception(*sys.exc_info())))
        raise

    mySteps = odb.steps
    numSteps = len(mySteps)

    for i in range(numSteps):
        stepKey = mySteps.keys()[i]
        step = mySteps[stepKey]

        try:
            sout = step.frames[-1].fieldOutputs["S"].values  # stress field output
            maxStress = -1
            for j in range(len(sout)):  # for each value in the stress field output:
                try:  # get name of part instance
                    if sout[j].mises > maxStress:  # if the Mises stress is greater than maxStress for that part:
                        maxStress = sout[j].mises  # set the part's maxStress equal to that Mises stress
                        maxStressStep = i  # store the frame number of the maximum stress
                except AttributeError:
                    pass
        except:
            pass

    try:
        mtrlID = instRef[jobName]["MaterialID"]  # get the Material ID
        failFatigueStress = mtrlRef[mtrlID][
            "mechanical__strength_fatigue"
        ]  # get the Fatigue Strength material property
        failYieldStress = mtrlRef[mtrlID][
            "mechanical__strength_tensile_yield"
        ]  # get the Yield Tensile Strength material property
        failUltimateStress = mtrlRef[mtrlID][
            "mechanical__strength_tensile_ultimate"
        ]  # get the Ultimate Tensile Strength material property
        fosFatigue = failFatigueStress / maxStress  # store the factor of safety for fatigue strength
        fosYield = failYieldStress / maxStress  # store the factor of safety for yield strength
        fosUltimate = failUltimateStress / maxStress  # store the factor of safety for ultimate strength
        fos = failFatigueStress / maxStress
    except:
        maxStress = "Err"
        failFatigueStress = "Err"
        failYieldStress = "Err"
        failUltimateStress = "Err"
        fosFatigue = "Err"
        fosYield = "Err"
        fosUltimate = "Err"
        fos = "Err"
        f.write("Insufficient data for FactorOfSafety!\n")
        pass

    headers = [
        "Part Name",
        "Unique ID",
        "Fatigue Strength (MPa)",
        "Yield Strength (MPa)",
        "Ultimate Tensile Strength (MPa)",
        "Maximum Stress (MPa)",
        "Factor of Safety for Fatigue Strength",
        "Factor of Safety for Yield Strength",
        "Factor of Safety for Ultimate Tensile Strength",
    ]

    output = [
        jobName,
        instRef[jobName]["ComponentID"],
        failFatigueStress,
        failYieldStress,
        failUltimateStress,
        maxStress,
        fosFatigue,
        fosYield,
        fosUltimate,
    ]

    report_file = jobName + ".csv"
    with open(report_file, "wb") as fp:
        a = csv.writer(fp, delimiter=",")
        data = [headers, output]
        a.writerows(data)

    f.close()

    try:
        CreateViewportPNG(odb, jobName, maxStressStep)
    except:
        pass

    odb.close()

    os.chdir(root)

    CopyOrDeleteResults(jobName)
Пример #45
0
# ABAQUS/PYTHON POST PROCESSING SCRIPT
# Run using abaqus python / abaqus viewer -noGUI / abaqus cae -noGUI

# Packages (Abaqus, Abapy and built-in only here)
from odbAccess import openOdb
from abapy.misc import dump
from abapy.postproc import GetHistoryOutputByKey as gho

# Setting up some pathes
workdir = 'workdir'
name = 'indentation_axi'

# Opening the Odb File
odb = openOdb(workdir + '/' + name + '.odb')

# Finding back the position of the reference node of the indenter. Its number is stored inside a node set named REF_NODE.
ref_node_label = odb.rootAssembly.instances['I_INDENTER'].nodeSets[
    'REF_NODE'].nodes[0].label

# Getting back the reaction forces along Y (RF2) and displacements along Y (U2) where they are recorded.
RF2 = gho(odb, 'RF2')
U2 = gho(odb, 'U2')

# Packing data
data = {'ref_node_label': ref_node_label, 'RF2': RF2, 'U2': U2}

# Dumping data
dump(data, workdir + '/' + name + '.pckl')

# Closing Odb
odb.close()
Пример #46
0
# ABAQUS/PYTHON POST PROCESSING SCRIPT
# Run using abaqus python / abaqus viewer -noGUI / abaqus cae -noGUI

# Packages (Abaqus, Abapy and built-in only here)
from odbAccess import openOdb
from abapy.misc import dump
from abapy.postproc import GetHistoryOutputByKey as gho

# Setting up some pathes
workdir = 'workdir'
name = 'indentation_axi'

# Opening the Odb File
odb = openOdb(workdir + '/' + name + '.odb')

# Finding back the position of the reference node of the indenter. Its number is stored inside a node set named REF_NODE.
ref_node_label = odb.rootAssembly.instances['I_INDENTER'].nodeSets['REF_NODE'].nodes[0].label

# Getting back the reaction forces along Y (RF2) and displacements along Y (U2) where they are recorded.
RF2 = gho(odb, 'RF2')
U2  = gho(odb, 'U2')

# Packing data
data = {'ref_node_label': ref_node_label, 'RF2':RF2, 'U2':U2}

# Dumping data
dump(data, workdir + '/' + name + '.pckl')

# Closing Odb
odb.close()