Exemplo n.º 1
0
def main():
    frequencies = [5, 10, 20, 40]

    lines = ['-', '--']
    colors = ['r', 'b', 'g', 'k']
    cycles = 100000
    pressures = np.linspace(0, 30, 100)
    strain = 0.05
    n = np.exp(np.linspace(0, np.log(cycles)))
    for j, f in enumerate(frequencies):
        parameters = [
            get_parameters(f, common=False),
            get_parameters(f, common=True)
        ]
        for i, par in enumerate(parameters):
            job_list = []
            for p in pressures:
                job_list.append((calc_deviator, [strain, n, p, par], {}))
            q = np.array(
                multi_processer(job_list, delay=0., timeout=3600, cpus=12))
            plt.plot(pressures, q, lines[i] + colors[j], lw=2)
    plt.xlabel('$I_{1, static}$ [kPa]')
    plt.ylabel(r'$\sigma_{vM, cyclc}$ [kPa]')
    plt.tight_layout()
    # plt.savefig('strain_life.png')
    plt.show()
def residual(par, *data):
    simulation_list = data
    job_list = [(calc_pf_for_simulation, (sim.cd, sim.load, par), {})
                for sim in simulation_list]
    pf_wl = multi_processer(job_list, timeout=100, delay=0, cpus=1)
    pf_target = [sim.pf_experimental for sim in simulation_list]
    res = np.sum((np.array(pf_wl) - np.array(pf_target))**2)
    print res, par, pf_wl
    return res
Exemplo n.º 3
0
def residual(fitting_parameters, par, parameters_to_fit, experiments,
             residual_func):
    par[parameters_to_fit] = fitting_parameters
    job_list = [(residual_func, [par, experiment], {})
                for experiment in experiments]
    simulations = multi_processer(job_list, delay=0.)
    res = 0
    for sim in simulations:
        r, e_sim, e_exp, p, q, f = sim
        res += r
        print("\tf = " + str(f) + ",\t p = " + str(p) + " , q = " + str(q) +
              ": e_exp = " + str(e_exp) + "\t e_sim = " + str(e_sim) +
              "\t r = " + str(r))
    print(fitting_parameters)
    print(res)
    return res
def residual_fit(parameters, *data):
    simulation_list, lower_bound, upper_bound = data
    parameters = _check_parameter_bounds(parameters, lower_bound, upper_bound)

    experimental_pf = np.array([
        float(sim.failures) / (sim.failures + sim.run_outs)
        for sim in simulation_list
    ])
    job_list = [(calc_pf_for_simulation, (simulation, parameters), {})
                for simulation in simulation_list]
    pf_wl = np.array(multi_processer(job_list, timeout=100, delay=0))
    r = (pf_wl - experimental_pf)**2
    r[abs(pf_wl - experimental_pf) < 0.1] = 0
    print '============================================================================================================'
    print 'parameters:\t\t', parameters
    print 'pf_simulation:\t', ' '.join(
        np.array_repr(pf_wl).replace('\n', '').replace('\r', '').split())
    print 'r_vec:\t\t\t', ' '.join(
        np.array_repr(r).replace('\n', '').replace('\r', '').split())
    print 'R:\t\t\t', np.sqrt(np.sum(r)) / 10
    return np.sum(r)
def likelihood_function_fit(parameters, *data):
    simulation_list, lower_bound, upper_bound = data
    parameters = _check_parameter_bounds(parameters, lower_bound, upper_bound)

    tol = 1e-6
    job_list = [(calc_pf_for_simulation, (simulation, parameters), {})
                for simulation in simulation_list]
    pf_sim = np.array(multi_processer(job_list, timeout=100, delay=0))
    pf_sim[pf_sim < tol] = tol
    pf_sim[pf_sim > 1 - tol] = 1 - tol
    likelihood = 0
    for i, simulation in enumerate(simulation_list):

        likelihood -= np.log(pf_sim[i]) * simulation.failures
        likelihood -= np.log(1 - pf_sim[i]) * simulation.run_outs

    print '============================================================================================================'
    print 'parameters:\t\t', parameters
    print 'pf_simulation:\t', ' '.join(
        np.array_repr(pf_sim).replace('\n', '').replace('\r', '').split())

    print 'L:\t\t\t', likelihood
    return likelihood
def calculate_permanent_strains(stress_odb_file_name, strain_odb_file_name,
                                cycles, material_parameters):
    try:
        len(cycles)
    except TypeError:
        cycles = np.array(cycles)
    cycles = np.array(cycles)

    work_directory = create_temp_dir_name(strain_odb_file_name)
    os.makedirs(work_directory)

    if not os.path.isfile(strain_odb_file_name):
        os.chdir('abaqus_functions')
        job = subprocess.Popen(abq + ' python create_empty_odb.py ' +
                               strain_odb_file_name + ' ' +
                               stress_odb_file_name,
                               shell=True)
        job.wait()
        os.chdir('..')

    os.chdir('abaqus_functions')
    static_pickle_file = work_directory + '/static_stresses.pkl'
    cyclic_pickle_file = work_directory + '/cyclic_stresses.pkl'
    job = subprocess.Popen(abq + ' python write_stress_state_pickles.py ' +
                           stress_odb_file_name + ' ' + static_pickle_file +
                           ' ' + cyclic_pickle_file,
                           shell=True)
    job.wait()
    os.chdir('..')

    with open(static_pickle_file, 'rb') as static_pickle:
        static_data = pickle.load(static_pickle, encoding='latin1')
        static_stresses = static_data['data'] / 1e3
        instance_name = str(static_data['instance'])
        element_set_name = str(static_data['element_set'])
    os.remove(static_pickle_file)

    with open(cyclic_pickle_file, 'rb') as cyclic_pickle:
        cyclic_stresses = pickle.load(cyclic_pickle,
                                      encoding='latin1')['data'] / 1e3
    os.remove(cyclic_pickle_file)

    n = static_stresses.shape[0]
    permanent_strains = np.zeros((len(cycles), n, static_stresses.shape[1]))
    num_cpus = 12
    chunksize = n // num_cpus
    indices = [i * chunksize for i in range(num_cpus)]
    indices.append(n)
    job_list = []
    for i in range(num_cpus):
        args_list = [
            material_parameters, cycles,
            static_stresses[indices[i]:indices[i + 1]],
            cyclic_stresses[indices[i]:indices[i + 1]]
        ]
        job_list.append((evaluate_permanent_strain_for_gp, args_list, {}))
    result = multi_processer(job_list, timeout=7200, cpus=num_cpus)
    for i in range(num_cpus):
        permanent_strains[:, indices[i]:indices[i + 1], :] = result[i]

    for i, n in enumerate(cycles):
        write_data_to_odb(field_data=permanent_strains[i, :, :],
                          field_id='EP',
                          odb_file_name=strain_odb_file_name,
                          step_name='cycles_' + str(n),
                          instance_name=instance_name,
                          set_name=element_set_name)

    boundary_conditions = [
        BoundaryCondition('X1_NODES', 'node_set', 1),
        BoundaryCondition('ballast_bottom_nodes', 'node_set', 2),
        BoundaryCondition('X_SYM_NODES', 'node_set', 1),
        BoundaryCondition('Z_SYM_NODES', 'node_set', 3),
        BoundaryCondition('Z1_NODES', 'node_set', 3)
    ]

    calculator = DeformationCalculator(strain_odb_file_name,
                                       boundary_conditions,
                                       step_name='cycles_' + str(cycles[0]),
                                       instance_name=instance_name,
                                       set_name=element_set_name,
                                       strain_field_id='EP')
    for i, n in enumerate(cycles):
        up, err = calculator.calculate_deformations(step_name='cycles_' +
                                                    str(n))

        write_data_to_odb(up,
                          'UP',
                          strain_odb_file_name,
                          step_name='cycles_' + str(n),
                          position='NODAL',
                          frame_number=0,
                          set_name='EMBANKMENT_INSTANCE_BALLAST_NODES')
        write_data_to_odb(err,
                          'ERR',
                          strain_odb_file_name,
                          step_name='cycles_' + str(n),
                          frame_number=0,
                          set_name=element_set_name)
    os.removedirs(work_directory)
    def __init__(self, strain_odb_file_name, boundary_conditions, step_name, instance_name='', set_name='',
                 strain_field_id='E'):
        print("Init calculator")
        self.odb_file_name = strain_odb_file_name
        self.work_directory = create_temp_dir_name(strain_odb_file_name)
        os.makedirs(self.work_directory)
        self.stain_field_id = strain_field_id
        self.instance_name = instance_name
        self.set_name = set_name
        parameter_pickle_file = self.work_directory + '/parameter_strain_pickle.pkl'
        data_pickle_file = self.work_directory + '/strain_pickle.pkl'
        parameter_dict = {'instance_name': self.instance_name, 'strain_odb_file_name': self.odb_file_name,
                          'set_name': self.set_name, 'strain_field_id': self.stain_field_id, 'step_name': step_name}
        with open(parameter_pickle_file, 'wb') as pickle_file:
            pickle.dump({'parameter_dict': parameter_dict, 'boundary_conditions': boundary_conditions}, pickle_file,
                        protocol=2)
        os.chdir('abaqus_functions')
        job = subprocess.Popen(abq + ' python write_data_for_def_calculation.py ' + parameter_pickle_file + ' '
                               + data_pickle_file, shell=True)
        job.wait()
        os.chdir('..')
        with open(data_pickle_file, 'rb') as pickle_file:
            data = pickle.load(pickle_file, encoding='latin1')
        os.remove(data_pickle_file)
        os.remove(parameter_pickle_file)

        print("Handling boundary conditions")
        bc_dofs = data['bc_dofs']
        bc_vals_dict = data['bc_vals_dict']
        self.nodal_displacements = data['nodal_displacements']
        elements = data['elements']
        b_components = data['b_components']
        strain_components = data['strain_components']
        displacement_components = data['displacement_components']
        self.bc_vals = 0.*bc_dofs
        for i, dof in enumerate(bc_dofs):
            self.bc_vals[i] = bc_vals_dict.get(dof, 0.)

        self.nodal_displacements[bc_dofs] = self.bc_vals
        row = np.zeros(b_components)
        col = np.zeros(b_components)
        values = np.zeros(b_components)
        self.gauss_point_volumes = np.zeros(strain_components)
        print("Assembling B-matrix")
        job_list = []
        for i, element in enumerate(elements):
            job_list.append((calculate_element_data, [element, i], {}))
        print("Starting multiprocessing of elements")
        b_data = multi_processer(job_list, cpus=12, delay=0., timeout=1e5)
        print("Assembling results")
        for i, data in enumerate(b_data):
            n = data[0].shape[0]
            col[i*n:(i + 1)*n] = data[0]
            row[i*n:(i + 1)*n] = data[1]
            values[i*n:(i + 1)*n] = data[2]
            gps = data[3].shape[0]//6
            self.gauss_point_volumes[i*gps*6:(i+1)*gps*6] = data[3]
        self.B_matrix = coo_matrix((values, (row, col)),
                                   shape=(strain_components, displacement_components)).tocsc()
        print("Shape of B-matrix:", self.B_matrix.shape)
        all_cols = np.arange(self.nodal_displacements.shape[0])
        self.bc_cols = np.where(np.in1d(all_cols, bc_dofs))[0]
        self.cols_to_keep = np.where(np.logical_not(np.in1d(all_cols, bc_dofs)))[0]
        print("Computing scale factors")
        scale_array = sp.diags([self.gauss_point_volumes], offsets=[0])
        self.B_matrix = scale_array*self.B_matrix
        self.B_red = self.B_matrix[:, self.cols_to_keep]

        print("Scaling B-matrix")
        self.scale_factors = norm(self.B_red, axis=0)
        scale_array = sp.diags([1./self.scale_factors], offsets=[0])
        self.B_red *= scale_array
        print("Init done")
        os.removedirs(self.work_directory)
Exemplo n.º 8
0
    experimental_pf = np.array([float(sim.failures)/(sim.failures + sim.run_outs) for sim in simulations])
    n_su = 20
    n_b = 20

    su = np.linspace(500, 1500, n_su)
    b = np.linspace(5, 20, n_b)

    SU, B = np.meshgrid(su, b)

    for fig, findley_parameter in enumerate([1.3]):
        plt.figure(fig)
        job_list = []
        for b_val in b:
            for su_val in su:
                job_list.append([likelihood_function_plot, ((findley_parameter, su_val, b_val), simulations), {}])
        r_list = multi_processer(jobs=job_list, timeout=1000, delay=0., cpus=16)
        r = np.array(r_list).reshape((n_b, n_su))

        ind_min = np.unravel_index(np.argmin(r, axis=None), r.shape)

        plt.contourf(SU, B, r)
        plt.text(SU[ind_min], B[ind_min], r'\textbf{x}', horizontalalignment='left')
        plt.text(1.1*su[0], 0.9*b[-1], r'$\sigma_u=' + str(SU[ind_min]) + '$ MPa',
                 horizontalalignment='left')
        plt.text(1.1*su[0], 0.8*b[-1], r'$b=' + str(B[ind_min]) + '$',
                 horizontalalignment='left')
        plt.text(1.1*su[0], 0.7*b[-1], r'$r=' + str(r[ind_min]) + '$',
                 horizontalalignment='left')
        plt.xlabel(r'$\sigma_u$ [MPa]')
        plt.ylabel(r'$b$ [-]')
        plt.colorbar()
    plt.semilogx(root_failures[:, -1],
                 root_failures[:, 1],
                 'kx',
                 ms=12,
                 mew=2,
                 mfc='w')

    torques = np.arange(900., 1600., 100.)
    case_depths = np.array([1.1])

    for sectors in [2, 2]:
        job_list = []
        for torque in torques:
            job_list.append([calculate_life, [torque, 1.1, sectors], {}])

        wl_data = multi_processer(job_list, timeout=600, delay=0., cpus=8)

        simulated_pf = 0 * torques
        N = np.zeros((torques.shape[0], len(pf_levels)))

        for force_level in range(torques.shape[0]):
            simulated_pf[force_level] = wl_data[force_level][0]
            N[force_level, :] = wl_data[force_level][1]

        for pf_level, (pf_val, color) in enumerate(zip(pf_levels, 'brg')):
            data_to_plot = np.vstack((N[:, pf_level].T, torques.T))
            plt.plot(data_to_plot[0, :],
                     data_to_plot[1, :],
                     '--' + color,
                     lw=2)