Пример #1
0
def calculate_fragility(capacity_curves, gmrs, damage_model, damping_model,
                        damping):

    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs, no_damage_states + 1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc + 1) * 100 / no_capacity_curves) + '%'
        limit_states = utils.define_limit_states(capacity_curves, icc,
                                                 damage_model)
        for igmr in range(no_gmrs):
            damping = optimize.fmin(
                calculate_performance_point,
                0.05,
                args=(gmrs, igmr, capacity_curves, icc, damping_model,
                      damping),
                xtol=0.001,
                ftol=0.001,
                disp=False,
            )
            [PDM, ds] = utils.allocate_damage(igmr, PDM, Sdi, limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #2
0
def calculate_fragility(capacity_curves, gmrs, damage_model, damping, hysteresis_model, damping_model):

    no_damage_states = len(damage_model["damage_states"])
    no_gmrs = len(gmrs["time"])
    no_capacity_curves = len(capacity_curves["Sd"])
    PDM = numpy.zeros((no_gmrs, no_damage_states + 1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    c1, c2, cR, cT = get_parameters(hysteresis_model, damping_model)
    if c1 > 0:
        for icc in range(no_capacity_curves):
            print str((icc + 1) * 100 / no_capacity_curves) + "%"
            Te = capacity_curves["periods"][icc]
            Sdy = capacity_curves["Sdy"][icc]
            for igmr in range(no_gmrs):
                limit_states = utils.define_limit_states(capacity_curves, icc, damage_model)
                time = gmrs["time"][igmr]
                acc = gmrs["acc"][igmr]
                spec_Te = utils.NigamJennings(time, acc, [Te], damping)
                spec_10sec = utils.NigamJennings(time, acc, [1.0], damping)
                spec_03sec = utils.NigamJennings(time, acc, [0.3], damping)
                Tc = spec_10sec["Sa"] / spec_03sec["Sa"]
                Sdi = optimize.fmin(
                    calculate_Sd,
                    Sdy,
                    args=(spec_Te["Sd"], Tc, capacity_curves, icc, c1, c2, cR, cT),
                    xtol=0.001,
                    ftol=0.001,
                    disp=False,
                )
                [PDM, ds] = utils.allocate_damage(igmr, PDM, Sdi, limit_states)
                Sds[igmr][icc] = Sdi

    return PDM, Sds
Пример #3
0
def calculate_fragility(capacity_curves, gmrs, damage_model, damping,
                        hysteresis_model, damping_model):

    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs, no_damage_states + 1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    c1, c2, cR, cT = get_parameters(hysteresis_model, damping_model)
    if c1 > 0:
        for icc in range(no_capacity_curves):
            print str((icc + 1) * 100 / no_capacity_curves) + '%'
            Te = capacity_curves['periods'][icc]
            Sdy = capacity_curves['Sdy'][icc]
            for igmr in range(no_gmrs):
                limit_states = utils.define_limit_states(
                    capacity_curves, icc, damage_model)
                time = gmrs['time'][igmr]
                acc = gmrs['acc'][igmr]
                spec_Te = utils.NigamJennings(time, acc, [Te], damping)
                spec_10sec = utils.NigamJennings(time, acc, [1.0], damping)
                spec_03sec = utils.NigamJennings(time, acc, [0.3], damping)
                Tc = spec_10sec['Sa'] / spec_03sec['Sa']
                Sdi = optimize.fmin(calculate_Sd,
                                    Sdy,
                                    args=(spec_Te['Sd'], Tc, capacity_curves,
                                          icc, c1, c2, cR, cT),
                                    xtol=0.001,
                                    ftol=0.001,
                                    disp=False)
                [PDM, ds] = utils.allocate_damage(igmr, PDM, Sdi, limit_states)
                Sds[igmr][icc] = Sdi

    return PDM, Sds
Пример #4
0
def calculate_fragility(capacity_curves, hysteresis, msa, gmrs, damage_model,
                        damping, degradation):

    no_damage_states = len(damage_model['damage_states'])
    no_bins = msa['n. bins']
    no_rec_bin = msa['records per bin']
    no_gmrs = no_bins * no_rec_bin
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_bins, no_damage_states + 1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    gmr_info = numpy.zeros((no_gmrs, 2))

    for icc in range(no_capacity_curves):
        print str((icc + 1) * 100 / no_capacity_curves) + '%'
        folder_scaled_recs = msa['input folder']
        target_files = os.listdir(folder_scaled_recs)
        for ele in target_files:
            if ele[-3:] != 'csv':
                target_files.remove(ele)
        counter = 0
        for index, ibin in enumerate(target_files):
            if index < no_bins:
                print ibin
            else:
                break

            with open(folder_scaled_recs + '/' + ibin, 'rb') as f:
                reader = csv.reader(f)
                newlist = [row for row in reader]
            record_name = [ele[0] for ele in newlist]
            record_scaling = [float(ele[1]) for ele in newlist]

            for igmr in range(no_rec_bin):
                limit_states = utils.define_limit_states(
                    capacity_curves, icc, damage_model)
                target_name = record_name[igmr]
                scaling_factor = record_scaling[igmr]
                time, disps = run_time_history_analysis(
                    capacity_curves, hysteresis, icc, gmrs, target_name,
                    scaling_factor, damping, degradation)
                Sdi = max(numpy.abs(numpy.array(disps)))
                #with open('dynamicResult.csv', 'rb') as f:
                #reader = csv.reader(f)
                #newlist = [row[0] for row in reader]
                #if newlist == ['KO']:
                #   ds = no_ls
                [PDM, ds] = utils.allocate_damage(index, PDM, Sdi,
                                                  limit_states)
                gmr_info[counter, :] = [
                    gmrs['name'].index(target_name), scaling_factor
                ]
                Sds[counter][icc] = Sdi
                counter = counter + 1
                print "gmr n.", counter, "max disp. =", Sdi, "DS =", ds

    return PDM, Sds, gmr_info
Пример #5
0
def calculate_fragility(capacity_curves, hysteresis, msa, gmrs,damage_model,damping,degradation):
    
    no_damage_states = len(damage_model['damage_states'])
    no_bins = msa['n. bins']
    no_rec_bin = msa['records per bin']
    no_gmrs = no_bins*no_rec_bin
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_bins,no_damage_states+1))
    Sds = numpy.zeros((no_gmrs,no_capacity_curves))
    gmr_info = numpy.zeros((no_gmrs,2))
    
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        folder_scaled_recs = msa['input folder']
        target_files = os.listdir(folder_scaled_recs)
        for ele in target_files:
            if ele[-3:] != 'csv':
                target_files.remove(ele)
        counter = 0
        for index,ibin in enumerate(target_files):
            if index < no_bins:
                print ibin
            else:
                break
            
            with open(folder_scaled_recs+'/'+ibin, 'rb') as f:
                reader = csv.reader(f)
                newlist = [row for row in reader]
            record_name = [ele[0] for ele in newlist]
            record_scaling = [float(ele[1]) for ele in newlist]
            
            for igmr in range(no_rec_bin):
                limit_states = utils.define_limit_states(capacity_curves, icc,damage_model)
                target_name = record_name[igmr]
                scaling_factor = record_scaling[igmr]
                time, disps = run_time_history_analysis(capacity_curves,hysteresis,icc,gmrs,target_name,scaling_factor,damping,degradation)
                Sdi = max(numpy.abs(numpy.array(disps)))
                    #with open('dynamicResult.csv', 'rb') as f:
                    #reader = csv.reader(f)
                    #newlist = [row[0] for row in reader]
                    #if newlist == ['KO']:
                    #   ds = no_ls
                [PDM, ds] = utils.allocate_damage(index,PDM,Sdi,limit_states)
                gmr_info[counter,:] = [gmrs['name'].index(target_name), scaling_factor]
                Sds[counter][icc] = Sdi
                counter = counter+1
                print "gmr n.", counter, "max disp. =", Sdi, "DS =", ds
            
    return PDM, Sds, gmr_info
Пример #6
0
def calculate_fragility(capacity_curves,gmrs,damage_model,damping_model,damping):
    
    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs,no_damage_states+1))
    Sds = numpy.zeros((no_gmrs,no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
            damping = optimize.fmin(calculate_performance_point, 0.05, args=(gmrs,igmr,capacity_curves,icc,damping_model,damping), xtol=0.001, ftol=0.001, disp = False, )
            [PDM, ds] = utils.allocate_damage(igmr,PDM,Sdi,limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #7
0
def calculate_fragility(capacity_curves, hysteresis, gmrs,damage_model,damping,degradation):
    
    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs,no_damage_states+1))
    Sds = numpy.zeros((no_gmrs,no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves, icc,damage_model)
            time, disps = run_time_history_analysis(capacity_curves,hysteresis,icc,gmrs,igmr,damping,degradation)
            Sdi = max(numpy.abs(numpy.array(disps)))
            [PDM, ds] = utils.allocate_damage(igmr,PDM,Sdi,limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #8
0
def calculate_fragility(capacity_curves,gmrs,damage_model,damping):
#This function returns a damage probability matrix (PDM) and the corresponding spectral displacements
    
    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs,no_damage_states+1))
    Sds = numpy.zeros((no_gmrs,no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
            Sdi = calculate_target_Sd(gmrs,igmr,capacity_curves,icc,damping)
            [PDM, ds] = utils.allocate_damage(igmr,PDM,Sdi,limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #9
0
def calculate_fragility(capacity_curves, gmrs, damage_model):
    #This function returns a damage probability matrix (PDM) and the corresponding spectral displacements

    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs, no_damage_states + 1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc + 1) * 100 / no_capacity_curves) + '%'
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves, icc,
                                                     damage_model)
            Sdi = calculate_Sd(gmrs, igmr, capacity_curves, icc)
            [PDM, ds] = utils.allocate_damage(igmr, PDM, Sdi, limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #10
0
def calculate_fragility(capacity_curves, hysteresis, gmrs, damage_model,
                        damping, degradation):
    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs, no_damage_states+1))
    Sds = numpy.zeros((no_gmrs, no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves, icc,
                                                     damage_model)
            time, disps = run_time_history_analysis(capacity_curves,
                                                    hysteresis, icc, gmrs,
                                                    igmr, damping, degradation)
            Sdi = max(numpy.abs(numpy.array(disps)))
            [PDM, ds] = utils.allocate_damage(igmr, PDM, Sdi, limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #11
0
def calculate_fragility(capacity_curves,gmrs,damage_model,damping):
#This function returns a damage probability matrix (PDM) and the corresponding spectral displacements
#after an iterative process to find the minimum Sd value for each case 

    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_capacity_curves = len(capacity_curves['Sd'])
    PDM = numpy.zeros((no_gmrs,no_damage_states+1))
    Sds = numpy.zeros((no_gmrs,no_capacity_curves))
    for icc in range(no_capacity_curves):
        print str((icc+1)*100/no_capacity_curves) + '%'
        Te = capacity_curves['periods'][icc]
        Sdy = capacity_curves['Sdy'][icc]
        for igmr in range(no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
            time = gmrs['time'][igmr]
            acc = gmrs['acc'][igmr]
            spec_Te = utils.NigamJennings(time,acc,[Te],damping) 
            Sdi = optimize.fmin(calculate_Sd, Sdy, args=(spec_Te['Sd'],capacity_curves,icc), xtol=0.001, ftol=0.001, disp = False, )
            [PDM, ds] = utils.allocate_damage(igmr,PDM,Sdi,limit_states)
            Sds[igmr][icc] = Sdi
    return PDM, Sds
Пример #12
0
def run_NLTHA(icc, capacity_curves, hysteresis, gmrs, gmr_characteristics,
              damage_model, damping, degradation, number_models_in_DS, msa):

    # This function runs a set of NLTHA and writes for each
    # Damage States a file with the list of records causing the structure to
    # fall in the given DS. The analyses stop when the number_models_in_DS is
    # reached for each DS.

    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_rec_bin = msa['records per bin']

    # remove files containing results of previous analyses if they exist
    for iDS in range(0, no_damage_states + 1):
        try:
            os.remove('initial_DS%d.csv' % iDS)
        except OSError:
            pass

    counter = 0
    # we are running all the gmrs and counter is the variable that records
    # the count of gmrs already run
    PDM1 = numpy.zeros((no_gmrs, no_damage_states + 1))
    store_DS = []
    for iDS in range(0, no_damage_states):
        # in this loop are excluded DS=0 and DS=collapse because they are
        # not of interest for fragility of damaged buildings
        for igmr in range(counter, no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves, icc,
                                                     damage_model)
            counter = igmr + 1
            target_name = gmrs['name'][igmr]
            available_aftershocks = filter_M_T(target_name,
                                               gmr_characteristics)
            if available_aftershocks >= no_rec_bin:
                time, disps = run_time_history_analysis(
                    capacity_curves, hysteresis, icc, gmrs, target_name, 1,
                    damping, degradation)
                Sdi = max(numpy.abs(numpy.array(disps)))
                PDM1, ds = utils.allocate_damage(igmr, PDM1, Sdi, limit_states)
                store_DS.append([gmrs['name'][igmr], ds, 1])
            # if maximum number of models in DS is achieved stop
            if sum(PDM1[:, iDS]) >= number_models_in_DS:
                break

        # This section works in case not enough records per DS are found
        if sum(PDM1[:, iDS]) < number_models_in_DS and iDS != 0:
            print "WARNING: there are not enough strong ground motions in DS ", str(
                iDS), ", scaling is required"
            for previous_DS in numpy.arange(iDS, 0, -1):
                # finding gmrs leading to iDS-1 and applying scaling factor to them
                temp = numpy.array(store_DS)
                list_of_models = temp[scipy.logical_and(
                    temp[:, 1] == str(iDS - 1), temp[:, 2] == str(
                        1))]  # names of the ground motion for the previous iDS
                for scaling_factor in numpy.linspace(1.2, 3, 7):
                    for ele in list_of_models:
                        limit_states = utils.define_limit_states(
                            capacity_curves, icc, damage_model)
                        gmr_name = ele[0]
                        igmr = gmrs['name'].index(gmr_name)
                        # mainshock Mw and Tg are compared to all aftershock Mw and Tg
                        available_aftershocks = filter_M_T(
                            gmr_name, gmr_characteristics)
                        if available_aftershocks < no_rec_bin:
                            index = numpy.where(list_of_models[:, 0] == ele[0])
                            list_of_models = numpy.delete(list_of_models,
                                                          (index),
                                                          axis=0)
                        else:
                            time, disps = run_time_history_analysis(
                                capacity_curves, hysteresis, icc, gmrs,
                                gmr_name, scaling_factor, damping, degradation)
                            Sdi = max(numpy.abs(numpy.array(disps)))
                            PDM1, ds = utils.allocate_damage(
                                igmr, PDM1, Sdi, limit_states)
                            store_DS.append([gmr_name, ds, scaling_factor])
                            if ds >= iDS:
                                index = numpy.where(
                                    list_of_models[:, 0] == ele[0])
                                list_of_models = numpy.delete(list_of_models,
                                                              (index),
                                                              axis=0)

                        if sum(PDM1[:, iDS]) >= number_models_in_DS: break
                    if sum(PDM1[:, iDS]) >= number_models_in_DS: break
                if sum(PDM1[:, iDS]) >= number_models_in_DS: break
            if sum(PDM1[:, iDS]) < number_models_in_DS:
                print "WARNING: not enough models in DS%d" % iDS

        # Store names of records and corresponding DS achieved so far
        save_DS_list(iDS, store_DS, limit_states)
Пример #13
0
def run_2MSA(icc, capacity_curves, hysteresis, msa, damage_model, gmrs,
             damping, degradation, PDMs, Sds, gmr_info, gmr_characteristics):

    no_damage_states = len(damage_model['damage_states'])
    no_bins = msa['n. bins']
    no_rec_bin = msa['records per bin']
    no_gmrs = no_bins * no_rec_bin
    filter_aftershock = msa['filter']
    Mw_multiplier = gmr_characteristics[-1]

    counter = 0
    for iDS in range(1, no_damage_states):

        PDM = PDMs['iDS' + str(iDS)]
        # Open files with records name bringing the system to DS = iDS
        with open('initial_DS%d.csv' % iDS, 'rU') as f:
            gmr_list = [row for row in csv.reader(f)]

        for index_gmr1, gmr_name in enumerate(gmr_list):
            print "Initial DS=", iDS
            igmr1 = gmrs['name'].index(
                gmr_name[0])  # index for 1st gmr within gmrs list
            scl1 = float(gmr_name[1])  #scaling factor for first ground motion
            [M_m, Tg_m, Mw, Tg] = find_M_T(gmr_name[0], gmr_characteristics)

            # Read files with records name and corresponding scaling factor for each target IM
            folder_scaled_recs = msa['input folder']
            target_files = os.listdir(folder_scaled_recs)
            for ele in target_files:
                # Remove files that are not csv
                if ele[-3:] != 'csv':
                    target_files.remove(ele)

            for index, ibin in enumerate(target_files):
                if index >= no_bins: break
                print ibin

                with open(folder_scaled_recs + '/' + ibin, 'rb') as f:
                    reader = csv.reader(f)
                    newlist = [row for row in reader]
                record_name = [ele[0] for ele in newlist]
                record_scaling = [float(ele[1]) for ele in newlist]

                # Run NLTHA with 1st gmr to bring the system to iDS and 2nd gmr corresponding to target IM
                for igmr in range(len(record_name)):
                    limit_states = utils.define_limit_states(
                        capacity_curves, icc, damage_model)
                    target_name = record_name[igmr]
                    scl2 = record_scaling[igmr]
                    [M_a, Tg_a, Mw, Tg] = find_M_T(target_name,
                                                   gmr_characteristics)
                    if scipy.logical_and(
                            M_a < M_m * Mw_multiplier,
                            Tg_a < Tg_m) or filter_aftershock == 'FALSE':
                        igmr2 = gmrs['name'].index(
                            target_name
                        )  # index of 2nd gmr within the gmrs list
                        time, disps = run_2time_history_analyses(
                            capacity_curves, hysteresis, icc, gmrs, igmr1,
                            igmr2, scl1, scl2, damping, degradation)
                        Sdi = max(numpy.abs(numpy.array(disps)))
                        # In PDM the number of models in each final damage state corresponding
                        # to the same IM bin (defined by the variable index) are summed up
                        PDM, ds = utils.allocate_damage(
                            index, PDM, Sdi, limit_states)
                        gmr_info['iDS' + str(iDS)].append([igmr2, scl2])
                        Sds['iDS' + str(iDS)].append(Sdi)
                        counter = counter + 1
                        print "gmr n.", counter, "max disp. =", Sdi, "DS =", ds
                    if sum(PDM[index, :]) >= no_rec_bin * (index_gmr1 +
                                                           1) * (icc + 1):
                        break

        PDMs['iDS' + str(iDS)] = PDM

    return PDMs, Sds, gmr_info
Пример #14
0
def run_NLTHA(icc,capacity_curves,hysteresis,gmrs,gmr_characteristics,damage_model,damping,degradation,number_models_in_DS,msa):
    
    # This function runs a set of NLTHA and writes for each 
    # Damage States a file with the list of records causing the structure to 
    # fall in the given DS. The analyses stop when the number_models_in_DS is 
    # reached for each DS.
    
    no_damage_states = len(damage_model['damage_states'])
    no_gmrs = len(gmrs['time'])
    no_rec_bin = msa['records per bin']
    
    # remove files containing results of previous analyses if they exist
    for iDS in range(0,no_damage_states+1):
        try:
            os.remove('initial_DS%d.csv' %iDS)
        except OSError:
            pass
        
    counter = 0
    # we are running all the gmrs and counter is the variable that records 
    # the count of gmrs already run
    PDM1 = numpy.zeros((no_gmrs,no_damage_states+1))
    store_DS = []
    for iDS in range(0,no_damage_states):
    # in this loop are excluded DS=0 and DS=collapse because they are 
    # not of interest for fragility of damaged buildings
        for igmr in range(counter,no_gmrs):
            limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
            counter = igmr+1
            target_name = gmrs['name'][igmr]
            available_aftershocks = filter_M_T(target_name,gmr_characteristics)
            if available_aftershocks >= no_rec_bin:
                time, disps = run_time_history_analysis(capacity_curves, hysteresis, icc,gmrs,target_name,1,damping,degradation)
                Sdi = max(numpy.abs(numpy.array(disps)))
                PDM1, ds = utils.allocate_damage(igmr,PDM1,Sdi,limit_states)
                store_DS.append([gmrs['name'][igmr], ds, 1])
            # if maximum number of models in DS is achieved stop
            if sum(PDM1[:,iDS]) >= number_models_in_DS:
                break
            
        # This section works in case not enough records per DS are found
        if sum(PDM1[:,iDS]) < number_models_in_DS and iDS != 0:
            print "WARNING: there are not enough strong ground motions in DS ", str(iDS),", scaling is required"
            for previous_DS in numpy.arange(iDS,0,-1):
                # finding gmrs leading to iDS-1 and applying scaling factor to them
                temp = numpy.array(store_DS)
                list_of_models = temp[scipy.logical_and(temp[:,1] == str(iDS-1),temp[:,2] == str(1))] # names of the ground motion for the previous iDS
                for scaling_factor in numpy.linspace(1.2,3,7):       
                    for ele in list_of_models:
                        limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
                        gmr_name = ele[0]                        
                        igmr = gmrs['name'].index(gmr_name)
                        # mainshock Mw and Tg are compared to all aftershock Mw and Tg
                        available_aftershocks = filter_M_T(gmr_name,gmr_characteristics)
                        if available_aftershocks < no_rec_bin:
                            index = numpy.where(list_of_models[:,0] == ele[0])
                            list_of_models = numpy.delete(list_of_models, (index), axis=0)
                        else:
                            time, disps = run_time_history_analysis(capacity_curves, hysteresis, icc,gmrs,gmr_name,scaling_factor,damping,degradation)              
                            Sdi = max(numpy.abs(numpy.array(disps)))
                            PDM1, ds = utils.allocate_damage(igmr,PDM1,Sdi,limit_states)
                            store_DS.append([gmr_name, ds, scaling_factor])
                            if ds >= iDS:
                                index = numpy.where(list_of_models[:,0] == ele[0])
                                list_of_models = numpy.delete(list_of_models, (index), axis=0)
                                
                        if sum(PDM1[:,iDS]) >= number_models_in_DS: break
                    if sum(PDM1[:,iDS]) >= number_models_in_DS: break
                if sum(PDM1[:,iDS]) >= number_models_in_DS: break
            if sum(PDM1[:,iDS]) < number_models_in_DS:
                print "WARNING: not enough models in DS%d" %iDS 

        # Store names of records and corresponding DS achieved so far            
        save_DS_list(iDS,store_DS, limit_states)
Пример #15
0
def run_2MSA(icc,capacity_curves,hysteresis,msa,damage_model,gmrs,damping,degradation,PDMs, Sds, gmr_info, gmr_characteristics):
    
    no_damage_states = len(damage_model['damage_states'])
    no_bins = msa['n. bins']
    no_rec_bin = msa['records per bin']
    no_gmrs = no_bins*no_rec_bin
    filter_aftershock = msa['filter']
    Mw_multiplier = gmr_characteristics[-1]
    
    counter = 0
    for iDS in range(1,no_damage_states):
        
        PDM = PDMs['iDS'+str(iDS)]
        # Open files with records name bringing the system to DS = iDS         
        with open('initial_DS%d.csv' %iDS, 'rU') as f:
            gmr_list = [row for row in csv.reader(f)]
            
        for index_gmr1, gmr_name in enumerate(gmr_list):
            print "Initial DS=", iDS
            igmr1 = gmrs['name'].index(gmr_name[0]) # index for 1st gmr within gmrs list
            scl1 = float(gmr_name[1]) #scaling factor for first ground motion
            [M_m, Tg_m, Mw, Tg] = find_M_T(gmr_name[0], gmr_characteristics)
            
            # Read files with records name and corresponding scaling factor for each target IM
            folder_scaled_recs = msa['input folder']
            target_files = os.listdir(folder_scaled_recs)
            for ele in target_files:
            # Remove files that are not csv
                if ele[-3:] != 'csv':
                    target_files.remove(ele)
                    
            for index,ibin in enumerate(target_files):
                if index>=no_bins: break
                print ibin
                
                with open(folder_scaled_recs+'/'+ibin, 'rb') as f:
                    reader = csv.reader(f)
                    newlist = [row for row in reader]
                record_name = [ele[0] for ele in newlist]
                record_scaling = [float(ele[1]) for ele in newlist]
                
                # Run NLTHA with 1st gmr to bring the system to iDS and 2nd gmr corresponding to target IM
                for igmr in range(len(record_name)):
                    limit_states = utils.define_limit_states(capacity_curves,icc,damage_model)
                    target_name = record_name[igmr]
                    scl2 = record_scaling[igmr]
                    [M_a, Tg_a, Mw, Tg] = find_M_T(target_name, gmr_characteristics)
                    if scipy.logical_and(M_a<M_m*Mw_multiplier, Tg_a<Tg_m) or filter_aftershock=='FALSE':
                        igmr2 = gmrs['name'].index(target_name) # index of 2nd gmr within the gmrs list
                        time, disps = run_2time_history_analyses(capacity_curves,hysteresis,icc,gmrs,igmr1,igmr2,scl1,scl2,damping,degradation)
                        Sdi = max(numpy.abs(numpy.array(disps)))
                        # In PDM the number of models in each final damage state corresponding 
                        # to the same IM bin (defined by the variable index) are summed up                     
                        PDM, ds = utils.allocate_damage(index,PDM,Sdi,limit_states)
                        gmr_info['iDS' + str(iDS)].append([igmr2, scl2])
                        Sds['iDS' + str(iDS)].append(Sdi)
                        counter = counter+1
                        print "gmr n.", counter, "max disp. =", Sdi, "DS =", ds
                    if sum(PDM[index,:]) >= no_rec_bin*(index_gmr1+1)*(icc+1): break
                        
        PDMs['iDS' + str(iDS)] = PDM
            
    return PDMs, Sds, gmr_info