Example #1
0
    ret = path.contains_points(points)
    assert ret.dtype == 'bool'
    np.testing.assert_equal(ret, [True, False])


def test_contains_points_negative_radius():
    path = Path.unit_circle()

    points = [(0.0, 0.0), (1.25, 0.0), (0.9, 0.9)]
    result = path.contains_points(points, radius=-0.5)
    np.testing.assert_equal(result, [True, False, False])


_test_paths = [
    # interior extrema determine extents and degenerate derivative
    Path([[0, 0], [1, 0], [1, 1], [0, 1]],
         [Path.MOVETO, Path.CURVE4, Path.CURVE4, Path.CURVE4]),
    # a quadratic curve
    Path([[0, 0], [0, 1], [1, 0]], [Path.MOVETO, Path.CURVE3, Path.CURVE3]),
    # a linear curve, degenerate vertically
    Path([[0, 1], [1, 1]], [Path.MOVETO, Path.LINETO]),
    # a point
    Path([[1, 2]], [Path.MOVETO]),
]

_test_path_extents = [(0., 0., 0.75, 1.), (0., 0., 1., 0.5), (0., 1., 1., 1.),
                      (1., 2., 1., 2.)]


@pytest.mark.parametrize('path, extents', zip(_test_paths, _test_path_extents))
def test_exact_extents(path, extents):
    # notice that if we just looked at the control points to get the bounding
Example #2
0
def get_dates(inps):
    # Given the SLC directory This function extracts the acquisition dates
    # and prepares a dictionary of sentinel slc files such that keys are
    # acquisition dates and values are object instances of sentinelSLC class
    # which is defined in Stack.py

    if inps.bbox is not None:
        bbox = [float(val) for val in inps.bbox.split()]

    if inps.exclude_dates is not None:
        excludeList = inps.exclude_dates.split(',')
    else:
        excludeList = []

    if inps.include_dates is not None:
        includeList = inps.include_dates.split(',')
    else:
        includeList = []

    if os.path.isfile(inps.slc_dirname):
        print('reading SAFE files from: ' + inps.slc_dirname)
        SAFE_files = []
        for line in open(inps.slc_dirname):
            SAFE_files.append(str.replace(line,'\n','').strip())

    else:
        SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong

    if len(SAFE_files) == 0:
        raise Exception('No SAFE file found')

    elif len(SAFE_files) == 1:
        raise Exception('At least two SAFE file is required. Only one SAFE file found.')

    else:
        print ("Number of SAFE files found: "+str(len(SAFE_files)))

    if inps.startDate is not None:
        stackStartDate = datetime.datetime(*time.strptime(inps.startDate, "%Y-%m-%d")[0:6])
    else:
        #if startDate is None let's fix it to first JPL's staellite lunch date :)
        stackStartDate = datetime.datetime(*time.strptime("1958-01-31", "%Y-%m-%d")[0:6])

    if inps.stopDate is not None:
        stackStopDate = datetime.datetime(*time.strptime(inps.stopDate, "%Y-%m-%d")[0:6])
    else:
        stackStopDate = datetime.datetime(*time.strptime("2158-01-31", "%Y-%m-%d")[0:6])


    ################################
    # write down the list of SAFE files in a txt file which will be used:
    f = open('SAFE_files.txt','w')
    safe_count=0
    safe_dict={}
    bbox_poly = [[bbox[0],bbox[2]],[bbox[0],bbox[3]],[bbox[1],bbox[3]],[bbox[1],bbox[2]]]
    for safe in SAFE_files:
        safeObj=sentinelSLC(safe)
        safeObj.get_dates()
        if safeObj.start_date_time < stackStartDate or safeObj.start_date_time > stackStopDate:
            excludeList.append(safeObj.date)
            continue

        safeObj.get_orbit(inps.orbit_dirname, inps.work_dir)

        # check if the date safe file is needed to cover the BBOX
        reject_SAFE=False
        if safeObj.date  not in excludeList and inps.bbox is not None:

            reject_SAFE=True
            pnts = safeObj.getkmlQUAD(safe)

            # looping over the corners, keep the SAF is one of the corners is within the BBOX
            lats = []
            lons = []
            for pnt in pnts:
                lon = float(pnt.split(',')[0])
                lat = float(pnt.split(',')[1])

                # keep track of all the corners to see of the product is larger than the bbox
                lats.append(lat)
                lons.append(lon)




#                bbox = SNWE
#                polygon = bbox[0] bbox[2]       SW
#                          bbox[0] bbox[3]       SE
#                          bbox[1] bbox[3]       NE
#                          bbox[1] bbox[2]       NW

                poly = Path(bbox_poly)
                point = (lat,lon)
                in_bbox = poly.contains_point(point)


                # product corner falls within BBOX (SNWE)
                if in_bbox:
                    reject_SAFE=False


            # If the product is till being rejected, check if the BBOX corners fall within the frame
            if reject_SAFE:
                for point in bbox_poly:
                    frame = [[a,b] for a,b in zip(lats,lons)]
                    poly = Path(frame)
                    in_frame = poly.contains_point(point)
                    if in_frame:
                        reject_SAFE=False


        if not reject_SAFE:
            if safeObj.date  not in safe_dict.keys() and safeObj.date  not in excludeList:
                safe_dict[safeObj.date]=safeObj
            elif safeObj.date  not in excludeList:
                safe_dict[safeObj.date].safe_file = safe_dict[safeObj.date].safe_file + ' ' + safe

            # write the SAFE file as it will be used
            f.write(safe + '\n')
            safe_count += 1
    # closing the SAFE file overview
    f.close()
    print ("Number of SAFE files to be used (cover BBOX): "+str(safe_count))

    ################################
    dateList = [key for key in safe_dict.keys()]
    dateList.sort()
    print ("*****************************************")
    print ("Number of dates : " +str(len(dateList)))
    print ("List of dates : ")
    print (dateList)

    ################################
    #get the overlap lat and lon bounding box
    S=[]
    N=[]
    W=[]
    E=[]
    safe_dict_bbox={}
    safe_dict_bbox_finclude={}
    safe_dict_finclude={}
    safe_dict_frameGAP={}
    print ('date      south      north')
    for date in dateList:
        #safe_dict[date].get_lat_lon()
        safe_dict[date].get_lat_lon_v2()

        #safe_dict[date].get_lat_lon_v3(inps)
        S.append(safe_dict[date].SNWE[0])
        N.append(safe_dict[date].SNWE[1])
        W.append(safe_dict[date].SNWE[2])
        E.append(safe_dict[date].SNWE[3])
        print (date , safe_dict[date].SNWE[0],safe_dict[date].SNWE[1])
        if inps.bbox is not None:
            if safe_dict[date].SNWE[0] <= bbox[0] and safe_dict[date].SNWE[1] >= bbox[1]:
                safe_dict_bbox[date] = safe_dict[date]
                safe_dict_bbox_finclude[date] = safe_dict[date]
            elif date in includeList:
                safe_dict_finclude[date] = safe_dict[date]
                safe_dict_bbox_finclude[date] = safe_dict[date]

        # tracking dates for which there seems to be a gap in coverage
        if not safe_dict[date].frame_nogap:
            safe_dict_frameGAP[date] = safe_dict[date]

    print ("*****************************************")
    print ("The overlap region among all dates (based on the preview kml files):")
    print (" South   North   East  West ")
    print (max(S),min(N),max(W),min(E))
    print ("*****************************************")
    if max(S) > min(N):
        print ("""WARNING:
           There might not be overlap between some dates""")
        print ("*****************************************")
    ################################
    print ('All dates (' + str(len(dateList)) + ')')
    print (dateList)
    print("")
    if inps.bbox is not None:
        safe_dict = safe_dict_bbox
        dateList = [key for key in safe_dict.keys()]
        dateList.sort()
        print ('dates covering the bbox (' + str(len(dateList)) + ')' )
        print (dateList)
        print("")

        if len(safe_dict_finclude)>0:
            # updating the dateList that will be used for those dates that are forced include
            # but which are not covering teh BBOX completely
            safe_dict = safe_dict_bbox_finclude
            dateList = [key for key in safe_dict.keys()]
            dateList.sort()

            # sorting the dates of the forced include
            dateListFinclude = [key for key in safe_dict_finclude.keys()]
            print('dates forced included (do not cover the bbox completely, ' + str(len(dateListFinclude)) + ')')
            print(dateListFinclude)
            print("")

    # report any potential gaps in fame coverage
    if len(safe_dict_frameGAP)>0:
        dateListframeGAP = [key for key in safe_dict_frameGAP.keys()]
        print('dates for which it looks like there are missing frames')
        print(dateListframeGAP)
        print("")

    if inps.master_date is None:
        if len(dateList)<1:
            print('*************************************')
            print('Error:')
            print('No acquisition forfills the temporal range and bbox requirement.')
            sys.exit(1)
        inps.master_date = dateList[0]
        print ("The master date was not chosen. The first date is considered as master date.")

    print ("")
    print ("All SLCs will be coregistered to : " + inps.master_date)

    slaveList = [key for key in safe_dict.keys()]
    slaveList.sort()
    slaveList.remove(inps.master_date)
    print ("slave dates :")
    print (slaveList)
    print ("")

    return dateList, inps.master_date, slaveList, safe_dict
Example #3
0
File: test.py Project: dmklee/RRT
import numpy as np
import math
import time

verts = np.array([[0., 0.], [0., 1.], [1., 1.], [1., 0.], [0., 0.]])

codes = [
    Path.MOVETO,
    Path.LINETO,
    Path.LINETO,
    Path.LINETO,
    Path.CLOSEPOLY,
]

path = Path(verts, codes)

fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlim(-2, 2)
ax.set_ylim(-2, 2)
plt.show(block=False)
i = 0.0
while i < 100:
    #for i in range(100):
    ax.clear()
    x = math.sin(i)
    y = math.cos(i)

    # print x, y
Example #4
0
def plt_mfd(Run_name,mega_MFD, scenarios_names_list, ScL_complet_list, ScL_list, Model_list,BG_hyp_list,
             dimension_used_list,faults_name_list,sample_list,b_value_list,MFD_type_list,m_Mmax,
             mega_bining_in_mag,a_s_model,b_sample,sm_sample,Mt_sample,plot_mfd,plot_as_rep,plot_Mmax,xmin,xmax,ymin,ymax,
             catalog_cum_rate,plot_mfd_detailled,bining_in_mag):
    
    
    file_scenarios_MFD_name = str(Run_name) + '/analysis/txt_files/scenarios_MFD.txt'
    file_scenarios_MFD = open(file_scenarios_MFD_name,'w')
    
    if plot_mfd == True :
        for scenario in scenarios_names_list :
            
            mfds_scenario = []
            for mfd_i in mega_MFD:
                if mfd_i[8] == scenario:
                   mfds_scenario.append(mfd_i)
                   
            mfd_scenario_cumulative = []
            mfd_source_cummulative = []
            for mfd in mfds_scenario:
                mfd_i = mfd[11::].astype(np.float)
                
                mfd_source_cummulative_i = []
                for i in range(len(mfd_i)): #calculate the cumulative for each source
                    mfd_source_cummulative_i.append(np.sum(np.array(mfd_i)[-(len(mfd_i)-i):])) 
                mfd_source_cummulative.append(mfd_source_cummulative_i)
                
            for sample in sample_list:
                rows, cols = np.where(np.array(mfds_scenario) == sample) 
                
                mfds_scenario_sample = np.take(mfd_source_cummulative,rows,axis= 0)
                
                mfd_scenario_cumulative_sample = np.sum(mfds_scenario_sample,axis=0)
                mfd_scenario_cumulative.append(mfd_scenario_cumulative_sample)
                
                    
                file_scenarios_MFD.write(scenario + '\t' + str(mfd_scenario_cumulative_sample)+'\n')
        
    file_scenarios_MFD.close()
    
    
    #"#### plot for the whole tree
    file_branch_cumMFD_name = str(Run_name) + '/analysis/txt_files/branch_cumMFD.txt'        
    file_branch_cumMFD = open(file_branch_cumMFD_name,'w')
    
    mega_mfd_cummulative = [] #will contain the cummulative MFD for each model of the logic tree
    total_list_BG_hyp = []  #wil contain the list of the M_trunc for each model of the logic tree
    total_list_complet_ScL = []
    total_list_ScL = [] #wil contain the list of the ScL for each model of the logic tree
    total_list_dimension_used = [] #wil contain the list of the dimension used for each model of the logic tree
    total_list_b_value = []
    total_list_MFD_type = []
    total_list_scenario_name = []
    total_list_model = []
    total_list_sample = []
    
    geologic_moment_rate = [] # list of the moment rate of each model
    geologic_moment_rate_no_as = []  # list of the moment rate of each modelif no aseismic slip is considered
    
    selected_ScL = 'Init0'
    Dimention_used = 'Init0' 
    str_all_data = 'Init0' 
    Model = 'Init0' 
    BG_hyp = 'Init0'
    b_min = 'Init0'
    b_max = 'Init0' 
    MFD_type = 'Init0'
    scenario_name = 'Init0'
    sample = 'Init0'
    
    mfd_i = np.zeros(len(mega_MFD[0][11::]))
    
    index = 0
    for mega_mfd_i in mega_MFD :
        if (mega_mfd_i[0] == selected_ScL) and (mega_mfd_i[1] == Dimention_used) and (mega_mfd_i[2] == str_all_data) and (mega_mfd_i[3] == Model
        ) and (mega_mfd_i[4] == BG_hyp) and (mega_mfd_i[5] == b_min) and (mega_mfd_i[6] == b_max) and (mega_mfd_i[7] == MFD_type
        ) and (mega_mfd_i[8] == scenario_name)  and (mega_mfd_i[9] == sample): #same model, we add sources
            #print 'ok'
            mfd_i += mega_mfd_i[11::].astype(np.float)
            
        else : #it means it a new model
            if sum(mfd_i) != 0. : #we calculate the cumulative MFD
                mfd_cummulative_i = []
                geologic_moment_rate_i = 0.
                for i in range(len(mfd_i)): #calculate the cumulative for each source
                    mfd_cummulative_i.append(np.sum(np.array(mfd_i)[-(len(mfd_i)-i):]))
                    M0 = 10. ** (1.5 * mega_bining_in_mag[i] + 9.1)
                    rate_M0 = M0 * mfd_i[i]
                    geologic_moment_rate_i += rate_M0
                geologic_moment_rate.append(geologic_moment_rate_i)
                geologic_moment_rate_no_as.append(geologic_moment_rate_i * 100. / (100. - float(a_s_model[index])))
                
                mega_mfd_cummulative.append(mfd_cummulative_i)
                total_list_BG_hyp.append(BG_hyp)
                total_list_complet_ScL.append((str(selected_ScL) + '_' + str(Dimention_used) + '_' + str(str_all_data)))
                total_list_ScL.append(selected_ScL)
                total_list_dimension_used.append(Dimention_used)
                total_list_model.append(Model)
                total_list_b_value.append('bmin_'+str(b_min)+'_bmax_'+str(b_max))
                total_list_MFD_type.append(MFD_type)
                total_list_scenario_name.append(scenario_name)
                total_list_sample.append(sample)
                file_branch_cumMFD.write(str(Model) + '\t' + str(MFD_type) + '\t' + str(BG_hyp) + '\t' + str(scenario_name) + '\t' + str((str(selected_ScL) + '_' + str(Dimention_used) + '_' + str(str_all_data))) + '\t' + 'bmin_'+str(b_min)+'_bmax_'+str(b_max) + '\t' + str(sample) + '\t' + '\t'.join(map(str,mfd_cummulative_i)) + '\n')
                
                index += 1
                
            mfd_i = np.zeros(len(mega_mfd_i[11::]))
            selected_ScL = mega_mfd_i[0]
            Dimention_used = mega_mfd_i[1] 
            str_all_data = mega_mfd_i[2] 
            Model = mega_mfd_i[3] 
            BG_hyp = mega_mfd_i[4] 
            b_min = mega_mfd_i[5] 
            b_max = mega_mfd_i[6] 
            MFD_type = mega_mfd_i[7]
            #a_s = mega_mfd_i[8]
            scenario_name = mega_mfd_i[8] 
            sample = mega_mfd_i[9] 
            mfd_i += mega_mfd_i[11::].astype(np.float)

    #we write for the last model 
    mfd_cummulative_i = []
    geologic_moment_rate_i = 0.
    for i in range(len(mfd_i)): #calculate the cumulative for each source
        mfd_cummulative_i.append(np.sum(np.array(mfd_i)[-(len(mfd_i)-i):]))
        M0 = 10. ** (1.5 * mega_bining_in_mag[i] + 9.1)
        rate_M0 = M0 * mfd_i[i]
        geologic_moment_rate_i += rate_M0
    geologic_moment_rate.append(geologic_moment_rate_i)
    geologic_moment_rate_no_as.append(geologic_moment_rate_i * 100. / (100. - float(a_s_model[index])))       
    geologic_moment_rate.append(geologic_moment_rate_i)
    geologic_moment_rate_no_as.append(geologic_moment_rate_i * 100. / (100. - float(a_s_model[index])))
    
    mega_mfd_cummulative.append(mfd_cummulative_i)
    total_list_BG_hyp.append(BG_hyp)
    total_list_complet_ScL.append((str(selected_ScL) + '_' + str(Dimention_used) + '_' + str(str_all_data)))
    total_list_ScL.append(selected_ScL)
    total_list_dimension_used.append(Dimention_used)
    total_list_model.append(Model)
    total_list_b_value.append('bmin_'+str(b_min)+'_bmax_'+str(b_max))
    total_list_MFD_type.append(MFD_type)
    total_list_scenario_name.append(scenario_name)
    total_list_sample.append(sample)
    file_branch_cumMFD.write(str(Model) + '\t' + str(MFD_type) + '\t' + str(BG_hyp) + '\t' + str(scenario_name) + '\t' + str((str(selected_ScL) + '_' + str(Dimention_used) + '_' + str(str_all_data))) + '\t' + 'bmin_'+str(b_min)+'_bmax_'+str(b_max) + '\t' + str(sample) + '\t' + '\t'.join(map(str,mfd_cummulative_i)) + '\n')
    
    file_branch_cumMFD.close()
    
    
    if len(mega_mfd_cummulative) < 4 :
        plot_mfd = False       
    mfd_X = mega_mfd_cummulative
    for i in range(len(mfd_X)):
        plt.scatter(mega_bining_in_mag,mfd_X[i], c='darkcyan', s=50, edgecolor='',marker = '_',alpha = 0.5)
    axes = plt.gca()
    axes.set_xlim([xmin,xmax])
    axes.set_ylim([ymin,ymax])
    for index_mag in range(len(mega_bining_in_mag)): 
        rate_plus = np.percentile(mfd_X,84,axis=0)[index_mag]
        rate_minus = np.percentile(mfd_X,16,axis=0)[index_mag]
        mag = mega_bining_in_mag[index_mag]
        mag_plus = mag+0.05
        mag_minus = mag-0.05
        verts = [(mag_minus, rate_minus ),
                 (mag_minus, rate_plus),
                 (mag_plus, rate_plus),
                 (mag_plus, rate_minus),
                 (mag_minus, rate_minus)]
        codes = [Path.MOVETO,
                 Path.LINETO,
                 Path.LINETO,
                 Path.LINETO,
                 Path.CLOSEPOLY]
                 
        path_poly = Path(verts, codes)
        
        patch = patches.PathPatch(path_poly,facecolor = 'darkgreen', lw = 0., alpha = 0.15)
        axes.add_patch(patch)
                    
    plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,50,axis=0),
                c='darkgreen', s=25, edgecolor='',marker = 'o',alpha = 0.8)
    plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,16,axis=0),
            c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
    plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,84,axis=0),
        c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
    plt.plot(mega_bining_in_mag,np.array(mfd_X).mean(axis=0),
                color='darkgreen', linewidth = 2)
    plt.grid()
    
    #plot the MFDs of the wholle tree with mean and percentiles    
#    for i in range(len(mega_mfd_cummulative)):
#        plt.scatter(mega_bining_in_mag,mega_mfd_cummulative[i], c='darkcyan', s=50, edgecolor='',marker = '_',alpha = 0.25)
#        
#    plt.scatter(mega_bining_in_mag,np.percentile(mega_mfd_cummulative,50,axis=0),
#                c='darkgreen', s=30, edgecolor='',marker = 'o',alpha = 0.8)
#    plt.scatter(mega_bining_in_mag,np.percentile(mega_mfd_cummulative,16,axis=0),
#            c='darkgreen', s=20, edgecolor='',marker = '+',alpha = 0.8)
#    plt.scatter(mega_bining_in_mag,np.percentile(mega_mfd_cummulative,84,axis=0),
#        c='darkgreen', s=20, edgecolor='',marker = '+',alpha = 0.8)
#    plt.scatter(mega_bining_in_mag,np.array(mega_mfd_cummulative).mean(axis=0),
#                c='darkslateblue', s=50, edgecolor='',marker = 's',alpha = 0.95) 
#    
#    
#    axes = plt.gca()
#    axes.set_xlim([xmin,xmax])
#    axes.set_ylim([ymin,ymax])
    plt.grid()
    plt.yscale('log')
    plt.title('MFD of the whole tree ')
    plt.savefig(str(Run_name) + '/analysis/figures/mfd/mdf_whole_tree.png' , dpi = 180, transparent=True)
    #plt.show()
    plt.close()
    
        
    rate_in_catalog = catalog_cum_rate

    #bining_in_mag = np.linspace(5.,7.5,26)
    '''##########################################
    #plot mfd for each scenario of the logic tree
    ############################################'''
    if len(scenarios_names_list)>1:
        index_model = 0
        for model in Model_list : 
            rate_in_catalog = catalog_cum_rate[index_model]
            for scenario in scenarios_names_list :
                
                if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/scenario_set/' + scenario):
                    os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/scenario_set/' + scenario)
                rows = np.where(np.array(total_list_scenario_name) == scenario)[0]  
                mfd_X = []
                for index in rows :  
                    mfd = mega_mfd_cummulative[index]
                    mfd_X.append(mfd)
                    
                #density plot
                if plot_mfd == True :   
                    
                    hyp_name = scenario
                    path = str(Run_name) + '/analysis/figures/analyze_branches/scenario_set/' + scenario
                    do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)

            index_model += 1
                
    '''##########################################
    #plot mfd for each model of the logic tree
    ############################################'''
    index_model = 0
    for model in Model_list : 
#        print catalog_cum_rate
#        print 
        rate_in_catalog = catalog_cum_rate[index_model]
        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model):
            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model)
        rows = np.where(np.array(total_list_model) == model)[0]  
        mfd_X = []
        for index in rows :  
            mfd = mega_mfd_cummulative[index]
            mfd_X.append(mfd)
            
        #density plot
        if plot_mfd == True :   
            hyp_name = model
            path = str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model
            do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)
        index_model +=1
    '''##########################################
    #plot mfd for each Background hypothesis  of the logic tree
    ############################################'''
    if len(BG_hyp_list) > 1:
        for BG_hyp in BG_hyp_list :        
            if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/BG/' + BG_hyp):
                os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/BG/' + BG_hyp)
            rows = np.where(np.array(total_list_BG_hyp) == BG_hyp)[0]  
            mfd_X = []
            index_check = 0
            for index in rows :  
                mfd = mega_mfd_cummulative[index]
                mfd_X.append(mfd)
                index_check += 1
                
            #density plot
            if plot_mfd == True :   
                hyp_name = BG_hyp
                path = str(Run_name) + '/analysis/figures/analyze_branches/BG/' + BG_hyp
                do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)

                
                                
    '''##########################################
    #plot mfd for each MFD  of the logic tree
    ############################################'''
    if len(MFD_type_list) > 1:
        for MFD_type in MFD_type_list :        
            if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/MFD_type/' + MFD_type):
                os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/MFD_type/' + MFD_type)
            rows = np.where(np.array(total_list_MFD_type) == MFD_type)[0]  
            mfd_X = []
            for index in rows :  
                mfd = mega_mfd_cummulative[index]
                mfd_X.append(mfd)
                
           #density plot
            if plot_mfd == True :  
                hyp_name = MFD_type
                path = str(Run_name) + '/analysis/figures/analyze_branches/MFD_type/' + MFD_type
                do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)

                
    '''##########################################
    #plot mfd for each bvalue  of the logic tree
    ############################################'''
    if len(b_value_list) > 1:
        for b in b_value_list :        
            if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/b_value/' + b):
                os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/b_value/' + b)
            rows = np.where(np.array(total_list_b_value) == b)[0]  
            mfd_X = []
            for index in rows :  
                mfd = mega_mfd_cummulative[index]
                mfd_X.append(mfd)
                
            #density plot
            if plot_mfd == True :   
                hyp_name = b
                path = str(Run_name) + '/analysis/figures/analyze_branches/b_value/' + b
                do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)

                
    '''##########################################
    #plot mfd for scalling law  of the logic tree
    ############################################'''
    if len(ScL_complet_list) > 1:
        for ScL in ScL_complet_list :        
            if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/ScL/' + ScL):
                os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/ScL/' + ScL)
            rows = np.where(np.array(total_list_complet_ScL) == ScL)[0]  
            mfd_X = []
            for index in rows :  
                mfd = mega_mfd_cummulative[index]
                mfd_X.append(mfd)
                
            #density plot
            if plot_mfd == True :   
                hyp_name = ScL
                path = str(Run_name) + '/analysis/figures/analyze_branches/ScL/' + ScL
                do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)

    
#            
    '''######################################
    #plot Mmax for each ScL of the logic tree
    ######################################'''
        
    for ScL in ScL_complet_list :
        rows = np.where(np.array(total_list_complet_ScL) == ScL)[0]
        #mfd_ScL_cumulative = []
        Mmax_m_ScL = []
        for index in rows :
            mfd = mega_mfd_cummulative[index]
            #mfd_ScL_cumulative.append(mfd)
            Mmax_m_ScL.append(m_Mmax[index])
            
        
        if not os.path.exists(str(Run_name) + '/analysis/figures/Mmax/for_each_ScL'):
            os.makedirs(str(Run_name) + '/analysis/figures/Mmax/for_each_ScL')
        
        if plot_Mmax == True :
            plt.hist(Mmax_m_ScL,int(round(max(m_Mmax) - min(m_Mmax),1) * 10. + 1.))
            plt.title(ScL)
            plt.savefig(str(Run_name) + '/analysis/figures/Mmax/for_each_ScL/Hist_Mmax_' + ScL +'.png',dpi = 100)
            #plt.show()
            plt.close()

#            
    '''######################################
    #plot Mmax for each scenario set of the logic tree
    ######################################'''
        
    for Sc_set in scenarios_names_list :
        rows = np.where(np.array(total_list_scenario_name) == Sc_set)[0]
        #mfd_Sc_set_cumulative = []
        Mmax_m_Sc_set = []
        for index in rows :
            mfd = mega_mfd_cummulative[index]
            #mfd_Sc_set_cumulative.append(mfd)
            Mmax_m_Sc_set.append(m_Mmax[index])
            
        
        if not os.path.exists(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set'):
            os.makedirs(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set')
        
        if plot_Mmax == True :
            plt.hist(Mmax_m_Sc_set,int(round(max(m_Mmax) - min(m_Mmax),1) * 10. + 1.))
            plt.title(Sc_set)
            plt.savefig(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set/Hist_Mmax_' + Sc_set +'.png',dpi = 100)
            #plt.show()
            plt.close()
#
##            
#    '''######################################
#    # the magnitude of rupture in which each faults are involed, for each set of scenarios
#           work in kinda progress
#    ######################################'''
#
#    for fault in faults_name_list:
#        for Sc_set in scenarios_names_list :
#            rows = np.where(np.array(mega_MFD) == Sc_set)[0]
#            #mfd_Sc_set_cumulative = []
#            Mmax_m_Sc_set = []
#            for index in rows :
#                mfd = mega_mfd_cummulative[index]
#                Mmax_m_Sc_set.append(m_Mmax[index])
#                
#            
#            if not os.path.exists(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set'):
#                os.makedirs(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set')
#            
#            if plot_Mmax == True :
#                plt.hist(Mmax_m_Sc_set,int(round(max(m_Mmax) - min(m_Mmax),1) * 10. + 1.))
#                plt.title(Sc_set)
#                plt.savefig(str(Run_name) + '/analysis/figures/Mmax/for_each_scenario_set/Hist_Mmax_' + Sc_set +'.png',dpi = 100)
#                #plt.show()
#                plt.close()
#    
    '''######################################
    #########################################
    #    detailled plot for combinaison of
    #       hypothesis
    #########################################
    ######################################'''    

    '''##########################################
    # calculate the difference between the mean rate of the model and the mean rate of the catalog
    ############################################'''        
          
    if plot_mfd == True and plot_mfd_detailled == True:
        file_branch_to_catalog_name = str(Run_name) + '/analysis/txt_files/branch_vs_catalog.txt'
        file_branch_to_catalog = open(file_branch_to_catalog_name,'w')
        index_model = 0
        for model in Model_list : 
            rate_in_catalog = catalog_cum_rate[index_model]
            for MFD_type in MFD_type_list :  
                for scenario in scenarios_names_list :
                    for b_value in b_value_list :  
                        for BG_hyp in BG_hyp_list :  
                            for ScL in ScL_complet_list :  
                                rows_model = np.where(np.array(total_list_model) == model)[0]  
                                rows_mfd = np.where(np.array(total_list_MFD_type) == MFD_type)[0] 
                                rows_sc = np.where(np.array(total_list_scenario_name) == scenario)[0]  
                                rows_ScL = np.where(np.array(total_list_complet_ScL) == ScL)[0]  
                                rows_b = np.where(np.array(total_list_b_value) == b_value)[0]  
                                rows_bg = np.where(np.array(total_list_BG_hyp) == BG_hyp)[0]  
                                rows = list(set(rows_model).intersection(rows_mfd)) 
                                rows = list(set(rows).intersection(rows_sc)) 
                                rows = list(set(rows).intersection(rows_ScL)) 
                                rows = list(set(rows).intersection(rows_b)) 
                                rows = list(set(rows).intersection(rows_bg)) 
                                if len(rows) > 0:
                                    file_branch_to_catalog.write(str(model)+'\t')
                                    file_branch_to_catalog.write(str(MFD_type)+'\t')
                                    file_branch_to_catalog.write(str(scenario)+'\t')
                                    file_branch_to_catalog.write(str(b_value)+'\t')
                                    file_branch_to_catalog.write(str(BG_hyp)+'\t')
                                    file_branch_to_catalog.write(str(ScL)+'\t')
                                    mfd_X = []
                                    for index in rows :  
                                        mfd = mega_mfd_cummulative[index]
                                        mfd_X.append(mfd)
                                    mean_rate_model = np.array(mfd_X).mean(axis=0)
                                    mean_rate_catalog = np.array(rate_in_catalog)#.mean(axis=0)
                                    for i in range(len(mean_rate_catalog)):
                                        file_branch_to_catalog.write(str(mean_rate_model[i]/mean_rate_catalog[i]-1.)+'\t')
                                    file_branch_to_catalog.write('\n')
            index_model +=1
        file_branch_to_catalog.close()
        
    '''##########################################
    # calculate the difference between the mean rate of the model and the mean rate of the catalog
    ############################################'''        
          
    if plot_mfd == True and plot_mfd_detailled == True:
        file_branch_to_catalog_name = str(Run_name) + '/analysis/txt_files/branch_vs_catalog.txt'
        file_branch_to_catalog = open(file_branch_to_catalog_name,'w')
        
        index_model = 0
        for model in Model_list : 
            rate_in_catalog = catalog_cum_rate[index_model]
            mean_rate_catalog = np.array(rate_in_catalog)#.mean(axis=0)
            for MFD_type in MFD_type_list :  
                for scenario in scenarios_names_list :
                    for b_value in b_value_list :  
                        for BG_hyp in BG_hyp_list :  
                            for ScL in ScL_complet_list :  
                                rows_model = np.where(np.array(total_list_model) == model)[0]  
                                rows_mfd = np.where(np.array(total_list_MFD_type) == MFD_type)[0] 
                                rows_sc = np.where(np.array(total_list_scenario_name) == scenario)[0]  
                                rows_ScL = np.where(np.array(total_list_complet_ScL) == ScL)[0]  
                                rows_b = np.where(np.array(total_list_b_value) == b_value)[0]  
                                rows_bg = np.where(np.array(total_list_BG_hyp) == BG_hyp)[0]  
                                rows = list(set(rows_model).intersection(rows_mfd)) 
                                rows = list(set(rows).intersection(rows_sc)) 
                                rows = list(set(rows).intersection(rows_ScL)) 
                                rows = list(set(rows).intersection(rows_b)) 
                                rows = list(set(rows).intersection(rows_bg)) 
                                if len(rows) > 0:
                                    file_branch_to_catalog.write(str(model)+'\t')
                                    file_branch_to_catalog.write(str(MFD_type)+'\t')
                                    file_branch_to_catalog.write(str(scenario)+'\t')
                                    file_branch_to_catalog.write(str(b_value)+'\t')
                                    file_branch_to_catalog.write(str(BG_hyp)+'\t')
                                    file_branch_to_catalog.write(str(ScL)+'\t')
                                    mfd_X = []
                                    for index in rows :  
                                        mfd = mega_mfd_cummulative[index]
                                        mfd_X.append(mfd)
                                    mean_rate_model = np.array(mfd_X).mean(axis=0)
                                    for i in range(len(mean_rate_catalog)):
                                        file_branch_to_catalog.write(str(mean_rate_model[i]/mean_rate_catalog[i]-1.)+'\t')
                                    file_branch_to_catalog.write('\n')
            index_model +=1
        file_branch_to_catalog.close()

    '''##########################################
    #plot mfd for each MFD shape hypothesis and scenario set
    ############################################'''        
          
    if plot_mfd == True and plot_mfd_detailled == True:
        if len(MFD_type_list) > 1 and len(scenarios_names_list)>1:
            index_model = 0
            for model in Model_list : 
                rate_in_catalog = catalog_cum_rate[index_model]
                for MFD_type in MFD_type_list :  
                    for scenario in scenarios_names_list :
                        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model):
                            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model)
                        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' + MFD_type):
                            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' + MFD_type)
                        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' + MFD_type+ '/' +scenario):
                            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' + MFD_type+ '/' +scenario)
                        rows_mfd = np.where(np.array(total_list_MFD_type) == MFD_type)[0] 
                        rows_sc = np.where(np.array(total_list_scenario_name) == scenario)[0]  
                        rows_i = list(set(rows_mfd).intersection(rows_sc)) 
                        rows_model = np.where(np.array(total_list_model) == model)[0]  
                        rows = list(set(rows_i).intersection(rows_model)) 
                        mfd_X = []
                        for index in rows :  
                            mfd = mega_mfd_cummulative[index]
                            mfd_X.append(mfd)
                            
                       #density plot
                        if plot_mfd == True :   
                            hyp_name =  model + ' ' + MFD_type + ' ' + scenario
                            path = str(Run_name) +'/analysis/figures/analyze_branches/Model/' + model+ '/' + MFD_type+ '/' +scenario
                            do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)
                index_model +=1
        
        
        
    '''##########################################
    #plot mfd for each background hypothesis and scenario set
    ############################################'''        
          
    if plot_mfd == True and plot_mfd_detailled == True:
        if len(BG_hyp_list) > 1 and len(scenarios_names_list)>1:
            index_model = 0
            for model in Model_list : 
                rate_in_catalog = catalog_cum_rate[index_model]
                for BG_hyp in BG_hyp_list :  
                    for scenario in scenarios_names_list :
                        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'  + BG_hyp+ '/' +scenario):
                            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'  + BG_hyp+ '/' +scenario)
                        rows_mfd = np.where(np.array(total_list_BG_hyp) == BG_hyp)[0] 
                        rows_sc = np.where(np.array(total_list_scenario_name) == scenario)[0]  
                        rows = list(set(rows_mfd).intersection(rows_sc)) 
                        rows_model = np.where(np.array(total_list_model) == model)[0]  
                        rows = list(set(rows).intersection(rows_model)) 
                        mfd_X = []
                        for index in rows :  
                            mfd = mega_mfd_cummulative[index]
                            mfd_X.append(mfd)
                            
                       #density plot
                        if plot_mfd == True :
                            hyp_name = BG_hyp + ' ' + scenario
                            path = str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'  + BG_hyp+ '/' +scenario
                            #total_list_hyp = total_list_MFD_type
                            do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)
                index_model +=1
        
        
        
    '''##########################################
    #plot mfd for each model hypothesis and MFD
    ############################################'''        
          
          
    if plot_mfd == True and plot_mfd_detailled == True:
        if len(Model_list) > 1 and len(MFD_type_list)>1:
            index_model = 0
            for model in Model_list :  
                rate_in_catalog = catalog_cum_rate[index_model]
                for MFD_type in MFD_type_list :
                    if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' +MFD_type):
                        os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' +MFD_type)
                    rows_i = np.where(np.array(total_list_model) == model)[0] 
                    rows_j = np.where(np.array(total_list_MFD_type) == MFD_type)[0]  
                    rows = list(set(rows_i).intersection(rows_j)) 
                    mfd_X = []
                    for index in rows :  
                        mfd = mega_mfd_cummulative[index]
                        mfd_X.append(mfd)
                        
                   #density plot
                    if plot_mfd == True :
                        hyp_name = model + ' ' + MFD_type
                        path = str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/' +MFD_type
                        #total_list_hyp = total_list_MFD_type
                        do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)
                index_model +=1        
            
                
                
    '''##########################################
    #plot mfd for each background hypothesis and mfd
    ############################################'''        
          
    if plot_mfd == True and plot_mfd_detailled == True:
        if len(BG_hyp_list) > 1 and len(MFD_type_list)>1:
            index_model = 0
            for model in Model_list : 
                rate_in_catalog = catalog_cum_rate[index_model]
                for BG_hyp in BG_hyp_list :  
                    for MFD_type in MFD_type_list :
                        if not os.path.exists(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'  + BG_hyp+ '/' +MFD_type):
                            os.makedirs(str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'  + BG_hyp+ '/' +MFD_type)
                        rows_i = np.where(np.array(total_list_BG_hyp) == BG_hyp)[0] 
                        rows_j = np.where(np.array(total_list_MFD_type) == MFD_type)[0]  
                        rows = list(set(rows_i).intersection(rows_j)) 
                        rows_model = np.where(np.array(total_list_model) == model)[0]  
                        rows = list(set(rows).intersection(rows_model)) 
                        mfd_X = []
                        for index in rows :  
                            mfd = mega_mfd_cummulative[index]
                            mfd_X.append(mfd)
                            
                       #density plot
                        if plot_mfd == True :
                            hyp_name = BG_hyp + ' ' + MFD_type
                            path = str(Run_name) + '/analysis/figures/analyze_branches/Model/' + model+ '/'+ BG_hyp+ '/' +MFD_type
                            #total_list_hyp = total_list_MFD_type
                            do_the_plots(hyp_name,mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,rate_in_catalog,plot_as_rep,a_s_model,rows,path,bining_in_mag)
                index_model +=1        
            
            
    return (total_list_ScL,total_list_dimension_used,geologic_moment_rate,
            geologic_moment_rate_no_as,total_list_scenario_name,total_list_MFD_type,
            mega_mfd_cummulative,total_list_model,total_list_sample,total_list_BG_hyp)
Example #5
0
    def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath, mtext=None):
        """
        draw the text by converting them to paths using textpath module.

        Parameters
        ----------
        prop : `matplotlib.font_manager.FontProperties`
          font property

        s : str
          text to be converted

        usetex : bool
          If True, use matplotlib usetex mode.

        ismath : bool
          If True, use mathtext parser. If "TeX", use *usetex* mode.

        """
        writer = self.writer

        writer.comment(s)

        glyph_map = self._glyph_map

        text2path = self._text2path
        color = rgb2hex(gc.get_rgb())
        fontsize = prop.get_size_in_points()

        style = {}
        if color != '#000000':
            style['fill'] = color
        if gc.get_alpha() != 1.0:
            style['opacity'] = short_float_fmt(gc.get_alpha())

        if not ismath:
            font = text2path._get_font(prop)
            _glyphs = text2path.get_glyphs_with_font(
                font, s, glyph_map=glyph_map, return_new_glyphs_only=True)
            glyph_info, glyph_map_new, rects = _glyphs

            if glyph_map_new:
                writer.start('defs')
                for char_id, glyph_path in six.iteritems(glyph_map_new):
                    path = Path(*glyph_path)
                    path_data = self._convert_path(path, simplify=False)
                    writer.element('path', id=char_id, d=path_data)
                writer.end('defs')

                glyph_map.update(glyph_map_new)

            attrib = {}
            attrib['style'] = generate_css(style)
            font_scale = fontsize / text2path.FONT_SCALE
            attrib['transform'] = generate_transform([
                ('translate', (x, y)), ('rotate', (-angle, )),
                ('scale', (font_scale, -font_scale))
            ])

            writer.start('g', attrib=attrib)
            for glyph_id, xposition, yposition, scale in glyph_info:
                attrib = {'xlink:href': '#%s' % glyph_id}
                if xposition != 0.0:
                    attrib['x'] = short_float_fmt(xposition)
                if yposition != 0.0:
                    attrib['y'] = short_float_fmt(yposition)
                writer.element('use', attrib=attrib)

            writer.end('g')
        else:
            if ismath == "TeX":
                _glyphs = text2path.get_glyphs_tex(prop,
                                                   s,
                                                   glyph_map=glyph_map,
                                                   return_new_glyphs_only=True)
            else:
                _glyphs = text2path.get_glyphs_mathtext(
                    prop, s, glyph_map=glyph_map, return_new_glyphs_only=True)

            glyph_info, glyph_map_new, rects = _glyphs

            # we store the character glyphs w/o flipping. Instead, the
            # coordinate will be flipped when this characters are
            # used.
            if glyph_map_new:
                writer.start('defs')
                for char_id, glyph_path in six.iteritems(glyph_map_new):
                    char_id = self._adjust_char_id(char_id)
                    # Some characters are blank
                    if not len(glyph_path[0]):
                        path_data = ""
                    else:
                        path = Path(*glyph_path)
                        path_data = self._convert_path(path, simplify=False)
                    writer.element('path', id=char_id, d=path_data)
                writer.end('defs')

                glyph_map.update(glyph_map_new)

            attrib = {}
            font_scale = fontsize / text2path.FONT_SCALE
            attrib['style'] = generate_css(style)
            attrib['transform'] = generate_transform([
                ('translate', (x, y)), ('rotate', (-angle, )),
                ('scale', (font_scale, -font_scale))
            ])

            writer.start('g', attrib=attrib)
            for char_id, xposition, yposition, scale in glyph_info:
                char_id = self._adjust_char_id(char_id)

                writer.element('use',
                               transform=generate_transform([
                                   ('translate', (xposition, yposition)),
                                   ('scale', (scale, )),
                               ]),
                               attrib={'xlink:href': '#%s' % char_id})

            for verts, codes in rects:
                path = Path(verts, codes)
                path_data = self._convert_path(path, simplify=False)
                writer.element('path', d=path_data)

            writer.end('g')
    def setFacetsLocs(self):
        NFacets = self.NFacets
        Npix = self.GD["Image"]["NPix"]
        Padding = self.GD["Facets"]["Padding"]
        self.Padding = Padding
        Npix, _ = EstimateNpix(float(Npix), Padding=1)
        self.Npix = Npix
        self.OutImShape = (self.nch, self.npol, self.Npix, self.Npix)

        RadiusTot = self.CellSizeRad * self.Npix / 2
        self.RadiusTot = RadiusTot

        lMainCenter, mMainCenter = 0., 0.
        self.lmMainCenter = lMainCenter, mMainCenter
        self.CornersImageTot = np.array(
            [[lMainCenter - RadiusTot, mMainCenter - RadiusTot],
             [lMainCenter + RadiusTot, mMainCenter - RadiusTot],
             [lMainCenter + RadiusTot, mMainCenter + RadiusTot],
             [lMainCenter - RadiusTot, mMainCenter + RadiusTot]])

        # MSName = self.GD["Data"]["MS"]
        # if ".txt" in MSName:
        #     f = open(MSName)
        #     Ls = f.readlines()
        #     f.close()
        #     MSName = []
        #     for l in Ls:
        #         ll = l.replace("\n", "")
        #         MSName.append(ll)
        #     MSName = MSName[0]

        MSName = self.VS.ListMS[0].MSName

        SolsFile = self.GD["DDESolutions"]["DDSols"]
        if isinstance(SolsFile, list):
            SolsFile = self.GD["DDESolutions"]["DDSols"][0]

        if SolsFile and (not (".npz" in SolsFile)) and (not (".h5"
                                                             in SolsFile)):
            Method = SolsFile
            # ThisMSName = reformat.reformat(
            #     os.path.abspath(MSName), LastSlash=False)
            # SolsFile = "%s/killMS.%s.sols.npz" % (ThisMSName, Method)
            SolsDir = self.GD["DDESolutions"]["SolsDir"]
            if SolsDir is None or SolsDir == "":
                ThisMSName = reformat.reformat(os.path.abspath(MSName),
                                               LastSlash=False)
                SolsFile = "%s/killMS.%s.sols.npz" % (ThisMSName, Method)
            else:
                _MSName = reformat.reformat(
                    os.path.abspath(MSName).split("/")[-1])
                DirName = os.path.abspath(
                    "%s%s" % (reformat.reformat(SolsDir), _MSName))
                if not os.path.isdir(DirName):
                    os.makedirs(DirName)
                SolsFile = "%s/killMS.%s.sols.npz" % (DirName, SolsFile)

#        if "CatNodes" in self.GD.keys():
        regular_grid = False
        if self.GD["Facets"]["CatNodes"] is not None:
            print("Taking facet directions from Nodes catalog: %s" %
                  self.GD["Facets"]["CatNodes"],
                  file=log)
            ClusterNodes = np.load(self.GD["Facets"]["CatNodes"])
            ClusterNodes = ClusterNodes.view(np.recarray)
            raNode = ClusterNodes.ra
            decNode = ClusterNodes.dec
            lFacet, mFacet = self.CoordMachine.radec2lm(raNode, decNode)
        elif SolsFile is not None and ".npz" in SolsFile:
            print("Taking facet directions from solutions file: %s" % SolsFile,
                  file=log)
            ClusterNodes = np.load(SolsFile)["ClusterCat"]
            ClusterNodes = ClusterNodes.view(np.recarray)
            raNode = ClusterNodes.ra
            decNode = ClusterNodes.dec
            lFacet, mFacet = self.CoordMachine.radec2lm(raNode, decNode)
        elif SolsFile is not None and ".h5" in SolsFile:
            h5file, apply_solsets, apply_map = _parse_solsfile(SolsFile)
            log.print(
                "Taking facet directions from H5parm: {}, solsets: {}".format(
                    h5file, apply_solsets))
            with tables.open_open(h5file) as H:
                lm, radec = [], []
                for solset in apply_solsets:
                    _solset = getattr(H.root, solset)
                    raNode, decNode = _solset.source[:]["dir"].T
                    lFacet, mFacet = self.CoordMachine.radec2lm(
                        raNode, decNode)
                    radec.append(np.stack([raNode, decNode], axis=1))
                    lm.append(np.stack([lFacet, mFacet], axis=1))
            # Nd+Nd+...,2
            lm = np.concatenate(lm, axis=0)
            radec = np.concatenate(radec, axis=0)
            lFacet, mFacet = lm[:, 0], lm[:, 1]
            raNode, decNode = radec[:, 0], radec[:, 1]
        else:
            print("Taking facet directions from regular grid", file=log)
            regular_grid = True
            CellSizeRad = (self.GD["Image"]["Cell"] / 3600.) * np.pi / 180
            lrad = Npix * CellSizeRad * 0.5

            NpixFacet = Npix // NFacets
            lfacet = NpixFacet * CellSizeRad * 0.5
            lcenter_max = lrad - lfacet

            lFacet, mFacet, = np.mgrid[-lcenter_max:lcenter_max:(NFacets) * 1j,
                                       -lcenter_max:lcenter_max:(NFacets) * 1j]
            lFacet = lFacet.flatten()
            mFacet = mFacet.flatten()
        print("  There are %i Jones-directions" % lFacet.size, file=log)

        self.lmSols = lFacet.copy(), mFacet.copy()

        raSols, decSols = self.CoordMachine.lm2radec(lFacet.copy(),
                                                     mFacet.copy())
        self.radecSols = raSols, decSols

        NodesCat = np.zeros((raSols.size, ),
                            dtype=[('ra', np.float), ('dec', np.float),
                                   ('l', np.float), ('m', np.float)])
        NodesCat = NodesCat.view(np.recarray)
        NodesCat.ra = raSols
        NodesCat.dec = decSols
        # print>>log,"Facet RA %s"%raSols
        # print>>log,"Facet Dec %s"%decSols
        NodesCat.l = lFacet
        NodesCat.m = mFacet

        ## saving below
        # NodeFile = "%s.NodesCat.%snpy" % (self.GD["Output"]["Name"], "psf." if self.DoPSF else "")
        # print>> log, "Saving Nodes catalog in %s" % NodeFile
        # np.save(NodeFile, NodesCat)

        self.DicoImager = {}

        xy = np.zeros((lFacet.size, 2), np.float32)
        xy[:, 0] = lFacet
        xy[:, 1] = mFacet

        regFile = "%s.tessel0.reg" % self.ImageName
        NFacets = self.NFacets = lFacet.size
        rac, decc = self.MainRaDec
        VM = ModVoronoiToReg.VoronoiToReg(rac, decc)

        if NFacets > 2:

            vor = Voronoi(xy, furthest_site=False)
            regions, vertices = ModVoronoi.voronoi_finite_polygons_2d(
                vor, radius=1.)

            PP = Polygon.Polygon(self.CornersImageTot)

            LPolygon = []
            ListNode = []
            for region, iNode in zip(regions, range(NodesCat.shape[0])):
                PP1 = Polygon.Polygon(np.array(vertices[region]))
                ThisP = np.array(PP & PP1)
                # x,y=np.array(PP1).T
                # xp,yp=np.array(PP).T
                # stop
                # import pylab
                # pylab.clf()
                # #pylab.plot(x,y)
                # pylab.plot(xp,yp)
                # pylab.draw()
                # pylab.show()
                # #pylab.pause(0.1)

                if ThisP.size > 0:
                    LPolygon.append(ThisP[0])
                    ListNode.append(iNode)
            NodesCat = NodesCat[np.array(ListNode)].copy()

# =======
#             LPolygon = [
#                 np.array(PP & Polygon.Polygon(np.array(vertices[region])))[0]
#                 for region in regions]
# >>>>>>> issue-255

        elif NFacets == 1:
            l0, m0 = lFacet[0], mFacet[0]
            LPolygon = [self.CornersImageTot]
        # VM.ToReg(regFile,lFacet,mFacet,radius=.1)

        NodeFile = "%s.NodesCat.npy" % self.GD["Output"]["Name"]
        print("Saving Nodes catalog in %s" % NodeFile, file=log)
        np.save(NodeFile, NodesCat)

        for iFacet, polygon0 in zip(range(len(LPolygon)), LPolygon):
            # polygon0 = vertices[region]
            P = polygon0.tolist()

        # VM.PolygonToReg(regFile,LPolygon,radius=0.1,Col="red")

        # stop

        ###########################################
        # SubDivide
        def GiveDiam(polygon):
            lPoly, mPoly = polygon.T
            l0 = np.max([lMainCenter - RadiusTot, lPoly.min()])
            l1 = np.min([lMainCenter + RadiusTot, lPoly.max()])
            m0 = np.max([mMainCenter - RadiusTot, mPoly.min()])
            m1 = np.min([mMainCenter + RadiusTot, mPoly.max()])
            dl = l1 - l0
            dm = m1 - m0
            diam = np.max([dl, dm])
            return diam, (l0, l1, m0, m1)

        DiamMax = self.GD["Facets"]["DiamMax"] * np.pi / 180
        # DiamMax=4.5*np.pi/180
        DiamMin = self.GD["Facets"]["DiamMin"] * np.pi / 180

        def ClosePolygon(polygon):
            P = polygon.tolist()
            polygon = np.array(P + [P[0]])
            return polygon

        def GiveSubDivideRegions(polygonFacet, DMax):

            polygonFOV = self.CornersImageTot
            # polygonFOV=ClosePolygon(polygonFOV)
            PFOV = Polygon.Polygon(polygonFOV)

            # polygonFacet=ClosePolygon(polygonFacet)
            P0 = Polygon.Polygon(polygonFacet)
            P0Cut = Polygon.Polygon(P0 & PFOV)

            if P0Cut.nPoints() == 0:
                return []

            polygonFacetCut = np.array(P0Cut[0])
            # polygonFacetCut=ClosePolygon(polygonFacetCut)

            diam, (l0, l1, m0, m1) = GiveDiam(polygonFacetCut)
            if diam < DMax:
                return [polygonFacetCut]

            Nl = int((l1 - l0) / DMax) + 1
            Nm = int((m1 - m0) / DMax) + 1
            dl = (l1 - l0) / Nl
            dm = (m1 - m0) / Nm
            lEdge = np.linspace(l0, l1, Nl + 1)
            mEdge = np.linspace(m0, m1, Nm + 1)
            lc = (lEdge[0:-1] + lEdge[1::]) / 2
            mc = (mEdge[0:-1] + mEdge[1::]) / 2
            LPoly = []
            Lc, Mc = np.meshgrid(lc, mc)
            Lc = Lc.ravel().tolist()
            Mc = Mc.ravel().tolist()

            DpolySquare = np.array([[-dl, -dm], [dl, -dm], [dl, dm], [-dl, dm]
                                    ]) * 0.5
            for lc, mc in zip(Lc, Mc):
                polySquare = DpolySquare.copy(
                )  # ClosePolygon(DpolySquare.copy())
                polySquare[:, 0] += lc
                polySquare[:, 1] += mc
                # polySquare=ClosePolygon(polySquare)
                P1 = Polygon.Polygon(polySquare)

                POut = (P0Cut & P1)
                if POut.nPoints() == 0:
                    continue

                polyOut = np.array(POut[0])
                # polyOut=ClosePolygon(polyOut)
                LPoly.append(polyOut)

                # pylab.clf()
                # x,y=polygonFacetCut.T
                # pylab.plot(x,y,color="blue")
                # x,y=polygonFacet.T
                # pylab.plot(x,y,color="blue",ls=":",lw=3)
                # x,y=np.array(PFOV[0]).T
                # pylab.plot(x,y,color="black")
                # x,y=polySquare.T
                # pylab.plot(x,y,color="green",ls=":",lw=3)
                # x,y=polyOut.T
                # pylab.plot(x,y,color="red",ls="--",lw=3)
                # pylab.xlim(-0.03,0.03)
                # pylab.ylim(-0.03,0.03)
                # pylab.draw()
                # pylab.show(False)
                # pylab.pause(0.5)

            return LPoly

        def PlotPolygon(P, *args, **kwargs):
            for poly in P:
                x, y = ClosePolygon(np.array(poly)).T
                pylab.plot(x, y, *args, **kwargs)

        LPolygonNew = []

        for iFacet in range(len(LPolygon)):
            polygon = LPolygon[iFacet]
            ThisDiamMax = DiamMax
            SubReg = GiveSubDivideRegions(polygon, ThisDiamMax)

            LPolygonNew += SubReg

        regFile = "%s.FacetMachine.tessel.ReCut.reg" % self.ImageName
        # VM.PolygonToReg(regFile,LPolygonNew,radius=0.1,Col="green",labels=[str(i) for i in range(len(LPolygonNew))])

        DicoPolygon = {}
        for iFacet in range(len(LPolygonNew)):
            DicoPolygon[iFacet] = {}
            poly = LPolygonNew[iFacet]
            DicoPolygon[iFacet]["poly"] = poly
            diam, (l0, l1, m0, m1) = GiveDiam(poly)
            DicoPolygon[iFacet]["diam"] = diam
            DicoPolygon[iFacet]["diamMin"] = np.min([(l1 - l0), (m1 - m0)])
            xc, yc = np.mean(poly[:, 0]), np.mean(poly[:, 1])
            DicoPolygon[iFacet]["xyc"] = xc, yc
            dSol = np.sqrt((xc - lFacet)**2 + (yc - mFacet)**2)
            DicoPolygon[iFacet]["iSol"] = np.where(dSol == np.min(dSol))[0]

        for iFacet in sorted(DicoPolygon.keys()):
            diam = DicoPolygon[iFacet]["diamMin"]
            # print iFacet,diam,DiamMin
            if diam < DiamMin:
                dmin = 1e6
                xc0, yc0 = DicoPolygon[iFacet]["xyc"]
                HasClosest = False
                for iFacetOther in sorted(DicoPolygon.keys()):
                    if iFacetOther == iFacet:
                        continue
                    iSolOther = DicoPolygon[iFacetOther]["iSol"]
                    # print "  ",iSolOther,DicoPolygon[iFacet]["iSol"]
                    if iSolOther != DicoPolygon[iFacet]["iSol"]:
                        continue
                    xc, yc = DicoPolygon[iFacetOther]["xyc"]
                    d = np.sqrt((xc - xc0)**2 + (yc - yc0)**2)
                    if d < dmin:
                        dmin = d
                        iFacetClosest = iFacetOther
                        HasClosest = True
                if (HasClosest):
                    log.print("Merging facet #%i to #%i" %
                              (iFacet, iFacetClosest))
                    P0 = Polygon.Polygon(DicoPolygon[iFacet]["poly"])
                    P1 = Polygon.Polygon(DicoPolygon[iFacetClosest]["poly"])
                    P2 = (P0 | P1)
                    POut = []
                    for iP in range(len(P2)):
                        POut += P2[iP]

                    poly = np.array(POut)
                    hull = ConvexHull(poly)
                    Contour = np.array([
                        hull.points[hull.vertices, 0],
                        hull.points[hull.vertices, 1]
                    ])
                    poly2 = Contour.T

                    del (DicoPolygon[iFacet])
                    DicoPolygon[iFacetClosest]["poly"] = poly2
                    DicoPolygon[iFacetClosest]["diam"] = GiveDiam(poly2)[0]
                    DicoPolygon[iFacetClosest]["xyc"] = np.mean(
                        poly2[:, 0]), np.mean(poly2[:, 1])

        # stop
        LPolygonNew = []
        for iFacet in sorted(DicoPolygon.keys()):
            # if DicoPolygon[iFacet]["diam"]<DiamMin:
            #     print>>log, ModColor.Str("  Facet #%i associated to direction #%i is too small, removing it"%(iFacet,DicoPolygon[iFacet]["iSol"]))
            #     continue
            LPolygonNew.append(DicoPolygon[iFacet]["poly"])

        # for iFacet in range(len(regions)):
        #     polygon=LPolygon[iFacet]
        #     ThisDiamMax=DiamMax
        #     while True:
        #         SubReg=GiveSubDivideRegions(polygon,ThisDiamMax)
        #         if SubReg==[]:
        #             break
        #         Diams=[GiveDiam(poly)[0] for poly in SubReg]

        #         if np.min(Diams)>DiamMin: break
        #         ThisDiamMax*=1.1
        #     LPolygonNew+=SubReg
        #     print

        regFile = "%s.tessel.%sreg" % (self.GD["Output"]["Name"],
                                       "psf." if self.DoPSF else "")
        # labels=["[F%i.C%i]"%(i,DicoPolygon[i]["iSol"]) for i in range(len(LPolygonNew))]
        # VM.PolygonToReg(regFile,LPolygonNew,radius=0.1,Col="green",labels=labels)

        # VM.PolygonToReg(regFile,LPolygonNew,radius=0.1,Col="green")

        # pylab.clf()
        # x,y=LPolygonNew[11].T
        # pylab.plot(x,y)
        # pylab.draw()
        # pylab.show()
        # stop
        ###########################################

        NFacets = len(LPolygonNew)

        NJonesDir = NodesCat.shape[0]
        self.JonesDirCat = np.zeros(
            (NodesCat.shape[0], ),
            dtype=[('Name', '|S200'), ('ra', np.float), ('dec', np.float),
                   ('SumI', np.float), ("Cluster", int), ("l", np.float),
                   ("m", np.float), ("I", np.float)])
        self.JonesDirCat = self.JonesDirCat.view(np.recarray)
        self.JonesDirCat.I = 1
        self.JonesDirCat.SumI = 1

        self.JonesDirCat.ra = NodesCat.ra
        self.JonesDirCat.dec = NodesCat.dec
        self.JonesDirCat.l = NodesCat.l
        self.JonesDirCat.m = NodesCat.m
        self.JonesDirCat.Cluster = range(NJonesDir)

        print("Sizes (%i facets):" % (self.JonesDirCat.shape[0]), file=log)
        print("   - Main field :   [%i x %i] pix" % (self.Npix, self.Npix),
              file=log)

        l_m_Diam = np.zeros((NFacets, 4), np.float32)
        l_m_Diam[:, 3] = np.arange(NFacets)

        Np = 10000
        D = {}
        for iFacet in range(NFacets):
            D[iFacet] = {}
            polygon = LPolygonNew[iFacet]
            D[iFacet]["Polygon"] = polygon
            lPoly, mPoly = polygon.T

            ThisDiam, (l0, l1, m0, m1) = GiveDiam(polygon)

            # ###############################
            # # Find barycenter of polygon
            # X=(np.random.rand(Np))*ThisDiam+l0
            # Y=(np.random.rand(Np))*ThisDiam+m0
            # XY = np.dstack((X, Y))
            # XY_flat = XY.reshape((-1, 2))
            # mpath = Path( polygon )
            # XY = np.dstack((X, Y))
            # XY_flat = XY.reshape((-1, 2))
            # mask_flat = mpath.contains_points(XY_flat)
            # mask=mask_flat.reshape(X.shape)
            # ###############################
            ThisPolygon = Polygon.Polygon(polygon)
            lc, mc = ThisPolygon.center()
            dl = np.max(np.abs([l0 - lc, l1 - lc]))
            dm = np.max(np.abs([m0 - mc, m1 - mc]))
            ###############################
            # lc=np.sum(X*mask)/np.sum(mask)
            # mc=np.sum(Y*mask)/np.sum(mask)
            # dl=np.max(np.abs(X[mask==1]-lc))
            # dm=np.max(np.abs(Y[mask==1]-mc))
            diam = 2 * np.max([dl, dm])

            ######################
            # lc=(l0+l1)/2.
            # mc=(m0+m1)/2.
            # dl=l1-l0
            # dm=m1-m0
            # diam=np.max([dl,dm])

            l_m_Diam[iFacet, 0] = lc
            l_m_Diam[iFacet, 1] = mc
            l_m_Diam[iFacet, 2] = diam

        self.SpacialWeigth = {}
        self.DicoImager = {}

        # sort facets by size, unless we're in regular grid mode
        if not regular_grid:
            indDiam = np.argsort(l_m_Diam[:, 2])[::-1]
            l_m_Diam = l_m_Diam[indDiam]

        for iFacet in range(l_m_Diam.shape[0]):
            self.DicoImager[iFacet] = {}
            self.DicoImager[iFacet]["Polygon"] = D[l_m_Diam[iFacet,
                                                            3]]["Polygon"]
            x0 = round(l_m_Diam[iFacet, 0] / self.CellSizeRad)
            y0 = round(l_m_Diam[iFacet, 1] / self.CellSizeRad)
            # if x0 % 2 == 0:
            #     x0 += 1
            # if y0 % 2 == 0:
            #     y0 += 1
            l0 = x0 * self.CellSizeRad
            m0 = y0 * self.CellSizeRad
            diam = round(
                l_m_Diam[iFacet, 2] / self.CellSizeRad) * self.CellSizeRad
            # self.AppendFacet(iFacet,l0,m0,diam)
            self.AppendFacet(iFacet, l0, m0, diam)

        # self.MakeMasksTessel()

        NpixMax = np.max([
            self.DicoImager[iFacet]["NpixFacet"]
            for iFacet in sorted(self.DicoImager.keys())
        ])
        NpixMaxPadded = np.max([
            self.DicoImager[iFacet]["NpixFacetPadded"]
            for iFacet in sorted(self.DicoImager.keys())
        ])
        self.PaddedGridShape = (1, 1, NpixMaxPadded, NpixMaxPadded)
        self.FacetShape = (1, 1, NpixMax, NpixMax)

        dmin = 1
        for iFacet in range(len(self.DicoImager)):
            l, m = self.DicoImager[iFacet]["l0m0"]
            d = np.sqrt(l**2 + m**2)
            if d < dmin:
                dmin = d
                iCentralFacet = iFacet
        self.iCentralFacet = iCentralFacet
        self.NFacets = len(self.DicoImager)
        # regFile="%s.tessel.reg"%self.GD["Output"]["Name"]
        labels = [(self.DicoImager[i]["lmShift"][0],
                   self.DicoImager[i]["lmShift"][1],
                   "[F%i_S%i]" % (i, self.DicoImager[i]["iSol"]))
                  for i in range(len(LPolygonNew))]
        VM.PolygonToReg(regFile,
                        LPolygonNew,
                        radius=0.1,
                        Col="green",
                        labels=labels)

        self.WriteCoordFacetFile()

        self.FacetDirections = set([
            self.DicoImager[iFacet]["RaDec"]
            for iFacet in range(len(self.DicoImager))
        ])
        #DicoName = "%s.DicoFacet" % self.GD["Images"]["ImageName"]
        DicoName = "%s.%sDicoFacet" % (self.GD["Output"]["Name"],
                                       "psf." if self.DoPSF else "")

        # Find the minimum l,m in the facet (for decorrelation calculation)
        for iFacet in self.DicoImager.keys():
            #Create smoothned facet tessel mask:
            Npix = self.DicoImager[iFacet]["NpixFacetPadded"]
            l0, l1, m0, m1 = self.DicoImager[iFacet]["lmExtentPadded"]
            X, Y = np.mgrid[l0:l1:Npix // 10 * 1j, m0:m1:Npix // 10 * 1j]
            XY = np.dstack((X, Y))
            XY_flat = XY.reshape((-1, 2))
            vertices = self.DicoImager[iFacet]["Polygon"]
            mpath = Path(vertices)  # the vertices of the polygon
            mask_flat = mpath.contains_points(XY_flat)
            mask = mask_flat.reshape(X.shape)
            mpath = Path(self.CornersImageTot)
            mask_flat2 = mpath.contains_points(XY_flat)
            mask2 = mask_flat2.reshape(X.shape)
            mask[mask2 == 0] = 0
            R = np.sqrt(X**2 + Y**2)
            R[mask == 0] = 1e6
            indx, indy = np.where(R == np.min(R))
            lmin, mmin = X[indx[0], indy[0]], Y[indx[0], indy[0]]
            self.DicoImager[iFacet]["lm_min"] = lmin, mmin

        self.FacetDirCat = np.zeros(
            (len(self.DicoImager), ),
            dtype=[('Name', '|S200'), ('ra', np.float), ('dec', np.float),
                   ('SumI', np.float), ("Cluster", int), ("l", np.float),
                   ("m", np.float), ("I", np.float)])
        self.FacetDirCat = self.FacetDirCat.view(np.recarray)
        self.FacetDirCat.I = 1
        self.FacetDirCat.SumI = 1
        for iFacet in self.DicoImager.keys():
            l, m = self.DicoImager[iFacet]["lmShift"]
            ra, dec = self.DicoImager[iFacet]["RaDec"]
            self.FacetDirCat.ra[iFacet] = ra
            self.FacetDirCat.dec[iFacet] = dec
            self.FacetDirCat.l[iFacet] = l
            self.FacetDirCat.m[iFacet] = m
            self.FacetDirCat.Cluster[iFacet] = iFacet

        print("Saving DicoImager in %s" % DicoName, file=log)
        MyPickle.Save(self.DicoImager, DicoName)
fig = plt.figure(figsize = (10,10))

y = HeightMap(['nn1000060000','nn3000080000'])
h = y.height_map
x = y.gradient_map
plt.imshow(h, extent = [210000,240000,760000,790000])

catch = ShapeObject('C:\\Users\\Murray\\Documents\\python\\River Code\\nrfa_data\\catchment_data\\SHP_files\\90003', region = [210000,230000,760000,780000])
catch.plot(fig, color='k')
info,x,y = catch.shapes[0]
plt.show()

vertices = np.zeros((len(x),2))
codes = [Path.MOVETO] + [Path.LINETO]*(len(x)-2) + [Path.CLOSEPOLY]
vertices[:,0], vertices[:,1] = x, y
path = Path(vertices, codes)

indices = np.zeros((600,600,2))
indice_list = []

for i in range(600):
    for j in range(600):
        indices[j,i,:] = [i,j]
        indice_list.append((i,j))
        
grid = GridSort(list(zip(x,y)), x_range=(210000,230000), y_range=(760000, 780000), grid_size=(600,600))
ci = list(zip(grid.contained_indexs.astype(int)[:,0],grid.contained_indexs.astype(int)[:,1]))

heights = []

for i in range(600):
Example #8
0
 def onselect(self, verts):
     self.poly_path = Path(verts)
     self.mask = self.poly_path.contains_points(self.coords).reshape(self.length,
                                                                     self.width)
     self.canvas.draw_idle()
Example #9
0
 def transform_path_non_affine(self, path):
     vertices = path.vertices
     if len(vertices) == 2 and vertices[0, 0] == vertices[1, 0]:
         return Path(self.transform(vertices), path.codes)
     ipath = path.interpolated(path._interpolation_steps)
     return Path(self.transform(ipath.vertices), ipath.codes)
Example #10
0
def get_raster_on_poly(rasterfile, poly, dtype='uint16', verbose=True):
    """Parses through an array of raster values with corresponding latitudes and
    longitudes and returns a list of the values on the shape boundary. """

    # get the extent and the raster values in the bounding box

    xmin = min([x for x, y in poly])
    xmax = max([x for x, y in poly])
    ymin = min([y for x, y in poly])
    ymax = max([y for x, y in poly])

    extent = xmin, ymin, xmax, ymax
    xs, ys, zs = get_raster_table(rasterfile, extent, dtype, locations=True)

    # create a matplotlib path for the polygon for point testing

    path = Path(poly)

    # set up a list for the points on the boundary

    points = []

    # find the bottom row

    n = len(xs[0]) - 1  # index of last column (used a lot)

    bottom = False
    row = 0
    while not bottom:
        row = row - 1
        x_row, y_row = xs[row], ys[row]
        bottom = any(
            [path.contains_point([x, y]) for x, y in zip(x_row, y_row)])

    # start at the left and go right until a point is inside

    j = 0
    while j < n and not path.contains_point([x_row[j], y_row[j]]):
        j += 1

    # start at the right and go left until a point is inside

    k = n
    while k > 0 and not path.contains_point([x_row[k], y_row[k]]):
        k = k - 1

    for p in zip(xs[row, j:k + 1], ys[row, j:k + 1], zs[row, j:k + 1]):
        points.append(p)

    # keep track of the bottom row

    bottom, bleft, bright = row + len(xs), j, k

    # find the top row

    top = False
    row = -1
    while not top:
        row += 1
        x_row, y_row = xs[row], ys[row]
        top = any([path.contains_point([x, y]) for x, y in zip(x_row, y_row)])

    # start at the left and go right until a point is inside

    j = 0
    while j < n and not path.contains_point([x_row[j], y_row[j]]):
        j += 1

    # start at the right and go left until a point is inside

    k = n
    while k > 0 and not path.contains_point([x_row[k], y_row[k]]):
        k = k - 1

    for p in zip(xs[row, j:k + 1], ys[row, j:k + 1], zs[row, j:k + 1]):
        points.append(p)

    # keep track of the left and right sides of the row above

    top, left, right = row + 1, j, k

    # parse through the rows and look for the first values inside; keep track
    # of the edges from the previous row (left and right)

    for x_row, y_row, z_row in zip(xs[top:bottom - 1], ys[top:bottom - 1],
                                   zs[top:bottom - 1]):

        # start at the left and go right until a point is inside

        j = 0
        while j < n and not path.contains_point([x_row[j], y_row[j]]):
            j += 1

        # start at the right and go left until a point is inside

        k = n
        while k > 0 and not path.contains_point([x_row[k], y_row[k]]):
            k = k - 1

        # add the points from left to last left and right to the last right

        if j == right: l = list(range(0))
        elif j < left: l = list(range(j, left))
        else: l = list(range(left, j + 1))

        if k == left: r = list(range(0))
        elif k > right: r = list(range(right + 1, k + 1))
        else: r = list(range(k, right + 1))

        for i in chain(l, r):
            points.append((x_row[i], y_row[i], z_row[i]))

        if j != right: left = j
        if k != left: right = k

    # connect to the last row

    x_row, y_row, z_row = xs[bottom - 1], ys[bottom - 1], zs[bottom - 1]

    l, r = list(range(left, bleft + 1)), list(range(bright, right + 1))

    for i in chain(l, r):
        points.append((x_row[i], y_row[i], z_row[i]))

    return points
Example #11
0
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.transforms import Bbox
from matplotlib.path import Path

left, bottom, width, height = (-1, -1, 2, 2)
rect = plt.Rectangle((left, bottom), width, height, facecolor="#aaaaaa")

fig, ax = plt.subplots()
ax.add_patch(rect)

bbox = Bbox.from_bounds(left, bottom, width, height)

for i in range(12):
    vertices = (np.random.random((2, 2)) - 0.5) * 6.0
    path = Path(vertices)
    if path.intersects_bbox(bbox):
        color = 'r'
    else:
        color = 'b'
    ax.plot(vertices[:, 0], vertices[:, 1], color=color)

plt.show()
Example #12
0
    def add(self,
            patchlabel='',
            flows=None,
            orientations=None,
            labels='',
            trunklength=1.0,
            pathlengths=0.25,
            prior=None,
            connect=(0, 0),
            rotation=0,
            **kwargs):
        """
        Add a simple Sankey diagram with flows at the same hierarchical level.

        Return value is the instance of :class:`Sankey`.

        Optional keyword arguments:

          ===============   ===================================================
          Keyword           Description
          ===============   ===================================================
          *patchlabel*      label to be placed at the center of the diagram
                            Note: *label* (not *patchlabel*) will be passed to
                            the patch through ``**kwargs`` and can be used to
                            create an entry in the legend.
          *flows*           array of flow values
                            By convention, inputs are positive and outputs are
                            negative.
          *orientations*    list of orientations of the paths
                            Valid values are 1 (from/to the top), 0 (from/to
                            the left or right), or -1 (from/to the bottom).  If
                            *orientations* == 0, inputs will break in from the
                            left and outputs will break away to the right.
          *labels*          list of specifications of the labels for the flows
                            Each value may be *None* (no labels), '' (just
                            label the quantities), or a labeling string.  If a
                            single value is provided, it will be applied to all
                            flows.  If an entry is a non-empty string, then the
                            quantity for the corresponding flow will be shown
                            below the string.  However, if the *unit* of the
                            main diagram is None, then quantities are never
                            shown, regardless of the value of this argument.
          *trunklength*     length between the bases of the input and output
                            groups
          *pathlengths*     list of lengths of the arrows before break-in or
                            after break-away
                            If a single value is given, then it will be applied
                            to the first (inside) paths on the top and bottom,
                            and the length of all other arrows will be
                            justified accordingly.  The *pathlengths* are not
                            applied to the horizontal inputs and outputs.
          *prior*           index of the prior diagram to which this diagram
                            should be connected
          *connect*         a (prior, this) tuple indexing the flow of the
                            prior diagram and the flow of this diagram which
                            should be connected
                            If this is the first diagram or *prior* is *None*,
                            *connect* will be ignored.
          *rotation*        angle of rotation of the diagram [deg]
                            *rotation* is ignored if this diagram is connected
                            to an existing one (using *prior* and *connect*).
                            The interpretation of the *orientations* argument
                            will be rotated accordingly (e.g., if *rotation*
                            == 90, an *orientations* entry of 1 means to/from
                            the left).
          ===============   ===================================================

        Valid kwargs are :meth:`matplotlib.patches.PathPatch` arguments:

        %(Patch)s

        As examples, ``fill=False`` and ``label='A legend entry'``.
        By default, ``facecolor='#bfd1d4'`` (light blue) and
        ``linewidth=0.5``.

        The indexing parameters (*prior* and *connect*) are zero-based.

        The flows are placed along the top of the diagram from the inside out
        in order of their index within the *flows* list or array.  They are
        placed along the sides of the diagram from the top down and along the
        bottom from the outside in.

        If the sum of the inputs and outputs is nonzero, the discrepancy
        will appear as a cubic Bezier curve along the top and bottom edges of
        the trunk.

        .. seealso::

            :meth:`finish`
        """
        # Check and preprocess the arguments.
        if flows is None:
            flows = np.array([1.0, -1.0])
        else:
            flows = np.array(flows)
        n = flows.shape[0]  # Number of flows
        if rotation is None:
            rotation = 0
        else:
            # In the code below, angles are expressed in deg/90.
            rotation /= 90.0
        if orientations is None:
            orientations = [0, 0]
        if len(orientations) != n:
            raise ValueError(
                "orientations and flows must have the same length.\n"
                "orientations has length %d, but flows has length %d." %
                (len(orientations), n))
        if labels != '' and getattr(labels, '__iter__', False):
            # iterable() isn't used because it would give True if labels is a
            # string
            if len(labels) != n:
                raise ValueError(
                    "If labels is a list, then labels and flows must have the "
                    "same length.\nlabels has length %d, but flows has length %d."
                    % (len(labels), n))
        else:
            labels = [labels] * n
        if trunklength < 0:
            raise ValueError(
                "trunklength is negative.\nThis isn't allowed, because it would "
                "cause poor layout.")
        if np.abs(np.sum(flows)) > self.tolerance:
            _log.info(
                "The sum of the flows is nonzero (%f).\nIs the "
                "system not at steady state?", np.sum(flows))
        scaled_flows = self.scale * flows
        gain = sum(max(flow, 0) for flow in scaled_flows)
        loss = sum(min(flow, 0) for flow in scaled_flows)
        if not (0.5 <= gain <= 2.0):
            _log.info(
                "The scaled sum of the inputs is %f.\nThis may "
                "cause poor layout.\nConsider changing the scale so"
                " that the scaled sum is approximately 1.0.", gain)
        if not (-2.0 <= loss <= -0.5):
            _log.info(
                "The scaled sum of the outputs is %f.\nThis may "
                "cause poor layout.\nConsider changing the scale so"
                " that the scaled sum is approximately 1.0.", gain)
        if prior is not None:
            if prior < 0:
                raise ValueError("The index of the prior diagram is negative.")
            if min(connect) < 0:
                raise ValueError(
                    "At least one of the connection indices is negative.")
            if prior >= len(self.diagrams):
                raise ValueError(
                    "The index of the prior diagram is %d, but there are "
                    "only %d other diagrams.\nThe index is zero-based." %
                    (prior, len(self.diagrams)))
            if connect[0] >= len(self.diagrams[prior].flows):
                raise ValueError(
                    "The connection index to the source diagram is %d, but "
                    "that diagram has only %d flows.\nThe index is zero-based."
                    % (connect[0], len(self.diagrams[prior].flows)))
            if connect[1] >= n:
                raise ValueError(
                    "The connection index to this diagram is %d, but this diagram"
                    "has only %d flows.\n The index is zero-based." %
                    (connect[1], n))
            if self.diagrams[prior].angles[connect[0]] is None:
                raise ValueError(
                    "The connection cannot be made.  Check that the magnitude "
                    "of flow %d of diagram %d is greater than or equal to the "
                    "specified tolerance." % (connect[0], prior))
            flow_error = (self.diagrams[prior].flows[connect[0]] +
                          flows[connect[1]])
            if abs(flow_error) >= self.tolerance:
                raise ValueError(
                    "The scaled sum of the connected flows is %f, which is not "
                    "within the tolerance (%f)." %
                    (flow_error, self.tolerance))

        # Determine if the flows are inputs.
        are_inputs = [None] * n
        for i, flow in enumerate(flows):
            if flow >= self.tolerance:
                are_inputs[i] = True
            elif flow <= -self.tolerance:
                are_inputs[i] = False
            else:
                _log.info("The magnitude of flow %d (%f) is below the "
                          "tolerance (%f).\nIt will not be shown, and it "
                          "cannot be used in a connection." %
                          (i, flow, self.tolerance))

        # Determine the angles of the arrows (before rotation).
        angles = [None] * n
        for i, (orient, is_input) in enumerate(zip(orientations, are_inputs)):
            if orient == 1:
                if is_input:
                    angles[i] = DOWN
                elif not is_input:
                    # Be specific since is_input can be None.
                    angles[i] = UP
            elif orient == 0:
                if is_input is not None:
                    angles[i] = RIGHT
            else:
                if orient != -1:
                    raise ValueError("The value of orientations[%d] is %d, "
                                     "but it must be [ -1 | 0 | 1 ]." %
                                     (i, orient))
                if is_input:
                    angles[i] = UP
                elif not is_input:
                    angles[i] = DOWN

        # Justify the lengths of the paths.
        if iterable(pathlengths):
            if len(pathlengths) != n:
                raise ValueError(
                    "If pathlengths is a list, then pathlengths and flows must "
                    "have the same length.\npathlengths has length %d, but flows "
                    "has length %d." % (len(pathlengths), n))
        else:  # Make pathlengths into a list.
            urlength = pathlengths
            ullength = pathlengths
            lrlength = pathlengths
            lllength = pathlengths
            d = dict(RIGHT=pathlengths)
            pathlengths = [d.get(angle, 0) for angle in angles]
            # Determine the lengths of the top-side arrows
            # from the middle outwards.
            for i, (angle, is_input,
                    flow) in enumerate(zip(angles, are_inputs, scaled_flows)):
                if angle == DOWN and is_input:
                    pathlengths[i] = ullength
                    ullength += flow
                elif angle == UP and not is_input:
                    pathlengths[i] = urlength
                    urlength -= flow  # Flow is negative for outputs.
            # Determine the lengths of the bottom-side arrows
            # from the middle outwards.
            for i, (angle, is_input, flow) in enumerate(
                    reversed(list(zip(angles, are_inputs, scaled_flows)))):
                if angle == UP and is_input:
                    pathlengths[n - i - 1] = lllength
                    lllength += flow
                elif angle == DOWN and not is_input:
                    pathlengths[n - i - 1] = lrlength
                    lrlength -= flow
            # Determine the lengths of the left-side arrows
            # from the bottom upwards.
            has_left_input = False
            for i, (angle, is_input, spec) in enumerate(
                    reversed(
                        list(
                            zip(angles, are_inputs,
                                zip(scaled_flows, pathlengths))))):
                if angle == RIGHT:
                    if is_input:
                        if has_left_input:
                            pathlengths[n - i - 1] = 0
                        else:
                            has_left_input = True
            # Determine the lengths of the right-side arrows
            # from the top downwards.
            has_right_output = False
            for i, (angle, is_input, spec) in enumerate(
                    zip(angles, are_inputs, list(zip(scaled_flows,
                                                     pathlengths)))):
                if angle == RIGHT:
                    if not is_input:
                        if has_right_output:
                            pathlengths[i] = 0
                        else:
                            has_right_output = True

        # Begin the subpaths, and smooth the transition if the sum of the flows
        # is nonzero.
        urpath = [
            (
                Path.MOVETO,
                [
                    (self.gap - trunklength / 2.0),  # Upper right
                    gain / 2.0
                ]),
            (Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0, gain / 2.0]),
            (Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0, gain / 2.0]),
            (Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0, -loss / 2.0]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0, -loss / 2.0]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap), -loss / 2.0])
        ]
        llpath = [
            (
                Path.LINETO,
                [
                    (trunklength / 2.0 - self.gap),  # Lower left
                    loss / 2.0
                ]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0, loss / 2.0]),
            (Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0, loss / 2.0]),
            (Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0, -gain / 2.0]),
            (Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0, -gain / 2.0]),
            (Path.LINETO, [(self.gap - trunklength / 2.0), -gain / 2.0])
        ]
        lrpath = [(
            Path.LINETO,
            [
                (trunklength / 2.0 - self.gap),  # Lower right
                loss / 2.0
            ])]
        ulpath = [(
            Path.LINETO,
            [
                self.gap - trunklength / 2.0,  # Upper left
                gain / 2.0
            ])]

        # Add the subpaths and assign the locations of the tips and labels.
        tips = np.zeros((n, 2))
        label_locations = np.zeros((n, 2))
        # Add the top-side inputs and outputs from the middle outwards.
        for i, (angle, is_input, spec) in enumerate(
                zip(angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
            if angle == DOWN and is_input:
                tips[i, :], label_locations[i, :] = self._add_input(
                    ulpath, angle, *spec)
            elif angle == UP and not is_input:
                tips[i, :], label_locations[i, :] = self._add_output(
                    urpath, angle, *spec)
        # Add the bottom-side inputs and outputs from the middle outwards.
        for i, (angle, is_input, spec) in enumerate(
                reversed(
                    list(
                        zip(angles, are_inputs,
                            list(zip(scaled_flows, pathlengths)))))):
            if angle == UP and is_input:
                tip, label_location = self._add_input(llpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
            elif angle == DOWN and not is_input:
                tip, label_location = self._add_output(lrpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
        # Add the left-side inputs from the bottom upwards.
        has_left_input = False
        for i, (angle, is_input, spec) in enumerate(
                reversed(
                    list(
                        zip(angles, are_inputs,
                            list(zip(scaled_flows, pathlengths)))))):
            if angle == RIGHT and is_input:
                if not has_left_input:
                    # Make sure the lower path extends
                    # at least as far as the upper one.
                    if llpath[-1][1][0] > ulpath[-1][1][0]:
                        llpath.append(
                            (Path.LINETO, [ulpath[-1][1][0],
                                           llpath[-1][1][1]]))
                    has_left_input = True
                tip, label_location = self._add_input(llpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
        # Add the right-side outputs from the top downwards.
        has_right_output = False
        for i, (angle, is_input, spec) in enumerate(
                zip(angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
            if angle == RIGHT and not is_input:
                if not has_right_output:
                    # Make sure the upper path extends
                    # at least as far as the lower one.
                    if urpath[-1][1][0] < lrpath[-1][1][0]:
                        urpath.append(
                            (Path.LINETO, [lrpath[-1][1][0],
                                           urpath[-1][1][1]]))
                    has_right_output = True
                tips[i, :], label_locations[i, :] = self._add_output(
                    urpath, angle, *spec)
        # Trim any hanging vertices.
        if not has_left_input:
            ulpath.pop()
            llpath.pop()
        if not has_right_output:
            lrpath.pop()
            urpath.pop()

        # Concatenate the subpaths in the correct order (clockwise from top).
        path = (urpath + self._revert(lrpath) + llpath + self._revert(ulpath) +
                [(Path.CLOSEPOLY, urpath[0][1])])

        # Create a patch with the Sankey outline.
        codes, vertices = zip(*path)
        vertices = np.array(vertices)

        def _get_angle(a, r):
            if a is None:
                return None
            else:
                return a + r

        if prior is None:
            if rotation != 0:  # By default, none of this is needed.
                angles = [_get_angle(angle, rotation) for angle in angles]
                rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
                tips = rotate(tips)
                label_locations = rotate(label_locations)
                vertices = rotate(vertices)
            text = self.ax.text(0, 0, s=patchlabel, ha='center', va='center')
        else:
            rotation = (self.diagrams[prior].angles[connect[0]] -
                        angles[connect[1]])
            angles = [_get_angle(angle, rotation) for angle in angles]
            rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
            tips = rotate(tips)
            offset = self.diagrams[prior].tips[connect[0]] - tips[connect[1]]
            translate = Affine2D().translate(*offset).transform_affine
            tips = translate(tips)
            label_locations = translate(rotate(label_locations))
            vertices = translate(rotate(vertices))
            kwds = dict(s=patchlabel, ha='center', va='center')
            text = self.ax.text(*offset, **kwds)
        if rcParams['_internal.classic_mode']:
            fc = kwargs.pop('fc', kwargs.pop('facecolor', '#bfd1d4'))
            lw = kwargs.pop('lw', kwargs.pop('linewidth', 0.5))
        else:
            fc = kwargs.pop('fc', kwargs.pop('facecolor', None))
            lw = kwargs.pop('lw', kwargs.pop('linewidth', None))
        if fc is None:
            fc = next(self.ax._get_patches_for_fill.prop_cycler)['color']
        patch = PathPatch(Path(vertices, codes), fc=fc, lw=lw, **kwargs)
        self.ax.add_patch(patch)

        # Add the path labels.
        texts = []
        for number, angle, label, location in zip(flows, angles, labels,
                                                  label_locations):
            if label is None or angle is None:
                label = ''
            elif self.unit is not None:
                quantity = self.format % abs(number) + self.unit
                if label != '':
                    label += "\n"
                label += quantity
            texts.append(
                self.ax.text(x=location[0],
                             y=location[1],
                             s=label,
                             ha='center',
                             va='center'))
        # Text objects are placed even they are empty (as long as the magnitude
        # of the corresponding flow is larger than the tolerance) in case the
        # user wants to provide labels later.

        # Expand the size of the diagram if necessary.
        self.extent = (min(np.min(vertices[:, 0]),
                           np.min(label_locations[:, 0]), self.extent[0]),
                       max(np.max(vertices[:, 0]),
                           np.max(label_locations[:, 0]), self.extent[1]),
                       min(np.min(vertices[:, 1]),
                           np.min(label_locations[:, 1]), self.extent[2]),
                       max(np.max(vertices[:, 1]),
                           np.max(label_locations[:, 1]), self.extent[3]))
        # Include both vertices _and_ label locations in the extents; there are
        # where either could determine the margins (e.g., arrow shoulders).

        # Add this diagram as a subdiagram.
        self.diagrams.append(
            SimpleNamespace(patch=patch,
                            flows=flows,
                            angles=angles,
                            tips=tips,
                            text=text,
                            texts=texts))

        # Allow a daisy-chained call structure (see docstring for the class).
        return self
Example #13
0
def test_path_intersect_path(phi):
    # test for the range of intersection angles
    eps_array = [1e-5, 1e-8, 1e-10, 1e-12]

    transform = transforms.Affine2D().rotate(np.deg2rad(phi))

    # a and b intersect at angle phi
    a = Path([(-2, 0), (2, 0)])
    b = transform.transform_path(a)
    assert a.intersects_path(b) and b.intersects_path(a)

    # a and b touch at angle phi at (0, 0)
    a = Path([(0, 0), (2, 0)])
    b = transform.transform_path(a)
    assert a.intersects_path(b) and b.intersects_path(a)

    # a and b are orthogonal and intersect at (0, 3)
    a = transform.transform_path(Path([(0, 1), (0, 3)]))
    b = transform.transform_path(Path([(1, 3), (0, 3)]))
    assert a.intersects_path(b) and b.intersects_path(a)

    # a and b are collinear and intersect at (0, 3)
    a = transform.transform_path(Path([(0, 1), (0, 3)]))
    b = transform.transform_path(Path([(0, 5), (0, 3)]))
    assert a.intersects_path(b) and b.intersects_path(a)

    # self-intersect
    assert a.intersects_path(a)

    # a contains b
    a = transform.transform_path(Path([(0, 0), (5, 5)]))
    b = transform.transform_path(Path([(1, 1), (3, 3)]))
    assert a.intersects_path(b) and b.intersects_path(a)

    # a and b are collinear but do not intersect
    a = transform.transform_path(Path([(0, 1), (0, 5)]))
    b = transform.transform_path(Path([(3, 0), (3, 3)]))
    assert not a.intersects_path(b) and not b.intersects_path(a)

    # a and b are on the same line but do not intersect
    a = transform.transform_path(Path([(0, 1), (0, 5)]))
    b = transform.transform_path(Path([(0, 6), (0, 7)]))
    assert not a.intersects_path(b) and not b.intersects_path(a)

    # Note: 1e-13 is the absolute tolerance error used for
    # `isclose` function from src/_path.Tests

    # a and b are parallel but do not touch
    for eps in eps_array:
        a = transform.transform_path(Path([(0, 1), (0, 5)]))
        b = transform.transform_path(Path([(0 + eps, 1), (0 + eps, 5)]))
        assert not a.intersects_path(b) and not b.intersects_path(a)

    # a and b are on the same line but do not intersect (really close)
    for eps in eps_array:
        a = transform.transform_path(Path([(0, 1), (0, 5)]))
        b = transform.transform_path(Path([(0, 5 + eps), (0, 7)]))
        assert not a.intersects_path(b) and not b.intersects_path(a)

    # a and b are on the same line and intersect (really close)
    for eps in eps_array:
        a = transform.transform_path(Path([(0, 1), (0, 5)]))
        b = transform.transform_path(Path([(0, 5 - eps), (0, 7)]))
        assert a.intersects_path(b) and b.intersects_path(a)

    # b is the same as a but with an extra point
    a = transform.transform_path(Path([(0, 1), (0, 5)]))
    b = transform.transform_path(Path([(0, 1), (0, 2), (0, 5)]))
    assert a.intersects_path(b) and b.intersects_path(a)
Example #14
0
def test_empty_closed_path():
    path = Path(np.zeros((0, 2)), closed=True)
    assert path.vertices.shape == (0, 2)
    assert path.codes is None
    assert_array_equal(path.get_extents().extents,
                       transforms.Bbox.null().extents)
Example #15
0
	b_0,b_1,b_2,b_3,b_4,b_5,b_6
	]
verts_2 = [
	d_0,d_1,d_2,d_3,d_4,d_5,d_6
	]

codes = [Path.MOVETO,
         Path.CURVE4,
         Path.CURVE4,
         Path.CURVE4,
		 Path.CURVE4,
         Path.CURVE4,
         Path.CURVE4,
         ]

path1 = Path(verts1, codes)
path2 = Path(verts2, codes)

path_1 = Path(verts_1, codes)
path_2 = Path(verts_2, codes)

fig = plt.figure()
ax = fig.add_subplot(111)

patch1 = patches.PathPatch(path1, facecolor='none', lw=2)
patch2 = patches.PathPatch(path2, facecolor='none', lw=2)
ax.add_patch(patch1)
ax.add_patch(patch2)

patch_1 = patches.PathPatch(path_1, facecolor='none', edgecolor='red', lw=2)
patch_2 = patches.PathPatch(path_2, facecolor='none', edgecolor='red', lw=2)
    
# Make the plot
fig=plt.figure()
ax = fig.add_subplot(111)
m.ax=ax

xnode,ynode=m(xnode,ynode)
xnode[xnode==1e+30]=np.nan
ynode[ynode==1e+30]=np.nan

for j in range(0,nelements):
    verts = [(xnode[i1[j]],ynode[i1[j]]),\
         (xnode[i2[j]],ynode[i2[j]]),\
         (xnode[i3[j]],ynode[i3[j]]),\
         (xnode[i1[j]],ynode[i1[j]]) ]
    path = Path(verts)
    patch = patches.PathPatch(path, facecolor='none',edgecolor='grey',lw=0.5)
    m.ax.add_patch(patch)

m.ax.set_xlim(lonmin,lonmax)
m.ax.set_ylim(latmin,latmax)

llon, llat  = np.meshgrid(lon,lat)
x,y = m(llon, llat)
contour=m.contourf(x,y,field,levels2plot,cmap=cmap,norm=norm,extend='both')


#m.drawcoastlines(ax=ax,color='black')
##m.drawparallels(np.arange(latmin,latmax,dlat), linewidth=0,
##                    labels=[1, 0, 0, 0], fontname='Times New Roman',fontsize=16)
##m.drawmeridians(np.arange(lonmin,lonmax,dlon), linewidth=0,
Example #17
0
def make_voronoi_reg(directions,
                     fitsfile,
                     outdir_reg='regions',
                     out_mask='facet.fits',
                     beam_reg=None,
                     png=None):
    """
    Take a list of coordinates and an image and voronoi tesselate the sky.
    It saves ds9 regions + fits mask of the facets

    directions : dict with {'Dir_0':[ra,dec], 'Dir_1':[ra,dec]...} - note that the "Dir_##" naming is important
    firsfile : mask fits file to tassellate (used for coordinates and to avoid splitting islands)
    outdir* : dir where to save regions/masks
    beam_reg : a ds9 region showing the the primary beam, exclude directions outside it
    """
    def closest_node(node, nodes):
        """
        Return closest values to node from nodes
        """
        nodes = np.asarray(nodes)
        dist_2 = np.sum((nodes - node)**2, axis=1)
        return np.argmin(dist_2)

    import lib_img
    logger.debug("Image used for tasselation reference: " + fitsfile)
    fits = pyfits.open(fitsfile)
    hdr, data = lib_img.flatten(fits)
    w = pywcs.WCS(hdr)
    pixsize = np.abs(hdr['CDELT1'])

    # Get facets central pixels
    ras = np.array([directions[d][0].degree for d in directions])
    decs = np.array([directions[d][1].degree for d in directions])
    x_fs, y_fs = w.all_world2pix(ras, decs, 0, ra_dec_order=True)
    # keep trak of numbers in the direction names to name correctly patches in the fits files
    # in this way Dir_12 will have "12" into the fits for that patch.
    nums = [int(d.split('_')[1]) for d in directions.keys()]

    x_c = data.shape[0] / 2.
    y_c = data.shape[1] / 2.

    if beam_reg is None:
        # no beam, use all directions for facets
        idx_for_facet = range(len(directions))
    else:
        r = pyregion.open(beam_reg)
        beam_mask = r.get_mask(header=hdr, shape=data.shape)
        beamradius_pix = r[0].coord_list[2] / pixsize
        idx_for_facet = []
        for i, dd in enumerate(t):
            if beam_mask[t['x'][i], t['y'][i]] == True:
                idx_for_facet.append(i)

    # convert to pixel space (voronoi must be in eucledian space)
    x1 = 0
    y1 = 0
    x2 = data.shape[0]
    y2 = data.shape[1]

    # do tasselization
    vor = Voronoi(
        np.array((x_fs[idx_for_facet], y_fs[idx_for_facet])).transpose())
    box = np.array([[x1, y1], [x2, y2]])
    impoly = voronoi_finite_polygons_2d_box(vor, box)

    # create fits mask (each region one number)
    x, y = np.meshgrid(np.arange(x2),
                       np.arange(y2))  # make a canvas with coordinates
    x, y = x.flatten(), y.flatten()
    pixels = np.vstack((x, y)).T
    data_facet = np.zeros(shape=data.shape)
    for num, poly in zip(nums, impoly):
        p = Path(poly)
        pixels_region = p.contains_points(pixels)
        # iterate through direction centres and find which one belongs to this region, then use the dir name to set the number
        # this is important as the vornoi tassellation has to have the same names of the original tassellation
        #for x,y,d in zip(x_fs, y_fs, directions.keys()):
        #    if pixels_region.reshape(x2,y2)[int(np.rint(x)),int(np.rint(y))] == True:
        #        num = int(d.split('_')[1])
        #        print num,x,y,d
        data_facet[pixels_region.reshape(x2, y2)] = num

    # put all values in each island equal to the closest region
    struct = generate_binary_structure(2, 2)
    data = binary_dilation(data, structure=struct,
                           iterations=3).astype(data.dtype)  # expand masks
    blobs, number_of_blobs = label(data.astype(int).squeeze(),
                                   structure=[[1, 1, 1], [1, 1, 1], [1, 1, 1]])
    center_of_masses = center_of_mass(data, blobs, range(number_of_blobs + 1))
    for blob in xrange(1, number_of_blobs + 1):
        # get closer facet
        facet_num = closest_node(center_of_masses[blob],
                                 np.array([y_fs, x_fs]).T)
        # put all pixel of that mask to that facet value
        data_facet[blobs == blob] = nums[facet_num]

    # save fits mask
    pyfits.writeto(out_mask, data_facet, hdr, overwrite=True)

    # save regions
    all_s = []
    for i, poly in enumerate(impoly):
        ra, dec = w.all_pix2world(poly[:, 0], poly[:, 1], 0, ra_dec_order=True)
        coords = np.array([ra, dec]).T.flatten()

        s = Shape('Polygon', None)
        s.coord_format = 'fk5'
        s.coord_list = coords  # ra, dec, radius
        s.coord_format = 'fk5'
        s.attr = ([], {
            'width': '2',
            'point': 'cross',
            'font': '"helvetica 16 normal roman"'
        })
        s.comment = 'color=red'
        all_s.append(s)

        regions = pyregion.ShapeList([s])
        regionfile = outdir_reg + '/' + directions.keys()[
            idx_for_facet[i]] + '.reg'
        regions.write(regionfile)

    # add names for all.reg
    for d_name, d_coord in directions.iteritems():
        s = Shape('circle', None)
        s.coord_format = 'fk5'
        s.coord_list = [d_coord[0].degree, d_coord[1].degree,
                        0.01]  # ra, dec, radius
        s.coord_format = 'fk5'
        s.attr = ([], {
            'width': '1',
            'point': 'cross',
            'font': '"helvetica 16 normal roman"'
        })
        s.comment = 'color=white text="%s"' % d_name
        all_s.append(s)

    regions = pyregion.ShapeList(all_s)
    regionfile = outdir_reg + '/all.reg'
    regions.write(regionfile)
    logger.debug(
        'There are %i regions within the PB and %i outside (no facet).' %
        (len(idx_for_facet), len(directions) - len(idx_for_facet)))

    # plot tesselization
    if png is not None:
        import matplotlib.pyplot as pl
        pl.figure(figsize=(8, 8))
        ax1 = pl.gca()
        voronoi_plot_2d(vor,
                        ax1,
                        show_vertices=True,
                        line_colors='black',
                        line_width=2,
                        point_size=4)
        for i, d in enumerate(directions):
            ax1.text(x_fs[i], y_fs[i], d, fontsize=15)
        if not beam_reg is None:
            c1 = pl.Circle((x_c, y_c), beamradius_pix, color='g', fill=False)
            ax1.add_artist(c1)
        ax1.plot([x1, x1, x2, x2, x1], [y1, y2, y2, y1, y1])
        ax1.set_xlabel('RA (pixel)')
        ax1.set_ylabel('Dec (pixel)')
        ax1.set_xlim(x1, x2)
        ax1.set_ylim(y1, y2)
        logger.debug('Save plot: %s' % png)
        pl.savefig(png)
def gamma_GP_from_SP_pt(SP, pt, p, lon, lat):
    """
    Global Polynomial of Neutral Density with respect to Practical Salinity
    and potential temperature.

    Calculates the Global Polynomial of Neutral Density gammma_GP using an
    approximate form gamma_poly of Neutral Density on each oceanic basin:
    North Atlantic, South Atlantic, Pacific, Indian and Southern Ocean.  Each
    function is a polynomial of 28 terms, function of Practical Salinity,
    potential temperature.  The function on the Southern Ocean contains another
    function which is a polynomial of 15 terms of Practical Salinity and
    potential temperature times by a pressure and a potential temperature terms
    which is effective close to Antarctica in shallow waters.  The polynomials
    on each ocean basins are combined to form the Global Polynomial using tests
    in latitude and longitude to recognize the basin and weighting functions to
    make the functions zip together where the oceanic basins communicate.

    Parameter
    ---------
    SP : array_like
         Practical salinity [psu]
    pt : array_like
         Potential temperature [ITS-90 deg C]
    p : array_like
        Sea pressure [dbar] (i.e. absolute pressure - 10.1325 dbar)
    lon : number
          Longitude [0-360]
    lat : latitude

    Returns
    -------
    gamma_GP : array
               Global Polynomial of Neutral Density with [kg/m^3] respect to
               Practical Salinity and potential temperature.

    Examples
    --------
    >>> from oceans.sw_extras import gamma_GP_from_SP_pt
    >>> SP = [35.066, 35.086, 35.089, 35.078, 35.025, 34.851, 34.696, 34.572,
    ...      34.531, 34.509, 34.496, 34.452, 34.458, 34.456, 34.488, 34.536,
    ...      34.579, 34.612, 34.642, 34.657, 34.685, 34.707, 34.72, 34.729]
    >>> pt = [12.25, 12.21, 12.09, 11.99, 11.69, 10.54, 9.35, 8.36, 7.86, 7.43,
    ...       6.87, 6.04, 5.5, 4.9, 4.04, 3.29, 2.78, 2.45, 2.211, 2.011,
    ...       1.894, 1.788, 1.554, 1.38]
    >>> p = [1.0, 48.0, 97.0, 145.0, 194.0, 291.0, 388.0, 485.0, 581.0, 678.0,
    ...      775.0, 872.0, 969.0, 1066.0, 1260.0, 1454.0, 1647.0, 1841.0,
    ...      2020.0, 2216.0, 2413.0, 2611.0, 2878.0, 3000.0]
    >>> lon, lat, n = [187.317, -41.6667, 24]
    >>> gamma_GP_from_SP_pt(SP, pt, p, lon, lat)
    array([26.66339976, 26.68613362, 26.71169809, 26.72286813, 26.74102625,
           26.82472769, 26.91707848, 26.9874849 , 27.03564777, 27.08512861,
           27.15880197, 27.24506111, 27.32438575, 27.40418818, 27.54227885,
           27.67691837, 27.77693976, 27.84683646, 27.90297626, 27.9428694 ,
           27.98107846, 28.01323277, 28.05769996, 28.09071215])

    Author
    ------
    Guillaume Serazin, Paul Barker & Trevor McDougall   [[email protected]]

    VERSION NUMBER: 1.0 (27th October, 2011)

    """
    from matplotlib.path import Path

    SP, pt, p, lon, lat = list(map(np.asanyarray, (SP, pt, p, lon, lat)))
    SP, pt, p, lon, lat = np.broadcast_arrays(SP, pt, p, lon, lat)

    # Normalization of the variables.
    SP = SP / 42.
    pt = pt / 40.

    # Computation of the polynomials on each oceanic basin.
    gamma_NAtl = gamma_G_north_atlantic(SP, pt)
    gamma_SAtl = gamma_G_south_atlantic(SP, pt)
    gamma_Pac = gamma_G_pacific(SP, pt)
    gamma_Ind = gamma_G_indian(SP, pt)
    gamma_SOce = gamma_G_southern_ocean(SP, pt, p)
    # gamma_Arc = np.zeros_like(SP) * np.NaN

    # Definition of the Indian part.
    io_lon = np.array([
        100, 100, 55, 22, 22, 146, 146, 133.9, 126.94, 123.62, 120.92, 117.42,
        114.11, 107.79, 102.57, 102.57, 98.79, 100
    ])

    io_lat = np.array([
        20, 40, 40, 20, -90, -90, -41, -12.48, -8.58, -8.39, -8.7, -8.82,
        -8.02, -7.04, -3.784, 2.9, 10, 20
    ])

    # Definition of the Pacific part.
    po_lon = np.array([
        100, 140, 240, 260, 272.59, 276.5, 278.65, 280.73, 295.217, 290, 300,
        294, 290, 146, 146, 133.9, 126.94, 123.62, 120.92, 117.42, 114.11,
        107.79, 102.57, 102.57, 98.79, 100.
    ])

    po_lat = np.array([
        20, 66, 66, 19.55, 13.97, 9.6, 8.1, 9.33, 0, -52., -64.5, -67.5, -90,
        -90, -41, -12.48, -8.58, -8.39, -8.7, -8.82, -8.02, -7.04, -3.784, 2.9,
        10, 20
    ])

    # Definition of the polygon filters.
    io_polygon = Path(list(zip(io_lon, io_lat)))
    po_polygon = Path(list(zip(po_lon, po_lat)))
    i_inter_indian_pacific = (in_polygon(lon, lat, io_polygon) *
                              in_polygon(lon, lat, po_polygon))

    i_indian = np.logical_xor(in_polygon(lon, lat, io_polygon),
                              i_inter_indian_pacific)
    i_pacific = in_polygon(lon, lat, po_polygon)
    i_atlantic = (1 - i_pacific) * (1 - i_indian)

    # Definition of the Atlantic weighting function.
    charac1_sa = lat < -10.
    charac2_sa = np.logical_and(lat <= 10., lat >= -10.)
    w_sa = charac1_sa + charac2_sa * (0.5 + 0.5 * np.cos(np.pi *
                                                         (lat + 10.) / 20.))

    # Definition of the Southern Ocean weighting function.
    charac1_so = lat < -40.
    charac2_so = np.logical_and(lat <= -20., lat >= -40.)
    w_so = charac1_so + charac2_so * (0.5 + 0.5 * np.cos(np.pi *
                                                         (lat + 40.) / 20.))

    # Combination of the North and South Atlantic.
    gamma_Atl = (1. - w_sa) * gamma_NAtl + w_sa * gamma_SAtl

    # Combination of the middle parts.
    gamma_middle = (i_pacific * gamma_Pac + i_atlantic * gamma_Atl +
                    i_indian * gamma_Ind)

    # Combination of the Northern and Southern parts.
    gamma_GP = w_so * gamma_SOce + (1. - w_so) * gamma_middle

    # Set NaN in the arctic region.
    gamma_GP[lat > 66.] = np.NaN

    # De-normalization.
    gamma_GP = 20. * gamma_GP - 20

    return gamma_GP
Example #19
0
    def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath, mtext=None):
        """
        Draw the text by converting them to paths using the textpath module.

        Parameters
        ----------
        s : str
          text to be converted
        prop : `matplotlib.font_manager.FontProperties`
          font property
        ismath : bool
          If True, use mathtext parser. If "TeX", use *usetex* mode.
        """
        writer = self.writer

        writer.comment(s)

        glyph_map = self._glyph_map

        text2path = self._text2path
        color = rgb2hex(gc.get_rgb())
        fontsize = prop.get_size_in_points()

        style = {}
        if color != '#000000':
            style['fill'] = color
        alpha = gc.get_alpha() if gc.get_forced_alpha() else gc.get_rgb()[3]
        if alpha != 1:
            style['opacity'] = short_float_fmt(alpha)
        font_scale = fontsize / text2path.FONT_SCALE
        attrib = {
            'style':
            generate_css(style),
            'transform':
            generate_transform([('translate', (x, y)), ('rotate', (-angle, )),
                                ('scale', (font_scale, -font_scale))]),
        }
        writer.start('g', attrib=attrib)

        if not ismath:
            font = text2path._get_font(prop)
            _glyphs = text2path.get_glyphs_with_font(
                font, s, glyph_map=glyph_map, return_new_glyphs_only=True)
            glyph_info, glyph_map_new, rects = _glyphs
            self._update_glyph_map_defs(glyph_map_new)

            for glyph_id, xposition, yposition, scale in glyph_info:
                attrib = {'xlink:href': '#%s' % glyph_id}
                if xposition != 0.0:
                    attrib['x'] = short_float_fmt(xposition)
                if yposition != 0.0:
                    attrib['y'] = short_float_fmt(yposition)
                writer.element('use', attrib=attrib)

        else:
            if ismath == "TeX":
                _glyphs = text2path.get_glyphs_tex(prop,
                                                   s,
                                                   glyph_map=glyph_map,
                                                   return_new_glyphs_only=True)
            else:
                _glyphs = text2path.get_glyphs_mathtext(
                    prop, s, glyph_map=glyph_map, return_new_glyphs_only=True)
            glyph_info, glyph_map_new, rects = _glyphs
            self._update_glyph_map_defs(glyph_map_new)

            for char_id, xposition, yposition, scale in glyph_info:
                char_id = self._adjust_char_id(char_id)
                writer.element('use',
                               transform=generate_transform([
                                   ('translate', (xposition, yposition)),
                                   ('scale', (scale, )),
                               ]),
                               attrib={'xlink:href': '#%s' % char_id})

            for verts, codes in rects:
                path = Path(verts, codes)
                path_data = self._convert_path(path, simplify=False)
                writer.element('path', d=path_data)

        writer.end('g')
from matplotlib.path import Path
import matplotlib.patches as patches

verts = [
    (0., 0.),
    (0., 1.),
    (0.5, 1.5),
    (1., 1.),
    (1., 0.),
    (0., 0.),
]

codes = [
    Path.MOVETO,
    Path.LINETO,
    Path.LINETO,
    Path.LINETO,
    Path.LINETO,
    Path.CLOSEPOLY,
]

path = Path(verts, codes)

patch = patches.PathPatch(path, facecolor='coral')
ax.add_patch(patch)
ax.set_xlim(-0.5, 2)
ax.set_ylim(-0.5, 2)

canvas.print_figure('demo.jpg')
Example #21
0
def mask_img(img,
             geo,
             edge=30,
             lower_thresh=0.0,
             upper_thresh=None,
             bs_width=13,
             tri_offset=13,
             v_asym=0,
             alpha=2.5,
             auto_type='median',
             tmsk=None):
    """
    Mask an image based off of various methods

    Parameters
    ----------
    img: np.ndarray
        The image to be masked
    geo: pyFAI.geometry.Geometry
        The pyFAI description of the detector orientation or any
        subclass of pyFAI.geometry.Geometry class
    edge: int, optional
        The number of edge pixels to mask. Defaults to 30. If None, no edge
        mask is applied
    lower_thresh: float, optional
        Pixels with values less than or equal to this threshold will be masked.
        Defaults to 0.0. If None, no lower threshold mask is applied
    upper_thresh: float, optional
        Pixels with values greater than or equal to this threshold will be
        masked.
        Defaults to None. If None, no upper threshold mask is applied.
    bs_width: int, optional
        The width of the beamstop in pixels. Defaults to 13.
        If None, no beamstop polygon mask is applied.
    tri_offset: int, optional
        The triangular pixel offset to create a pointed beamstop polygon mask.
        Defaults to 13. If None, no beamstop polygon mask is applied.
    v_asym: int, optional
        The vertical asymmetry of the polygon beamstop mask. Defaults to 0.
        If None, no beamstop polygon mask is applied.
    alpha: float or tuple or, 1darray, optional
        Then number of acceptable standard deviations, if tuple then we use
        a linear distribution of alphas from alpha[0] to alpha[1], if array
        then we just use that as the distribution of alphas. Defaults to 2.5.
        If None, no outlier masking applied.
    tmsk: np.ndarray, optional
        The starting mask to be compounded on. Defaults to None. If None mask
        generated from scratch.

    Returns
    -------
    tmsk: np.ndarray
        The mask as a boolean array. True pixels are good pixels, False pixels
        are masked out.

    """

    if tmsk is None:
        working_mask = np.ones(img.shape).astype(bool)
    else:
        working_mask = tmsk.copy()
    if edge:
        working_mask *= margin(img.shape, edge)
    if lower_thresh:
        working_mask *= (img >= lower_thresh).astype(bool)
    if upper_thresh:
        working_mask *= (img <= upper_thresh).astype(bool)
    if all([a is not None for a in [bs_width, tri_offset, v_asym]]):
        center_x, center_y = [
            geo.getFit2D()[k] for k in ['centerX', 'centerY']
        ]
        nx, ny = img.shape
        mask_verts = [(center_x - bs_width, center_y),
                      (center_x, center_y - tri_offset),
                      (center_x + bs_width, center_y),
                      (center_x + bs_width + v_asym, ny),
                      (center_x - bs_width - v_asym, ny)]

        x, y = np.meshgrid(np.arange(nx), np.arange(ny))
        x, y = x.flatten(), y.flatten()

        points = np.vstack((x, y)).T

        path = Path(mask_verts)
        grid = path.contains_points(points)
        # Plug msk_grid into into next (edge-mask) step in automask
        working_mask *= ~grid.reshape((ny, nx))

    if alpha:
        working_mask *= new_masking_method(img,
                                           geo,
                                           alpha=alpha,
                                           tmsk=working_mask,
                                           mask_method=auto_type)
    working_mask = working_mask.astype(np.bool)
    return working_mask
Example #22
0
    def check(self):

        frameNumber = self.acq.GetPointFrameNumber()

        LASI_values = self.acq.GetPoint("LASI").GetValues()
        RASI_values = self.acq.GetPoint("RASI").GetValues()
        LPSI_values = self.acq.GetPoint("LPSI").GetValues()
        RPSI_values = self.acq.GetPoint("RPSI").GetValues()
        sacrum_values = (self.acq.GetPoint("LPSI").GetValues() +
                         self.acq.GetPoint("RPSI").GetValues()) / 2.0
        midAsis_values = (self.acq.GetPoint("LASI").GetValues() +
                          self.acq.GetPoint("RASI").GetValues()) / 2.0

        projectedLASI = np.array(
            [LASI_values[:, 0], LASI_values[:, 1],
             np.zeros((frameNumber))]).T
        projectedRASI = np.array(
            [RASI_values[:, 0], RASI_values[:, 1],
             np.zeros((frameNumber))]).T
        projectedLPSI = np.array(
            [LPSI_values[:, 0], LPSI_values[:, 1],
             np.zeros((frameNumber))]).T
        projectedRPSI = np.array(
            [RPSI_values[:, 0], RPSI_values[:, 1],
             np.zeros((frameNumber))]).T

        for i in range(0, frameNumber):
            verts = [
                projectedLASI[i, 0:2],  # left, bottom
                projectedRASI[i, 0:2],  # left, top
                projectedRPSI[i, 0:2],  # right, top
                projectedLPSI[i, 0:2],  # right, bottom
                projectedLASI[i, 0:2],  # right, top
            ]

            codes = [
                Path.MOVETO,
                Path.LINETO,
                Path.LINETO,
                Path.LINETO,
                Path.CLOSEPOLY,
            ]

            path = Path(verts, codes)

            intersection = geometry.LineLineIntersect(projectedLASI[i, :],
                                                      projectedLPSI[i, :],
                                                      projectedRASI[i, :],
                                                      projectedRPSI[i, :])

            if path.contains_point(intersection[0]):
                logging.error(
                    "[pyCGM2-Checking] wrong Labelling of pelvic markers at frame [%i]"
                    % (i))
                if self.exceptionMode:
                    raise Exception(
                        "[pyCGM2-Checking] wrong Labelling of pelvic markers at frame [%i]"
                        % (i))

                self.state = False
            else:
                # check marker side
                pt1 = RASI_values[i, :]
                pt2 = LASI_values[i, :]
                pt3 = sacrum_values[i, :]
                ptOrigin = midAsis_values[i, :]

                a1 = (pt2 - pt1)
                a1 = np.divide(a1, np.linalg.norm(a1))
                v = (pt3 - pt1)
                v = np.divide(v, np.linalg.norm(v))
                a2 = np.cross(a1, v)
                a2 = np.divide(a2, np.linalg.norm(a2))

                x, y, z, R = frame.setFrameData(a1, a2, "YZX")

                csFrame_L = frame.Frame()
                csFrame_L.setRotation(R)
                csFrame_L.setTranslation(RASI_values[i, :])

                csFrame_R = frame.Frame()
                csFrame_R.setRotation(R)
                csFrame_R.setTranslation(LASI_values[i, :])

                for marker in self.markers:
                    residual = self.acq.GetPoint(marker).GetResidual(i)

                    if marker[0] == "L":
                        local = np.dot(
                            csFrame_L.getRotation().T,
                            self.acq.GetPoint(marker).GetValues()[i, :] -
                            csFrame_L.getTranslation())
                    if marker[0] == "R":
                        local = np.dot(
                            csFrame_R.getRotation().T,
                            self.acq.GetPoint(marker).GetValues()[i, :] -
                            csFrame_R.getTranslation())
                    if residual > 0.0:
                        if marker[0] == "L" and local[1] < 0:
                            logging.error(
                                "[pyCGM2-Checking] check location of the marker [%s] at frame [%i]"
                                % (marker, i))
                            self.state = False
                            if self.exceptionMode:
                                raise Exception(
                                    "[pyCGM2-Checking] check location of the marker [%s] at frame [%i]"
                                    % (marker, i))

                        if marker[0] == "R" and local[1] > 0:
                            logging.error(
                                "[pyCGM2-Checking] check location of the marker [%s] at frame [%i]"
                                % (marker, i))
                            self.state = False
                            if self.exceptionMode:
                                raise Exception(
                                    "[pyCGM2-Checking] check location of the marker [%s] at frame [%i]"
                                    % (marker, i))
                                self.state = False
Example #23
0
def draw_networkx_edges(G,
                        pos,
                        edgelist=None,
                        width=1.0,
                        edge_color='k',
                        style='solid',
                        alpha=1.0,
                        edge_cmap=None,
                        edge_vmin=None,
                        edge_vmax=None,
                        ax=None,
                        arrows=True,
                        arrowstyle='thick',
                        label=None,
                        **kwds):
    try:
        import matplotlib
        import matplotlib.pyplot as plt
        import matplotlib.cbook as cb
        import matplotlib.patches as patches
        from matplotlib.colors import colorConverter, Colormap
        from matplotlib.collections import LineCollection
        from matplotlib.path import Path
        import numpy
    except ImportError:
        raise ImportError("Matplotlib required for draw()")
    except RuntimeError:
        print("Matplotlib unable to open display")
        raise
    # print "drawing_edges"

    if ax is None:
        ax = plt.gca()

    if edgelist is None:
        edgelist = G.edges()

    if not edgelist or len(edgelist) == 0:  # no edges!
        return None

    # set edge positions
    edge_pos = numpy.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist])
    # for e in edge_pos:
    #   print e
    if not cb.iterable(width):
        lw = (width, )
    else:
        lw = width

    if not cb.is_string_like(edge_color) \
           and cb.iterable(edge_color) \
           and len(edge_color) == len(edge_pos):
        if numpy.alltrue([cb.is_string_like(c) for c in edge_color]):
            # (should check ALL elements)
            # list of color letters such as ['k','r','k',...]
            edge_colors = tuple(
                [colorConverter.to_rgba(c, alpha) for c in edge_color])
        elif numpy.alltrue([not cb.is_string_like(c) for c in edge_color]):
            # If color specs are given as (rgb) or (rgba) tuples, we're OK
            if numpy.alltrue(
                [cb.iterable(c) and len(c) in (3, 4) for c in edge_color]):
                edge_colors = tuple(edge_color)
            else:
                # numbers (which are going to be mapped with a colormap)
                edge_colors = None
        else:
            raise ValueError(
                'edge_color must consist of either color names or numbers')
    else:
        if cb.is_string_like(edge_color) or len(edge_color) == 1:
            edge_colors = (colorConverter.to_rgba(edge_color, alpha), )
        else:
            raise ValueError(
                'edge_color must be a single color or list of exactly m colors where m is the number or edges'
            )

    edge_collection = LineCollection(
        edge_pos,
        colors=edge_colors,
        linewidths=lw,
        antialiaseds=(1, ),
        linestyle=style,
        transOffset=ax.transData,
    )

    # print type(edge_collection)

    edge_collection.set_zorder(1)  # edges go behind nodes
    edge_collection.set_label(label)
    ax.add_collection(edge_collection)

    # Note: there was a bug in mpl regarding the handling of alpha values for
    # each line in a LineCollection.  It was fixed in matplotlib in r7184 and
    # r7189 (June 6 2009).  We should then not set the alpha value globally,
    # since the user can instead provide per-edge alphas now.  Only set it
    # globally if provided as a scalar.
    if cb.is_numlike(alpha):
        edge_collection.set_alpha(alpha)

    if edge_colors is None:
        if edge_cmap is not None:
            assert (isinstance(edge_cmap, Colormap))
        edge_collection.set_array(numpy.asarray(edge_color))
        edge_collection.set_cmap(edge_cmap)
        if edge_vmin is not None or edge_vmax is not None:
            edge_collection.set_clim(edge_vmin, edge_vmax)
        else:
            edge_collection.autoscale()

    arrow_collection = None

    if G.is_directed() and arrows:

        # a directed graph hack-fix
        # draws arrows at each
        # waiting for someone else to implement arrows that will work
        arrow_colors = edge_colors
        a_pos = []
        p = .1  # make arrows 10% of total length
        angle = 2.7  #angle for arrows
        for src, dst in edge_pos:
            x1, y1 = src
            x2, y2 = dst
            dx = x2 - x1  # x offset
            dy = y2 - y1  # y offset
            d = numpy.sqrt(float(dx**2 + dy**2))  # length of edge
            theta = numpy.arctan2(dy, dx)
            if d == 0:  # source and target at same position
                continue
            if dx == 0:  # vertical edge
                xa = x2
                ya = dy + y1
            if dy == 0:  # horizontal edge
                ya = y2
                xa = dx + x1
            else:
                # xa = p*d*numpy.cos(theta)+x1
                # ya = p*d*numpy.sin(theta)+y1
                #corrects the endpoints to better draw
                x2 -= .04 * numpy.cos(theta)
                y2 -= .04 * numpy.sin(theta)
                lx1 = p * d * numpy.cos(theta + angle) + (x2)
                lx2 = p * d * numpy.cos(theta - angle) + (x2)
                ly1 = p * d * numpy.sin(theta + angle) + (y2)
                ly2 = p * d * numpy.sin(theta - angle) + (y2)

            a_pos.append(((lx1, ly1), (x2, y2)))
            a_pos.append(((lx2, ly2), (x2, y2)))

        arrow_collection = LineCollection(
            a_pos,
            colors=arrow_colors,
            linewidths=[1 * ww for ww in lw],
            antialiaseds=(1, ),
            transOffset=ax.transData,
        )

        arrow_collection.set_zorder(1)  # edges go behind nodes
        arrow_collection.set_label(label)
        # print type(ax)
        ax.add_collection(arrow_collection)

    #drawing self loops

    d = 1
    c = 0.0707
    selfedges = []
    verts = [
        (0.1 * d - 0.1 * d, 0.0),  # P0
        (c * d - 0.1 * d, c * d),  # P0
        (0.0 - 0.1 * d, 0.1 * d),  # P0
        (-c * d - 0.1 * d, c * d),  # P0
        (-0.1 * d - 0.1 * d, 0.0),  # P0
        (-c * d - 0.1 * d, -c * d),  # P0
        (0.0 - 0.1 * d, -0.1 * d),  # P0
        (c * d - 0.1 * d, -c * d),  # P0
        (0.1 * d - 0.1 * d, 0.0)
    ]
    # print verts

    codes = [
        Path.MOVETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
        Path.LINETO,
    ]

    for e in edge_pos:
        if (numpy.array_equal(e[0], e[1])):
            nodes = verts[:]
            for i in range(len(nodes)):
                nodes[i] += e[0]
            # print nodes
            path = Path(nodes, codes)
            patch = patches.PathPatch(path,
                                      color=None,
                                      facecolor=None,
                                      edgecolor=edge_colors[0],
                                      fill=False,
                                      lw=4)
            ax.add_patch(patch)

    # update view
    minx = numpy.amin(numpy.ravel(edge_pos[:, :, 0]))
    maxx = numpy.amax(numpy.ravel(edge_pos[:, :, 0]))
    miny = numpy.amin(numpy.ravel(edge_pos[:, :, 1]))
    maxy = numpy.amax(numpy.ravel(edge_pos[:, :, 1]))

    w = maxx - minx
    h = maxy - miny
    padx, pady = 0.05 * w, 0.05 * h
    corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady)
    # print ax
    ax.update_datalim(corners)
    ax.autoscale_view()

    #    if arrow_collection:

    return edge_collection
Example #24
0
def plot_slab(slab,
              ax,
              scale=0.8,
              repeat=5,
              window=1.5,
              draw_unit_cell=True,
              decay=0.2,
              adsorption_sites=True):
    """
    Function that helps visualize the slab in a 2-D plot, for
    convenient viewing of output of AdsorbateSiteFinder.

    Args:
        slab (slab): Slab object to be visualized
        ax (axes): matplotlib axes with which to visualize
        scale (float): radius scaling for sites
        repeat (int): number of repeating unit cells to visualize
        window (float): window for setting the axes limits, is essentially
            a fraction of the unit cell limits
        draw_unit_cell (bool): flag indicating whether or not to draw cell
        decay (float): how the alpha-value decays along the z-axis
    """
    orig_slab = slab.copy()
    slab = reorient_z(slab)
    orig_cell = slab.lattice.matrix.copy()
    if repeat:
        slab.make_supercell([repeat, repeat, 1])
    coords = np.array(sorted(slab.cart_coords, key=lambda x: x[2]))
    sites = sorted(slab.sites, key=lambda x: x.coords[2])
    alphas = 1 - decay * (np.max(coords[:, 2]) - coords[:, 2])
    alphas = alphas.clip(min=0)
    corner = [0, 0, slab.lattice.get_fractional_coords(coords[-1])[-1]]
    corner = slab.lattice.get_cartesian_coords(corner)[:2]
    verts = orig_cell[:2, :2]
    lattsum = verts[0] + verts[1]
    # Draw circles at sites and stack them accordingly
    for n, coord in enumerate(coords):
        r = sites[n].specie.atomic_radius * scale
        ax.add_patch(
            patches.Circle(coord[:2] - lattsum * (repeat // 2),
                           r,
                           color='w',
                           zorder=2 * n))
        color = color_dict[sites[n].species_string]
        ax.add_patch(
            patches.Circle(coord[:2] - lattsum * (repeat // 2),
                           r,
                           facecolor=color,
                           alpha=alphas[n],
                           edgecolor='k',
                           lw=0.3,
                           zorder=2 * n + 1))
    # Adsorption sites
    if adsorption_sites:
        asf = AdsorbateSiteFinder(orig_slab)
        ads_sites = asf.find_adsorption_sites()['all']
        sop = get_rot(orig_slab)
        ads_sites = [
            sop.operate(ads_site)[:2].tolist() for ads_site in ads_sites
        ]
        ax.plot(*zip(*ads_sites),
                color='k',
                marker='x',
                markersize=10,
                mew=1,
                linestyle='',
                zorder=10000)
    # Draw unit cell
    if draw_unit_cell:
        verts = np.insert(verts, 1, lattsum, axis=0).tolist()
        verts += [[0., 0.]]
        verts = [[0., 0.]] + verts
        codes = [
            Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY
        ]
        verts = [(np.array(vert) + corner).tolist() for vert in verts]
        path = Path(verts, codes)
        patch = patches.PathPatch(path,
                                  facecolor='none',
                                  lw=2,
                                  alpha=0.5,
                                  zorder=2 * n + 2)
        ax.add_patch(patch)
    ax.set_aspect("equal")
    center = corner + lattsum / 2.
    extent = np.max(lattsum)
    lim_array = [center - extent * window, center + extent * window]
    x_lim = [ele[0] for ele in lim_array]
    y_lim = [ele[1] for ele in lim_array]
    ax.set_xlim(x_lim)
    ax.set_ylim(y_lim)
    return ax
Example #25
0
    def plot_jet_axis(self, ax, mask=None, plot_core=True):

        U, V, windspeed = self.get_wind_components()

        lat_min, lon_min, lat_max, lon_max = self.chart.domain
        box = sgeom.box(lon_min, lat_min, lon_max, lat_max)

        if mask is not None:
            # Mask wind components using NaNs because streamplot
            # doesn't seem to respect masked arrays
            U[mask] = np.nan
            V[mask] = np.nan

            # Mask windspeed
            ws = np.ma.masked_where(mask, windspeed)
        else:
            ws = windspeed

        # Select seed point(s) corresponding to maximum windspeed
        seed_index = np.unravel_index(np.argmax(ws, axis=None), ws.shape)
        seed_index = np.array(seed_index, ndmin=2)
        seed_points = np.array([[self.lon[x[1]], self.lat[x[0]]]
                                for x in seed_index])

        # Get streamline(s) through specified seed point(s)
        strm = ax.streamplot(self.lon,
                             self.lat,
                             U,
                             V,
                             start_points=seed_points,
                             arrowstyle='-',
                             linewidth=0)

        segments = strm.lines.get_segments()
        num_seg = len(segments)
        if num_seg == 0:
            return

        # Get line segments, place arrows and get vertices for tramlines
        # TODO add jet entrance/exit markers
        verts = []
        current_point = None
        angle = 0
        for j, seg in enumerate(segments):
            if np.allclose(seg[0], seed_points[0]):
                # Found matching seed point
                if not all(seg[0] == seg[-1]):
                    # Get angle of segment
                    tdx, tdy = seg[-1] - seg[0]
                    angle = math.atan(tdy / tdx) * 180 / math.pi
            for i, s in enumerate(seg[:-1]):
                if not all(s == current_point):
                    # Reset distance sum
                    dist_sum = 0
                # Check length of segment and add patches at
                # suitable intervals
                if np.allclose(s, seg[i + 1]):
                    continue
                dx, dy = seg[i + 1] - s
                seg_len = np.hypot(dx, dy)
                dist_sum = dist_sum + seg_len
                while dist_sum > self.arrow_interval:
                    dist_sum -= self.arrow_interval
                    # Find start and end points for arrow patch
                    v = np.array([dx, dy])
                    loc = (seg_len - dist_sum) / seg_len
                    start = s + loc * v
                    end = start + 0.1 * v / seg_len
                    # Draw patch
                    p = mpatches.FancyArrowPatch(start,
                                                 end,
                                                 transform=ax.transData,
                                                 **self.arrow_options)
                    ax.add_patch(p)

                n = np.array([-dy, dx])
                n = 0.25 * n / np.linalg.norm(n)
                sp = s + n
                sm = s - n
                if not box.contains(sgeom.Point((sp))):
                    line = sgeom.LineString([sp, sp + np.array([dx, dy])])
                    sp = np.array(box.exterior.intersection(line))
                if not box.contains(sgeom.Point((sm))):
                    line = sgeom.LineString([sm, sm + np.array([dx, dy])])
                    sm = np.array(box.exterior.intersection(line))
                if len(sp) and len(sm):
                    verts.append([sp, sm])
                current_point = seg[i + 1]
            if current_point is not None:
                sp = current_point + n
                sm = current_point - n
                if (box.contains(sgeom.Point((sp)))
                        and box.contains(sgeom.Point((sm)))):
                    verts.append([sp, sm])

        # Rearrange array to get vertices for parallel tramlines
        verts = np.stack(verts, axis=1)
        for v in verts:
            path = Path(v)
            patch = mpatches.PathPatch(path, **self.path_options)
            ax.add_patch(patch)

        if plot_core:

            # Mask windspeed to relevant region
            ws_masked = np.ma.masked_where(self.coord_mask, windspeed)

            # Use maximum of specified core threshold and specified percentile
            ws_thres = np.percentile(ws_masked[~ws_masked.mask],
                                     self.thres_core_percentile)
            ws_thres = max(self.thres_core, ws_thres)

            # Get contours for core threshold windspeed
            ctr_list = skimage.measure.find_contours(ws_masked, level=ws_thres)
            for c in ctr_list:
                shp = sgeom.Polygon(c)

                # Does this contain a point of maximum windspeed?
                contains_seed = False
                for x in seed_index:
                    if shp.intersects(sgeom.Point((x))):
                        contains_seed = True
                if not contains_seed:
                    continue

                # Translate centroid coordinates to lon/lat
                centroid = shp.centroid.coords[0]
                xy = [
                    self.lon[np.rint(centroid[1]).astype(int)],
                    self.lat[np.rint(centroid[0]).astype(int)]
                ]

                # Use contour bounds to get dimensions for core
                it = iter(shp.bounds)
                bounds = np.array([[
                    self.lon[np.rint(next(it)).astype(int)],
                    self.lat[np.rint(x).astype(int)]
                ] for x in it])
                dx, dy = np.abs(bounds[1] - bounds[0])

                # Draw core as ellipse around centroid
                p = mpatches.Ellipse(xy=xy,
                                     width=dx,
                                     height=dy,
                                     angle=angle,
                                     **self.core_options)
                ax.add_patch(p)

                # Add label to indicate core pressure level
                loc = xy + np.array([0, dy / 2])
                ax.annotate(f'{self.level} {self.level_units}',
                            xy=loc,
                            **self.core_label_options)
Example #26
0
    def add(self,
            patchlabel='',
            flows=None,
            orientations=None,
            labels='',
            trunklength=1.0,
            pathlengths=0.25,
            prior=None,
            connect=(0, 0),
            rotation=0,
            **kwargs):
        """
        Add a simple Sankey diagram with flows at the same hierarchical level.

        Parameters
        ----------
        patchlabel : str
            Label to be placed at the center of the diagram.
            Note that *label* (not *patchlabel*) can be passed as keyword
            argument to create an entry in the legend.

        flows : list of float
            Array of flow values.  By convention, inputs are positive and
            outputs are negative.

            Flows are placed along the top of the diagram from the inside out
            in order of their index within *flows*.  They are placed along the
            sides of the diagram from the top down and along the bottom from
            the outside in.

            If the sum of the inputs and outputs is
            nonzero, the discrepancy will appear as a cubic Bezier curve along
            the top and bottom edges of the trunk.

        orientations : list of {-1, 0, 1}
            List of orientations of the flows (or a single orientation to be
            used for all flows).  Valid values are 0 (inputs from
            the left, outputs to the right), 1 (from and to the top) or -1
            (from and to the bottom).

        labels : list of (str or None)
            List of labels for the flows (or a single label to be used for all
            flows).  Each label may be *None* (no label), or a labeling string.
            If an entry is a (possibly empty) string, then the quantity for the
            corresponding flow will be shown below the string.  However, if
            the *unit* of the main diagram is None, then quantities are never
            shown, regardless of the value of this argument.

        trunklength : float
            Length between the bases of the input and output groups (in
            data-space units).

        pathlengths : list of float
            List of lengths of the vertical arrows before break-in or after
            break-away.  If a single value is given, then it will be applied to
            the first (inside) paths on the top and bottom, and the length of
            all other arrows will be justified accordingly.  The *pathlengths*
            are not applied to the horizontal inputs and outputs.

        prior : int
            Index of the prior diagram to which this diagram should be
            connected.

        connect : (int, int)
            A (prior, this) tuple indexing the flow of the prior diagram and
            the flow of this diagram which should be connected.  If this is the
            first diagram or *prior* is *None*, *connect* will be ignored.

        rotation : float
            Angle of rotation of the diagram in degrees.  The interpretation of
            the *orientations* argument will be rotated accordingly (e.g., if
            *rotation* == 90, an *orientations* entry of 1 means to/from the
            left).  *rotation* is ignored if this diagram is connected to an
            existing one (using *prior* and *connect*).

        Returns
        -------
        Sankey
            The current `.Sankey` instance.

        Other Parameters
        ----------------
        **kwargs
           Additional keyword arguments set `matplotlib.patches.PathPatch`
           properties, listed below.  For example, one may want to use
           ``fill=False`` or ``label="A legend entry"``.

        %(Patch_kwdoc)s

        See Also
        --------
        Sankey.finish
        """
        # Check and preprocess the arguments.
        if flows is None:
            flows = np.array([1.0, -1.0])
        else:
            flows = np.array(flows)
        n = flows.shape[0]  # Number of flows
        if rotation is None:
            rotation = 0
        else:
            # In the code below, angles are expressed in deg/90.
            rotation /= 90.0
        if orientations is None:
            orientations = 0
        try:
            orientations = np.broadcast_to(orientations, n)
        except ValueError:
            raise ValueError(
                f"The shapes of 'flows' {np.shape(flows)} and 'orientations' "
                f"{np.shape(orientations)} are incompatible") from None
        try:
            labels = np.broadcast_to(labels, n)
        except ValueError:
            raise ValueError(
                f"The shapes of 'flows' {np.shape(flows)} and 'labels' "
                f"{np.shape(labels)} are incompatible") from None
        if trunklength < 0:
            raise ValueError(
                "'trunklength' is negative, which is not allowed because it "
                "would cause poor layout")
        if abs(np.sum(flows)) > self.tolerance:
            _log.info(
                "The sum of the flows is nonzero (%f; patchlabel=%r); "
                "is the system not at steady state?", np.sum(flows),
                patchlabel)
        scaled_flows = self.scale * flows
        gain = sum(max(flow, 0) for flow in scaled_flows)
        loss = sum(min(flow, 0) for flow in scaled_flows)
        if prior is not None:
            if prior < 0:
                raise ValueError("The index of the prior diagram is negative")
            if min(connect) < 0:
                raise ValueError(
                    "At least one of the connection indices is negative")
            if prior >= len(self.diagrams):
                raise ValueError(
                    f"The index of the prior diagram is {prior}, but there "
                    f"are only {len(self.diagrams)} other diagrams")
            if connect[0] >= len(self.diagrams[prior].flows):
                raise ValueError(
                    "The connection index to the source diagram is {}, but "
                    "that diagram has only {} flows".format(
                        connect[0], len(self.diagrams[prior].flows)))
            if connect[1] >= n:
                raise ValueError(
                    f"The connection index to this diagram is {connect[1]}, "
                    f"but this diagram has only {n} flows")
            if self.diagrams[prior].angles[connect[0]] is None:
                raise ValueError(
                    f"The connection cannot be made, which may occur if the "
                    f"magnitude of flow {connect[0]} of diagram {prior} is "
                    f"less than the specified tolerance")
            flow_error = (self.diagrams[prior].flows[connect[0]] +
                          flows[connect[1]])
            if abs(flow_error) >= self.tolerance:
                raise ValueError(
                    f"The scaled sum of the connected flows is {flow_error}, "
                    f"which is not within the tolerance ({self.tolerance})")

        # Determine if the flows are inputs.
        are_inputs = [None] * n
        for i, flow in enumerate(flows):
            if flow >= self.tolerance:
                are_inputs[i] = True
            elif flow <= -self.tolerance:
                are_inputs[i] = False
            else:
                _log.info(
                    "The magnitude of flow %d (%f) is below the tolerance "
                    "(%f).\nIt will not be shown, and it cannot be used in a "
                    "connection.", i, flow, self.tolerance)

        # Determine the angles of the arrows (before rotation).
        angles = [None] * n
        for i, (orient, is_input) in enumerate(zip(orientations, are_inputs)):
            if orient == 1:
                if is_input:
                    angles[i] = DOWN
                elif not is_input:
                    # Be specific since is_input can be None.
                    angles[i] = UP
            elif orient == 0:
                if is_input is not None:
                    angles[i] = RIGHT
            else:
                if orient != -1:
                    raise ValueError(
                        f"The value of orientations[{i}] is {orient}, "
                        f"but it must be -1, 0, or 1")
                if is_input:
                    angles[i] = UP
                elif not is_input:
                    angles[i] = DOWN

        # Justify the lengths of the paths.
        if np.iterable(pathlengths):
            if len(pathlengths) != n:
                raise ValueError(
                    f"The lengths of 'flows' ({n}) and 'pathlengths' "
                    f"({len(pathlengths)}) are incompatible")
        else:  # Make pathlengths into a list.
            urlength = pathlengths
            ullength = pathlengths
            lrlength = pathlengths
            lllength = pathlengths
            d = dict(RIGHT=pathlengths)
            pathlengths = [d.get(angle, 0) for angle in angles]
            # Determine the lengths of the top-side arrows
            # from the middle outwards.
            for i, (angle, is_input,
                    flow) in enumerate(zip(angles, are_inputs, scaled_flows)):
                if angle == DOWN and is_input:
                    pathlengths[i] = ullength
                    ullength += flow
                elif angle == UP and not is_input:
                    pathlengths[i] = urlength
                    urlength -= flow  # Flow is negative for outputs.
            # Determine the lengths of the bottom-side arrows
            # from the middle outwards.
            for i, (angle, is_input, flow) in enumerate(
                    reversed(list(zip(angles, are_inputs, scaled_flows)))):
                if angle == UP and is_input:
                    pathlengths[n - i - 1] = lllength
                    lllength += flow
                elif angle == DOWN and not is_input:
                    pathlengths[n - i - 1] = lrlength
                    lrlength -= flow
            # Determine the lengths of the left-side arrows
            # from the bottom upwards.
            has_left_input = False
            for i, (angle, is_input, spec) in enumerate(
                    reversed(
                        list(
                            zip(angles, are_inputs,
                                zip(scaled_flows, pathlengths))))):
                if angle == RIGHT:
                    if is_input:
                        if has_left_input:
                            pathlengths[n - i - 1] = 0
                        else:
                            has_left_input = True
            # Determine the lengths of the right-side arrows
            # from the top downwards.
            has_right_output = False
            for i, (angle, is_input, spec) in enumerate(
                    zip(angles, are_inputs, list(zip(scaled_flows,
                                                     pathlengths)))):
                if angle == RIGHT:
                    if not is_input:
                        if has_right_output:
                            pathlengths[i] = 0
                        else:
                            has_right_output = True

        # Begin the subpaths, and smooth the transition if the sum of the flows
        # is nonzero.
        urpath = [
            (
                Path.MOVETO,
                [
                    (self.gap - trunklength / 2.0),  # Upper right
                    gain / 2.0
                ]),
            (Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0, gain / 2.0]),
            (Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0, gain / 2.0]),
            (Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0, -loss / 2.0]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0, -loss / 2.0]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap), -loss / 2.0])
        ]
        llpath = [
            (
                Path.LINETO,
                [
                    (trunklength / 2.0 - self.gap),  # Lower left
                    loss / 2.0
                ]),
            (Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0, loss / 2.0]),
            (Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0, loss / 2.0]),
            (Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0, -gain / 2.0]),
            (Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0, -gain / 2.0]),
            (Path.LINETO, [(self.gap - trunklength / 2.0), -gain / 2.0])
        ]
        lrpath = [(
            Path.LINETO,
            [
                (trunklength / 2.0 - self.gap),  # Lower right
                loss / 2.0
            ])]
        ulpath = [(
            Path.LINETO,
            [
                self.gap - trunklength / 2.0,  # Upper left
                gain / 2.0
            ])]

        # Add the subpaths and assign the locations of the tips and labels.
        tips = np.zeros((n, 2))
        label_locations = np.zeros((n, 2))
        # Add the top-side inputs and outputs from the middle outwards.
        for i, (angle, is_input, spec) in enumerate(
                zip(angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
            if angle == DOWN and is_input:
                tips[i, :], label_locations[i, :] = self._add_input(
                    ulpath, angle, *spec)
            elif angle == UP and not is_input:
                tips[i, :], label_locations[i, :] = self._add_output(
                    urpath, angle, *spec)
        # Add the bottom-side inputs and outputs from the middle outwards.
        for i, (angle, is_input, spec) in enumerate(
                reversed(
                    list(
                        zip(angles, are_inputs,
                            list(zip(scaled_flows, pathlengths)))))):
            if angle == UP and is_input:
                tip, label_location = self._add_input(llpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
            elif angle == DOWN and not is_input:
                tip, label_location = self._add_output(lrpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
        # Add the left-side inputs from the bottom upwards.
        has_left_input = False
        for i, (angle, is_input, spec) in enumerate(
                reversed(
                    list(
                        zip(angles, are_inputs,
                            list(zip(scaled_flows, pathlengths)))))):
            if angle == RIGHT and is_input:
                if not has_left_input:
                    # Make sure the lower path extends
                    # at least as far as the upper one.
                    if llpath[-1][1][0] > ulpath[-1][1][0]:
                        llpath.append(
                            (Path.LINETO, [ulpath[-1][1][0],
                                           llpath[-1][1][1]]))
                    has_left_input = True
                tip, label_location = self._add_input(llpath, angle, *spec)
                tips[n - i - 1, :] = tip
                label_locations[n - i - 1, :] = label_location
        # Add the right-side outputs from the top downwards.
        has_right_output = False
        for i, (angle, is_input, spec) in enumerate(
                zip(angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
            if angle == RIGHT and not is_input:
                if not has_right_output:
                    # Make sure the upper path extends
                    # at least as far as the lower one.
                    if urpath[-1][1][0] < lrpath[-1][1][0]:
                        urpath.append(
                            (Path.LINETO, [lrpath[-1][1][0],
                                           urpath[-1][1][1]]))
                    has_right_output = True
                tips[i, :], label_locations[i, :] = self._add_output(
                    urpath, angle, *spec)
        # Trim any hanging vertices.
        if not has_left_input:
            ulpath.pop()
            llpath.pop()
        if not has_right_output:
            lrpath.pop()
            urpath.pop()

        # Concatenate the subpaths in the correct order (clockwise from top).
        path = (urpath + self._revert(lrpath) + llpath + self._revert(ulpath) +
                [(Path.CLOSEPOLY, urpath[0][1])])

        # Create a patch with the Sankey outline.
        codes, vertices = zip(*path)
        vertices = np.array(vertices)

        def _get_angle(a, r):
            if a is None:
                return None
            else:
                return a + r

        if prior is None:
            if rotation != 0:  # By default, none of this is needed.
                angles = [_get_angle(angle, rotation) for angle in angles]
                rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
                tips = rotate(tips)
                label_locations = rotate(label_locations)
                vertices = rotate(vertices)
            text = self.ax.text(0, 0, s=patchlabel, ha='center', va='center')
        else:
            rotation = (self.diagrams[prior].angles[connect[0]] -
                        angles[connect[1]])
            angles = [_get_angle(angle, rotation) for angle in angles]
            rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
            tips = rotate(tips)
            offset = self.diagrams[prior].tips[connect[0]] - tips[connect[1]]
            translate = Affine2D().translate(*offset).transform_affine
            tips = translate(tips)
            label_locations = translate(rotate(label_locations))
            vertices = translate(rotate(vertices))
            kwds = dict(s=patchlabel, ha='center', va='center')
            text = self.ax.text(*offset, **kwds)
        if mpl.rcParams['_internal.classic_mode']:
            fc = kwargs.pop('fc', kwargs.pop('facecolor', '#bfd1d4'))
            lw = kwargs.pop('lw', kwargs.pop('linewidth', 0.5))
        else:
            fc = kwargs.pop('fc', kwargs.pop('facecolor', None))
            lw = kwargs.pop('lw', kwargs.pop('linewidth', None))
        if fc is None:
            fc = next(self.ax._get_patches_for_fill.prop_cycler)['color']
        patch = PathPatch(Path(vertices, codes), fc=fc, lw=lw, **kwargs)
        self.ax.add_patch(patch)

        # Add the path labels.
        texts = []
        for number, angle, label, location in zip(flows, angles, labels,
                                                  label_locations):
            if label is None or angle is None:
                label = ''
            elif self.unit is not None:
                if isinstance(self.format, str):
                    quantity = self.format % abs(number) + self.unit
                elif callable(self.format):
                    quantity = self.format(number)
                else:
                    raise TypeError(
                        'format must be callable or a format string')
                if label != '':
                    label += "\n"
                label += quantity
            texts.append(
                self.ax.text(x=location[0],
                             y=location[1],
                             s=label,
                             ha='center',
                             va='center'))
        # Text objects are placed even they are empty (as long as the magnitude
        # of the corresponding flow is larger than the tolerance) in case the
        # user wants to provide labels later.

        # Expand the size of the diagram if necessary.
        self.extent = (min(np.min(vertices[:, 0]),
                           np.min(label_locations[:, 0]), self.extent[0]),
                       max(np.max(vertices[:, 0]),
                           np.max(label_locations[:, 0]), self.extent[1]),
                       min(np.min(vertices[:, 1]),
                           np.min(label_locations[:, 1]), self.extent[2]),
                       max(np.max(vertices[:, 1]),
                           np.max(label_locations[:, 1]), self.extent[3]))
        # Include both vertices _and_ label locations in the extents; there are
        # where either could determine the margins (e.g., arrow shoulders).

        # Add this diagram as a subdiagram.
        self.diagrams.append(
            SimpleNamespace(patch=patch,
                            flows=flows,
                            angles=angles,
                            tips=tips,
                            text=text,
                            texts=texts))

        # Allow a daisy-chained call structure (see docstring for the class).
        return self
Example #27
0
 def transform_path(self, path):
     vertices = path.vertices
     ipath = path.interpolated(self._resolution)
     return Path(self.transform(ipath.vertices), ipath.codes)
Example #28
0
def _font_to_ps_type3(font_path, chars):
    """
    Subset *chars* from the font at *font_path* into a Type 3 font.

    Parameters
    ----------
    font_path : path-like
        Path to the font to be subsetted.
    chars : str
        The characters to include in the subsetted font.

    Returns
    -------
    str
        The string representation of a Type 3 font, which can be included
        verbatim into a PostScript file.
    """
    font = get_font(font_path, hinting_factor=1)
    glyph_ids = [font.get_char_index(c) for c in chars]

    preamble = """\
%!PS-Adobe-3.0 Resource-Font
%%Creator: Converted from TrueType to Type 3 by Matplotlib.
10 dict begin
/FontName /{font_name} def
/PaintType 0 def
/FontMatrix [{inv_units_per_em} 0 0 {inv_units_per_em} 0 0] def
/FontBBox [{bbox}] def
/FontType 3 def
/Encoding [{encoding}] def
/CharStrings {num_glyphs} dict dup begin
/.notdef 0 def
""".format(font_name=font.postscript_name,
           inv_units_per_em=1 / font.units_per_EM,
           bbox=" ".join(map(str, font.bbox)),
           encoding=" ".join("/{}".format(font.get_glyph_name(glyph_id))
                             for glyph_id in glyph_ids),
           num_glyphs=len(glyph_ids) + 1)
    postamble = """
end readonly def

/BuildGlyph {
 exch begin
 CharStrings exch
 2 copy known not {pop /.notdef} if
 true 3 1 roll get exec
 end
} _d

/BuildChar {
 1 index /Encoding get exch get
 1 index /BuildGlyph get exec
} _d

FontName currentdict end definefont pop
"""

    entries = []
    for glyph_id in glyph_ids:
        g = font.load_glyph(glyph_id, LOAD_NO_SCALE)
        v, c = font.get_path()
        entries.append(
            "/%(name)s{%(bbox)s sc\n" % {
                "name": font.get_glyph_name(glyph_id),
                "bbox": " ".join(map(str, [g.horiAdvance, 0, *g.bbox])),
            } + _path.convert_to_string(
                # Convert back to TrueType's internal units (1/64's).
                # (Other dimensions are already in these units.)
                Path(v * 64, c),
                None,
                None,
                False,
                None,
                0,
                # No code for quad Beziers triggers auto-conversion to cubics.
                # Drop intermediate closepolys (relying on the outline
                # decomposer always explicitly moving to the closing point
                # first).
                [b"m", b"l", b"", b"c", b""],
                True).decode("ascii") + "ce} _d")

    return preamble + "\n".join(entries) + postamble
Example #29
0
def do_the_plots(hyp_name, mfd_X, mega_bining_in_mag, xmin, xmax, ymin, ymax,
                 Run_name, rate_in_catalog, plot_as_rep, a_s_model, rows, path,
                 bining_in_mag):
    for i in range(len(mfd_X)):
        plt.scatter(mega_bining_in_mag,
                    mfd_X[i],
                    c='darkcyan',
                    s=50,
                    edgecolor='',
                    marker='_',
                    alpha=0.5)
    axes = plt.gca()
    axes.set_xlim([xmin, xmax])
    axes.set_ylim([ymin, ymax])
    for index_mag in range(len(mega_bining_in_mag)):
        rate_plus = np.percentile(mfd_X, 84, axis=0)[index_mag]
        rate_minus = np.percentile(mfd_X, 16, axis=0)[index_mag]
        mag = mega_bining_in_mag[index_mag]
        mag_plus = mag + 0.05
        mag_minus = mag - 0.05
        verts = [(mag_minus, rate_minus), (mag_minus, rate_plus),
                 (mag_plus, rate_plus), (mag_plus, rate_minus),
                 (mag_minus, rate_minus)]
        codes = [
            Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY
        ]

        path_poly = Path(verts, codes)

        patch = patches.PathPatch(path_poly,
                                  facecolor='#598556',
                                  lw=0.,
                                  alpha=0.15)
        axes.add_patch(patch)

    plt.scatter(mega_bining_in_mag,
                np.percentile(mfd_X, 50, axis=0),
                c='darkgreen',
                s=25,
                edgecolor='',
                marker='o',
                alpha=0.8)
    plt.scatter(mega_bining_in_mag,
                np.percentile(mfd_X, 16, axis=0),
                c='darkgreen',
                s=60,
                edgecolor='',
                marker='_',
                alpha=0.8)
    plt.scatter(mega_bining_in_mag,
                np.percentile(mfd_X, 84, axis=0),
                c='darkgreen',
                s=60,
                edgecolor='',
                marker='_',
                alpha=0.8)
    plt.plot(mega_bining_in_mag,
             np.array(mfd_X).mean(axis=0),
             color='darkgreen',
             linewidth=2)
    plt.grid()

    plt.yscale('log')
    plt.title('MFD ' + hyp_name)
    plt.savefig(path + '/mdf_' + hyp_name + '_density.png',
                dpi=180,
                transparent=True)
    #plt.show()
    plt.close()
    '''
    # plot linearely
    
    i_mag = 0

    for mag in [4.0,4.5,5.0,5.5,6.0,6.5,7.0,7.5,8.0,8.5]:
        #ploting the catalog
        for index_cat in range(len(rate_in_catalog)):
            mag_bin = [x + 0.01 for x in bining_in_mag]
#            print len(mag_bin)
#            print len(rate_in_catalog[index_cat])
            plt.scatter(mag_bin,rate_in_catalog[index_cat], c='k', s=50, edgecolor='',marker = '_', alpha = 0.25) 
           
        plt.scatter(mag_bin,np.percentile(rate_in_catalog,50,axis=0),
                    c='k', s=30, edgecolor='',marker = 'o',alpha = 0.8)
        plt.scatter(mag_bin,np.percentile(rate_in_catalog,16,axis=0),
                c='k', s=20, edgecolor='',marker = '+',alpha = 0.8)
        plt.scatter(mag_bin,np.percentile(rate_in_catalog,84,axis=0),
            c='k', s=20, edgecolor='',marker = '+',alpha = 0.8)
        plt.scatter(mag_bin,np.array(rate_in_catalog).mean(axis=0),
                    c='k', s=50, edgecolor='',marker = 's',alpha = 0.95)
        
        #plotting the modelled rates
        for i in range(len(mfd_X)):
            plt.scatter(mega_bining_in_mag,mfd_X[i], c='darkcyan', s=50, edgecolor='',marker = '_',alpha = 0.2)
        
        axes = plt.gca()
        axes.set_xlim([mag-0.05,mag+0.55])
        index_x0 = 0
        while bining_in_mag[index_x0] <= mag-0.05 and bining_in_mag[index_x0] != xmax :
            index_x0 +=1
            #print index_x0
        ymax = np.array(mfd_X).max(axis=0)[index_x0] + 0.2*np.array(mfd_X).max(axis=0)[index_x0]
        if ymax==0:
            ymax=0.00001
        axes.set_ylim([0.,ymax])
        mag_bin = [x - 0.01 for x in mega_bining_in_mag] 
        for index_mag in range(len(mag_bin)): 
            rate_plus = np.percentile(mfd_X,84,axis=0)[index_mag]
            rate_minus = np.percentile(mfd_X,16,axis=0)[index_mag]
            mag_i = mega_bining_in_mag[index_mag]
            mag_plus = mag_i+0.05
            mag_minus = mag_i-0.05
            verts = [(mag_minus, rate_minus ),
                     (mag_minus, rate_plus),
                     (mag_plus, rate_plus),
                     (mag_plus, rate_minus),
                     (mag_minus, rate_minus)]
            codes = [Path.MOVETO,
                     Path.LINETO,
                     Path.LINETO,
                     Path.LINETO,
                     Path.CLOSEPOLY]
                     
            path_poly = Path(verts, codes)
            
            patch = patches.PathPatch(path_poly,facecolor = '#598556', lw = 0., alpha = 0.15) 
            
               
            plt.scatter(mag_bin,np.percentile(mfd_X,50,axis=0),
                        c='darkgreen', s=25, edgecolor='',marker = 'o',alpha = 0.8)
            plt.scatter(mag_bin,np.percentile(mfd_X,16,axis=0),
                    c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
            plt.scatter(mag_bin,np.percentile(mfd_X,84,axis=0),
                c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
            plt.plot(mag_bin,np.array(mfd_X).mean(axis=0),
                        color='darkgreen', linewidth = 2)
                
            
    #        plt.scatter(mag_bin,np.percentile(mfd_X,50,axis=0),
    #                    c='darkgreen', s=30, edgecolor='',marker = 'o',alpha = 0.8)
    #        plt.scatter(mag_bin,np.percentile(mfd_X,16,axis=0),
    #                c='darkgreen', s=20, edgecolor='',marker = '+',alpha = 0.8)
    #        plt.scatter(mag_bin,np.percentile(mfd_X,84,axis=0),
    #            c='darkgreen', s=20, edgecolor='',marker = '+',alpha = 0.8)
    #        plt.scatter(mag_bin,np.array(mfd_X).mean(axis=0),
    #                    c='darkslateblue', s=50, edgecolor='',marker = 's',alpha = 0.95)
    #        axes = plt.gca()
        ymax = np.array(mfd_X).max(axis=0)[0] + 0.2 * np.array(mfd_X).max(axis=0)[0]
        axes.set_ylim([0.,ymax/float(12**i_mag)]) #depends of the b_value
        
        plt.grid() 
    
        #plt.yscale('log')
        plt.title('MFD ' + hyp_name)
        plt.savefig(path+ '/mdf_' + hyp_name + '_' + str(mag)+ '_' + str(mag+0.5) +'.png' ,
                    dpi = 100, transparent=True)
        #plt.show()
        plt.close()
        i_mag += 0.5
        '''
    if plot_as_rep == True:
        # plot the histogra of the aseismicity in this branch
        a_s_hyp = []
        for index in rows:
            a_s_hyp.append(a_s_model[index])

        plt.hist(a_s_hyp, 20)
        plt.axis([0, 100, 0, max(plt.hist(a_s_hyp, 20)[0] + 10)])
        plt.title('aseismic slip for bvalue ' + hyp_name)
        plt.savefig(path + '/aseismic_slip.png', dpi=100, transparent=True)
        #plt.show()
        plt.close()
Example #30
0
 def __init__(self, f):
     self.f = f
     from settings import G
     self.boundery = Path(G)
     self.config()