def generate_Total_berg_lon_lat(Total_berg_lon, Total_berg_lat, input_file,
                                constraints, m):
    all_bergs_lat = get_valid_data_from_traj_file(input_file, 'lat')
    all_bergs_lon = get_valid_data_from_traj_file(input_file, 'lon')

    #Adding constraints to the data:
    print 'Lendth of original field: ', len(all_bergs_lat)
    all_bergs_lat = add_constraint_to_data(input_file, all_bergs_lat,
                                           constraints)
    all_bergs_lon = add_constraint_to_data(input_file, all_bergs_lon,
                                           constraints)

    print 'Lendth of field after constraints: ', len(all_bergs_lat)
    x, y = m(all_bergs_lon, all_bergs_lat)
    #m.scatter(x,y,3,marker='o',color=color_vec[k])

    Total_berg_lon = np.concatenate((Total_berg_lon, x), axis=0)
    Total_berg_lat = np.concatenate((Total_berg_lat, y), axis=0)

    return (Total_berg_lon, Total_berg_lat)
def get_Total_berg_field(input_file, Total_berg_field, field, constraints):
    print 'Loading ', field
    get_original = False
    if field[-1] == '0':
        get_original = True
        field = field[:-1]
    all_bergs_field = get_valid_data_from_traj_file(
        input_file,
        field,
        subtract_orig=False,
        get_original_values=get_original)  # Getting iceberg mass too.
    all_bergs_field = add_constraint_to_data(input_file, all_bergs_field,
                                             constraints)
    Total_berg_field = np.concatenate((Total_berg_field, all_bergs_field),
                                      axis=0)

    return Total_berg_field
def main():
	#Clear screen
	#os.system('clear')


	#Defining possible paths
	all_paths=define_paths_array()

	#Flags
	plot_average_dist=1
	plot_full_time_series=0

	#Parameters and variables
	#start_year=1980
	end_year0=1985
	number_of_bins=100
	Number_of_years=25

	#Include contraints to use to filter the data
	# The constraint has the form [field_name, lower_bonud, upper_bound]
	constraint1=np.array(['lat',-85.,-10.])  # Latituds north of -60 in the southern hemisphere
	#constraint2=np.array(['lon',-60.,0.])  #Longitude of Weddel Sea    , lon appears to go from -270 to 90. 
	#constraint3=np.array(['lon',-120.,-60.])  #Longitude of Weddel Sea    , lon appears to go from -270 to 90. 
	constraints=[]
	constraints.append(constraint1)
	#constraints.append(constraint2)
	#constraints.append(constraint3)
	#contraints


	mass_scaling=np.array([2000, 200, 50, 20, 10, 5, 2, 1, 1, 1])



	#field_name='area';  x_min=1;x_max=1.e9  # Good for area including all Deltas
	field_name='area';  x_min=1;x_max=1.e7  # Good for area 
	#field_name='mass' ; #x_min=100;x_max=1.e12  # Good for mass
	#Defining a list of colors
	color_vec=np.array(['blue', 'red','purple','green', 'coral', 'cyan', 'magenta','orange', 'black', 'grey', 'yellow', 'orchid', 'blue', 'red','purple','green', 'coral', 'cyan' ])

	fig = plt.figure(1)



	input_file='/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_Delta1/19000101.icebergs_month.nc'
	area=nc.Dataset(input_file).variables['area'][:, :]
	lon=nc.Dataset(input_file).variables['xT'][:]
	lat=nc.Dataset(input_file).variables['yT'][:]


	count1=0
	#for k in range(13):#for k in np.array([3]):
	#for k in np.array([0 , 8]):
	for k in np.array([8 ]):
	#for k in np.array([0,1,2,5,8,9]):
		count1=count1+1
	#for k in range(0,8):
		input_folder=all_paths[k]


		#Make sure that data exists of the years allocated, othersize change it.
		(min_year, max_year)=find_max_min_years(input_folder,'.iceberg_trajectories.nc')
		end_year=min(max_year,end_year0)
		start_year=max(end_year-Number_of_years,min_year)
		#if max_year>=start_year:
		for year in range(start_year,end_year):
			#year=1945
			filename ='/' + str(year) + '0101.iceberg_trajectories.nc'
 	                input_file=input_folder + filename
			print input_file
			field_name1='uvel'
			#field_name1='lon'
			field_name2='vvel'
			iceberg_lats = get_valid_data_from_traj_file(input_file,'lat',subtract_orig=False)
			iceberg_lons = get_valid_data_from_traj_file(input_file,'lon',subtract_orig=False)
			iceberg_mass = get_valid_data_from_traj_file(input_file,'mass',subtract_orig=False)
			iceberg_field1 = get_valid_data_from_traj_file(input_file,field_name1,subtract_orig=False)
			iceberg_field2 = get_valid_data_from_traj_file(input_file,field_name2,subtract_orig=False)
			print 'Lendth of original field: ' , len(iceberg_field1)
			#Getting the distributions from the data

			#Handeling constraints
	                iceberg_lats=add_constraint_to_data(input_file,iceberg_lats,constraints)
	                iceberg_lons=add_constraint_to_data(input_file,iceberg_lons,constraints)
	                iceberg_mass=add_constraint_to_data(input_file,iceberg_mass,constraints)
	                iceberg_field1=add_constraint_to_data(input_file,iceberg_field1,constraints)
	                iceberg_field2=add_constraint_to_data(input_file,iceberg_field2,constraints)
	                print 'Lendth of field after constraints: ' , len(iceberg_field1)


			(field_gridded1,mass_gridded)=interpolat_bergs_onto_map(lat,lon,area,iceberg_lats,iceberg_lons,iceberg_field1,iceberg_mass,mass_weighted=True)
			(field_gridded2,mass_gridded)=interpolat_bergs_onto_map(lat,lon,area,iceberg_lats,iceberg_lons,iceberg_field2,iceberg_mass,mass_weighted=True)


			if year==start_year:
				Total_gridded1=field_gridded1
				Total_gridded2=field_gridded2
				Total_mass_gridded= mass_gridded
			else:
				Total_gridded1=Total_gridded1+field_gridded1
				Total_gridded2=Total_gridded2+field_gridded2
				Total_mass_gridded=Total_mass_gridded+ mass_gridded

		M=field_gridded1.shape
		norm_field1=np.zeros((M[0],M[1]))
		norm_field2=np.zeros((M[0],M[1]))
		for i in range(M[0]):
			for j in range(M[1]):
				if mass_gridded[i,j]>0:
					norm_field1[i,j]=field_gridded1[i,j]/mass_gridded[i,j]
					norm_field2[i,j]=field_gridded2[i,j]/mass_gridded[i,j]




		subplot(2,2,1)
		plot_polar_field(lat,lon,field_gridded1,pole='south',difference_on=1.,title='uvel',p_values=None,cscale=None,field=None)
		
		subplot(2,2,2)
		plot_polar_field(lat,lon,field_gridded2,pole='south',difference_on=1.,title='vvel',p_values=None,cscale=None,field=None)
		
		subplot(2,2,3)
		#plot_polar_field(lat,lon,field_gridded1/mass_gridded,pole='south',difference_on=1.,title='uvel',p_values=None,cscale=None,field=None)
		plot_polar_field(lat,lon,norm_field1,pole='south',difference_on=1.,title='uvel',p_values=None,cscale=None,field=None)
		
		subplot(2,2,4)
		#plot_polar_field(lat,lon,field_gridded2/mass_gridded,pole='south',difference_on=1.,title='vvel',p_values=None,cscale=None,field=None)
		plot_polar_field(lat,lon,norm_field2,pole='south',difference_on=1.,title='vvel',p_values=None,cscale=None,field=None)
			
			
			
			#Plotting the distributions
			#ax = fig.add_subplot(1,2,count1)
			#ax = fig.add_subplot(1,1,1)
			#plt.plot(field1,-field2,'o',color=color_vec[k])
			#plt.xlabel(field_name1)
			#plt.ylabel(field_name2)
			#plt.ylim([-75., -40]) #For mass
			#plt.xlim([10.e3, 7.4e11]) #For mass
			#ax.set_yscale('log')


	#plt.legend(loc='upper right', frameon=True)
	#fig = matplotlib.pyplot.gcf()
	#fig.set_size_inches(9,4.5)
	plt.show()


	print 'Script complete'
Example #4
0
def main():
    #Clear screen
    #os.system('clear')

    #Defining possible paths
    all_paths = define_paths_array()

    #Flags
    plot_full_time_series = 0
    plot_scatter_plot = False
    plot_mean_fields = True
    plot_occurance_matrix = False
    find_expected_values = True
    fixed_upper_bound = True
    use_log_axis = True
    save_the_data = True
    load_the_data = True

    #Parameters and variables
    #start_year=1980
    #end_year0=2010
    end_year0 = 2057
    #end_year0=2030
    Number_of_years = 12
    #Number_of_years=0
    #2199

    #Which fields do you want to compare?
    #field_name1='area'
    field_name1 = 'age'
    #field_name1='mass'
    #field_name1='width'
    #field_name1='length'
    #field_name1='width'
    #field_name1='thickness'
    #field_name1='LW_max'
    #field_name1='distance_from_calving'

    field_name2 = 'mass'
    #field_name2='area'
    #field_name2='age'
    #field_name2='thickness'
    #field_name2='length'
    #field_name2='width'

    initial_mass_vec = np.array([
        8.8e7, 4.1e8, 3.3e9, 1.8e10, 3.8e10, 7.5e10, 1.2e11, 2.2e11, 3.9e11,
        7.4e11
    ])
    #Include contraints to use to filter the data

    constraint2 = {
        'Constraint_field_name': 'lon',
        'lower_bound': -150,
        'upper_bound': -65,
        'original_values': True
    }  #Longitude of AB
    constraint_SH = {
        'Constraint_field_name': 'lat',
        'lower_bound': -150,
        'upper_bound': -5,
        'original_values': True
    }  #Longitude of AB
    constraint_NH = {
        'Constraint_field_name': 'lat',
        'lower_bound': 5,
        'upper_bound': 150,
        'original_values': True
    }  #Longitude of AB
    constraint_depth = {
        'Constraint_field_name': 'depth',
        'lower_bound': 500,
        'upper_bound': 8000,
        'original_values': False
    }
    constraint_dist_from_calving = {
        'Constraint_field_name': 'distance_from_calving',
        'lower_bound': 1000,
        'upper_bound': 10000000000000,
        'original_values': False
    }
    #constraint_mass = {'Constraint_field_name': 'mass', 'lower_bound': -10**16 , 'upper_bound': -0.5*(10**8), 'original_values': False, 'subtract_original': True}
    constraint_mass = {
        'Constraint_field_name': 'mass',
        'lower_bound': 3.8 * (10**11),
        'upper_bound': 4.0 * (10**11),
        'original_values': True,
        'subtract_original': False
    }
    #constraint_mass0 = {'Constraint_field_name': 'mass', 'lower_bound': 7.3*(10**11) , 'upper_bound': 7.6*(10**11), 'original_values': True, 'subtract_original': False}
    constraint_mass0 = {
        'Constraint_field_name': 'mass',
        'lower_bound': 3.2 * (10**9),
        'upper_bound': 3.4 * (10**9),
        'original_values': True,
        'subtract_original': False
    }
    constraint_day = {
        'Constraint_field_name': 'day',
        'lower_bound': 92,
        'upper_bound': 94.,
        'original_values': True,
        'subtract_original': False
    }
    constraint_vvel = {
        'Constraint_field_name': 'vvel',
        'lower_bound': 0.0001,
        'upper_bound': 100,
        'original_values': False,
        'subtract_original': False
    }
    constraint_uvel = {
        'Constraint_field_name': 'uvel',
        'lower_bound': -100,
        'upper_bound': -0.00100,
        'original_values': False,
        'subtract_original': False
    }
    constraint_sst = {
        'Constraint_field_name': 'sst',
        'lower_bound': -10,
        'upper_bound': -1.5,
        'original_values': False,
        'subtract_original': False
    }
    constraint_age = {
        'Constraint_field_name': 'age',
        'lower_bound': 0,
        'upper_bound': 5,
        'original_values': False
    }
    constraint_lon_Sermalik = {
        'Constraint_field_name': 'lon',
        'lower_bound': -38,
        'upper_bound': -36,
        'original_values': True
    }  #Longitude of Rink
    constraint_lat_Sermalik = {
        'Constraint_field_name': 'lat',
        'lower_bound': 65.0,
        'upper_bound': 66.0,
        'original_values': True
    }  #Longitude of Rink
    constraint_lon_WestGreenland = {
        'Constraint_field_name': 'lon',
        'lower_bound': -45,
        'upper_bound': -20,
        'original_values': True
    }
    constraint_lat_SouthGreenland = {
        'Constraint_field_name': 'lat',
        'lower_bound': 50.0,
        'upper_bound': 66.0,
        'original_values': True
    }
    #constraints.append(constraint2)

    #if field_name1=='age' or field_name2=='age':
    #	print 'applying age constraint'
    #	day_constraint = {'Constraint_field_name': 'day', 'lower_bound': 0 , 'upper_bound': 258, 'original_values': True} #Longitude of AB
    constraints = []
    #constraints.append(constraint_dist_from_calving)
    #constraints.append(constraint_depth)
    #constraints.append(constraint_mass0)
    #constraints.append(constraint_day)
    #constraints.append(constraint_age)
    #constraints.append(constraint_SH)
    #constraints.append(constraint_sst)
    #constraints.append(constraint_NH)
    #constraints.append(constraint_vvel)
    #constraints.append(constraint_uvel)
    #constraints.append(constraint_lon_Sermalik)
    #constraints.append(constraint_lat_Sermalik)
    #constraints.append(day_constraint)
    #constraints.append(constraint_lon_WestGreenland)
    #constraints.append(constraint_lat_SouthGreenland)

    rho_ice = 850.0
    rho_sw = 1025.0
    number_of_bins = 100
    Number_of_classes = 10

    mass_scaling = np.array([2000, 200, 50, 20, 10, 5, 2, 1, 1, 1])
    initial_mass = np.array([
        8.8e7, 4.1e8, 3.3e9, 1.8e10, 3.8e10, 7.5e10, 1.2e11, 2.2e11, 3.9e11,
        7.4e11
    ])
    initial_thickness = np.array(
        [40, 67, 133, 175, 250, 250, 250, 250, 250, 250])
    initial_area = initial_mass / (initial_thickness * rho_ice)
    initial_W = sqrt(initial_area / 1.5)
    initial_L = 1.5 * initial_W
    B = np.sqrt(6. * (rho_ice / rho_sw) * (1 - (rho_ice / rho_sw)))
    print B
    W_roll = B * initial_W
    L_roll = 1.5 * W_roll
    #L_roll = initial_L-(initial_W-W_roll)
    Mass_roll = initial_thickness * W_roll * L_roll * rho_ice

    mass_ind_list = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
    mass_num = 1
    #mass_ind_list=np.array([mass_num])

    #Initializing occurance matrix
    number_of_age_bins = 200

    #field_name='area';  x_min=1;x_max=1.e9  # Good for area including all Deltas
    field_name = 'mass'
    x_min = 1
    x_max = 1.e7  # Good for area
    #field_name='mass' ; #x_min=100;x_max=1.e12  # Good for mass
    #Defining a list of colors
    color_vec = np.array([
        'blue', 'red', 'purple', 'green', 'coral', 'cyan', 'magenta', 'orange',
        'black', 'grey', 'yellow', 'orchid', 'blue', 'red', 'purple', 'green',
        'coral', 'cyan'
    ])
    root_path = '/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_'

    #Delta_list=np.array(['Delta9', 'rolling_tournadre_WM', 'rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_Delta1_Burton_new','rolling_Delta5_Burton_new','rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_tournadre_Burton_breaking_aspect_fixed','rolling_tournadre_Burton_breaking_D_fixed','rolling_tournadre_Burton_new'])
    Delta_list = np.array(
        ['tournadre', 'rolling_tournadre_WM', 'rolling_tournadre_Burton_new'])
    Delta_list = np.array(
        ['rolling_tournadre_Burton_new', 'rolling_tournadre_Rolling_off'])
    Delta_list = np.array([
        'rolling_tournadre_Burton_new_passive',
        'rolling_tournadre_Rolling_off_passive'
    ])
    #Delta_list=np.array(['rolling_tournadre_Burton_new/trajectories/','rolling_tournadre_Rolling_off/trajectories/'])
    #Delta_list=np.array(['Delta1','Delta9'])
    #Delta_list=np.array(['Delta2','Delta3', 'Delta4','Delta5','Delta6'])

    #Name_list=np.array(['Martin and Adcroft','Weeks and Mellor','Burton'])
    Name_list = np.array(['Rolling', 'No Rolling'])

    #load_filename_list=np.array(['processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values.mat',\
    #	'processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values_logaxis_fixed_upperbound.mat',\
    #	'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values_logaxis_fixed_upperbound.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values_logaxis.mat',\
    #	'processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values_logaxis.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Burton_new_age_vs_mass_1948_to_2198_expected_values_logaxis_Numbins50.mat',\
    #	'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1943_to_2043_expected_values_logaxis_Numbins100.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Burton_new_age_vs_mass_1946_to_2046_expected_values_logaxis__fixed_upperboundNumbins100.mat',\
    #		'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1943_to_2043_expected_values_logaxis__fixed_upperboundNumbins100.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values_logaxis__fixed_upperbound_moreageNumbins100.mat',\
    #'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values_logaxis__fixed_upperbound_moreageNumbins100.mat'])
    #load_filename_list=np.array(['processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values_logaxis__fixed_upperboundNumbins100.mat',\
    #'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values_logaxis__fixed_upperboundNumbins100.mat'])

    #Southern_Hemisphere
    No_Rolling_file = 'processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1980_to_2030_expected_values_constraints_lat_50.0_to_75.0_logaxis__fixed_upperboundNumbins100.mat'
    Rolling_file = 'processed_data/rolling_tournadre_Burton_new_age_vs_mass_1980_to_2030_expected_values_constraints_lat_50.0_to_75.0_logaxis__fixed_upperboundNumbins100.mat'

    #No_Rolling_file='processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1980_to_2030_expected_values_constraints_lat_150_to_5_logaxis__fixed_upperboundNumbins100.mat'
    #Rolling_file='processed_data/rolling_tournadre_Burton_new_age_vs_mass_1980_to_2030_expected_values_constraints_lat_150_to_5_logaxis__fixed_upperboundNumbins100.mat'
    #Norther_Hemisphere
    #No_Rolling_file='processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1980_to_2030_expected_values_constraints_lat_5_to_150_logaxis__fixed_upperboundNumbins100.mat'
    #Rolling_file='processed_data/rolling_tournadre_Burton_new_age_vs_mass_1980_to_2030_expected_values_constraints_lat_5_to_150_logaxis__fixed_upperboundNumbins100.mat'

    load_filename_list = np.array([Rolling_file, No_Rolling_file])

    fig = plt.figure(1)
    ax = fig.add_subplot(1, 1, 1)
    if load_the_data == True:
        count1 = 0
        for k in np.array([0, 1]):
            #for k in np.array([1]):
            count1 = count1 + 1
            #input_folder=all_paths[k]
            #Delta_name='Delta'+str(k)
            Delta_name = Delta_list[k]
            input_folder = root_path + Delta_name + '/trajectories/'

            #Make sure that data exists of the years allocated, othersize change it.
            (min_year,
             max_year) = find_max_min_years(input_folder,
                                            '.iceberg_trajectories.nc')
            end_year = min(max_year, end_year0)
            start_year = max(end_year - Number_of_years, min_year)

            #Full mean matricies
            mean_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            std_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            x_matrix = np.zeros((Number_of_classes, end_year - start_year + 1,
                                 number_of_bins - 1))

            sigma_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            scale_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            loc_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            mu_matrix = np.zeros((Number_of_classes, end_year - start_year + 1,
                                  number_of_bins - 1))
            median_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))
            mode_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1,
                 number_of_bins - 1))

            #Time mean matricies
            Total_mean = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_std = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_x = np.zeros((Number_of_classes, number_of_bins - 1))

            Total_sigma = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_scale = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_loc = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_mu = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_median = np.zeros((Number_of_classes, number_of_bins - 1))
            Total_mode = np.zeros((Number_of_classes, number_of_bins - 1))

            occurance_y = np.logspace(np.log10(10**5),
                                      np.log10(10**11),
                                      num=number_of_bins,
                                      endpoint=True)
            occurance_x = np.linspace(0,
                                      40,
                                      num=number_of_age_bins,
                                      endpoint=True)
            occurance_matrix = np.zeros(
                (Number_of_classes, len(occurance_x), len(occurance_y)))

            year_count = -1
            for year in range(start_year, end_year + 1):
                year_count = year_count + 1
                #year=1945
                filename = '/' + str(year) + '0101.iceberg_trajectories.nc'
                input_file = input_folder + filename
                print input_file

                field1_full = get_valid_data_from_traj_file(
                    input_file, field_name1, subtract_orig=False)
                field2_full = get_valid_data_from_traj_file(
                    input_file, field_name2, subtract_orig=False)

                print 'Lendth of field: ', len(field1_full)
                #Getting the distributions from the data

                #Handling all the different mass classes
                for mass_ind in mass_ind_list:
                    mass = initial_mass[mass_ind]
                    constraint_m = {
                        'Constraint_field_name': 'mass',
                        'lower_bound': mass - 100,
                        'upper_bound': mass + 100,
                        'original_values': True
                    }
                    mass_constraints = []
                    mass_constraints = constraints
                    mass_constraints.append(constraint_m)
                    #Handeling constraints
                    field1 = 0.
                    field2 = 0.
                    field1 = add_constraint_to_data(input_file, field1_full,
                                                    mass_constraints)
                    field2 = add_constraint_to_data(input_file, field2_full,
                                                    mass_constraints)
                    print 'Lendth of field after constraints using mass0= ', mass, ' is ', len(
                        field1)
                    mass_constraints.pop()

                    if find_expected_values == True:
                        #Finding statistics
                        (x_bins,y_ave,y_std,sigma,scale,loc,mu,y_median,y_mode)=expected_values(field2,field1,number_of_bins,min_bin_value=True,\
                          use_log_axis=use_log_axis, fixed_upper_bound=fixed_upper_bound)
                        mean_matrix[mass_ind, year_count, :] = y_ave
                        std_matrix[mass_ind, year_count, :] = y_std
                        x_matrix[mass_ind, year_count, :] = x_bins
                        sigma_matrix[mass_ind, year_count, :] = sigma
                        scale_matrix[mass_ind, year_count, :] = scale
                        loc_matrix[mass_ind, year_count, :] = loc
                        mu_matrix[mass_ind, year_count, :] = mu
                        median_matrix[mass_ind, year_count, :] = y_median
                        mode_matrix[mass_ind, year_count, :] = y_mode

                        #Dealing with occurance  (not quite read - needs to take all masses)
                        occurance_matrix[
                            mass_ind, :, :] = construct_occurance_matrix(
                                field1, field2,
                                occurance_matrix[mass_ind, :, :], occurance_x,
                                occurance_y)

                    if plot_scatter_plot == True:
                        label = Name_list[k]
                        plt.plot(field1,
                                 field2,
                                 'o',
                                 color=color_vec[k],
                                 label=label)
                        plt.xlabel(field_name1)
                        plt.ylabel(field_name2)
                        plt.title(Name_list[k])

            #Taking the time mean
            if find_expected_values == True:
                Total_median = find_average_over_time_without_nan(
                    median_matrix, 'median')
                Total_mean = find_average_over_time_without_nan(
                    mean_matrix, 'mean')
                Total_std = find_average_over_time_without_nan(
                    np.sqrt(std_matrix**2), 'mean')
                Total_x = find_average_over_time_without_nan(x_matrix, 'mean')
                Total_sigma = find_average_over_time_without_nan(
                    sigma_matrix, 'mean')
                Total_scale = find_average_over_time_without_nan(
                    scale_matrix, 'mean')
                Total_loc = find_average_over_time_without_nan(
                    loc_matrix, 'mean')
                Total_mu = find_average_over_time_without_nan(
                    mu_matrix, 'mean')

            #Normalizing occurance
            for mass_ind in range(len(mass_ind_list)):
                occurance_matrix[
                    mass_ind, :, :] = occurance_matrix[mass_ind, :, :] / (
                        np.sum(occurance_matrix[mass_ind, :, :]))
                print 'Sum of occurance_matrix is: ', np.sum(
                    occurance_matrix[mass_ind, :, :])

            if save_the_data == True:
                save_mat_file(mean_matrix, median_matrix, std_matrix, x_matrix, Total_mean, Total_std, Total_x, start_year,end_year,Delta_name,field_name1,field_name2,\
                  sigma_matrix, scale_matrix, loc_matrix, mu_matrix, Total_sigma, Total_loc, Total_scale, Total_mu,Total_median,\
                  constraints,use_log_axis,fixed_upper_bound,str(number_of_bins),occurance_x,occurance_y,occurance_matrix)

            if plot_mean_fields == True:
                first_time = True
                for mass_ind in mass_ind_list:
                    if first_time is True:
                        label = Name_list[k]
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 color=color_vec[k],
                                 label=label,
                                 linewidth=4)
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 'o',
                                 color=color_vec[k],
                                 linewidth=4)
                        first_time = False
                    else:
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 color=color_vec[k],
                                 linewidth=4)
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 'o',
                                 color=color_vec[k],
                                 linewidth=4,
                                 linestyle='*')
                plt.xlabel(field_name1)
                plt.ylabel(field_name2)
                ax.set_yscale('log')
                plt.ylim([10**5, 10**12])
                #ax.set_xscale('log')
                #Normalizing

            if plot_occurance_matrix is True:
                #ax = fig.add_subplot(2,1,k+1)
                vmax = np.max(occurance_matrix[mass_num, :, :])
                #cNorm = MidpointNormalize(vmin=0, vmax=5,midpoint=0)
                cNorm = mpl.colors.Normalize(vmin=0, vmax=vmax)
                cNorm = mpl.colors.LogNorm(vmin=10**-5, vmax=0.1)
                print len(occurance_x), len(occurance_y)
                plt.pcolor(occurance_x,
                           occurance_y, (np.squeeze(
                               occurance_matrix[mass_num, :, :])).transpose(),
                           cmap='jet',
                           norm=cNorm)
                plt.colorbar()

    else:  # Loading data from mat file
        if plot_mean_fields == True:
            count1 = 0
            #for k in np.array([1]):
            for k in np.array([0, 1]):
                count1 = count1 + 1
                filename = load_filename_list[k]
                print filename
                (Total_median, median_matrix, Total_mean, mean_matrix, Total_x,
                 x_matrix) = load_mass_vs_age_from_mat_file(filename,
                                                            load_type='median')

                first_time = True
                #for mass_ind in mass_ind_list:
                #for mass_ind in np.array([1,2,3,4,5,6,7,8,9]):
                for mass_ind in np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]):
                    #for mass_ind in np.array([9]):
                    #These should be uncommented
                    #plt.plot(Total_mean,Total_x,color='black',label=label,linewidth=4)
                    #plt.plot(Total_median,Total_x,color='green',label=label,linewidth=4)
                    #plt.plot(Total_mean+Total_std,Total_x,color='magenta',linewidth=4)
                    #plt.plot(Total_mean-Total_std,Total_x,color='magenta',linewidth=4)

                    #(fit_age,fit_mass)=fit_curve(Total_median[mass_ind,:], Total_x[mass_ind,:])
                    #if np.max(np.isnan(Total_median[mass_ind,:]))==1:
                    #		print 'STOP!'
                    #		print Total_median[mass_ind,:]
                    #(filtered_age,filtered_mass)=filter_curve(Total_median[mass_ind,:], Total_x[mass_ind,:],k,mass_ind)
                    if first_time is True:
                        label = Name_list[k]
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 'o',
                                 color=color_vec[k],
                                 linewidth=1)
                        #plt.plot(filtered_age, filtered_mass,'o' ,color=color_vec[k], linewidth=2,label=label)
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 color=color_vec[k],
                                 label=label,
                                 linewidth=4)
                        first_time = False
                    else:
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 color=color_vec[k],
                                 linewidth=4)
                        plt.plot(np.squeeze(Total_median[mass_ind, :]),
                                 Total_x[mass_ind, :],
                                 'o',
                                 color=color_vec[k],
                                 linewidth=1)
                        #plt.plot(filtered_age, filtered_mass,'o' ,color=color_vec[k], linewidth=2)
                    #plt.plot(np.squeeze(Total_median[mass_ind,:]),Total_x[mass_ind,:],color=color_vec[k], linewidth=1)
                    #plt.plot(np.squeeze(Total_median[mass_ind,:]),Total_x[mass_ind,:],color=color_vec[k], linewidth=4,linestyle=':')
                    #plt.plot(filtered_age, filtered_mass ,color=color_vec[k], linewidth=2)
                    #plt.plot(fit_age, fit_mass ,color=color_vec[k], linewidth=3)
                    #print Total_x[mass_ind,:]

                    #plt.plot(5,Mass_roll[mass_ind],'o',color='k')

                plt.xlabel(field_name1)
                plt.ylabel(field_name2)
                ax.set_yscale('log')
                plt.ylim([10**5, 10**12])
                plt.xlim([0, 25.])
                #ax.set_xscale('log')

    if plot_occurance_matrix is True:
        for k in np.array([0, 1]):
            filename = load_filename_list[k]
            (occurance_x, occurance_y,
             occurance_matrix) = load_mass_vs_age_from_mat_file(
                 filename, load_type='occurance')
            dp = occurance_x[1] - occurance_x[0]
            #for mass_num in np.array([7]):
            #for mass_num in np.array([1,2,3,4,5,6,7,8,9]):
            max_occurance = np.zeros((len(occurance_y)))
            mode_occurance = np.zeros((len(occurance_y)))
            for j in range(len(occurance_y)):
                #max_occurance[j]=np.sum(  occurance_matrix[mass_num,:,j]*occurance_x*dp /np.sum(occurance_matrix[mass_num,:,j])  )
                max_occurance[j] = np.sum(
                    occurance_matrix[mass_num, :, j] * occurance_x /
                    np.sum(occurance_matrix[mass_num, :, j]))
                #values=occurance_matrix[mass_num,:,j]
                #mode_occurance[j]=occurance_x[np.argmax(values)]
            #plt.subplot(2,1,k+1)
            vmax = np.max(occurance_matrix[mass_num, :, :])
            #cNorm = MidpointNormalize(vmin=0, vmax=5,midpoint=0)
            #cNorm = mpl.colors.Normalize(vmin=-vmax, vmax=vmax)
            cNorm = mpl.colors.LogNorm(vmin=10**-5, vmax=0.1)
            #if k==0:
            #	max_occurance0=max_occurance
            if k == 1:
                plt.pcolor(occurance_x,
                           occurance_y, (np.squeeze(
                               occurance_matrix[mass_num, :, :])).transpose(),
                           cmap='jet',
                           norm=cNorm)
                plt.colorbar()
                plt.title(Name_list[k])
            plt.plot(max_occurance,
                     occurance_y,
                     color=color_vec[k],
                     linewidth=2)
            #plt.plot(mode_occurance,occurance_y,color=color_vec[k],linewidth=2)
            #title=Name_list[k]
            plt.plot(max_occurance,
                     occurance_y,
                     color=color_vec[k],
                     linewidth=1,
                     label=Name_list[k])
            #plt.plot(mode_occurance,occurance_y,color=color_vec[k],linewidth=1, label=Name_list[k])
            ax.set_yscale('log')

    #plt.legend(loc='upper right', frameon=True,prop={'size':12})
    plt.legend(loc='upper right', frameon=True)
    #fig = matplotlib.pyplot.gcf()
    fig.set_size_inches(9, 4.5)
    plt.show()

    print 'Script complete'
def main():
	#Clear screen
	#os.system('clear')


	start_year=1915
	end_year0=2019
	Number_of_years=0
	input_folder='/ptmp/Alon.Stern/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_Delta9/'
	second_folder=None
	field='CN'
	months_str='all'
	constraint_name=None
	months=range(0,11)
	file_type='ice_month'
	pole='north'
	save_traj_data=False

	color_vec=np.array(['blue', 'red', 'green', 'grey','purple', 'cyan', 'magenta', 'black', 'orange', 'coral', 'yellow', 'orchid',  'black', 'orange', 'coral', 'yellow', 'orchid' ])



 	# The constraint has the form [field_name, lower_bonud, upper_bound]
	#constraint1=np.array(['lat',-65.,-10.])  # Latituds north of -60 in the southern hemisphere

	constraint_depth = {'Constraint_field_name': 'depth', 'lower_bound': 3000 , 'upper_bound': 8000, 'original_values': True}
 
	constraint_dist_from_calving = {'Constraint_field_name': 'distance_from_calving', 'lower_bound': 1000000 , 'upper_bound': 10000000000000, 'original_values': False}

	constraint2 = {'Constraint_field_name': 'lon', 'lower_bound': -150 , 'upper_bound': -65, 'original_values': True} #Longitude of AB
 	constraint3 = {'Constraint_field_name': 'lon', 'lower_bound': -210 , 'upper_bound': -65, 'original_values': True} #Longitude of AB_plus_Ross

 	constraint_SH = {'Constraint_field_name': 'lat', 'lower_bound': -90 , 'upper_bound': 0, 'original_values': True} #age
 	constraint_age = {'Constraint_field_name': 'age', 'lower_bound': 30 , 'upper_bound': 300, 'original_values': True} #age

	constraints=[]

	#mass=3.9e11 ;constraint_name='massD9'
	#mass=8.8e7 ;constraint_name='massD1'
	#constraint_m = {'Constraint_field_name': 'mass', 'lower_bound': mass-100 , 'upper_bound': mass+100, 'original_values': True}
	#constraints.append(constraint2)  ; constraint_name='AB'
	#constraints.append(constraint3)  ; constraint_name='AB_plus_Ross'
	#constraints.append(constraint4)
	#constraints.append(constraint_m)
	#constraints.append(constraint_depth)
	#constraints.append(constraint_dist_from_calving)
	#constraints.append(constraint_age)
	#constraints.append(constraint_SH)

	#plot_multiple_panels==1:
	constraint2 = {'Constraint_field_name': 'lon', 'lower_bound': -150 , 'upper_bound': -65, 'original_values': True} #Longitude of AB

	root_path='/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg'

	(All_data, lat, lon,z) =generate_data_file(start_year, start_year, input_folder, field, months_str, file_type)
	data_mean=np.squeeze(np.mean(All_data,axis=0))
	data_mean=None

	#run_names=np.array(['freq','all_big', 'mass', 'all_small']) ; naming_flag='groups'
	#run_names=np.array(['Delta1', 'Delta2', 'Delta3', 'Delta6', 'Delta9','Delta10']) ; naming_flag='Delta'
	#run_names=np.array(['Delta1' ,'Delta10','Delta1_thick', 'Delta10_thick', 'Delta1b_thick', 'Delta10b_thick']) ; naming_flag='Thick'
	#run_names=np.array(['Delta6', 'Delta6','Delta9']) ; naming_flag='Delta'
	run_names=np.array(['tournadre']) ; naming_flag='Delta'
	#run_names=np.array(['Delta1','Delta3','Delta4','Delta5', 'Delta6','Delta9']) ; naming_flag='Delta'

	for k in range(1):
		input_folder=root_path + '_bergs_' + run_names[k]
		subplot(1,1,k)
		print input_folder	
		#Making sure the years work
		(min_year, max_year)=find_max_min_years(input_folder,'.iceberg_trajectories.nc')
		end_year=min(max_year,end_year0)
		start_year=max(end_year-Number_of_years,min_year)
		title1= run_names[k] + ' (' + str(start_year) + ' to ' + str(end_year) + ')'

		#Plot blank map.
		if pole=='south':
			projection = 'splaea'
			boundinglat=-45
		if pole=='north':
			projection = 'nplaea'
			boundinglat=45
		projection = 'nplaea'
		plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1\
				,p_values=None,cscale=None,field=None,colorbar_on=True,return_data_map=False,plot_lat_lon_lines=True,boundinglat=boundinglat)
		m = Basemap(projection=projection, boundinglat=boundinglat, lon_0=180)
		print 'You are here'
		print projection

		
		count=0
		Total_berg_lat=np.array([])
		Total_berg_lon=np.array([])
		Total_berg_mass0=np.array([])
		for year in range(start_year, end_year+1):
			count=count+1
			filename ='/' + str(year) + '0101.iceberg_trajectories.nc'
			input_file=input_folder + filename
			print input_file
			all_bergs_lat = get_valid_data_from_traj_file(input_file,'lat')
			all_bergs_lon = get_valid_data_from_traj_file(input_file,'lon')
			all_bergs_mass0 =  get_valid_data_from_traj_file(input_file,'mass',subtract_orig=False,get_original_values=True) # Getting iceberg mass too.

			#Adding constraints to the data:
			print 'Lendth of original field: ' , len(all_bergs_lat)
			all_bergs_lat=add_constraint_to_data(input_file,all_bergs_lat,constraints)
			all_bergs_lon=add_constraint_to_data(input_file,all_bergs_lon,constraints)
			all_bergs_mass0=add_constraint_to_data(input_file,all_bergs_mass0,constraints)

			print 'Lendth of field after constraints: ' , len(all_bergs_lat)


			x, y = m(all_bergs_lon, all_bergs_lat)
			plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1\
					,p_values=None,cscale=None,field=None,colorbar_on=True,return_data_map=False,plot_lat_lon_lines=True,boundinglat=boundinglat)
			m.scatter(x,y,3,marker='o',color=color_vec[k])

			#plt.plot(all_bergs_lon,all_bergs_lat,'o')
			
			Total_berg_lon=np.concatenate((Total_berg_lon,x),axis=0)
			Total_berg_lat=np.concatenate((Total_berg_lat,y),axis=0)
			Total_berg_mass0=np.concatenate((Total_berg_mass0,all_bergs_mass0),axis=0)
			
		
		if save_traj_data==True:
			save_traj_mat_file(lat,lon, Total_berg_lon, Total_berg_lat, Total_berg_mass0, pole,input_folder,start_year,end_year,months_str,constraint_name)
		#plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1)
		#m.scatter(Total_berg_lon,Total_berg_lat,1,marker='o',color=color_vec[k])
			


	output_file= 'figures/traj_plot_' + str(start_year)+ '_to_' +  str(end_year) + '_with_' + run_names[k] + '.png'
	#plt.savefig(output_file, dpi=150, bbox_inches='tight', pad_inches=0.4)

	plt.show()



	print 'Script complete'
def main():
	#Clear screen
	#os.system('clear')


	start_year=1915
	end_year0=1992
	Number_of_years=0
	input_folder='/ptmp/Alon.Stern/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_Delta9/'
	second_folder=None
	field='CN'
	months_str='all'
	constraint_name=None
	months=range(0,11)
	file_type='ice_month'
	pole='north'
	pole=None
	save_traj_data=False

	Title_list={'Delta1':'$L_{0}=60m$','Delta2':'$L_{0}=100m$','Delta3':'$L_{0}=200m$','Delta4':'$L_{0}=350m$','Delta5':'$L_{0}=500m$',\
		                        'Delta6':'$L_{0}=700m$','Delta7':'$L_{0}=900m$','Delta8':'$L_{0}=1200m$','Delta9':'$L_{0}=1600m$','Delta10':'$L_{0}=2200m$'}
	color_vec=np.array(['blue', 'red', 'green', 'grey','purple', 'cyan', 'magenta', 'black', 'orange', 'coral', 'yellow', 'orchid',  'black', 'orange', 'coral', 'yellow', 'orchid' ])
	cNorm = mpl.colors.LogNorm(vmin=8.8e7, vmax=7.4e11)
	
	plot_each_time=False


 	# The constraint has the form [field_name, lower_bonud, upper_bound]
	#constraint1=np.array(['lat',-65.,-10.])  # Latituds north of -60 in the southern hemisphere

	constraint_depth = {'Constraint_field_name': 'depth', 'lower_bound': 3000 , 'upper_bound': 8000, 'original_values': True}
 
	constraint_dist_from_calving = {'Constraint_field_name': 'distance_from_calving', 'lower_bound': 1000000 , 'upper_bound': 10000000000000, 'original_values': False}

	constraint_lon_Rink = {'Constraint_field_name': 'lon', 'lower_bound': -55 , 'upper_bound': -45, 'original_values': True} #Longitude of Rink
        constraint_lat_Rink = {'Constraint_field_name': 'lat', 'lower_bound': 70.75 , 'upper_bound': 72.75, 'original_values': True} #Longitude of Rink

	constraints=[]

	#mass=3.9e11 ;constraint_name='massD9'
	#mass=8.8e7 ;constraint_name='massD1'
	#constraint_m = {'Constraint_field_name': 'mass', 'lower_bound': mass-100 , 'upper_bound': mass+100, 'original_values': True}
	#constraints.append(constraint2)  ; constraint_name='AB'
	#constraints.append(constraint3)  ; constraint_name='AB_plus_Ross'
	#constraints.append(constraint4)
	#constraints.append(constraint_m)
	#constraints.append(constraint_depth)
	#constraints.append(constraint_dist_from_calving)
	constraints.append(constraint_lon_Rink)
	constraints.append(constraint_lat_Rink)
	#constraints.append(constraint_SH)

	#plot_multiple_panels==1:
	constraint2 = {'Constraint_field_name': 'lon', 'lower_bound': -150 , 'upper_bound': -65, 'original_values': True} #Longitude of AB

	root_path='/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg'

	(All_data, lat, lon,z) =generate_data_file(start_year, start_year, input_folder, field, months_str, file_type)
	data_mean=np.squeeze(np.mean(All_data,axis=0))
	data_mean=None

	#Deciding on the projection.
	if pole=='south':
		projection = 'splaea'
		boundinglat=-45
	if pole=='north':
		projection = 'nplaea'
		boundinglat=45
	if pole==None:
		projection = 'lcc'
		lat_1=80
		lat_2=60
		lat_0=63.5
		lon_0=-59.


	run_names=np.array(['tournadre']) ; naming_flag='Delta'
	#run_names=np.array(['Delta1', 'Delta3', 'Delta9']) ; naming_flag='Delta'
	#run_names=np.array(['Delta1', 'Delta2','Delta3','Delta6', 'Delta9','Delta10']) ; naming_flag='Delta'
	run_names=np.array(['Delta1' ,'Delta3', 'Delta9']) ; naming_flag='Delta'

	nrows=1 ;ncols=3
	fig, axes = plt.subplots(nrows=nrows, ncols=ncols)
	for k in range(ncols*nrows):
		input_folder=root_path + '_bergs_' + run_names[k]
		subplot(nrows,ncols,k+1)
		print input_folder	
		#Making sure the years work
		(min_year, max_year)=find_max_min_years(input_folder,'.iceberg_trajectories.nc')
		end_year=min(max_year,end_year0)
		start_year=max(end_year-Number_of_years,min_year)
		title1= run_names[k] + ' (' + str(start_year) + ' to ' + str(end_year) + ')'

		#plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1\
		#		,p_values=None,cscale=None,field=None,colorbar_on=True,return_data_map=False,plot_lat_lon_lines=True,boundinglat=boundinglat)
		if pole==None:
			m = Basemap(width=1750000,height=4300000, rsphere=(6378137.00,6356752.3142),resolution='l',area_thresh=1000.,\
					projection=projection,lat_1=lat_1,lat_2=lat_2,lat_0=lat_0,lon_0=lon_0)
		else:
			m = Basemap(projection=projection, boundinglat=boundinglat, lon_0=180)
		m.drawcoastlines()
		m.fillcontinents(color='grey',lake_color='white')

		
		count=0
		Total_berg_lat=np.array([])
		Total_berg_lon=np.array([])
		Total_berg_mass0=np.array([])
		for year in range(start_year, end_year+1):
			count=count+1
			filename ='/' + str(year) + '0101.iceberg_trajectories.nc'
			input_file=input_folder + filename
			print input_file
			all_bergs_lat = get_valid_data_from_traj_file(input_file,'lat')
			all_bergs_lon = get_valid_data_from_traj_file(input_file,'lon')
			all_bergs_mass0 =  get_valid_data_from_traj_file(input_file,'mass',subtract_orig=False,get_original_values=True) # Getting iceberg mass too.

			#Adding constraints to the data:
			print 'Lendth of original field: ' , len(all_bergs_lat)
			all_bergs_lat=add_constraint_to_data(input_file,all_bergs_lat,constraints)
			all_bergs_lon=add_constraint_to_data(input_file,all_bergs_lon,constraints)
			all_bergs_mass0=add_constraint_to_data(input_file,all_bergs_mass0,constraints)

			print 'Lendth of field after constraints: ' , len(all_bergs_lat)


			x, y = m(all_bergs_lon, all_bergs_lat)
			if plot_each_time==True:
				plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1\
						,p_values=None,cscale=None,field=None,colorbar_on=True,return_data_map=False,plot_lat_lon_lines=True,boundinglat=boundinglat)
				m.scatter(x,y,3,marker='o',color=color_vec[k])

			#plt.plot(all_bergs_lon,all_bergs_lat,'o')
			
			Total_berg_lon=np.concatenate((Total_berg_lon,x),axis=0)
			Total_berg_lat=np.concatenate((Total_berg_lat,y),axis=0)
			Total_berg_mass0=np.concatenate((Total_berg_mass0,all_bergs_mass0),axis=0)
		

		datamap=m.scatter(Total_berg_lon,Total_berg_lat, c=Total_berg_mass0, marker='o',cmap='jet',norm=cNorm)
		if run_names[k]=='tournadre':
			cbar=fig.colorbar(datamap)
        	        cbar.set_label('Calving Mass (kg)')
		else:
			plt.title(Title_list[run_names[k]])
		
		if save_traj_data==True:
			save_traj_mat_file(lat,lon, Total_berg_lon, Total_berg_lat, Total_berg_mass0, pole,input_folder,start_year,end_year,months_str,constraint_name)
		#plot_polar_field(lat,lon,data_mean,pole,difference_on=0.,title=title1)
		#m.scatter(Total_berg_lon,Total_berg_lat,1,marker='o',color=color_vec[k])
			

	output_file= 'figures/Leigh_Rink_figure2.png'
	#output_file= 'figures/Northern_Trajectories_tournadre.png'
	#output_file= 'figures/Northern_Trajectories_Deltas.png'
	fig.set_size_inches(21.0, 8.5,forward=True)
	plt.savefig(output_file, dpi=150, bbox_inches='tight', pad_inches=0.4)

	plt.show()



	print 'Script complete'
Example #7
0
def main():
    #Clear screen
    #os.system('clear')

    #Defining possible paths
    all_paths = define_paths_array()

    #Flags
    plot_full_time_series = 0
    plot_scatter_plot = False
    plot_mean_fields_again = True
    plot_occurance_matrix = False
    plot_derivatives_and_fits = False
    plot_rolling_equation = False
    find_expected_values = True
    save_the_data = False
    load_the_data = True

    #Parameters and variables
    #start_year=1980
    end_year0 = 2010
    #end_year0=1999
    Number_of_years = 3
    #Number_of_years=0

    #Which fields do you want to compare?
    #field_name1='area'
    #field_name1='age'
    #field_name1='side_decay'
    #field_name1='Me'
    #field_name1='sst'
    #field_name1='Ss'
    field_name1 = 'cn'
    #field_name1='speed'
    #field_name1='Mb'
    #field_name1='mass'
    #field_name1='length'
    #field_name1='width'
    #field_name1='thickness'
    #field_name1='LW_max'
    #field_name1='distance_from_calving'

    field_name2 = 'mass'
    #field_name2='area'
    #field_name2='age'
    #field_name2='thickness'
    #field_name2='length'
    #field_name2='width'

    initial_mass_vec = np.array([
        8.8e7, 4.1e8, 3.3e9, 1.8e10, 3.8e10, 7.5e10, 1.2e11, 2.2e11, 3.9e11,
        7.4e11
    ])
    #Include contraints to use to filter the data

    constraint2 = {
        'Constraint_field_name': 'lon',
        'lower_bound': -150,
        'upper_bound': -65,
        'original_values': True
    }  #Longitude of AB
    constraint_SH = {
        'Constraint_field_name': 'lat',
        'lower_bound': -150,
        'upper_bound': -5,
        'original_values': True
    }  #Longitude of AB
    constraint_NH = {
        'Constraint_field_name': 'lat',
        'lower_bound': 5,
        'upper_bound': 100,
        'original_values': True
    }  #Longitude of AB
    constraint_birth_year = {
        'Constraint_field_name': 'year',
        'lower_bound': 1899.9,
        'upper_bound': 1900.1,
        'original_values': True
    }
    constraint_depth = {
        'Constraint_field_name': 'depth',
        'lower_bound': 500,
        'upper_bound': 8000,
        'original_values': False
    }
    constraint_dist_from_calving = {
        'Constraint_field_name': 'distance_from_calving',
        'lower_bound': 1000,
        'upper_bound': 10000000000000,
        'original_values': False
    }
    #constraint_mass = {'Constraint_field_name': 'mass', 'lower_bound': -10**16 , 'upper_bound': -0.5*(10**8), 'original_values': False, 'subtract_original': True}
    constraint_mass = {
        'Constraint_field_name': 'mass',
        'lower_bound': 1.0 * (10**10),
        'upper_bound': 4.0 * (10**15),
        'original_values': False,
        'subtract_original': False
    }
    constraint_mass0 = {
        'Constraint_field_name': 'mass',
        'lower_bound': 7.3 * (10**11),
        'upper_bound': 7.6 * (10**11),
        'original_values': True,
        'subtract_original': False
    }
    #constraint_mass0 = {'Constraint_field_name': 'mass', 'lower_bound': 3.2*(10**9) , 'upper_bound': 3.4*(10**9), 'original_values': True, 'subtract_original': False}
    constraint_day = {
        'Constraint_field_name': 'day',
        'lower_bound': 92,
        'upper_bound': 94.,
        'original_values': True,
        'subtract_original': False
    }
    constraint_vvel = {
        'Constraint_field_name': 'vvel',
        'lower_bound': 0.0001,
        'upper_bound': 100,
        'original_values': False,
        'subtract_original': False
    }
    constraint_uvel = {
        'Constraint_field_name': 'uvel',
        'lower_bound': -100,
        'upper_bound': -0.00100,
        'original_values': False,
        'subtract_original': False
    }
    constraint_age = {
        'Constraint_field_name': 'age',
        'lower_bound': 0,
        'upper_bound': 5,
        'original_values': False
    }
    constraint_lon_WestGreenland = {
        'Constraint_field_name': 'lon',
        'lower_bound': -45,
        'upper_bound': -20,
        'original_values': True
    }
    constraint_lat_SouthGreenland = {
        'Constraint_field_name': 'lat',
        'lower_bound': 50.0,
        'upper_bound': 66.0,
        'original_values': True
    }

    #constraints.append(constraint2)

    #if field_name1=='age' or field_name2=='age':
    #	print 'applying age constraint'
    #	day_constraint = {'Constraint_field_name': 'day', 'lower_bound': 0 , 'upper_bound': 258, 'original_values': True} #Longitude of AB
    constraints = []
    #constraints.append(constraint_dist_from_calving)
    #constraints.append(constraint_depth)
    constraints.append(constraint_mass0)
    #constraints.append(constraint_day)
    #constraints.append(constraint_age)
    #constraints.append(constraint_SH)
    #constraints.append(constraint_birth_year)
    #constraints.append(constraint_NH)
    #constraints.append(constraint_vvel)
    #constraints.append(constraint_uvel)
    #	constraints.append(day_constraint)
    #constraints.append(constraint_lon_WestGreenland)
    constraints.append(constraint_lat_SouthGreenland)

    mass_scaling = np.array([2000, 200, 50, 20, 10, 5, 2, 1, 1, 1])

    #field_name='area';  x_min=1;x_max=1.e9  # Good for area including all Deltas
    field_name = 'mass'
    x_min = 1
    x_max = 1.e7  # Good for area
    #field_name='mass' ; #x_min=100;x_max=1.e12  # Good for mass
    #Defining a list of colors
    color_vec = np.array([
        'blue', 'red', 'purple', 'green', 'coral', 'cyan', 'magenta', 'orange',
        'black', 'grey', 'yellow', 'orchid', 'blue', 'red', 'purple', 'green',
        'coral', 'cyan'
    ])
    root_path = '/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_'

    #Delta_list=np.array(['Delta9', 'rolling_tournadre_WM', 'rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_Delta1_Burton_new','rolling_Delta5_Burton_new','rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_tournadre_Burton_breaking_aspect_fixed','rolling_tournadre_Burton_breaking_D_fixed','rolling_tournadre_Burton_new'])
    Delta_list = np.array(
        ['tournadre', 'rolling_tournadre_WM', 'rolling_tournadre_Burton_new'])
    #Delta_list=np.array(['rolling_tournadre_Burton_new/trajectories/','rolling_tournadre_Rolling_off/trajectories/'])
    Delta_list = np.array(
        ['rolling_tournadre_Burton_new', 'rolling_tournadre_Rolling_off'])
    #Delta_list=np.array(['Delta1','Delta9'])
    #Delta_list=np.array(['Delta2','Delta3', 'Delta4','Delta5','Delta6'])

    #Rolling_file='processed_data/rolling_tournadre_Burton_new_Me_vs_mass_1960_to_2010_expected_values_constraints_mass_7.3e+11_to_7.6e+11_lat_50.0_to_75.0_scatter.mat'
    #No_Rolling_file='processed_data/rolling_tournadre_Rolling_off_Me_vs_mass_1960_to_2010_expected_values_constraints_mass_7.3e+11_to_7.6e+11_lat_50.0_to_75.0_scatter.mat'

    No_Rolling_file = 'processed_data/rolling_tournadre_Rolling_off_Me_vs_mass_1960_to_2010_expected_values_constraints_mass_7.3e+11_to_7.6e+11_lon_45_to_20_lat_50.0_to_75.0_scatter.mat'
    Rolling_file = 'processed_data/rolling_tournadre_Burton_new_Me_vs_mass_1960_to_2010_expected_values_constraints_mass_7.3e+11_to_7.6e+11_lon_45_to_20_lat_50.0_to_75.0_scatter.mat'
    load_filename_list = np.array([Rolling_file, No_Rolling_file])

    #Name_list=np.array(['Martin and Adcroft','Weeks and Mellor','Burton'])
    Name_list = np.array(['Rolling', 'No Rolling'])

    fig = plt.figure(1)
    #for k in range(13):#for k in np.array([3]):
    count1 = 0
    print 'field1 = ', field_name1, ', field2 = ', field_name2
    if load_the_data is True:
        for k in np.array([0, 1]):
            #for k in np.array([0,1,2,3,4]):
            #for k in np.array([8,5,3,1]):
            #for k in np.array([9,5,1]):
            #for k in np.array([4,5,6,9]):
            #for k in np.array([7,8,10]):
            #for k in np.array([10,9,8,7,6,5,4,3,2,1]):
            #for k in range(1,9):
            #ax = fig.add_subplot(2,1,k+1)
            ax = fig.add_subplot(1, 1, 1)
            count1 = count1 + 1
            #input_folder=all_paths[k]
            #Delta_name='Delta'+str(k)
            Delta_name = Delta_list[k]
            input_folder = root_path + Delta_name
            input_folder = input_folder + '/trajectories/'
            print input_folder

            #Make sure that data exists of the years allocated, othersize change it.
            (min_year,
             max_year) = find_max_min_years(input_folder,
                                            '.iceberg_trajectories.nc')
            end_year = min(max_year, end_year0)
            start_year = max(end_year - Number_of_years, min_year)

            number_of_bins = 100
            mean_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            std_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            x_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))

            sigma_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            scale_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            loc_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            mu_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            median_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))
            mode_matrix = np.zeros(
                (end_year - start_year + 1, number_of_bins - 1))

            number_of_age_bins = 40
            occurance_y = np.logspace(np.log10(10**5),
                                      np.log10(10**11),
                                      num=number_of_bins,
                                      endpoint=True)
            occurance_x = np.linspace(0,
                                      3,
                                      num=number_of_age_bins,
                                      endpoint=True)
            #occurance_x = np.linspace(0, 40, num=number_of_age_bins, endpoint=True)
            occurance_matrix = np.zeros((len(occurance_x), len(occurance_y)))

            year_count = -1
            for year in range(start_year, end_year + 1):
                year_count = year_count + 1
                #year=1945
                filename = '/' + str(year) + '0101.iceberg_trajectories.nc'
                input_file = input_folder + filename
                print input_file

                field1 = get_valid_data_from_traj_file(input_file,
                                                       field_name1,
                                                       subtract_orig=False)
                field2 = get_valid_data_from_traj_file(input_file,
                                                       field_name2,
                                                       subtract_orig=False)

                print 'Lendth of field: ', len(field1)
                #Getting the distributions from the data

                #Handeling constraints
                field1 = add_constraint_to_data(input_file, field1,
                                                constraints)
                field2 = add_constraint_to_data(input_file, field2,
                                                constraints)
                print 'Lendth of field after constraints: ', len(field1)

                #field2=field2*0.81

                #if create_2D_histogram==True:

                if find_expected_values == True:
                    (x_bins, y_ave, y_std, sigma, scale, loc, mu, y_median,
                     y_mode) = expected_values(field2,
                                               field1,
                                               number_of_bins,
                                               min_bin_value=True)
                    mean_matrix[year_count, :] = y_ave
                    std_matrix[year_count, :] = y_std
                    x_matrix[year_count, :] = x_bins

                    sigma_matrix[year_count, :] = sigma
                    scale_matrix[year_count, :] = scale
                    loc_matrix[year_count, :] = loc
                    mu_matrix[year_count, :] = mu

                    median_matrix[year_count, :] = y_median
                    mode_matrix[year_count, :] = y_mode

                    #Dealing with occurance
                    Occurance_matrix = construct_occurance_matrix(
                        field1, field2, occurance_matrix, occurance_x,
                        occurance_y)

                if plot_scatter_plot == True:
                    #Plotting the distributions
                    #ax = fig.add_subplot(3,1,k+1)
                    #ax = fig.add_subplot(2,1,count1)
                    label = Name_list[k]
                    #label='Delta' + str(k)
                    plt.plot(field1,
                             field2,
                             '.',
                             color=color_vec[k],
                             label=label)
                    plt.xlabel(field_name1)
                    plt.ylabel(field_name2)
                    #plt.ylim([-75., -40]) #For mass
                    #plt.xlim([0, 30]) #For mass
                    #ax.set_yscale('log')
                    #ax.set_xscale('log')
                    plt.title(Name_list[k])

                if plot_derivatives_and_fits == True:
                    #dy_dx=derivative(x_bins,y_ave)
                    #Note:  x_bins is mass, y_ave is age
                    (dy_dx, x_new, y_new,
                     x_fit) = fit_derivative(x_bins, y_ave)

                    ax = fig.add_subplot(1, 1, 1)
                    label = 'Delta' + str(k + 1)

                    plt.plot(y_ave,
                             x_bins,
                             color=color_vec[k],
                             label=label,
                             linewidth=2.)
                    plt.plot(y_ave, x_bins, '*', color=color_vec[k])
                    plt.plot(y_new,
                             x_new,
                             color='red',
                             label=label,
                             linewidth=4.)
                    plt.plot(y_new,
                             x_fit,
                             color='magenta',
                             label=label,
                             linewidth=2.)

                    plt.plot(y_ave + y_std, x_bins, color='green')
                    plt.plot(y_ave - y_std, x_bins, color='green')
                    plt.xlabel(field_name1)
                    plt.ylabel(field_name2)
                    #ax.set_yscale('log')
                    #ax.set_xscale('log')

                    ax = fig.add_subplot(1, 2, 2)
                    plt.plot(x_bins,
                             dy_dx,
                             color=color_vec[k],
                             label=label,
                             linewidth=4.)
                    plt.plot(x_bins, dy_dx, '*')
                    plt.plot(x_new, dy_dx)
                    plt.ylabel('dy/dx')
                    plt.xlabel('Mass')
                    #ax.set_xscale('log')
                    #ax.set_yscale('log')

            if plot_rolling_equation == True:
                D = np.linspace(0.1, 300, 100)
                L = sqrt(0.92 * (D**2) + (58.32 * D))
                plt.plot(L, D, linestyle='--')
                #plt.plot(L,L,linestyle=':')
                plt.plot(L, 0.81 * L, linestyle=':')
                #plt.plot(L,L/1.2,linestyle=':')
                #plt.plot(L,L-(,linestyle=':')

            #Normalizing
            print np.sum(occurance_matrix)
            occurance_matrix = occurance_matrix / (np.sum(occurance_matrix))

            if plot_occurance_matrix is True:
                vmax = np.max(occurance_matrix)
                #cNorm = MidpointNormalize(vmin=0, vmax=5,midpoint=0)
                cNorm = mpl.colors.Normalize(vmin=0, vmax=vmax)
                cNorm = mpl.colors.LogNorm(vmin=10**-5, vmax=0.1)
                print len(occurance_x), len(occurance_y)
                plt.pcolor(occurance_x,
                           occurance_y,
                           occurance_matrix.transpose(),
                           cmap='jet',
                           norm=cNorm)
                plt.colorbar()
                plt.xlabel(field_name1)
                plt.ylabel(field_name2)
                ax.set_yscale('log')
                ax.set_xscale('log')

            if plot_mean_fields_again == True:
                Total_median = find_average_over_time_without_nan(
                    median_matrix, 'median')
                Total_mean = find_average_over_time_without_nan(
                    mean_matrix, 'mean')
                Total_std = find_average_over_time_without_nan(
                    np.sqrt(std_matrix**2), 'mean')
                Total_x = find_average_over_time_without_nan(x_matrix, 'mean')
                Total_sigma = find_average_over_time_without_nan(
                    sigma_matrix, 'mean')
                Total_scale = find_average_over_time_without_nan(
                    scale_matrix, 'mean')
                Total_loc = find_average_over_time_without_nan(
                    loc_matrix, 'mean')
                Total_mu = find_average_over_time_without_nan(
                    mu_matrix, 'mean')
                if save_the_data == True:
                    save_mat_file(mean_matrix, std_matrix, x_matrix, Total_mean, Total_std, Total_x, start_year,end_year,Delta_name,field_name1,field_name2,\
                      sigma_matrix, scale_matrix, loc_matrix, mu_matrix, Total_sigma, Total_loc, Total_scale, Total_mu,Total_median,constraints,\
                      occurance_x,occurance_y,occurance_matrix)

                #ax = fig.add_subplot(1,1,1)
                #label='Delta' + str(k+1)
                label = Delta_list[k]

                #These should be uncommented
                #plt.plot(Total_mean,Total_x,color='black',label=label,linewidth=4)
                #plt.plot(Total_median,Total_x,color='green',label=label,linewidth=4)
                #plt.plot(Total_mean+Total_std,Total_x,color='magenta',linewidth=4)
                #plt.plot(Total_mean-Total_std,Total_x,color='magenta',linewidth=4)

                #plt.plot(Total_median,Total_x,color=color_vec[k],label=label,linewidth=4)
                print Total_median
                plt.title(Name_list[k])
                M = median_matrix.shape
                #for loop1 in range(M[0]):
                #	plt.plot(np.squeeze(median_matrix[loop1,:]),Total_x,color=color_vec[loop1+3],label=label,linewidth=1)
                plt.plot(Total_median,
                         Total_x,
                         color=color_vec[k],
                         label=label,
                         linewidth=4)
                plt.xlabel(field_name1)
                plt.ylabel(field_name2)
                ax.set_yscale('log')
                #ax.set_xscale('log')
    else:
        if plot_mean_fields_again == True:
            ax = fig.add_subplot(1, 1, 1)
            for k in range(2):
                print k
                filename = load_filename_list[k]
                (Total_median, Total_mean,
                 Total_x) = load_mass_vs_age_from_mat_file(filename,
                                                           load_type='median')
                plt.title(Name_list[k])
                print Total_median.shape, Total_x.shape
                plt.plot(Total_median,
                         Total_x,
                         color=color_vec[k],
                         linewidth=4)
                plt.xlabel(field_name1)
                plt.ylabel(field_name2)
                ax.set_yscale('log')

        if plot_occurance_matrix is True:
            for k in range(2):
                ax = fig.add_subplot(1, 1, 1)
                filename = load_filename_list[k]
                (occurance_x, occurance_y,
                 occurance_matrix) = load_mass_vs_age_from_mat_file(
                     filename, load_type='occurance')
                dp = occurance_x[1] - occurance_x[0]
                max_occurance = np.zeros((len(occurance_y)))
                mode_occurance = np.zeros((len(occurance_y)))
                for j in range(len(occurance_y)):
                    max_occurance[j] = np.sum(occurance_matrix[:, j] *
                                              occurance_x /
                                              np.sum(occurance_matrix[:, j]))
                #plt.subplot(2,1,k+1)
                vmax = np.max(occurance_matrix[:, :])
                #cNorm = MidpointNormalize(vmin=0, vmax=5,midpoint=0)
                #cNorm = mpl.colors.Normalize(vmin=-vmax, vmax=vmax)
                cNorm = mpl.colors.LogNorm(vmin=10**-5, vmax=0.1)
                #if k==0:
                #       max_occurance0=max_occurance
                if k == 1:
                    plt.pcolor(occurance_x,
                               occurance_y, (np.squeeze(
                                   occurance_matrix[:, :])).transpose(),
                               cmap='jet',
                               norm=cNorm)
                    plt.colorbar()
                    plt.title(Name_list[k])
                plt.plot(max_occurance,
                         occurance_y,
                         color=color_vec[k],
                         linewidth=2)
                #plt.plot(mode_occurance,occurance_y,color=color_vec[k],linewidth=2)
                #title=Name_list[k]
                plt.plot(max_occurance,
                         occurance_y,
                         color=color_vec[k],
                         linewidth=1,
                         label=Name_list[k])
                #plt.plot(mode_occurance,occurance_y,color=color_vec[k],linewidth=1, label=Name_list[k])
                ax.set_yscale('log')

    plt.legend(loc='upper right', frameon=True)

    #plt.legend(loc='lower right', frameon=True,prop={'size':12})
    #plt.legend(loc='lower right', frameon=True,prop={'size':12})
    #plt.legend(loc='upper right', frameon=True)
    #fig = matplotlib.pyplot.gcf()
    fig.set_size_inches(9, 4.5)
    print 'field1 = ', field_name1, ', field2 = ', field_name2
    plt.show()

    print 'Script complete'
def main():
    #Clear screen
    #os.system('clear')

    #Defining possible paths
    all_paths = define_paths_array()

    #Flags
    save_the_data = True
    load_the_data = True

    #Parameters and variables
    #start_year=1980
    #end_year0=2010
    end_year0 = 2000
    Number_of_years = 50
    #Number_of_years=0

    #Which fields do you want to compare?
    #field_name1='sst'
    #field_name1='ua'
    #field_name1='va'
    #field_name1='uo'
    #field_name1='vo'
    #field_name1='ui'
    #field_name1='vi'
    #field_name1='cn'
    #field_name1='speed_o'
    #field_name1='speed_i'
    #field_name1='speed_a'
    #field_name1='Mb'
    #field_name1='Me'
    field_name1 = 'Mv'

    initial_mass = np.array([
        8.8e7, 4.1e8, 3.3e9, 1.8e10, 3.8e10, 7.5e10, 1.2e11, 2.2e11, 3.9e11,
        7.4e11
    ])
    #Include contraints to use to filter the data
    constraint_SH = {
        'Constraint_field_name': 'lat',
        'lower_bound': -150,
        'upper_bound': -5,
        'original_values': True
    }
    constraint_NH = {
        'Constraint_field_name': 'lat',
        'lower_bound': 5,
        'upper_bound': 150,
        'original_values': True
    }

    #if field_name1=='age' or field_name2=='age':
    #	print 'applying age constraint'
    #	day_constraint = {'Constraint_field_name': 'day', 'lower_bound': 0 , 'upper_bound': 258, 'original_values': True} #Longitude of AB
    constraints = []
    #constraints.append(constraint_NH)
    constraints.append(constraint_SH)

    rho_ice = 850.0
    rho_sw = 1025.0

    mass_ind_list = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
    #mass_ind_list=np.array([9])

    #field_name='area';  x_min=1;x_max=1.e9  # Good for area including all Deltas
    field_name = 'mass'
    x_min = 1
    x_max = 1.e7  # Good for area
    #field_name='mass' ; #x_min=100;x_max=1.e12  # Good for mass
    #Defining a list of colors
    color_vec = np.array([
        'blue', 'red', 'purple', 'green', 'coral', 'cyan', 'magenta', 'orange',
        'black', 'grey', 'yellow', 'orchid', 'blue', 'red', 'purple', 'green',
        'coral', 'cyan'
    ])
    root_path = '/ptmp/aas/model_output/ulm_mom6_2015.07.20_again_myIcebergTag/AM2_LM3_SIS2_MOM6i_1deg_bergs_'

    #Delta_list=np.array(['Delta9', 'rolling_tournadre_WM', 'rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_Delta1_Burton_new','rolling_Delta5_Burton_new','rolling_Delta9_Burton_new'])
    #Delta_list=np.array(['rolling_tournadre_Burton_breaking_aspect_fixed','rolling_tournadre_Burton_breaking_D_fixed','rolling_tournadre_Burton_new'])
    Delta_list = np.array(
        ['tournadre', 'rolling_tournadre_WM', 'rolling_tournadre_Burton_new'])
    Delta_list = np.array(
        ['rolling_tournadre_Burton_new', 'rolling_tournadre_Rolling_off'])
    #Delta_list=np.array(['rolling_tournadre_Burton_new/trajectories/','rolling_tournadre_Rolling_off/trajectories/'])
    #Delta_list=np.array(['Delta1','Delta9'])
    #Delta_list=np.array(['Delta2','Delta3', 'Delta4','Delta5','Delta6'])

    #Name_list=np.array(['Martin and Adcroft','Weeks and Mellor','Burton'])
    Name_list = np.array(['Rolling', 'No Rolling'])

    filename = 'processed_data/rolling_tournadre_Burton_new_Mean_forcing_' + field_name1 + '_1950_to_2000.mat'

    #load_filename_list=np.array(['processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values.mat',\
    #	'processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values.mat'])
    load_filename_list=np.array(['processed_data/rolling_tournadre_Rolling_off_age_vs_mass_1950_to_2000_expected_values_logaxis.mat',\
     'processed_data/rolling_tournadre_Burton_new_age_vs_mass_1950_to_2000_expected_values_logaxis.mat'])

    fig = plt.figure(1)
    ax = fig.add_subplot(1, 1, 1)
    if load_the_data == True:
        count1 = 0
        for k in np.array([1]):
            #for k in np.array([1]):
            count1 = count1 + 1
            #input_folder=all_paths[k]
            #Delta_name='Delta'+str(k)
            Delta_name = Delta_list[k]
            input_folder = root_path + Delta_name + '/trajectories/'

            #Make sure that data exists of the years allocated, othersize change it.
            (min_year,
             max_year) = find_max_min_years(input_folder,
                                            '.iceberg_trajectories.nc')
            end_year = min(max_year, end_year0)
            start_year = max(end_year - Number_of_years, min_year)

            number_of_bins = 500
            Number_of_classes = 10

            #Initializing matricies
            field1_matrix = np.zeros(
                (Number_of_classes, end_year - start_year + 1))

            year_count = -1
            for year in range(start_year, end_year + 1):
                year_count = year_count + 1
                #year=1945
                filename = '/' + str(year) + '0101.iceberg_trajectories.nc'
                input_file = input_folder + filename
                print input_file

                field1_full = get_valid_data_from_traj_file(
                    input_file, field_name1, subtract_orig=False)

                print 'Lendth of field: ', len(field1_full)
                #Getting the distributions from the data

                #Handling all the different mass classes
                for mass_ind in mass_ind_list:
                    mass = initial_mass[mass_ind]
                    constraint_m = {
                        'Constraint_field_name': 'mass',
                        'lower_bound': mass - 100,
                        'upper_bound': mass + 100,
                        'original_values': True
                    }
                    mass_constraints = []
                    mass_constraints = constraints
                    mass_constraints.append(constraint_m)
                    #Handeling constraints
                    field1 = 0.
                    field2 = 0.
                    field1 = add_constraint_to_data(input_file, field1_full,
                                                    mass_constraints)
                    print 'Lendth of field after constraints using mass0= ', mass, ' is ', len(
                        field1)
                    mass_constraints.pop()

                    field1_matrix[mass_ind, year_count] = np.mean(field1)
        mean_field1 = np.mean(field1_matrix, axis=1)
        Total_mean_field1 = np.mean(mean_field1)
        if save_the_data is True:
            save_mat_file(Delta_name, field1_matrix, field_name1, constraints,
                          mean_field1, Total_mean_field1, start_year, end_year)

    else:
        (field1_matrix, mean_field1,
         Total_mean_field1) = load_data_from_mat_file(filename, field_name1)

    #print field1_matrix[:,:]
    print mean_field1
    print Total_mean_field1

    #plt.legend(loc='upper right', frameon=True,prop={'size':12})
    #plt.legend(loc='upper right', frameon=True)
    #fig = matplotlib.pyplot.gcf()
    #fig.set_size_inches(9,4.5)
    #plt.show()

    print 'Script complete'