def n_run(self): for _element in self.elements: local_matrix(_element) self.asm_v() self.full_structure_matrix() self.set_nodes_loads() for _element in self.elements: get_data(self, _element)
def run(self): for _element in self.elements: local_matrix(_element) self.asm_v() self.full_structure_matrix() self.set_nodes_loads() for _element in self.elements: get_data(self, _element) for _element in self.elements: eval_deact_nodes(_element) deact_nodes(_element) deact_press_y(_element)
def main(): start = time.time() # Read parameters from input file. in_file, in_file_cols, coords_flag, vor_flag, cr_file_cols, mag_range,\ area_frac_range, m_m_n, intens_frac, dens_frac = get_params_in() # Each sub-list in 'in_file' is a row of the file. f_name = in_file[:-4] # Create log file. save_to_log(f_name, 'Processing file: {}'.format(f_name), 'w') save_to_log(f_name, 'Mag range: {}'.format(mag_range), 'a') if vor_flag == 'voronoi': save_to_log(f_name, 'Area range: {}'.format(area_frac_range), 'a') save_to_log(f_name, 'Min/max neighbors: {}'.format(m_m_n), 'a') save_to_log(f_name, "Frame's density fraction: {}".format( intens_frac), 'a') save_to_log(f_name, "Frame's I/A fraction: {}\n".format(dens_frac), 'a') # Get points coordinates and magnitudes. x_mr, y_mr, mag_mr, x, y, mag, ra_cent, dec_cent = get_data( in_file, in_file_cols, mag_range, coords_flag, vor_flag) save_to_log(f_name, 'Photometric data obtained', 'a') save_to_log(f_name, 'Total number of stars: {}'.format(len(x)), 'a') if coords_flag == 'deg': save_to_log(f_name, 'Center of frame: RA={}, DEC={}'.format( ra_cent, dec_cent), 'a') else: save_to_log(f_name, 'Center of frame: x={}, y={}'.format( ra_cent, dec_cent), 'a') save_to_log(f_name, '\nStars filtered by mag range: {}'.format(len(x_mr)), 'a') # Obtain Voronoi diagram using the *magnitude filtered coordinates*. points = zip(x_mr, y_mr) vor = Voronoi(points) save_to_log(f_name, 'Voronoi diagram obtained', 'a') # Get data on Voronoi diagram. save_to_log(f_name, 'Processing Voronoi diagram', 'a') acp_pts, acp_mags, rej_pts, pts_area, pts_vert = get_vor_data(points, mag_mr, vor) # Obtain average area *using magnitude filtered stars*. fr_area = ((max(x_mr) - min(x_mr)) * (max(y_mr) - min(y_mr))) avr_area = fr_area / len(x_mr) # Filter out large area values for border polygons. pts_area_filt, mean_filt_area = large_area_filt(pts_area, avr_area) save_to_log(f_name, ("Mean Voronoi cells area (stars in mag range): " "{:g} {}^2".format(mean_filt_area, coords_flag)), 'a') # Obtain center coordinates and radii either automatically by grouping # neighbor stars, or by reading those values from a file. # This function applies the area and neighbors filters. cent_rad, pts_area_thres, mag_area_thres, pts_neighbors,\ mags_neighbors = get_cent_rad( f_name, coords_flag, cr_file_cols, m_m_n, vor_flag, ra_cent, dec_cent, acp_pts, acp_mags, pts_area, pts_vert, mean_filt_area, area_frac_range) # Filter/organize groups found by their density, using stars filtered # by magnitude. dens_accp_groups, dens_rej_groups, area_frac_range = filt_density( f_name, fr_area, x_mr, y_mr, mag_mr, cent_rad, vor_flag, area_frac_range, dens_frac, mean_filt_area) save_to_log( f_name, "\nGroups filtered by density (stars/area): {}".format( len(dens_accp_groups)), 'a') # Filter/organize groups found by their I/A, using stars filtered # by magnitude. intens_acc_dens_acc, intens_acc_dens_rej, intens_rej_dens_acc,\ intens_rej_dens_rej = filt_integ_mag( x_mr, y_mr, mag_mr, dens_accp_groups, dens_rej_groups, intens_frac) save_to_log( f_name, "\nGroups filtered by intensity/area: {}".format( len(intens_acc_dens_acc[0])), 'a') # Write data to file. save_cent_rad(f_name, cent_rad, intens_acc_dens_acc, intens_acc_dens_rej, intens_rej_dens_acc, intens_rej_dens_rej) save_to_log(f_name, "\nData saved to {}.out file".format(f_name), 'a') # Make plots. save_to_log(f_name, '\nPlotting', 'a') all_plots(f_name, mag_range, area_frac_range, x, y, mag, coords_flag, vor_flag, x_mr, y_mr, mag_mr, pts_area_filt, pts_area_thres, mag_area_thres, pts_neighbors, mags_neighbors, intens_frac, dens_frac, cent_rad, intens_acc_dens_acc, intens_acc_dens_rej, intens_rej_dens_acc, intens_rej_dens_rej) # Done. elapsed = time.time() - start m, s = divmod(elapsed, 60) save_to_log(f_name, 'Full run completed in {:.0f}m {:.0f}s.'.format( m, s), 'a')
# Loop through all clusters processed. for indx, sub_dir in enumerate(sub_dirs): cluster = cl_names[indx] # Check if cluster is in list. if cluster in clust_list: print sub_dir, cluster # Location of the photometric data file for each cluster. data_phot = '/media/rest/Dropbox/GABRIEL/CARRERA/3-POS-DOC/trabajo/\ data_all/cumulos-datos-fotometricos/' # Get photometric data for cluster. filename = glob.glob(join(data_phot, sub_dir, cluster + '.*'))[0] id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1 = \ gd.get_data(data_phot, sub_dir, filename) # Accept and reject stars based on their errors. bright_end, popt_mag, popt_umag, pol_mag, popt_col1, popt_ucol1, \ pol_col1, mag_val_left, mag_val_right, col1_val_left, col1_val_right, \ acpt_stars, rjct_stars = ear.err_accpt_rejct(id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1) clust_rad = [radius[indx], 0.] center_cl = [centers[0][indx], centers[1][indx]] # Get stars in and out of cluster's radius. stars_in, stars_out, stars_in_rjct, stars_out_rjct = \ gio.get_in_out(center_cl, clust_rad[0], acpt_stars, rjct_stars) # Path where the code is running
def run_page2(): load_global_vars() st.sidebar.title("User settings") st.title("Correlation overview") # dd/mm/YY H:M:S dt_string = TODAY.strftime("%d/%m/%Y %H:%M:%S") st.write("Last updated at", dt_string) ex_tickers = "CL=F, DX=F, GC=F, ES=F, NQ=F, DBC" tickers = st.text_input("Provide ticker symbols, split by comma", ex_tickers) df = get_data(tickers=tickers, period="20y") df_perc = df["Close"].pct_change(periods=1).dropna() ticker = st.sidebar.selectbox("Select ticker", list(df_perc.columns)) ex_periods = "10, 20, 30, 60, 90, 120, 150, 180, 210" periods = st.text_input( "Choose correlation periods, split by comma (expressed in days)", ex_periods) periods = periods.split(",") # convert to integer periods = [int(p) for p in periods] # correlation for table store_corelations = {} for c in df_perc.columns: corr_tick = {} if c != ticker: for p in periods: df_perc_period = df_perc.tail(p) corr_tick[p] = pd.DataFrame(df_perc_period[ticker]).corrwith( df_perc_period[c], axis=0, drop=False, method='spearman').values.tolist()[0] store_corelations[c] = corr_tick # precision precision = st.sidebar.number_input("Number of digits for precision", min_value=1, max_value=10, value=3) st.dataframe( pd.DataFrame(store_corelations).T.style.set_precision(precision)) # correlation for figure corr_period = st.sidebar.slider( "Choose correlation period figure (expressed in days)", min_value=5, max_value=200, value=30) period_figure = st.sidebar.slider( "Choose maximum period figure (expressed in days)", min_value=100, max_value=2000, value=1000) corr_tick_rolling = {} for c in df_perc.columns: if c != ticker: df_perc_period = df_perc.tail(period_figure) corr_tick_rolling[c] = rolling_spearman( df_perc_period[ticker].values, df_perc_period[c].values, corr_period) # plot fiure out = pd.DataFrame(corr_tick_rolling, index=df_perc_period.index).dropna().reset_index() out_long = pd.melt(out, id_vars='Date', value_vars=out.columns[1:]) out_long = out_long.rename(columns={ "variable": "Ticker", "value": "Correlation" }) fig = px.line(out_long, x="Date", y="Correlation", title=f'Rolling correlations with {ticker}', color="Ticker") st.plotly_chart(fig, use_container_width=True)
# Also store the parameters associated with this cluster. final_zams_params = [] for indx, sub_dir in enumerate(sub_dirs): cluster = cl_names[indx] # if cluster in gabriel_accept: if cluster in ruben_accept: # if cluster == 'L115': # use_all_clusters = True # if use_all_clusters: print sub_dir, cluster filename = glob.glob(join(data_isos_file, sub_dir, cluster + '.*'))[0] id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1 = \ gd.get_data(data_isos_file, sub_dir, filename) # Accept and reject stars based on their errors. bright_end, popt_mag, popt_umag, pol_mag, popt_col1, popt_ucol1, \ pol_col1, mag_val_left, mag_val_right, col1_val_left, col1_val_right, \ acpt_stars, rjct_stars = ear.err_accpt_rejct(id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1) clust_rad = [radius[indx], 0.] center_cl = [centers[0][indx], centers[1][indx]] # Get stars in and out of cluster's radius. stars_in, stars_out, stars_in_rjct, stars_out_rjct = \ gio.get_in_out(center_cl, clust_rad[0], acpt_stars, rjct_stars) # Path where the code is running
def run_page1(): # load global variables load_global_vars() st.title('Market dashboard') # dd/mm/YY H:M:S dt_string = TODAY.strftime("%d/%m/%Y %H:%M:%S") st.write("Last updated at", dt_string) # import tickers # with st.spinner("Updating tickers..."): tickers = get_tickers(dir=DIR_TICKERS, time=dt_string) st.sidebar.title("User settings") if st.sidebar.button("Update tickers"): tickers = pd.read_excel('data/tickers.xlsx') comput = st.sidebar.selectbox("Choose computation", ["Close", "High", "Low", "Open", "Volume"]) # convert to string tickers_input = ' '.join(map(str, tickers['Ticker'].values.tolist())) tickers_list = tickers['Ticker'].values.tolist() tickers_list.sort() # choose benchmark benchmark = st.sidebar.selectbox("Select benchmark", [1] + tickers_list) # periods expander_periods = st.sidebar.beta_expander("Define periods") with expander_periods: # periods input period1 = expander_periods.number_input("Period 1", min_value=1, max_value=MAX_PERIOD, value=1, step=1) period2 = expander_periods.number_input("Period 2", min_value=1, max_value=MAX_PERIOD, value=5, step=1) period3 = expander_periods.number_input("Period 3", min_value=1, max_value=MAX_PERIOD, value=30, step=1) period4 = expander_periods.number_input("Period 4", min_value=1, max_value=MAX_PERIOD, value=DAYS_YTD, step=1) period5 = expander_periods.number_input("Period 5", min_value=1, max_value=MAX_PERIOD, value=365, step=1) # add periods in list periods = [period1, period2, period3, period4, period5] sorts = expander_periods.selectbox("Sort by period", periods) within_group = expander_periods.checkbox("Sort within group", value=False) expander_figureOpt = st.sidebar.beta_expander("Figure options") # figure options with expander_figureOpt: ascending = expander_figureOpt.checkbox("Descending", value=True) fig_width = expander_figureOpt.slider("Figure width", min_value=100, max_value=2000, value=1150, step=1) fig_height = expander_figureOpt.slider("Figure height", min_value=100, max_value=2000, value=950, step=1) font_size = expander_figureOpt.number_input("Font size", min_value=4, max_value=30, value=9, step=1) color_cyc = expander_figureOpt.color_picker('Color for cyclical', value='#CC241D') color_def = expander_figureOpt.color_picker('Color for defensive', value='#458588') color_gro = expander_figureOpt.color_picker('Color for growth', value='#689D6A') color_oth = expander_figureOpt.color_picker('Color for other', value='#928374') color_discrete_map={ "Cyclical":color_cyc, "Defensive":color_def, "Growth":color_gro, "Other": color_oth } if ascending: asc_desc = "total ascending" else: asc_desc = "total descending" # compute returns #with st.spinner("Loading data..."): data = get_data(tickers=tickers_input, period="20y", time=dt_string) rets = [compute_returns(data=data, column=comput, period=p, benchmark=benchmark) for p in periods] rets_conc = round(pd.concat(rets, axis=1)*100, ndigits=2) rets_conc.columns = periods rets_conc['Ticker'] = rets_conc.index # expander tickers expander_ticker = st.beta_expander("Show tickers") with expander_ticker: t = expander_ticker.multiselect("Select tickers", options=tickers_list, default=tickers_list) select_tick = rets_conc[rets_conc['Ticker'].isin(t)] select_tick = pd.concat([select_tick.set_index('Ticker'), tickers.set_index('Ticker')], axis=1, join='inner').reset_index() select_tick = select_tick.sort_values(['Group',sorts], ascending=ascending) select_tick.dropna(inplace=True) df_plot_long = pd.melt(select_tick, id_vars=['Ticker','Name', 'Group'], var_name='Period', value_name='Return (%)') #with st.spinner("Updating figure..."): fig = make_figure( data=df_plot_long, fig_width=fig_width, fig_height=fig_height, font_size=font_size, sorting=sorts, periods=periods, asc_desc=asc_desc, within_group=within_group, color_discrete_map=color_discrete_map ) # show figure st.plotly_chart(fig, use_container_width=False)
else: if cluster in zams_manual_accept or cluster in iso_manual_accept: run_cluster = True else: run_cluster = False if run_cluster: print sub_dir, cluster # Location of the photometric data file for each cluster. data_phot = '/media/rest/Dropbox/GABRIEL/CARRERA/3-POS-DOC/trabajo/\ data_all/cumulos-datos-fotometricos/' # Get photometric data for cluster. filename = glob.glob(join(data_phot, sub_dir, cluster + '.*'))[0] id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1 = \ gd.get_data(data_phot, sub_dir, filename) # Accept and reject stars based on their errors. bright_end, popt_mag, popt_umag, pol_mag, popt_col1, popt_ucol1, \ pol_col1, mag_val_left, mag_val_right, col1_val_left, col1_val_right, \ acpt_stars, rjct_stars = ear.err_accpt_rejct(id_star, x_data, y_data, mag_data, e_mag, col1_data, e_col1) clust_rad = [radius[indx], 0.] center_cl = [centers[0][indx], centers[1][indx]] # Get stars in and out of cluster's radius. stars_in, stars_out, stars_in_rjct, stars_out_rjct = \ gio.get_in_out(center_cl, clust_rad[0], acpt_stars, rjct_stars) # Path where the code is running