class TreeElement(_traits.HasTraits): '''Represent a calculation (analysis) in the Individual Difference methods ensembe ''' name = _traits.Str() calcc = _traits.WeakRef() plots_act = _traits.List(DiffWindowLauncher) def _plots_act_default(self): acts = [ ("X Scores", 'plsr_x_scores_plot'), ("X & Y correlation loadings", 'plsr_corr_loadings_plot'), ("X Loadings", 'plsr_x_loadings_plot'), ("Y loadings", 'plsr_y_loadings_plot'), ("Explained variance in X", 'plsr_x_expl_var_plot'), ("Explained variance in Y", 'plsr_y_expl_var_plot'), ] return [ DiffWindowLauncher( node_name=nn, plot_func_name=fn, owner_ref=self, loop_name='plots_act', ) for nn, fn in acts ]
class WindowLauncher(_traits.HasTraits): node_name = _traits.Str() view_creator = _traits.Callable() owner_ref = _traits.WeakRef() loop_name = _traits.Str() # FIXME: Rename to creator_parms func_parms = _traits.Tuple()
class Conjoint(Model): # The imput data for calculation owner_ref = _traits.WeakRef() # design = DataSet() design = _traits.DelegatesTo('owner_ref') design_vars = _traits.List(_traits.Str()) liking = DataSet() # consumers = DataSet() consumers = _traits.DelegatesTo('owner_ref') consumers_vars = _traits.List(_traits.Str()) # Conjoint settings model_struct = _traits.Enum('Struct 1', 'Struct 2', 'Struct 3') # Conjoint calculation state ccs = _traits.Instance(ConjointCalcState, ()) cm = _traits.Instance(ConjointMachine) # depends_on res = _traits.Property( depends_on='design_vars, consumers_vars, model_struct') def _cm_default(self): try: return ConjointMachine() except RNotFoundException: self.ccs.messages = ("Was not able to find and start R.\n" "You have to check the installation of R") self.ccs.edit_traits(kind='livemodal') @_traits.on_trait_change('owner_ref.model_struct') def _struc_altered(self, new): self.model_struct = new @_traits.on_trait_change('owner_ref.sel_design_var') def _des_var_altered(self, new): self.design_vars = new @_traits.on_trait_change('owner_ref.sel_cons_char') def _cons_char_altered(self, new): self.consumers_vars = new @_traits.cached_property def _get_res(self): if not self.cm.run_state: self.cm.run_state = self.ccs model = { 'Struct 1': 1, 'Struct 2': 2, 'Struct 3': 3 }[self.model_struct] self.cm.schedule_calculation(self.design, sorted(self.design_vars), self.liking, model, self.consumers, sorted(self.consumers_vars)) self.ccs.edit_traits(kind='livemodal') return self.cm.get_result()
class TLoop(TLineMixIn): '''The time loop serves as the base class for application time loops. That can be interrupted paused, resumed or restarted. The implementation of the loop must contain the break criterion while True: if self.restart or self.paused: break #calculation ''' tstep = tr.WeakRef(ITStep) sim = tr.Property def _get_sim(self): return self.tstep.sim hist = tr.Property def _get_hist(self): return self.tstep.hist interrupt = tr.Bool(False) def reset(self): self.restart = True restart = tr.Bool(True) user_wants_abort = tr.Property def _get_user_wants_abort(self): return self.interrupt def init(self): if self.interrupt: self.interrupt = False if self.restart: self.tline.val = self.tline.min self.tstep.init_state() self.hist.init_state() self.restart = False def eval(self): '''This method is called by the tloop_thread. ''' raise NotImplementedError def __call__(self): self.init() return self.eval()
class Simulator(BMCSTreeNode, TLineMixIn): r'''Base class for simulators included in the BMCS Tool Suite. It implements the state dependencies within the simulation tree. It handles also the communication between the simulation and the user interface in several modes of interaction. ''' name = 'simulator' @tr.observe('state_changed') def _model_structure_changed(self, event=None): self.tloop.restart = True #========================================================================= # TIME LOOP #========================================================================= tloop = Property(Instance(ITLoop), depends_on='state_changed') r'''Time loop constructed based on the current model. ''' @cached_property def _get_tloop(self): return self.tstep.tloop_type(tstep=self.tstep, tline=self.tline) def __init__(self, tstep, *args, **kw): self.tstep = tstep super(Simulator, self).__init__(*args, **kw) tstep = tr.WeakRef(ITStep) hist = tr.Property def _get_hist(self): return self.tstep.hist def run(self): r'''Run a thread if it does not exist - do nothing otherwise ''' self.tloop() return interrupt = tr.DelegatesTo('tloop') def reset(self): self.tloop.reset() ipw_view = bu.View( run_method='tloop', reset_method='reset', interrupt_var='interrupt', time_var='t', time_max='t_max', )
class ViewNavigator(_traits.HasTraits): view_loop = _traits.List(WindowLauncher) current_idx = _traits.Int(0) res = _traits.WeakRef() def show_next(self): if self.current_idx < len(self.view_loop) - 1: self.current_idx += 1 else: self.current_idx = 0 vc = self.view_loop[self.current_idx] # return vc.view_creator(self.res, vc.func_parms) if len(vc.func_parms) < 1: return self._make_plot_controller(vc.view_creator(self.res)) else: return self._make_plot_controller( vc.view_creator(self.res, *vc.func_parms)) def show_previous(self): if self.current_idx > 0: self.current_idx -= 1 else: self.current_idx = len(self.view_loop) - 1 vc = self.view_loop[self.current_idx] if len(vc.func_parms) < 1: return self._make_plot_controller(vc.view_creator(self.res)) else: return self._make_plot_controller( vc.view_creator(self.res, *vc.func_parms)) def _make_plot_controller(self, viewable): if isinstance(viewable, StackedHistPlot): plot_control = StackedPlotControl(viewable) elif isinstance(viewable, InteractionPlot): plot_control = InteractionPlotControl(viewable) elif isinstance(viewable, (CLSectorPlot, IndDiffCLSectorPlot)): plot_control = CLSectorPlotControl(viewable) elif isinstance(viewable, CLPlot): plot_control = CLPlotControl(viewable) elif isinstance(viewable, ScatterSectorPlot): plot_control = PCSectorPlotControl(viewable) elif isinstance(viewable, PCScatterPlot): plot_control = PCPlotControl(viewable) elif isinstance(viewable, _chaco.DataView): plot_control = NoPlotControl(viewable) return plot_control
class EnergyDissipation(bu.InteractiveModel): name='Energy' colors = dict( # color associations stored_energy = 'darkgreen', # recoverable free_energy_kin = 'darkcyan', # freedom - sky free_energy_iso = 'darkslateblue', # freedom - sky plastic_diss_s = 'darkorange', # fire - heat plastic_diss_w = 'red', # fire - heat damage_diss_s = 'darkgray', # ruined damage_diss_w = 'black' # ruined ) slider_exp = tr.WeakRef(bu.InteractiveModel) t_arr = tr.DelegatesTo('slider_exp') Sig_arr = tr.DelegatesTo('slider_exp') Eps_arr = tr.DelegatesTo('slider_exp') s_x_t = tr.DelegatesTo('slider_exp') s_y_t = tr.DelegatesTo('slider_exp') w_t = tr.DelegatesTo('slider_exp') iter_t = tr.DelegatesTo('slider_exp') show_iter = bu.Bool(False) E_plastic_work = bu.Bool(False) E_iso_free_energy = bu.Bool(True) E_kin_free_energy = bu.Bool(True) E_plastic_diss = bu.Bool(True) E_damage_diss = bu.Bool(True) ipw_view = bu.View( bu.Item('show_iter'), bu.Item('E_damage_diss'), bu.Item('E_plastic_work'), bu.Item('E_iso_free_energy'), bu.Item('E_kin_free_energy'), bu.Item('E_plastic_diss'), ) WUG_t = tr.Property def _get_W_t(self): W_arr = ( cumtrapz(self.Sig_arr[:, 0], self.s_x_t, initial=0) + cumtrapz(self.Sig_arr[:, 1], self.s_y_t, initial=0) + cumtrapz(self.Sig_arr[:, 2], self.w_t, initial=0) ) s_x_el_t = (self.s_x_t - self.Eps_arr[:, 0]) s_y_el_t = (self.s_y_t - self.Eps_arr[:, 1]) w_el_t = (self.w_t - self.Eps_arr[:, 2]) U_arr = ( self.Sig_arr[:, 0] * s_x_el_t / 2.0 + self.Sig_arr[:, 1] * s_y_el_t / 2.0 + self.Sig_arr[:, 2] * w_el_t / 2.0 ) G_arr = W_arr - U_arr return W_arr, U_arr, G_arr Eps = tr.Property """Energy dissipated in associatiation with individual internal variables """ def _get_Eps(self): Eps_names = self.slider_exp.slide_model.Eps_names E_i = cumtrapz(self.Sig_arr, self.Eps_arr, initial=0, axis=0) return SimpleNamespace(**{Eps_name: E for Eps_name, E in zip(Eps_names, E_i.T)}) mechanisms = tr.Property """Energy in association with mechanisms (damage and plastic dissipation) or free energy """ def _get_mechanisms(self): E_i = cumtrapz(self.Sig_arr, self.Eps_arr, initial=0, axis=0) E_T_x_pi_, E_T_y_pi_, E_N_pi_, E_z_, E_alpha_x_, E_alpha_y_, E_omega_T_, E_omega_N_ = E_i.T E_plastic_work_T = E_T_x_pi_ + E_T_y_pi_ E_plastic_work_N = E_N_pi_ E_plastic_work = E_plastic_work_T + E_plastic_work_N E_iso_free_energy = E_z_ E_kin_free_energy = E_alpha_x_ + E_alpha_y_ E_plastic_diss_T = E_plastic_work_T - E_iso_free_energy - E_kin_free_energy E_plastic_diss_N = E_plastic_work_N E_plastic_diss = E_plastic_diss_T + E_plastic_diss_N E_damage_diss = E_omega_T_ + E_omega_N_ return SimpleNamespace(**{'plastic_work_N': E_plastic_work_N, 'plastic_work_T': E_plastic_work_T, 'plastic_work': E_plastic_work, 'iso_free_energy': E_iso_free_energy, 'kin_free_energy': E_kin_free_energy, 'plastic_diss_N': E_plastic_diss_N, 'plastic_diss_T': E_plastic_diss_T, 'plastic_diss': E_plastic_diss, 'damage_diss_N': E_omega_N_, 'damage_diss_T': E_omega_T_, 'damage_diss': E_damage_diss}) def plot_energy(self, ax, ax_i): W_arr = ( cumtrapz(self.Sig_arr[:, 0], self.s_x_t, initial=0) + cumtrapz(self.Sig_arr[:, 1], self.s_y_t, initial=0) + cumtrapz(self.Sig_arr[:, 2], self.w_t, initial=0) ) s_x_el_t = (self.s_x_t - self.Eps_arr[:, 0]) s_y_el_t = (self.s_y_t - self.Eps_arr[:, 1]) w_el_t = (self.w_t - self.Eps_arr[:, 2]) U_arr = ( self.Sig_arr[:, 0] * s_x_el_t / 2.0 + self.Sig_arr[:, 1] * s_y_el_t / 2.0 + self.Sig_arr[:, 2] * w_el_t / 2.0 ) G_arr = W_arr - U_arr ax.plot(self.t_arr, W_arr, lw=0.5, color='black', label=r'$W$ - Input work') ax.plot(self.t_arr, G_arr, '--', color='black', lw = 0.5, label=r'$W^\mathrm{inel}$ - Inelastic work') ax.fill_between(self.t_arr, W_arr, G_arr, color=self.colors['stored_energy'], alpha=0.2) ax.set_xlabel('$t$ [-]'); ax.set_ylabel(r'$E$ [Nmm]') ax.legend() E_i = cumtrapz(self.Sig_arr, self.Eps_arr, initial=0, axis=0) E_T_x_pi_, E_T_y_pi_, E_N_pi_, E_z_, E_alpha_x_, E_alpha_y_, E_omega_T_, E_omega_N_ = E_i.T E_plastic_work_T = E_T_x_pi_ + E_T_y_pi_ E_plastic_work_N = E_N_pi_ E_plastic_work = E_plastic_work_T + E_plastic_work_N E_iso_free_energy = E_z_ E_kin_free_energy = E_alpha_x_ + E_alpha_y_ E_plastic_diss_T = E_plastic_work_T - E_iso_free_energy - E_kin_free_energy E_plastic_diss_N = E_plastic_work_N E_plastic_diss = E_plastic_diss_T + E_plastic_diss_N E_damage_diss = E_omega_T_ + E_omega_N_ E_level = 0 if self.E_damage_diss: ax.plot(self.t_arr, E_damage_diss + E_level, color='black', lw=1) ax_i.plot(self.t_arr, E_damage_diss, color='gray', lw=2, label=r'damage diss.: $Y\dot{\omega}$') ax.fill_between(self.t_arr, E_omega_N_ + E_level, E_level, color='black', hatch='|'); E_d_level = E_level + E_omega_N_ ax.fill_between(self.t_arr, E_omega_T_ + E_d_level, E_d_level, color='gray', alpha=0.3); E_level = E_damage_diss if self.E_plastic_work: ax.plot(self.t_arr, E_plastic_work + E_level, lw=0.5, color='black') # ax.fill_between(self.t_arr, E_plastic_work + E_level, E_level, color='red', alpha=0.3) label = r'plastic work: $\sigma \dot{\varepsilon}^\pi$' ax_i.plot(self.t_arr, E_plastic_work, color='red', lw=2,label=label) ax.fill_between(self.t_arr, E_plastic_work_N + E_level, E_level, color='orange', alpha=0.3); E_p_level = E_level + E_plastic_work_N ax.fill_between(self.t_arr, E_plastic_work_T + E_p_level, E_p_level, color='red', alpha=0.3); if self.E_plastic_diss: ax.plot(self.t_arr, E_plastic_diss + E_level, lw=.4, color='black') label = r'apparent pl. diss.: $\sigma \dot{\varepsilon}^\pi - X\dot{\alpha} - Z\dot{z}$' ax_i.plot(self.t_arr, E_plastic_diss, color='red', lw=2, label=label) ax.fill_between(self.t_arr, E_plastic_diss_N + E_level, E_level, color='red', hatch='-'); E_d_level = E_level + E_plastic_diss_N ax.fill_between(self.t_arr, E_plastic_diss_T + E_d_level, E_d_level, color='red', alpha=0.3); E_level += E_plastic_diss if self.E_iso_free_energy: ax.plot(self.t_arr, E_iso_free_energy + E_level, '-.', lw=0.5, color='black') ax.fill_between(self.t_arr, E_iso_free_energy + E_level, E_level, color='royalblue', hatch='|') ax_i.plot(self.t_arr, -E_iso_free_energy, '-.', color='royalblue', lw=2, label=r'iso. diss.: $Z\dot{z}$') E_level += E_iso_free_energy if self.E_kin_free_energy: ax.plot(self.t_arr, E_kin_free_energy + E_level, '-.', color='black', lw=0.5) ax.fill_between(self.t_arr, E_kin_free_energy + E_level, E_level, color='royalblue', alpha=0.2); ax_i.plot(self.t_arr, -E_kin_free_energy, '-.', color='blue', lw=2, label=r'free energy: $X\dot{\alpha}$') ax_i.legend() ax_i.set_xlabel('$t$ [-]'); ax_i.set_ylabel(r'$E$ [Nmm]') @staticmethod def subplots(fig): ax_work, ax_energies = fig.subplots(1, 2) ax_iter = ax_work.twinx() return ax_work, ax_energies, ax_iter def update_plot(self, axes): ax_work, ax_energies, ax_iter = axes self.plot_energy(ax_work, ax_energies) if self.show_iter: ax_iter.plot(self.t_arr, self.iter_t) ax_iter.set_ylabel(r'$n_\mathrm{iter}$') def xsubplots(self, fig): ((ax1, ax2), (ax3, ax4)) = fig.subplots(2, 2, figsize=(10, 5), tight_layout=True) ax11 = ax1.twinx() ax22 = ax2.twinx() ax33 = ax3.twinx() ax44 = ax4.twinx() return ax1, ax11, ax2, ax22, ax3, ax33, ax4, ax44 def xupdate_plot(self, axes): ax1, ax11, ax2, ax22, ax3, ax33, ax4, ax44 = axes self.get_response([6, 0, 0]) # plot_Sig_Eps(s_x_t, Sig_arr, Eps_arr, iter_t, *axes) s_x_pi_, s_y_pi_, w_pi_, z_, alpha_x_, alpha_y_, omega_s_, omega_w_ = self.Eps_arr.T tau_x_pi_, tau_y_pi_, sig_pi_, Z_, X_x_, X_y_, Y_s_, Y_w_ = self.Sig_arr.T ax1.plot(self.w_t, sig_pi_, color='green') ax11.plot(self.s_x_t, tau_x_pi_, color='red') ax2.plot(self.w_t, omega_w_, color='green') ax22.plot(self.w_t, omega_s_, color='red')
class Pca(Model): """Represent the PCA model of a data set.""" ds = DataSet() settings = _traits.WeakRef() # List of variable names with zero variance in the data vector zero_variance = _traits.List() #checkbox bool for standardised results standardise = _traits.Bool(False) calc_n_pc = _traits.Int() min_pc = 2 # max_pc = _traits.Property() max_pc = 10 min_std = _traits.Float(0.001) def _get_res(self): '''Does the PCA calculation and gets the results This return an results object that holds copies of the various result data. Each result set i an DataSet containing all metadata necessary for presenting the result. ''' if self.settings.standardise: std_ds = True else: std_ds = False if std_ds and self._have_zero_std_var(): raise InComputeable('Matrix have variables with zero variance', self.zero_variance) pca = PCA(self.ds.values, numComp=self.settings.calc_n_pc, Xstand=std_ds, cvType=["loo"]) return self._pack_res(pca) def _have_zero_std_var(self): sv = self.ds.values.std(axis=0) dm = sv < self.min_std if _np.any(dm): vv = _np.array(self.ds.var_n) self.zero_variance = list(vv[_np.nonzero(dm)]) return True else: self.zero_variance = [] return False def _get_max_pc(self): return max((min(self.ds.n_objs, self.ds.n_vars, 12) - 2), self.min_pc) def _calc_n_pc_default(self): return self.max_pc def _pack_res(self, pca_obj): res = Result('PCA {0}'.format(self.ds.display_name)) # Scores mT = pca_obj.X_scores() res.scores = DataSet( mat=_pd.DataFrame( data=mT, index=self.ds.obj_n, columns=["PC-{0}".format(i+1) for i in range(mT.shape[1])], ), subs=self.ds.subs, display_name='Scores') # Loadings mP = pca_obj.X_loadings() res.loadings = DataSet( mat=_pd.DataFrame( data=mP, index=self.ds.var_n, columns=["PC-{0}".format(i+1) for i in range(mP.shape[1])], ), subs=self.ds.rsubs, display_name='Loadings') # Correlation loadings mCL = pca_obj.X_corrLoadings() res.corr_loadings = DataSet( mat=_pd.DataFrame( data=mCL, index=self.ds.var_n, columns=["PC-{0}".format(i+1) for i in range(mCL.shape[1])], ), display_name='Correlation loadings') # Explained variance cal = pca_obj.X_calExplVar() cum_cal = pca_obj.X_cumCalExplVar()[1:] val = pca_obj.X_valExplVar() cum_val = pca_obj.X_cumValExplVar()[1:] res.expl_var = DataSet( mat=_pd.DataFrame( data=[cal, cum_cal, val, cum_val], index=['calibrated', 'cumulative calibrated', 'validated', 'cumulative validated'], columns=["PC-{0}".format(i+1) for i in range(len(cal))], ), display_name='Explained variance') # Residuals E after each computed PC # Return a dictionary with arrays # I can put this into a Pandas Panel 3D structure resids = pca_obj.X_residuals() # predicted matrices Xhat from calibration after each computed PC. # FIXME: Is this X_predCal() # cal_pred_x = pca_obj.calPredX() #validated matrices Xhat from calibration after each computed PC. # val_pred_x = pca_obj.valPredX() # MSEE from cross validation after each computed PC. msee = pca_obj.X_MSEE() # MSEE from cross validation after each computed PC for each variable. ind_var_msee = pca_obj.X_MSEE_indVar() # MSECV from cross validation after each computed PC. msecv = pca_obj.X_MSECV() # MSECV from cross validation after each computed PC for each variable. ind_var_msecv = pca_obj.X_MSECV_indVar() return res
class BasicStat(Model): '''Basic statitstics model object. This model calculates: * mean * std * min * max In adition i makes data for plotting histograms. The *summary_axis* attribute decides if the calculation is done for the row or column axis. ''' ds = DataSet() settings = _traits.WeakRef() summary_axis = _traits.Enum(('Row-wise', 'Column-wise')) def _get_res(self): res = Result('Basic stats for {}'.format(self.ds.display_name)) res.summary = self._calc_summary() res.hist = self._calc_histogram() return res def _calc_summary(self): mat = self.ds.values if self.settings.summary_axis == 'Row-wise': ax = 1 idx = self.ds.obj_n else: ax = 0 idx = self.ds.var_n sy = _pd.DataFrame(index=idx) sy['min'] = _np.percentile(mat, 0, axis=ax) sy['perc25'] = _np.percentile(mat, 25, axis=ax) sy['median'] = _np.percentile(mat, 50, axis=ax) sy['perc75'] = _np.percentile(mat, 75, axis=ax) sy['max'] = _np.percentile(mat, 100, axis=ax) return DataSet(mat=sy, display_name="Box plot: {}".format( self.ds.display_name)) def _calc_histogram(self): # NOTE: astype(np.int16) due to some bincount() bug in v1.6.2 on window # https://github.com/numpy/numpy/issues/823 mat = self.ds.values.astype(_np.int16) end = mat.max() + 2 begin = mat.min() split = range(begin, end) def hist(v): r = _np.histogram(v, bins=split) return r[0] if self.ds.missing_data: hl = [] if self.settings.summary_axis == 'Row-wise': idx = self.ds.obj_n dr = 1 for i in range(mat.shape[0]): row = mat[i, ~mat[i].mask] hr = list(_np.bincount(row, minlength=end)) hl.append(hr) else: idx = self.ds.var_n dr = 0 for i in range(mat.shape[1]): row = mat[~mat[:, i].mask, i] hr = list(_np.bincount(row, minlength=end)) hl.append(hr) else: if self.settings.summary_axis == 'Row-wise': idx = self.ds.obj_n hl = _np.apply_along_axis(hist, 1, mat) else: idx = self.ds.var_n hl = _np.apply_along_axis(hist, 0, mat).T ht = _pd.DataFrame(hl, index=idx, columns=split[:-1]) if self.ds.missing_data: ht['missing'] = _np.ma.count_masked(mat, axis=dr) return DataSet(mat=ht, display_name="Stacked histogram: {}".format( self.ds.display_name))
class PlsrPcr(Model): """Represent the PlsrPcr model between one X and Y data set.""" # Consumer liking ds_C = DataSet() # Descriptive analysis / sensory profiling ds_S = DataSet() ds_X = _traits.Property() ds_Y = _traits.Property() settings = _traits.WeakRef() # Checkbox bool for standardised results standardise_x = _traits.Bool(False) standardise_y = _traits.Bool(False) int_ext_mapping = _traits.Enum('Internal', 'External') plscr_method = _traits.Enum('PLSR', 'PCR') calc_n_pc = _traits.Int() min_pc = 2 # max_pc = _traits.Property() max_pc = 10 min_std = _traits.Float(0.001) C_zero_std = _traits.List() S_zero_std = _traits.List() def _get_res(self): if self._have_zero_std(): raise InComputeable('Matrix have variables with zero variance', self.C_zero_std, self.S_zero_std) n_pc = min(self.settings.calc_n_pc, self._get_max_pc()) if self.settings.plscr_method == 'PLSR': pls = PLSR(self.ds_X.values, self.ds_Y.values, numComp=n_pc, cvType=["loo"], Xstand=self.settings.standardise_x, Ystand=self.settings.standardise_y) return self._pack_res(pls) elif self.settings.plscr_method == 'PCR': pcr = PCR(self.ds_X.values, self.ds_Y.values, numComp=n_pc, cvType=["loo"], Xstand=self.settings.standardise_x, Ystand=self.settings.standardise_y) return self._pack_res(pcr) def _have_zero_std(self): self.C_zero_std = [] self.S_zero_std = [] if self._std_C() and self._std_S(): rC = self._C_have_zero_std_var() rS = self._S_have_zero_std_var() return rC or rS elif self._std_C(): return self._C_have_zero_std_var() elif self._std_S(): return self._S_have_zero_std_var() def _std_C(self): if self.settings.int_ext_mapping == 'Internal': return self.settings.standardise_x else: return self.settings.standardise_y def _std_S(self): if self.settings.int_ext_mapping == 'Internal': return self.settings.standardise_y else: return self.settings.standardise_x def _C_have_zero_std_var(self): self.C_zero_std = self._check_zero_std(self.ds_C) return bool(self.C_zero_std) def _S_have_zero_std_var(self): self.S_zero_std = self._check_zero_std(self.ds_S) return bool(self.S_zero_std) def _check_zero_std(self, ds): zero_std_var = [] sv = ds.values.std(axis=0) dm = sv < self.min_std if _np.any(dm): vv = _np.array(ds.var_n) zero_std_var = list(vv[_np.nonzero(dm)]) return zero_std_var def _get_ds_X(self): if self.settings.int_ext_mapping == 'Internal': return self.ds_C else: return self.ds_S def _get_ds_Y(self): if self.settings.int_ext_mapping == 'Internal': return self.ds_S else: return self.ds_C def _get_max_pc(self): if self.settings.int_ext_mapping == 'Internal': return max((min(self.ds_C.n_objs, self.ds_C.n_vars, 11) - 1), self.min_pc) else: return max((min(self.ds_S.n_objs, self.ds_S.n_vars, 11) - 1), self.min_pc) def _calc_n_pc_default(self): return self.max_pc def _mk_pred_ds(self, pred_mat, npc): pred_ds = DataSet( mat=_pd.DataFrame( data=pred_mat, index=self.ds_Y.obj_n, columns=self.ds_Y.var_n, ), display_name='Predicted after PC{}'.format(npc)) return pred_ds def _pack_res(self, pls_obj): res = Result('PLSR/PCR {0}(X) & {1}(Y)'.format(self.ds_X.display_name, self.ds_Y.display_name)) if self.settings.int_ext_mapping == 'External': res.external_mapping = True else: res.external_mapping = False res.plscr_method = self.settings.plscr_method # Scores X mT = pls_obj.X_scores() res.scores_x = DataSet( mat=_pd.DataFrame( data=mT, index=self.ds_X.obj_n, columns=["PC-{0}".format(i+1) for i in range(mT.shape[1])], ), display_name='X scores') # loadings_x mP = pls_obj.X_loadings() res.loadings_x = DataSet( mat=_pd.DataFrame( data=mP, index=self.ds_X.var_n, columns=["PC-{0}".format(i+1) for i in range(mP.shape[1])], ), display_name='X loadings') # loadings_y # Same as loading_x in external mapping? mQ = pls_obj.Y_loadings() res.loadings_y = DataSet( mat=_pd.DataFrame( data=mQ, index=self.ds_Y.var_n, columns=["PC-{0}".format(i+1) for i in range(mQ.shape[1])], ), display_name='Y loadings') # expl_var_x cal = pls_obj.X_calExplVar() cum_cal = pls_obj.X_cumCalExplVar()[1:] val = pls_obj.X_valExplVar() cum_val = pls_obj.X_cumValExplVar()[1:] res.expl_var_x = DataSet( mat=_pd.DataFrame( data=[cal, cum_cal, val, cum_val], index=['calibrated', 'cumulative calibrated', 'validated', 'cumulative validated'], columns=["PC-{0}".format(i+1) for i in range(len(cal))], ), display_name='Explained variance in X') # expl_var_y cal = pls_obj.Y_calExplVar() cum_cal = pls_obj.Y_cumCalExplVar()[1:] val = pls_obj.Y_valExplVar() cum_val = pls_obj.Y_cumValExplVar()[1:] res.expl_var_y = DataSet( mat=_pd.DataFrame( data=[cal, cum_cal, val, cum_val], index=['calibrated', 'cumulative calibrated', 'validated', 'cumulative validated'], columns=["PC-{0}".format(i+1) for i in range(len(cal))], ), display_name='Explained variance in Y') # X_corrLoadings() # corr_loadings_x mXcl = pls_obj.X_corrLoadings() res.corr_loadings_x = DataSet( mat=_pd.DataFrame( data=mXcl, index=self.ds_X.var_n, columns=["PC-{0}".format(i+1) for i in range(mXcl.shape[1])], ), display_name='X & Y correlation loadings') # Y_corrLoadings() # corr_loadings_y mYcl = pls_obj.Y_corrLoadings() res.corr_loadings_y = DataSet( mat=_pd.DataFrame( data=mYcl, index=self.ds_Y.var_n, columns=["PC-{0}".format(i+1) for i in range(mXcl.shape[1])], ), display_name=self.ds_Y.display_name) # Y_predCal() # Return a dict with Y pred for each PC pYc = pls_obj.Y_predCal() ks = pYc.keys() pYcs = [self._mk_pred_ds(pYc[k], k) for k in ks] res.pred_cal_y = pYcs # Y_predVal() # Return a dict with Y pred for each PC pYv = pls_obj.Y_predVal() ks = pYv.keys() pYvs = [self._mk_pred_ds(pYv[k], k) for k in ks] res.pred_val_y = pYvs return res
class Simulator(BMCSTreeNode, TLineMixIn): r'''Base class for simulators included in the BMCS Tool Suite. It implements the state dependencies within the simulation tree. It handles also the communication between the simulation and the user interface in several modes of interaction. ''' tree_node_list = List([]) def _tree_node_list_default(self): return [ self.tline, ] def _update_node_list(self): self.tree_node_list = [ self.tline, ] title = Str desc = Str @on_trait_change(itags_str) def _model_structure_changed(self): self.tloop.restart = True #========================================================================= # TIME LOOP #========================================================================= tloop = Property(Instance(ITLoop), depends_on=itags_str) r'''Time loop constructed based on the current model. ''' @cached_property def _get_tloop(self): return self.tstep.tloop_type(tstep=self.tstep, tline=self.tline) tstep = tr.WeakRef(ITStep) hist = tr.Property def _get_hist(self): return self.tstep.hist def pause(self): self.tloop.paused = True self.join_thread() def stop(self): self.tloop.restart = True self.join_thread() #========================================================================= # COMPUTATION THREAD #========================================================================= _run_thread = Instance(RunTimeLoopThread) _running = Bool(False) def run(self): r'''Run a thread if it does not exist - do nothing otherwise ''' self._running = True if self.ui: # inform ui that the simulation is running in a thread self.ui.start_event = True self.ui.running = True try: # start the calculation self.tloop() except Exception as e: self._running = False if self.ui: self.ui.running = False raise e # re-raise exception self._running = False if self.ui: # cleanup ui and send the finish event self.ui.running = False self.ui.finish_event = True def run_thread(self): r'''Run a thread if it does not exist - do nothing otherwise ''' if self._running: return self._run_thread = RunTimeLoopThread(self) self._run_thread.start() def join_thread(self): r'''Wait until the thread finishes ''' if self._run_thread == None: self._running = False return self._run_thread.join() @on_trait_change(itags_str) def signal_reset(self): '''Upon the change of the model parameters, signal the user interface that further calculation does not make sense. ''' if self.ui: self.ui.stop()
class IndDiff(pb.Model): """Represent the IndDiff model between X and Y data set. A PLS model will try to find the multidimensional direction in the X space that explains the maximum multidimensional variance direction in the Y space. Consumer attributes is the independent predictors - observable variables X Liking datata is the respons - predicted variables Y, this can be dummified X is an n x m matrix of predictors Y is an n x p matrix of responses """ # Consumer Liking ds_L = ds.DataSet() # Consumer Attributes ds_A = ds.DataSet() # predictors ds_X = _traits.Property() # responses ds_Y = _traits.Property() # Standardise standardise_x = _traits.Bool() # Calculated PCA for the response variable pca_L = _traits.Property() settings = _traits.WeakRef() # checkbox bool for standardised results calc_n_pc = _traits.Int() min_pc = 2 max_pc = 10 # Selection of variables to dummify dummify_variables = _traits.ListUnicode() consumer_variables = _traits.ListUnicode() # Liking PC to use in PLS selected_liking_pc = _traits.List(_traits.Int) n_Y_pc = _traits.List([(0,'PC-1'),(1,'PC-2'),(2,'PC-3')]) selected_segments = _traits.Instance(ds.Factor) num_segments = _traits.Int(0) # Export buttons ev_export_dummified = _traits.Button("Export dummified variables to 'Data sets' tab") ev_export_segments = _traits.Button("Export consumer segments to 'Data sets' tab") ev_remove_segments = _traits.Button("Remove segments") min_std = _traits.Float(0.001) L_zero_std = _traits.List() @_traits.on_trait_change('ev_remove_segments') def _zero(self, obj, name, old, new): calc = self.owner.calculations[0].model calc.selected_segments.levels = {} @_traits.on_trait_change('ev_export_segments') def _one(self, obj, name, old, new): calc = self.owner.calculations[0].model dsy_sd = calc.make_liking_dummy_segmented(calc.selected_segments) dsy_sd.display_name += '_segments' self.owner.dsc.add(dsy_sd) @_traits.on_trait_change('ev_export_dummified') def _two(self, obj, name, old, new): calc = self.owner.calculations[0].model dummy = calc.ds_X dummy.display_name += '_dummified' self.owner.dsc.add(dummy) @_traits.on_trait_change('selected_segments:levels') def track_segments(self, obj, name, old, new): self.settings.num_segments = len(obj.levels) def _get_pca_L(self): if self._L_have_zero_std_var(): raise InComputeable( 'Matrix have variables with zero variance', self.L_zero_std) cpca = PCA(self.ds_L.values, numComp=3, Xstand=False, cvType=["loo"]) return ra.adapt_oto_pca(cpca, self.ds_L, self.ds_L.display_name) def _L_have_zero_std_var(self): self.L_zero_std = self._check_zero_std(self.ds_L) return bool(self.L_zero_std) def _check_zero_std(self, ds): zero_std_var = [] sv = ds.values.std(axis=0) dm = sv < self.min_std if _np.any(dm): vv = _np.array(ds.var_n) zero_std_var = list(vv[_np.nonzero(dm)]) return zero_std_var def calc_pls_raw_liking(self): n_pc = 2 dsx = self.ds_X dsy = self.ds_Y plsr = PLSR(dsx.values, dsy.values, numComp=n_pc, cvType=["loo"], Xstand=self.standardise_x, Ystand=False) title = 'PLSR({0} ~ {1})'.format(dsx.display_name, dsy.display_name) return ra.adapt_oto_plsr(plsr, dsx, dsy, title) def calc_pls_pc_likings(self, pc_sel): n_pc = 2 dsx = self.ds_X dsy = self.pca_L.loadings dsy.mat = dsy.mat.iloc[:,pc_sel] plsr = PLSR(dsx.values, dsy.values, numComp=n_pc, cvType=["loo"], Xstand=self.standardise_x, Ystand=False) title = 'PLSR({0} ~ {1})'.format(dsx.display_name, dsy.display_name) return ra.adapt_oto_plsr(plsr, dsx, dsy, title) def calc_plsr_da(self, segments): '''Process Add a row for each segments Loop throu each segment row and set 1 where we have index and 0 for the rest do this via property - no then segments have to be part of model Hmmm Add dummy segments to attr array as vel ''' if len(segments) < 1: # FIXME: Show warning, no segments defined return dsx = segments.get_combined_levels_subset(self.ds_X, axis=0) dsy = self.make_liking_dummy_segmented(segments) dsy = dsy.copy(transpose=True) n_pc = 2 plsr = PLSR(dsx.values, dsy.values, numComp=n_pc, cvType=["loo"], Xstand=self.standardise_x, Ystand=False) title = 'PLSR-DA({0} ~ {1})'.format(dsx.display_name, dsy.display_name) return ra.adapt_oto_plsr(plsr, dsx, dsy, title) def make_liking_dummy_segmented(self, segments): dsy_sd = segments.get_combined_levels_subset(self.ds_Y, axis=0) index = segments.levels.keys() columns = segments.get_combined_levels_labels(self.ds_Y, axis=0) segs = _pd.DataFrame(0, index=index, columns=columns) for lvn, lv in segments.levels.iteritems(): cols = lv.get_labels(self.ds_Y, 0) segs.loc[lvn,cols] = 1 dsy_sd.mat = segs return dsy_sd def _get_res(self): return None def _get_ds_X(self): """Get the independent variable X that is the consumer attributes""" varn = [str(v) for v in self.settings.dummify_variables] dsa = self.ds_A.copy(transpose=False) dsx = df.dummify(dsa, varn) return dsx def _get_ds_Y(self): """Get the response variable that is the consumer liking""" return self.ds_L.copy(transpose=True) def _calc_n_pc_default(self): return self.max_pc
class InelStateEvolution(bu.InteractiveModel): name = 'State evolution' slider_exp = tr.WeakRef(bu.InteractiveModel) t_slider = bu.Float(0) t_max = bu.Float(1.001) t_arr = tr.DelegatesTo('slider_exp') Sig_arr = tr.DelegatesTo('slider_exp') Eps_arr = tr.DelegatesTo('slider_exp') s_x_t = tr.DelegatesTo('slider_exp') s_y_t = tr.DelegatesTo('slider_exp') w_t = tr.DelegatesTo('slider_exp') iter_t = tr.DelegatesTo('slider_exp') ipw_view = bu.View(bu.Item('t_max', latex=r't_{\max}', readonly=True), time_editor=bu.HistoryEditor(var='t_slider', low=0, max_var='t_max', n_steps=50)) def plot_omega_NT(self, ax, **kw): s_x_pi_, s_y_pi_, w_pi_, z_, alpha_x_, alpha_y_, omega_T_, omega_N_ = self.Eps_arr.T ax.plot(omega_N_, omega_T_, **kw) ax.set_xlabel(r'$\omega_\mathrm{N}$') ax.set_ylabel(r'$\omega_\mathrm{T}$') def plot_Sig_Eps(self, axes): ax1, ax11, ax2, ax22, ax3, ax33, ax4 = axes colors = ['blue', 'red', 'green', 'black', 'magenta'] t = self.t_arr s_x_pi_, s_y_pi_, w_pi_, z_, alpha_x_, alpha_y_, omega_T_, omega_N_ = self.Eps_arr.T tau_x_pi_, tau_y_pi_, sig_pi_, Z_, X_x_, X_y_, Y_T_, Y_N_ = self.Sig_arr.T n_step = len(s_x_pi_) idx = np.argmax(self.t_slider < self.t_arr) # slip path in 2d def get_cum_s(s_x, s_y): d_s_x, d_s_y = s_x[1:] - s_x[:-1], s_y[1:] - s_y[:-1] d_s = np.hstack([0, np.sqrt(d_s_x**2 + d_s_y**2)]) return cumtrapz(d_s, initial=0) s_t = get_cum_s(self.s_x_t, self.s_y_t) s_pi_t = get_cum_s(s_x_pi_, s_y_pi_) w_t = self.w_t w_pi_t = w_pi_ tau_pi = np.sqrt(tau_x_pi_**2 + tau_y_pi_**2) ax1.set_title('stress - displacement') ax1.plot(t, tau_pi, '--', color='darkgreen', label=r'$||\tau||$') ax1.fill_between(t, tau_pi, 0, color='limegreen', alpha=0.1) ax1.plot(t, sig_pi_, '--', color='olivedrab', label=r'$\sigma$') ax1.fill_between(t, sig_pi_, 0, color='olivedrab', alpha=0.1) ax1.set_ylabel(r'$|| \tau ||, \sigma$') ax1.set_xlabel('$t$') ax1.plot(t[idx], 0, marker='H', color='red') ax1.legend() ax11.plot(t, s_t, color='darkgreen', label=r'$||s||$') ax11.plot(t, s_pi_t, '--', color='orange', label=r'$||s^\pi||$') ax11.plot(t, w_t, color='olivedrab', label=r'$w$') ax11.plot(t, w_pi_t, '--', color='chocolate', label=r'$w^\pi$') ax11.set_ylabel(r'$|| s ||, w$') ax11.legend() mpl_align_yaxis(ax1, 0, ax11, 0) ax2.set_title('energy release rate - damage') ax2.plot(t, Y_N_, '--', color='darkgray', label=r'$Y_N$') ax2.fill_between(t, Y_N_, 0, color='darkgray', alpha=0.15) ax2.plot(t, Y_T_, '--', color='darkslategray', label=r'$Y_T$') ax2.fill_between(t, Y_T_, 0, color='darkslategray', alpha=0.05) ax2.set_xlabel('$t$') ax2.set_ylabel('$Y$') ax2.plot(t[idx], 0, marker='H', color='red') ax2.legend() ax22.plot(t, omega_N_, color='darkgray', label=r'$\omega_N$') ax22.plot(t, omega_T_, color='darkslategray', label=r'$\omega_T$') ax22.set_ylim(ymax=1) ax22.set_ylabel(r'$\omega$') ax22.legend() ax3.set_title('hardening force - displacement') alpha_t = np.sqrt(alpha_x_**2 + alpha_y_**2) X_t = np.sqrt(X_x_**2 + X_y_**2) ax3.plot(t, Z_, '--', color='darkcyan', label=r'$Z$') ax3.fill_between(t, Z_, 0, color='darkcyan', alpha=0.05) ax3.plot(t, X_t, '--', color='darkslateblue', label=r'$X$') ax3.fill_between(t, X_t, 0, color='darkslateblue', alpha=0.05) ax3.set_ylabel(r'$Z, X$') ax3.set_xlabel('$t$') ax3.plot(t[idx], 0, marker='H', color='red') ax3.legend() ax33.plot(t, z_, color='darkcyan', label=r'$z$') ax33.plot(t, alpha_t, color='darkslateblue', label=r'$\alpha$') ax33.set_ylabel(r'$z, \alpha$') ax33.legend(loc='lower left') slide_model = self.slider_exp.slide_model slide_model.plot_f_state(ax4, self.Eps_arr[idx, :], self.Sig_arr[idx, :]) @staticmethod def subplots(fig): ((ax1, ax2), (ax3, ax4)) = fig.subplots(2, 2) ax11 = ax1.twinx() ax22 = ax2.twinx() ax33 = ax3.twinx() return ax1, ax11, ax2, ax22, ax3, ax33, ax4 def update_plot(self, axes): self.plot_Sig_Eps(axes)