def analysis_estimation( self, worker: Worker, options: gui.estimation.Options) -> EstimationResult: CHUNK_SIZE = 64 with Core() as core: worker.interrupt = lambda: core.shutdown( ) # register interrupt hook rows: List[estimation_result.PackedResponse] = [] worker.set_work_size(len(self.subjects)) for i in range(0, len(self.subjects), CHUNK_SIZE): request = estimation_result.Request( subjects=self.subjects[i:i + CHUNK_SIZE], models=options.models, disable_parallelism=options.disable_parallelism, ) responses = core.call('estimation', estimation_result.RequestC, estimation_result.PackedResponsesC, request) rows.extend(responses) worker.set_progress(len(rows)) ds = EstimationResult( self.name + ' (model est.)', self.alternatives, ) ds.subjects = rows return ds
def analysis_integrity_check(self, worker: Worker, _config: None) -> dataset.AnalysisResult: worker.set_work_size(len(self.subjects)) subjects: List[dataset.integrity_check.Subject] = [] with Core() as core: worker.interrupt = lambda: core.shutdown() for i, subject in enumerate(self.subjects): subj_issues = core.call('integrity-check', PackedSubjectC, dataset.integrity_check.SubjectC, subject) if subj_issues.issues: subjects.append(subj_issues) worker.set_progress(i + 1) if subjects: ds = dataset.integrity_check.IntegrityCheck( self.name + ' (integrity check)', self.alternatives) ds.subjects = subjects return ds else: return dataset.ShowMessageBox( type=dataset.MessageBoxType.INFORMATION, title='Integrity check', message='No integrity issues found.', )
def analysis_summary_stats(self, worker: Worker, _config: None) -> ExperimentStats: subjects = [] worker.set_work_size(len(self.subjects)) with Core() as core: worker.interrupt = lambda: core.shutdown() for i, subject in enumerate(self.subjects): subjects.append( core.call("summary", PackedSubjectC, dataset.experiment_stats.SubjectC, subject)) worker.set_progress(i + 1) ds = ExperimentStats( name=self.name + ' (info)', alternatives=self.alternatives, ) ds.subjects = subjects return ds
def analysis_tuple_intrans_alts(self, worker: Worker, _config: None) -> TupleIntransAlts: subjects = [] worker.set_work_size(len(self.subjects)) with Core() as core: worker.interrupt = lambda: core.shutdown() for i, subject in enumerate(self.subjects): subjects.append( core.call( 'tuple-intrans-alts', PackedSubjectC, dataset.tuple_intrans_alts.SubjectC, subject, )) worker.set_progress(i + 1) ds = TupleIntransAlts(self.name + ' (inconsistent alternative tuples)', self.alternatives) ds.subjects = subjects return ds
def analysis_consistency(self, worker: Worker, _config: None) -> ConsistencyResult: with Core() as core: worker.interrupt = lambda: core.shutdown() # interrupt hook rows = [] worker.set_work_size(len(self.subjects)) for i, subject in enumerate(self.subjects): response = core.call('consistency', PackedSubjectC, dataset.consistency_result.SubjectRawC, subject) rows.append(response) worker.set_progress(i + 1) ds = ConsistencyResult( self.name + ' (consistency)', self.alternatives, ) ds.load_from_core(rows) return ds
def analysis_simulation( self, worker: Worker, options: 'gui.copycat_simulation.Options') -> 'ExperimentalData': subjects: List[PackedSubject] = [] with Core() as core: worker.interrupt = lambda: core.shutdown( ) # register interrupt hook worker.set_work_size(len(self.subjects) * options.multiplicity) position = 0 for subject_packed in self.subjects: for j in range(options.multiplicity): response = simulation.run( core, simulation.Request( name='random%d' % (j + 1), alternatives=self. alternatives, # we don't use subject.alternatives here gen_menus=simulation.GenMenus( generator=simulation.Copycat(subject_packed), defaults=False, # this will be ignored, anyway ), gen_choices=options.gen_choices, preserve_deferrals=options.preserve_deferrals, )) subjects.append(response.subject_packed) position += 1 if position % 1024 == 0: worker.set_progress(position) ds = ExperimentalData(name=options.name, alternatives=self.alternatives) ds.subjects = subjects ds.observ_count = options.multiplicity * self.observ_count return ds
def analysis_consistency(self, worker : Worker, _config : None) -> BudgetaryConsistency: with Core() as core: worker.interrupt = lambda: core.shutdown() # interrupt hook rows = [] worker.set_work_size(len(self.subjects)) for i, subject in enumerate(self.subjects): response = core.call( 'budgetary-consistency', SubjectC, dataset.budgetary_consistency.SubjectC, subject ) rows.append(response) worker.set_progress(i+1) ds = BudgetaryConsistency( self.name + ' (consistency)', self.alternatives, rows, ) return ds