def _save_issue_instance_precondition_assoc(self, bulk_saver: BulkSaver) -> None: for pre_id, instance_ids in self._precondition_issue_instance_assoc.items( ): for instance_id in instance_ids: bulk_saver.add_issue_instance_precondition_assoc( self._issue_instances[instance_id], self._preconditions[pre_id])
def _save_trace_frame_leaf_assoc(self, bulk_saver: BulkSaver) -> None: for trace_frame_id, leaf_ids in self._trace_frame_leaf_assoc.items(): for (leaf_id, depth) in leaf_ids: bulk_saver.add_trace_frame_leaf_assoc( self._shared_texts[leaf_id], self._trace_frames[trace_frame_id], depth, )
def _save_issue_instance_shared_text_assoc(self, bulk_saver: BulkSaver) -> None: for ( shared_text_id, instance_ids, ) in self._shared_text_issue_instance_assoc.items(): for instance_id in instance_ids: bulk_saver.add_issue_instance_shared_text_assoc( self._issue_instances[instance_id], self._shared_texts[shared_text_id], )
def _save_issue_instance_trace_frame_assoc(self, bulk_saver: BulkSaver) -> None: for ( trace_frame_id, instance_ids, ) in self._trace_frame_issue_instance_assoc.items(): for instance_id in instance_ids: bulk_saver.add_issue_instance_trace_frame_assoc( self._issue_instances[instance_id], self._trace_frames[trace_frame_id], )
def __init__( self, database: DB, use_lock: bool = False, primary_key_generator: Optional[PrimaryKeyGenerator] = None, ): self.use_lock = use_lock self.dbname = database.dbname self.database = database self.primary_key_generator = primary_key_generator or PrimaryKeyGenerator() self.bulk_saver = BulkSaver(self.primary_key_generator)
def _save_postcondition_source_assoc(self, bulk_saver: BulkSaver) -> None: for post_id, source_ids in self._postcondition_source_assoc.items(): for (source_id, depth) in source_ids: bulk_saver.add_postcondition_source_assoc( self._sources[source_id], self._postconditions[post_id], depth)
def _save_precondition_sink_assoc(self, bulk_saver: BulkSaver) -> None: for pre_id, sink_ids in self._precondition_sink_assoc.items(): for (sink_id, depth) in sink_ids: bulk_saver.add_precondition_sink_assoc( self._sinks[sink_id], self._preconditions[pre_id], depth)
def update_bulk_saver(self, bulk_saver: BulkSaver) -> None: bulk_saver.add_all(list(self._issues.values())) bulk_saver.add_all(list(self._issue_instances.values())) bulk_saver.add_all(list(self._preconditions.values())) bulk_saver.add_all(list(self._postconditions.values())) bulk_saver.add_all(list(self._trace_frames.values())) bulk_saver.add_all(list(self._sources.values())) bulk_saver.add_all(list(self._sinks.values())) bulk_saver.add_all(list(self._issue_instance_fix_info.values())) bulk_saver.add_all(list(self._trace_annotations.values())) bulk_saver.add_all(list(self._shared_texts.values())) self._save_issue_instance_postcondition_assoc(bulk_saver) self._save_issue_instance_precondition_assoc(bulk_saver) self._save_issue_instance_trace_frame_assoc(bulk_saver) self._save_precondition_sink_assoc(bulk_saver) self._save_postcondition_source_assoc(bulk_saver) self._save_trace_frame_leaf_assoc(bulk_saver) self._save_issue_instance_shared_text_assoc(bulk_saver)
class DatabaseSaver(PipelineStep[TraceGraph, RunSummary]): RUN_MODEL = Run def __init__( self, database: DB, use_lock: bool = False, primary_key_generator: Optional[PrimaryKeyGenerator] = None, ): self.use_lock = use_lock self.dbname = database.dbname self.database = database self.primary_key_generator = primary_key_generator or PrimaryKeyGenerator( ) self.bulk_saver = BulkSaver(self.primary_key_generator) self.summary: Summary @disable_gc @log_time def run(self, input: TraceGraph, summary: Summary) -> Tuple[RunSummary, Summary]: self.graph = input self.summary = summary self._prep_save() return self._save(), self.summary def _prep_save(self): """ Prepares the bulk saver to load the trace graph info into the database. """ log.info("Preparing bulk save.") self.graph.update_bulk_saver(self.bulk_saver) log.info( "Dropped %d unused preconditions, %d are missing", sum(len(v) for v in self.summary["precondition_entries"].values()), len(self.summary["missing_preconditions"]), ) log.info( "Dropped %d unused postconditions, %d are missing", sum( len(v) for v in self.summary["postcondition_entries"].values()), len(self.summary["missing_postconditions"]), ) del self.summary["postcondition_entries"] def _save(self) -> RunSummary: """ Saves bulk saver's info into the databases in bulk. """ assert self.summary[ "run"] is not None, "Must have called process before" log.info( "Saving %d issues, %d preconditions, %d postconditions, " "%d trace frames, %d trace annotations", len(self.bulk_saver.get_items_to_add(Issue)), len(self.bulk_saver.get_items_to_add(Precondition)), len(self.bulk_saver.get_items_to_add(Postcondition)), len(self.bulk_saver.get_items_to_add(TraceFrame)), len(self.bulk_saver.get_items_to_add(TraceFrameAnnotation)), ) with self.database.make_session() as session: pk_gen = self.primary_key_generator.reserve(session, [Run], use_lock=self.use_lock) self.summary["run"].id.resolve(id=pk_gen.get(Run), is_new=True) session.add(self.summary["run"]) session.commit() run_id = self.summary["run"].id.resolved() self.summary["run"] = None # Invalidate it self.bulk_saver.save_all(self.database, self.use_lock) # Now that the run is finished, fetch it from the DB again and set its # status to FINISHED. with self.database.make_session() as session: run = session.query(self.RUN_MODEL).filter_by(id=run_id).one() run.status = RunStatus.FINISHED session.add(run) session.commit() run_summary = run.get_summary() run_summary.num_invisible_issues = 0 run_summary.num_missing_preconditions = len( self.summary["missing_preconditions"]) run_summary.num_missing_postconditions = len( self.summary["missing_postconditions"]) return run_summary