def insert_summary_into_db(self, summary_data: dict): try: summary_model = self.get_dynamic_db_model(ComponentSummary, self.job_id) DB.create_tables([summary_model]) summary_obj = summary_model.get_or_none( summary_model.f_job_id == self.job_id, summary_model.f_component_name == self.component_name, summary_model.f_role == self.role, summary_model.f_party_id == self.party_id, summary_model.f_task_id == self.task_id, summary_model.f_task_version == self.task_version) if summary_obj: summary_obj.f_summary = serialize_b64(summary_data, to_str=True) summary_obj.f_update_time = current_timestamp() summary_obj.save() else: self.get_dynamic_db_model( ComponentSummary, self.job_id).create(f_job_id=self.job_id, f_component_name=self.component_name, f_role=self.role, f_party_id=self.party_id, f_task_id=self.task_id, f_task_version=self.task_version, f_summary=serialize_b64(summary_data, to_str=True), f_create_time=current_timestamp()) except Exception as e: schedule_logger(self.job_id).exception( "An exception where querying summary job id: {} " "component name: {} to database:\n{}".format( self.job_id, self.component_name, e))
def bulk_insert_model_data(self, model, data_source): with DB.connection_context(): try: DB.create_tables([model]) batch_size = 50 if RuntimeConfig.USE_LOCAL_DATABASE else 1000 for i in range(0, len(data_source), batch_size): with DB.atomic(): model.insert_many(data_source[i:i+batch_size]).execute() return len(data_source) except Exception as e: schedule_logger(self.job_id).exception(e) return 0
def bulk_insert_into_db(model, data_source, logger): try: try: DB.create_tables([model]) except Exception as e: logger.exception(e) batch_size = 50 if RuntimeConfig.USE_LOCAL_DATABASE else 1000 for i in range(0, len(data_source), batch_size): with DB.atomic(): model.insert_many(data_source[i:i + batch_size]).execute() return len(data_source) except Exception as e: logger.exception(e) return 0