def load_from_db(cls, build_id): """ Given a build_id, fetch all the stored information from the database to reconstruct a Build object to represent that build. :param build_id: The id of the build to recreate. """ with Connection.get() as session: build_schema = session.query(BuildSchema).filter(BuildSchema.build_id == build_id).first() failed_artifact_directories_schema = session.query(FailedArtifactDirectoriesSchema) \ .filter(FailedArtifactDirectoriesSchema.build_id == build_id) \ .all() failed_subjob_atom_pairs_schema = session.query(FailedSubjobAtomPairsSchema) \ .filter(FailedSubjobAtomPairsSchema.build_id == build_id) \ .all() atoms_schema = session.query(AtomsSchema).filter(AtomsSchema.build_id == build_id).all() subjobs_schema = session.query(SubjobsSchema).filter(SubjobsSchema.build_id == build_id).all() # If a query returns None, then we know the build wasn't found in the database if not build_schema: return None build_parameters = json.loads(build_schema.build_parameters) # Genereate a BuildRequest object with our query response build_request = BuildRequest(build_parameters) # Create initial Build object, we will be altering the state of this as we get more data build = Build(build_request) build._build_id = build_id # Manually generate ProjectType object for build and create a `job_config` since this is usually done in `prepare()` build.generate_project_type() job_config = build.project_type.job_config() # Manually update build data build._artifacts_tar_file = build_schema.artifacts_tar_file build._artifacts_zip_file = build_schema.artifacts_zip_file build._error_message = build_schema.error_message build._postbuild_tasks_are_finished = bool(int(build_schema.postbuild_tasks_are_finished)) build.setup_failures = build_schema.setup_failures build._timing_file_path = build_schema.timing_file_path # Manually set the state machine timestamps build._state_machine._transition_timestamps = { BuildState.QUEUED: build_schema.queued_ts, BuildState.FINISHED: build_schema.finished_ts, BuildState.PREPARED: build_schema.prepared_ts, BuildState.PREPARING: build_schema.preparing_ts, BuildState.ERROR: build_schema.error_ts, BuildState.CANCELED: build_schema.canceled_ts, BuildState.BUILDING: build_schema.building_ts } build._state_machine._fsm.current = BuildState[build_schema.state] build_artifact = BuildArtifact(build_schema.build_artifact_dir) directories = [] for directory in failed_artifact_directories_schema: directories.append(directory.failed_artifact_directory) build_artifact._failed_artifact_directories = directories pairs = [] for pair in failed_subjob_atom_pairs_schema: pairs.append((pair.subjob_id, pair.atom_id)) build_artifact._q_failed_subjob_atom_pairs = pairs build._build_artifact = build_artifact atoms_by_subjob_id = {} for atom in atoms_schema: atoms_by_subjob_id.setdefault(atom.subjob_id, []) atoms_by_subjob_id[atom.subjob_id].append(Atom( atom.command_string, atom.expected_time, atom.actual_time, atom.exit_code, atom.state, atom.atom_id, atom.subjob_id )) subjobs = OrderedDict() for subjob in subjobs_schema: atoms = atoms_by_subjob_id[subjob.subjob_id] # Add atoms after subjob is created so we don't alter their state on initialization subjob_to_add = Subjob(build_id, subjob.subjob_id, build.project_type, job_config, []) subjob_to_add._atoms = atoms subjob_to_add.completed = subjob.completed subjobs[subjob.subjob_id] = subjob_to_add build._all_subjobs_by_id = subjobs # Place subjobs into correct queues within the build build._unstarted_subjobs = Queue(maxsize=len(subjobs)) build._finished_subjobs = Queue(maxsize=len(subjobs)) for _, subjob in subjobs.items(): if subjob.completed: build._finished_subjobs.put(subjob) else: build._unstarted_subjobs.put(subjob) return build