def _make_entire_workflow(workflow_id): workflow = PbWorkflow() workflow.id = workflow_id entire_workflow = PbEntireWorkflow() entire_workflow.workflow.CopyFrom(workflow) root_family = PbFamilyProxy() root_family.name = 'root' entire_workflow.family_proxies.extend([root_family]) return entire_workflow
def __init__(self, schd): self.schd = schd self.workflow_id = f'{self.schd.owner}{ID_DELIM}{self.schd.suite}' self.ancestors = {} self.descendants = {} self.parents = {} self.pool_points = set() self.max_point = None self.min_point = None self.edge_points = {} self.cycle_states = {} # Managed data types self.data = { self.workflow_id: { EDGES: {}, FAMILIES: {}, FAMILY_PROXIES: {}, JOBS: {}, TASKS: {}, TASK_PROXIES: {}, WORKFLOW: PbWorkflow(), } } self.deltas = { EDGES: EDeltas(), FAMILIES: FDeltas(), FAMILY_PROXIES: FPDeltas(), JOBS: JDeltas(), TASKS: TDeltas(), TASK_PROXIES: TPDeltas(), WORKFLOW: PbWorkflow(), } self.updates = { EDGES: {}, FAMILIES: {}, FAMILY_PROXIES: {}, JOBS: {}, TASKS: {}, TASK_PROXIES: {}, } self.updates_pending = False
def apply_delta(key, delta, data): """Apply delta to specific data-store workflow and type.""" # Merge in updated fields if key == WORKFLOW: new_data = PbWorkflow() new_data.CopyFrom(data[key]) new_data.ClearField('state_totals') new_data.ClearField('states') new_data.MergeFrom(delta) # fields that are set to empty kinds aren't carried if not delta.is_held_total: new_data.is_held_total = 0 # For thread safe update data[key] = new_data return for element in delta.deltas: if key in (TASK_PROXIES, FAMILY_PROXIES) and element.id in data[key]: # fields cannot be directly assigned, so is cleared first. if hasattr(element, 'prerequisites') and element.prerequisites: del data[key][element.id].prerequisites[:] # fields that are set to empty kinds aren't carried if not element.is_held: data[key][element.id].is_held = False data[key].setdefault(element.id, MESSAGE_MAP[key]()).MergeFrom(element) # Prune data elements by id for del_id in delta.pruned: if del_id not in data[key]: continue if key == TASK_PROXIES: data[TASKS][data[key][del_id].task].proxies.remove(del_id) getattr(data[WORKFLOW], key).remove(del_id) elif key == FAMILY_PROXIES: data[FAMILIES][data[key][del_id].family].proxies.remove(del_id) getattr(data[WORKFLOW], key).remove(del_id) elif key == EDGES: getattr(data[WORKFLOW], key).edges.remove(del_id) del data[key][del_id]
FAMILIES: PbFamily, FAMILY_PROXIES: PbFamilyProxy, JOBS: PbJob, TASKS: PbTask, TASK_PROXIES: PbTaskProxy, WORKFLOW: PbWorkflow, } DATA_TEMPLATE = { EDGES: {}, FAMILIES: {}, FAMILY_PROXIES: {}, JOBS: {}, TASKS: {}, TASK_PROXIES: {}, WORKFLOW: PbWorkflow(), } DELTAS_MAP = { EDGES: EDeltas, FAMILIES: FDeltas, FAMILY_PROXIES: FPDeltas, JOBS: JDeltas, TASKS: TDeltas, TASK_PROXIES: TPDeltas, WORKFLOW: WDeltas, ALL_DELTAS: AllDeltas, } DELTA_FIELDS = {DELTA_ADDED, DELTA_UPDATED, DELTA_PRUNED}
def generate_graph_elements(self, start_point=None, stop_point=None): """Generate edges and [ghost] nodes (family and task proxy elements). Args: start_point (cylc.flow.cycling.PointBase): Edge generation start point. stop_point (cylc.flow.cycling.PointBase): Edge generation stop point. """ if not self.pool_points: return config = self.schd.config tasks = self.data[self.workflow_id][TASKS] if not tasks: tasks = self.updates[TASKS] task_proxies = self.data[self.workflow_id][TASK_PROXIES] if start_point is None: start_point = min(self.pool_points) if stop_point is None: stop_point = max(self.pool_points) # Used for generating family [ghost] nodes new_points = set() # Generate ungrouped edges for edge in config.get_graph_edges(start_point, stop_point): # Reference or create edge source & target nodes/proxies s_node = edge[0] t_node = edge[1] if s_node is None: continue # Is the source cycle point in the task pool? s_name, s_point = TaskID.split(s_node) s_point_cls = get_point(s_point) s_pool_point = False s_valid = TaskID.is_valid_id(s_node) if s_valid: s_pool_point = s_point_cls in self.pool_points # Is the target cycle point in the task pool? t_pool_point = False t_valid = t_node and TaskID.is_valid_id(t_node) if t_valid: t_name, t_point = TaskID.split(t_node) t_point_cls = get_point(t_point) t_pool_point = get_point(t_point) in self.pool_points # Proceed if either source or target cycle points # are in the task pool. if not s_pool_point and not t_pool_point: continue # If source/target is valid add/create the corresponding items. # TODO: if xtrigger is suite_state create remote ID source_id = ( f'{self.workflow_id}{ID_DELIM}{s_point}{ID_DELIM}{s_name}') if s_valid: s_task_id = f'{self.workflow_id}{ID_DELIM}{s_name}' new_points.add(s_point) # Add source points for pruning. self.edge_points.setdefault(s_point_cls, set()) if (source_id not in task_proxies and source_id not in self.updates[TASK_PROXIES]): self.updates[TASK_PROXIES][source_id] = ( self.generate_ghost_task(s_node)) getattr( self.deltas[WORKFLOW], TASK_PROXIES).append(source_id) if (source_id not in tasks[s_task_id].proxies and source_id not in self.updates[TASKS].get( s_task_id, PbTask()).proxies): self.updates[TASKS].setdefault( s_task_id, PbTask( stamp='f{s_task_id}@{update_time}', id=s_task_id, )).proxies.append(source_id) # Add valid source before checking for no target, # as source may be an isolate (hence no edges). # At present targets can't be xtriggers. if t_valid: target_id = ( f'{self.workflow_id}{ID_DELIM}{t_point}{ID_DELIM}{t_name}') t_task_id = f'{self.workflow_id}{ID_DELIM}{t_name}' new_points.add(t_point) # Add target points to associated source points for pruning. self.edge_points.setdefault(s_point_cls, set()) self.edge_points[s_point_cls].add(t_point_cls) if (target_id not in task_proxies and target_id not in self.updates[TASK_PROXIES]): self.updates[TASK_PROXIES][target_id] = ( self.generate_ghost_task(t_node)) getattr(self.deltas[WORKFLOW], TASK_PROXIES).append( target_id) if (target_id not in tasks[t_task_id].proxies and target_id not in self.updates[TASKS].get( t_task_id, PbTask()).proxies): self.updates[TASKS].setdefault( t_task_id, PbTask( stamp='f{t_task_id}@{update_time}', id=t_task_id, )).proxies.append(target_id) # Initiate edge element. e_id = ( f'{self.workflow_id}{ID_DELIM}{s_node}{ID_DELIM}{t_node}') self.updates[EDGES][e_id] = PbEdge( id=e_id, suicide=edge[3], cond=edge[4], source=source_id, target=target_id, ) # Add edge id to node field for resolver reference self.updates[TASK_PROXIES].setdefault( target_id, PbTaskProxy(id=target_id)).edges.append(e_id) if s_valid: self.updates[TASK_PROXIES].setdefault( source_id, PbTaskProxy(id=source_id)).edges.append(e_id) getattr( self.deltas.setdefault(WORKFLOW, PbWorkflow()), EDGES).edges.extend(self.updates[EDGES].keys()) if new_points: self.generate_ghost_families(new_points)