class ParetoFilter(ParetoFilterBase): """Takes a set of cases and filters out the subset of cases which are pareto optimal. Assumes that smaller values for model responses are better, so all problems must be posed as minimization problems. """ # pylint: disable-msg=E1101 criteria = List(Str, iotype="in", desc="List of outputs from the case to consider for " "filtering. Note that only case outputs are allowed as " "criteria.") #case_set = Slot(ICaseIterator, # desc="CaseIterator with the cases to be filtered to " # "Find the pareto optimal subset.") case_sets = List(Slot(ICaseIterator), value=[], iotype="in", desc="CaseSet with the cases to be filtered to " "find the pareto optimal subset.") pareto_set = Slot(CaseSet, desc="Resulting collection of pareto optimal cases.", copy="shallow") dominated_set = Slot(CaseSet, desc="Resulting collection of dominated cases.", copy="shallow")
class DummyComp(Component): r = Float(iotype='in') r2 = Float(iotype='in') r3 = Float(iotype='in', desc="some random variable", low=-1.0, high=1.0, other_meta_data="test") s = Str(iotype='in') rout = Float(iotype='out', units='ft') r2out = Float(iotype='out') sout = Str(iotype='out') slistout = List(Str, iotype='out') dummy_in = Slot(Component, iotype='in') dummy_out = Slot(Component, iotype='out') dummy_out_no_copy = Slot(Component, iotype='out', copy=None) def __init__(self): super(DummyComp, self).__init__() self.r = 1.0 self.r2 = -1.0 self.rout = 0.0 self.r2out = 0.0 self.s = 'a string' self.sout = '' # make a nested container with input and output ContainerVars self.add('dummy', Multiplier()) self.dummy_in = self.dummy self.dummy_out = self.dummy def execute(self): self.rout = self.r * 1.5 self.r2out = self.r2 + 10.0 self.sout = self.s[::-1] # pylint: disable-msg=E1101 self.dummy.execute()
def common_io(assembly, varspeed, varpitch): regulated = varspeed or varpitch # add inputs assembly.add('npts_coarse_power_curve', Int(20, iotype='in', desc='number of points to evaluate aero analysis at')) assembly.add('npts_spline_power_curve', Int(200, iotype='in', desc='number of points to use in fitting spline to power curve')) assembly.add('AEP_loss_factor', Float(1.0, iotype='in', desc='availability and other losses (soiling, array, etc.)')) if varspeed: assembly.add('control', VarTree(VarSpeedMachine(), iotype='in', desc='control parameters')) else: assembly.add('control', VarTree(FixedSpeedMachine(), iotype='in', desc='control parameters')) # add slots (must replace) assembly.add('geom', Slot(GeomtrySetupBase)) assembly.add('analysis', Slot(AeroBase)) assembly.add('dt', Slot(DrivetrainLossesBase)) assembly.add('cdf', Slot(CDFBase)) # add outputs assembly.add('AEP', Float(iotype='out', units='kW*h', desc='annual energy production')) assembly.add('V', Array(iotype='out', units='m/s', desc='wind speeds (power curve)')) assembly.add('P', Array(iotype='out', units='W', desc='power (power curve)')) assembly.add('diameter', Float(iotype='out', units='m', desc='rotor diameter')) if regulated: assembly.add('ratedConditions', VarTree(RatedConditions(), iotype='out'))
def test_list_and_dict_slot_attributes(self): top = Assembly() top.add('sock', Slot(MyClass, desc='Stuff0')) top.add('list_sock', List(Slot(MyClass), iotype='in', desc='Stuff')) top.add('dict_sock', Dict(key_trait=Str, value_trait=Slot(MyClass), iotype='in', desc='Stuff2')) attrs = top.get_attributes(io_only=False) slot_attrs = attrs['Slots'] self.assertTrue({'name': 'list_sock', 'containertype': 'list', 'filled': [], 'klass': 'MyClass', 'desc': 'Stuff'} in slot_attrs) self.assertTrue({'name': 'dict_sock', 'containertype': 'dict', 'filled': {}, 'klass': 'MyClass', 'desc': 'Stuff2'} in slot_attrs) self.assertTrue({'name': 'sock', 'containertype': 'singleton', 'filled': None, 'klass': 'MyClass', 'desc': 'Stuff0'} in slot_attrs) # Now fill some slots. top.list_sock.append(MyClass()) top.list_sock.append(MyClass()) top.dict_sock['Testing'] = MyClass() top.sock = MyClass() # Note, only tested with one item in the dict because it is not ordered, # and hash order will vary on different platforms. attrs = top.get_attributes(io_only=False) slot_attrs = attrs['Slots'] self.assertTrue({'name': 'list_sock', 'containertype': 'list', 'filled': ['MyClass', 'MyClass'], 'klass': 'MyClass', 'desc': 'Stuff'} in slot_attrs) # Need some special checking for the dict slot # since we get back a MyClass instance dict_slots = filter(lambda x: x["name"] == "dict_sock", slot_attrs) self.assertEqual(len(dict_slots), 1) dict_slot = dict_slots[0] self.assertEqual(dict_slot["containertype"], "dict") self.assertEqual(dict_slot["klass"], "MyClass") self.assertEqual(dict_slot["desc"], "Stuff2") self.assertEqual(dict_slot["filled"], {'Testing': 'MyClass'}) self.assertTrue({'name': 'sock', 'containertype': 'singleton', 'filled': 'MyClass', 'klass': 'MyClass', 'desc': 'Stuff0'} in slot_attrs)
class ExpectedImprovement(ExpectedImprovementBase): best_case = Slot(CaseSet, desc="CaseSet which contains a single case " "representing the criteria value.") predicted_value = Slot( NormalDistribution, desc="The Normal Distribution of the predicted value " "for some function at some point where you wish to" " calculate the EI.")
class DOEdriver(DOEdriverBase): """ Driver for Design of Experiments. """ implements(IHasParameters) # pylint: disable-msg=E1101 DOEgenerator = Slot(IDOEgenerator, required=True, desc='Iterator supplying normalized DOE values.') case_filter = Slot(ICaseFilter, desc='Selects cases to be run.')
class TstComponent(Component): dummy_data = Slot(TstContainer, iotype='in') dummy_data_out = Slot(TstContainer, iotype='out') dummyin = Float(iotype='in') def __init__(self): super(TstComponent,self).__init__() self.add('dummy_data',TstContainer()) self.add('dummy_data_out',TstContainer()) def execute(self): self.dummy_data_out = self.dummy_data.copy()
class ExpectedImprovement(Component): best_case = Slot(CaseSet, iotype="in", desc="CaseSet which contains a single case " "representing the criteria value.", required=True) criteria = Str(iotype="in", desc="Name of the variable to maximize the expected " "improvement around. Must be a NormalDistrubtion type.") predicted_value = Slot( NormalDistribution, iotype="in", desc="The Normal Distribution of the predicted value " "for some function at some point where you wish to" " calculate the EI.") EI = Float(0.0, iotype="out", desc="The expected improvement of the predicted_value.") PI = Float(0.0, iotype="out", desc="The probability of improvement of the predicted_value.") def execute(self): """ Calculates the expected improvement of the model at a given point. """ mu = self.predicted_value.mu sigma = self.predicted_value.sigma best_case = self.best_case[0] try: target = best_case[self.criteria] except KeyError: self.raise_exception( "best_case did not have an output which " "matched the criteria, '%s'" % self.criteria, ValueError) try: self.PI = 0.5 + 0.5 * erf((1 / 2**.5) * (target - mu / sigma)) T1 = (target - mu) * .5 * (1. + erf( (target - mu) / (sigma * 2.**.5))) T2 = sigma * ((1. / ((2. * pi)**.05)) * exp(-0.5 * ( (target - mu) / sigma)**2.)) self.EI = abs(T1 + T2) except (ValueError, ZeroDivisionError): self.EI = 0 self.PI = 0
class Comp(Component): x = Float(iotype='in') y = Float(iotype='in') indct = Dict(iotype='in') outdct = Dict(iotype='out') cont = Slot(A) contlist = List(Slot(A), iotype='in') def get_cont(self, i): return self.contlist[i] def get_attrib(self, name): return getattr(self, name)
class DistributionCaseDriver(CaseIterDriverBase): """ Driver for evaluating models at point distributions. """ implements(IHasParameters) distribution_generator = Slot( IDistributionGenerator, required=True, desc='Iterator supplying values of point distribitions.') case_outputs = List(Str, iotype='in', desc='A list of outputs to be saved with each case.') def get_case_iterator(self): """Returns a new iterator over the Case set.""" return self._get_cases() def _get_cases(self): """Iterator over the cases""" for row in self.distribution_generator: case = self.set_parameters(row, Case(parent_uuid=self._case_id)) case.add_outputs(self.case_outputs) yield case
class VarComponent(Component): """Contains some vars""" boolvar = Bool(False, iotype='in') intvar = Int(333, iotype='in') floatvar = Float(-16.54, iotype='in') expvar1 = Float(1.2, iotype='in') expvar2 = Float(1.2, iotype='in') textvar = Str("This", iotype='in') arrayvar = Array(iotype='in') arrayvarsplit = Array(iotype='in') arrayvarsplit2 = Array(iotype='in') arrayvarzerod = Array(zeros(shape=(0, 0)), iotype='in') arrayvartwod = Array(zeros(shape=(1, 3)), iotype='in') arraysmall = Array(iotype='in') arrayshorthand = Array(iotype='in') single = Array(iotype='in') singleint = Array(iotype='in', dtype=numpy_int32) singlebool = Array(iotype='in', dtype=bool) stringarray = List([], iotype='in') listenumvar = List(Enum(1, (1, 2, 3)), iotype='in') listenumvar2 = List(Enum(1.5, (1.5, 2.4, 3.3)), iotype='in') listenumvar3 = List(Enum('a', ('a', 'b', 'c')), iotype='in') listenumvar4 = List(Enum(True, (True, False)), iotype='in') varcontainer = Slot(VarContainer, iotype='input') def __init__(self): super(VarComponent, self).__init__() # Variable Containers self.add('varcontainer', VarContainer())
class TstAssembly1(Assembly): dummy_data = Slot(TstContainer, iotype='in') def configure(self): super(TstAssembly1,self).configure() self.add('dummy_data',TstContainer()) self.add('dummy_data_out',TstContainer())
def test_deprecated_metadata(self): with warnings.catch_warnings(record=True) as w: Slot(Assembly, iotype="in") assert len(w) == 1 assert issubclass(w[-1].category, FutureWarning) assert "Slot" in str(w[-1].message) assert "iotype" in str(w[-1].message)
class SimpleAssembly(Assembly): d = Slot(Dummy) def configure(self): self.driver.workflow.add('d')
def __init__(self, params=None, responses=None, nfi=1): super(MultiFiMetaModel, self).__init__(params, responses) self.nfi = nfi if self.nfi > 1: self._param_data = [[] for i in np.arange(self.nfi)] for name in responses: self._response_data[name] = [[] for i in np.arange(self.nfi)] self.add('default_surrogate', Slot(IMultiFiSurrogate, allow_none=True, desc="This surrogate will be used for all outputs that don't " "have a specific surrogate assigned to them in their sur_<name> slot.")) # Add params.<invar>_fi<n> for name in params: for n in np.arange(nfi): if n > 0: input_tree = self.get('params') name_with_fi = "%s_fi%d" % (name, n+1) self.add(name_with_fi, Float(0.0, iotype='in', desc='metamodel param')) input_tree.add(name_with_fi, List([], desc='training param')) # Add responses.<outvar>_fi<n> for name in responses: for n in np.arange(nfi): if n > 0: output_tree = self.get('responses') name_with_fi = "%s_fi%d" % (name, n+1) self.add(name_with_fi, Float(0.0, iotype='out', desc='metamodel response')) output_tree.add(name_with_fi, List([], desc='training response')) self.surrogates[name] = None
class MyContainer(Container): uncertain = Slot(NormalDistribution, iotype="out") def __init__(self): super(MyContainer, self).__init__() self.uncertain = NormalDistribution() self.add('dyntrait', Float(9., desc='some desc'))
class SlotComp5(Assembly): iterator = Slot(CIterator(), allow_none=False, desc='cases to evaluate') num_cases = Int(0, iotype='out') def execute(self): self.num_cases = 0 for case in self.iterator: self.num_cases += 1
class Example(Assembly): base = Slot(OneBase) def configure(self): self.add('base', OneBase()) self.add('c', C()) self.driver.workflow.add(['base', 'c']) self.connect('base.x', 'c.x') self.create_passthrough('c.y')
class TestComponent(Component): dummy_data = Slot(TestContainer(),iotype='in') x = Float(iotype='out') def __init__(self): super(TestComponent,self).__init__() self.add('dummy_data',TestContainer()) def execute(self): self.x = (self.dummy_data.dummy1-3)**2 - self.dummy_data.dummy2
def add(self, name, obj): if isinstance(obj, VariableTree): if self.trait(name) is None: self.add_trait(name, Slot(VariableTree, iotype=obj._iotype)) self.on_trait_change(self._trait_modified, name) elif not IVariable.providedBy(obj): msg = "a VariableTree may only contain Variables or other " + \ "VariableTrees" self.raise_exception(msg, TypeError) return super(VariableTree, self).add(name, obj)
class SlotComp(Assembly): iterator = Slot(ICaseIterator, allow_none=False, desc='cases to evaluate') num_cases = Int(0, iotype='out') def __init__(self): super(SlotComp, self).__init__() def execute(self): self.num_cases = 0 for case in self.iterator: self.num_cases += 1
class DistributionCaseDriver(CaseIteratorDriver): """ Driver for evaluating models at point distributions. """ distribution_generator = Slot( IDistributionGenerator, required=True, desc='Iterator supplying values of point distribitions.') def execute(self): """Generate and evaluate cases.""" self.set_inputs(self.distribution_generator) super(DistributionCaseDriver, self).execute()
class AutoAssemb(Assembly): d2 = Slot(Dummy) def configure(self): self.add('d1', Dummy()) self.add('d2', Dummy()) self.add('d3', Dummy()) self.driver.workflow.add(['d1', 'd2', 'd3']) self.connect('d1.fout', 'd2.fin') self.connect('d2.fout', 'd3.fin') self.create_passthrough('d1.fin') self.create_passthrough('d3.fout')
class SimpleCaseIterDriver(Driver): """ A Driver that sequentially runs a set of cases provided by an :class:`ICaseIterator` and optionally records the results in a :class:`CaseRecorder`. This is intended for test cases or very simple models only. For a more full-featured Driver with similar functionality, see :class:`CaseIteratorDriver`. - The `iterator` socket provides the cases to be evaluated. - The `recorders` socket is used to record results. This is inherited from the :class:`Driver` class. For each case coming from the `iterator`, the workflow will be executed once. """ # pylint: disable-msg=E1101 iterator = Slot(ICaseIterator, desc='Source of Cases.', required=True) def __init__(self): super(SimpleCaseIterDriver, self).__init__() self._iter = None # Set to None when iterator is empty. self.on_trait_change(self._iterator_modified, 'iterator') def _iterator_modified(self, obj, name, value): self._call_execute = True def _pre_execute(self, force=False): super(SimpleCaseIterDriver, self)._pre_execute(force) def execute(self): """ Run each case in `iterator` and record results in `recorder`. """ for case in self.iterator: self._run_case(case) for recorder in self.recorders: recorder.record(case) def _run_case(self, case): msg = None case.parent_uuid = self._case_id case.apply_inputs(self.parent) try: self.workflow.run(case_id=case.uuid) except Exception as err: msg = str(err) try: case.update_outputs(self.parent, msg) except Exception as err: if msg is None: case.msg = str(err)
class NeighborhoodDOEdriver(CaseIteratorDriver): """Driver for Design of Experiments within a specified neighborhood around a point.""" # pylint: disable-msg=E1101 DOEgenerator = Slot(IDOEgenerator, required=True, desc='Iterator supplying normalized DOE values.') alpha = Float(.3, low=.01, high=1.0, iotype='in', desc='Multiplicative factor for neighborhood DOE Driver.') beta = Float(.01, low=.001, high=1.0, iotype='in', desc='Another factor for neighborhood DOE Driver.') def execute(self): """Generate and evaluate cases.""" self.set_inputs(self._get_cases()) super(NeighborhoodDOEdriver, self).execute() def _get_cases(self): """Generate each case.""" self.DOEgenerator.num_parameters = self.total_parameters() upper = self.get_upper_bounds() lower = self.get_lower_bounds() P = self.eval_parameters() M = (P - lower) / (upper - lower) for row in list(self.DOEgenerator) + [tuple(M)]: delta_low = P - lower k_low = 1.0 / (1.0 + (1 - self.beta) * delta_low) new_low = P - self.alpha * k_low * delta_low #/(self.exec_count+1) delta_high = upper - P k_high = 1.0 / (1.0 + (1 - self.beta) * delta_high) new_high = P + self.alpha * k_high * delta_high #/(self.exec_count+1) vals = new_low + (new_high - new_low) * row yield vals
class TowerSE(Assembly): # geometry parameters z_param = Array(iotype='in', units='m', desc='parameterized locations along tower, linear lofting between') d_param = Array(iotype='in', units='m', desc='tower diameter at corresponding locations') t_param = Array(iotype='in', units='m', desc='shell thickness at corresponding locations') # geometry z_full = Array(iotype='in', units='m', desc='locations along tower') L_reinforced = Array(iotype='in', units='m') theta_stress = Array(iotype='in', units='deg', desc='location along azimuth where stress should be evaluated. 0 corresponds to +x axis. follows unit circle direction and c.s.') # wind/wave wind_rho = Float(1.225, iotype='in', units='kg/m**3', desc='air density') wind_mu = Float(1.7934e-5, iotype='in', units='kg/(m*s)', desc='dynamic viscosity of air') wind_Uref1 = Float(iotype='in', units='m/s', desc='reference wind speed (usually at hub height)') wind_Uref2 = Float(iotype='in', units='m/s', desc='reference wind speed (usually at hub height)') wind_zref = Float(iotype='in', units='m', desc='corresponding reference height') wind_z0 = Float(0.0, iotype='in', units='m', desc='bottom of wind profile (height of ground/sea)') wind_cd = Float(iotype='in', desc='Cd coefficient (it will be applied at all stations), if left blank it will be calculated based on cylinder Re') wave_rho = Float(1027.0, iotype='in', units='kg/m**3', desc='water density') wave_mu = Float(1.3351e-3, iotype='in', units='kg/(m*s)', desc='dynamic viscosity of water') wave_cm = Float(2.0, iotype='in', desc='mass coefficient') wave_cd = Float(iotype='in', desc='Cd coefficient (it will be applied at all stations), if left blank it will be calculated based on cylinder Re') yaw = Float(0.0, iotype='in', units='deg', desc='yaw angle') # material props E = Array(iotype='in', units='N/m**2', desc='modulus of elasticity') G = Array(iotype='in', units='N/m**2', desc='shear modulus') rho = Array(iotype='in', units='kg/m**3', desc='material density') sigma_y = Array(iotype='in', units='N/m**2', desc='yield stress') # spring reaction data. Use float('inf') for rigid constraints. kidx = Array(iotype='in', desc='indices of z where external stiffness reactions should be applied.') kx = Array(iotype='in', units='m', desc='spring stiffness in x-direction') ky = Array(iotype='in', units='m', desc='spring stiffness in y-direction') kz = Array(iotype='in', units='m', desc='spring stiffness in z-direction') ktx = Array(iotype='in', units='m', desc='spring stiffness in theta_x-rotation') kty = Array(iotype='in', units='m', desc='spring stiffness in theta_y-rotation') ktz = Array(iotype='in', units='m', desc='spring stiffness in theta_z-rotation') # extra mass midx = Array(iotype='in', desc='indices where added mass should be applied.') m = Array(iotype='in', units='kg', desc='added mass') mIxx = Array(iotype='in', units='kg*m**2', desc='x mass moment of inertia about some point p') mIyy = Array(iotype='in', units='kg*m**2', desc='y mass moment of inertia about some point p') mIzz = Array(iotype='in', units='kg*m**2', desc='z mass moment of inertia about some point p') mIxy = Array(iotype='in', units='kg*m**2', desc='xy mass moment of inertia about some point p') mIxz = Array(iotype='in', units='kg*m**2', desc='xz mass moment of inertia about some point p') mIyz = Array(iotype='in', units='kg*m**2', desc='yz mass moment of inertia about some point p') mrhox = Array(iotype='in', units='m', desc='x-location of p relative to node') mrhoy = Array(iotype='in', units='m', desc='y-location of p relative to node') mrhoz = Array(iotype='in', units='m', desc='z-location of p relative to node') addGravityLoadForExtraMass = Bool(True, iotype='in', desc='add gravitational load') # gravitational load g = Float(9.81, iotype='in', units='m/s**2', desc='acceleration of gravity (magnitude)') # safety factors gamma_f = Float(1.35, iotype='in', desc='safety factor on loads') gamma_m = Float(1.1, iotype='in', desc='safety factor on materials') gamma_n = Float(1.0, iotype='in', desc='safety factor on consequence of failure') gamma_b = Float(1.1, iotype='in', desc='buckling safety factor') gamma_fatigue = Float(1.755, iotype='in', desc='total safety factor for fatigue') # replace wind1 = Slot(WindBase) wind2 = Slot(WindBase) wave1 = Slot(WaveBase) wave2 = Slot(WaveBase) # point loads (if addGravityLoadForExtraMass=True be sure not to double count by adding those force here also) plidx1 = Array(iotype='in', desc='indices where point loads should be applied.') Fx1 = Array(iotype='in', units='N', desc='point force in x-direction') Fy1 = Array(iotype='in', units='N', desc='point force in y-direction') Fz1 = Array(iotype='in', units='N', desc='point force in z-direction') Mxx1 = Array(iotype='in', units='N*m', desc='point moment about x-axis') Myy1 = Array(iotype='in', units='N*m', desc='point moment about y-axis') Mzz1 = Array(iotype='in', units='N*m', desc='point moment about z-axis') plidx2 = Array(iotype='in', desc='indices where point loads should be applied.') Fx2 = Array(iotype='in', units='N', desc='point force in x-direction') Fy2 = Array(iotype='in', units='N', desc='point force in y-direction') Fz2 = Array(iotype='in', units='N', desc='point force in z-direction') Mxx2 = Array(iotype='in', units='N*m', desc='point moment about x-axis') Myy2 = Array(iotype='in', units='N*m', desc='point moment about y-axis') Mzz2 = Array(iotype='in', units='N*m', desc='point moment about z-axis') # constraint parameters min_d_to_t = Float(120.0, iotype='in') min_taper = Float(0.4, iotype='in') # fatigue parameters life = Float(20.0, iotype='in', desc='fatigue life of tower') m_SN = Int(4, iotype='in', desc='slope of S/N curve') DC = Float(80.0, iotype='in', desc='standard value of stress') z_DEL = Array(iotype='in') M_DEL = Array(iotype='in') # frame3ddd options shear = Bool(True, iotype='in', desc='include shear deformation') geom = Bool(False, iotype='in', desc='include geometric stiffness') dx = Float(5.0, iotype='in', desc='z-axis increment for internal forces') nM = Int(2, iotype='in', desc='number of desired dynamic modes of vibration (below only necessary if nM > 0)') Mmethod = Int(1, iotype='in', desc='1: subspace Jacobi, 2: Stodola') lump = Int(0, iotype='in', desc='0: consistent mass, 1: lumped mass matrix') tol = Float(1e-9, iotype='in', desc='mode shape tolerance') shift = Float(0.0, iotype='in', desc='shift value ... for unrestrained structures') # outputs mass = Float(iotype='out', units='kg') f1 = Float(iotype='out', units='Hz', desc='First natural frequency') f2 = Float(iotype='out', units='Hz', desc='Second natural frequency') top_deflection1 = Float(iotype='out', units='m', desc='Deflection of tower top in yaw-aligned +x direction') top_deflection2 = Float(iotype='out', units='m', desc='Deflection of tower top in yaw-aligned +x direction') stress1 = Array(iotype='out', units='N/m**2', desc='Von Mises stress along tower on downwind side (yaw-aligned +x). Normalized by yield stress. Includes safety factors.') stress2 = Array(iotype='out', units='N/m**2', desc='Von Mises stress along tower on downwind side (yaw-aligned +x). Normalized by yield stress. Includes safety factors.') shell_buckling1 = Array(iotype='out', desc='Shell buckling constraint load case #1. Should be < 1 for feasibility. Includes safety factors') shell_buckling2 = Array(iotype='out', desc='Shell buckling constraint load case #2. Should be < 1 for feasibility. Includes safety factors') global_buckling1 = Array(iotype='out', desc='Global buckling constraint load case #1. Should be < 1 for feasibility. Includes safety factors') global_buckling2 = Array(iotype='out', desc='Global buckling constraint load case #2. Should be < 1 for feasibility. Includes safety factors') damage = Array(iotype='out', desc='Fatigue damage at each tower section') weldability = Array(iotype='out') manufacturability = Array(iotype='out') def configure(self): self.add('geometry', TowerDiscretization()) # two load cases. TODO: use a case iterator self.add('wind1', WindBase()) self.add('wind2', WindBase()) self.add('wave1', WaveBase()) self.add('wave2', WaveBase()) self.add('windLoads1', TowerWindDrag()) self.add('windLoads2', TowerWindDrag()) self.add('waveLoads1', TowerWaveDrag()) self.add('waveLoads2', TowerWaveDrag()) self.add('distLoads1', AeroHydroLoads()) self.add('distLoads2', AeroHydroLoads()) self.add('props', CylindricalShellProperties()) self.add('tower1', TowerFrame3DD()) self.add('tower2', TowerFrame3DD()) self.add('gc', GeometricConstraints()) self.driver.workflow.add(['geometry', 'wind1', 'wind2', 'wave1', 'wave2', 'windLoads1', 'windLoads2', 'waveLoads1', 'waveLoads2', 'distLoads1', 'distLoads2', 'geometry', 'props', 'tower1', 'tower2', 'gc']) # connections to geometry self.connect('z_param', 'geometry.z_param') self.connect('d_param', 'geometry.d_param') self.connect('t_param', 'geometry.t_param') self.connect('z_full', 'geometry.z_full') # connections to wind1 self.connect('geometry.z_full', 'wind1.z') self.connect('wind_Uref1', 'wind1.Uref') self.connect('wind_zref', 'wind1.zref') self.connect('wind_z0', 'wind1.z0') # connections to wind2 self.connect('geometry.z_full', 'wind2.z') self.connect('wind_Uref2', 'wind2.Uref') self.connect('wind_zref', 'wind2.zref') self.connect('wind_z0', 'wind2.z0') # connections to wave1 and wave2 self.connect('geometry.z_full', 'wave1.z') self.connect('geometry.z_full', 'wave2.z') # connections to windLoads1 self.connect('wind1.U', 'windLoads1.U') self.connect('geometry.z_full', 'windLoads1.z') self.connect('geometry.d_full', 'windLoads1.d') self.connect('wind1.beta', 'windLoads1.beta') self.connect('wind_rho', 'windLoads1.rho') self.connect('wind_mu', 'windLoads1.mu') self.connect('wind_cd', 'windLoads1.cd_usr') # connections to windLoads2 self.connect('wind2.U', 'windLoads2.U') self.connect('geometry.z_full', 'windLoads2.z') self.connect('geometry.d_full', 'windLoads2.d') self.connect('wind2.beta', 'windLoads2.beta') self.connect('wind_rho', 'windLoads2.rho') self.connect('wind_mu', 'windLoads2.mu') self.connect('wind_cd', 'windLoads2.cd_usr') # connections to waveLoads1 self.connect('wave1.U', 'waveLoads1.U') self.connect('wave1.A', 'waveLoads1.A') self.connect('geometry.z_full', 'waveLoads1.z') self.connect('geometry.d_full', 'waveLoads1.d') self.connect('wave1.beta', 'waveLoads1.beta') self.connect('wave_rho', 'waveLoads1.rho') self.connect('wave_mu', 'waveLoads1.mu') self.connect('wave_cm', 'waveLoads1.cm') self.connect('wave_cd', 'waveLoads1.cd_usr') # connections to waveLoads2 self.connect('wave2.U', 'waveLoads2.U') self.connect('wave2.A', 'waveLoads2.A') self.connect('geometry.z_full', 'waveLoads2.z') self.connect('geometry.d_full', 'waveLoads2.d') self.connect('wave2.beta', 'waveLoads2.beta') self.connect('wave_rho', 'waveLoads2.rho') self.connect('wave_mu', 'waveLoads2.mu') self.connect('wave_cm', 'waveLoads2.cm') self.connect('wave_cd', 'waveLoads2.cd_usr') # connections to distLoads1 self.connect('windLoads1.windLoads', 'distLoads1.windLoads') self.connect('waveLoads1.waveLoads', 'distLoads1.waveLoads') self.connect('geometry.z_full', 'distLoads1.z') self.connect('yaw', 'distLoads1.yaw') # connections to distLoads2 self.connect('windLoads2.windLoads', 'distLoads2.windLoads') self.connect('waveLoads2.waveLoads', 'distLoads2.waveLoads') self.connect('geometry.z_full', 'distLoads2.z') self.connect('yaw', 'distLoads2.yaw') # connections to props self.connect('geometry.d_full', 'props.d') self.connect('geometry.t_full', 'props.t') # connect to tower1 self.connect('z_full', 'tower1.z') self.connect('props.Az', 'tower1.Az') self.connect('props.Asx', 'tower1.Asx') self.connect('props.Asy', 'tower1.Asy') self.connect('props.Jz', 'tower1.Jz') self.connect('props.Ixx', 'tower1.Ixx') self.connect('props.Iyy', 'tower1.Iyy') self.connect('E', 'tower1.E') self.connect('G', 'tower1.G') self.connect('rho', 'tower1.rho') self.connect('sigma_y', 'tower1.sigma_y') self.connect('geometry.d_full', 'tower1.d') self.connect('geometry.t_full', 'tower1.t') self.connect('L_reinforced', 'tower1.L_reinforced') self.connect('theta_stress', 'tower1.theta_stress') self.connect('kidx', 'tower1.kidx') self.connect('kx', 'tower1.kx') self.connect('ky', 'tower1.ky') self.connect('kz', 'tower1.kz') self.connect('ktx', 'tower1.ktx') self.connect('kty', 'tower1.kty') self.connect('ktz', 'tower1.ktz') self.connect('midx', 'tower1.midx') self.connect('m', 'tower1.m') self.connect('mIxx', 'tower1.mIxx') self.connect('mIyy', 'tower1.mIyy') self.connect('mIzz', 'tower1.mIzz') self.connect('mIxy', 'tower1.mIxy') self.connect('mIxz', 'tower1.mIxz') self.connect('mIyz', 'tower1.mIyz') self.connect('mrhox', 'tower1.mrhox') self.connect('mrhoy', 'tower1.mrhoy') self.connect('mrhoz', 'tower1.mrhoz') self.connect('addGravityLoadForExtraMass', 'tower1.addGravityLoadForExtraMass') self.connect('g', 'tower1.g') self.connect('plidx1', 'tower1.plidx') self.connect('Fx1', 'tower1.Fx') self.connect('Fy1', 'tower1.Fy') self.connect('Fz1', 'tower1.Fz') self.connect('Mxx1', 'tower1.Mxx') self.connect('Myy1', 'tower1.Myy') self.connect('Mzz1', 'tower1.Mzz') self.connect('distLoads1.Px', 'tower1.Px') self.connect('distLoads1.Py', 'tower1.Py') self.connect('distLoads1.Pz', 'tower1.Pz') self.connect('distLoads1.qdyn', 'tower1.qdyn') #self.connect('distLoads1.outloads', 'tower1.WWloads') self.connect('gamma_f', 'tower1.gamma_f') self.connect('gamma_m', 'tower1.gamma_m') self.connect('gamma_n', 'tower1.gamma_n') self.connect('gamma_b', 'tower1.gamma_b') self.connect('life', 'tower1.life') self.connect('m_SN', 'tower1.m_SN') self.connect('DC', 'tower1.DC') self.connect('z_DEL', 'tower1.z_DEL') self.connect('M_DEL', 'tower1.M_DEL') self.connect('gamma_fatigue', 'tower1.gamma_fatigue') self.connect('shear', 'tower1.shear') self.connect('geom', 'tower1.geom') self.connect('dx', 'tower1.dx') self.connect('nM', 'tower1.nM') self.connect('Mmethod', 'tower1.Mmethod') self.connect('lump', 'tower1.lump') self.connect('tol', 'tower1.tol') self.connect('shift', 'tower1.shift') # connect to tower2 self.connect('z_full', 'tower2.z') self.connect('props.Az', 'tower2.Az') self.connect('props.Asx', 'tower2.Asx') self.connect('props.Asy', 'tower2.Asy') self.connect('props.Jz', 'tower2.Jz') self.connect('props.Ixx', 'tower2.Ixx') self.connect('props.Iyy', 'tower2.Iyy') self.connect('E', 'tower2.E') self.connect('G', 'tower2.G') self.connect('rho', 'tower2.rho') self.connect('sigma_y', 'tower2.sigma_y') self.connect('geometry.d_full', 'tower2.d') self.connect('geometry.t_full', 'tower2.t') self.connect('L_reinforced', 'tower2.L_reinforced') self.connect('theta_stress', 'tower2.theta_stress') self.connect('kidx', 'tower2.kidx') self.connect('kx', 'tower2.kx') self.connect('ky', 'tower2.ky') self.connect('kz', 'tower2.kz') self.connect('ktx', 'tower2.ktx') self.connect('kty', 'tower2.kty') self.connect('ktz', 'tower2.ktz') self.connect('midx', 'tower2.midx') self.connect('m', 'tower2.m') self.connect('mIxx', 'tower2.mIxx') self.connect('mIyy', 'tower2.mIyy') self.connect('mIzz', 'tower2.mIzz') self.connect('mIxy', 'tower2.mIxy') self.connect('mIxz', 'tower2.mIxz') self.connect('mIyz', 'tower2.mIyz') self.connect('mrhox', 'tower2.mrhox') self.connect('mrhoy', 'tower2.mrhoy') self.connect('mrhoz', 'tower2.mrhoz') self.connect('addGravityLoadForExtraMass', 'tower2.addGravityLoadForExtraMass') self.connect('g', 'tower2.g') self.connect('plidx2', 'tower2.plidx') self.connect('Fx2', 'tower2.Fx') self.connect('Fy2', 'tower2.Fy') self.connect('Fz2', 'tower2.Fz') self.connect('Mxx2', 'tower2.Mxx') self.connect('Myy2', 'tower2.Myy') self.connect('Mzz2', 'tower2.Mzz') self.connect('distLoads2.Px', 'tower2.Px') self.connect('distLoads2.Py', 'tower2.Py') self.connect('distLoads2.Pz', 'tower2.Pz') self.connect('distLoads2.qdyn', 'tower2.qdyn') #elf.connect('distLoads2.outloads', 'tower2.WWloads') self.connect('gamma_f', 'tower2.gamma_f') self.connect('gamma_m', 'tower2.gamma_m') self.connect('gamma_n', 'tower2.gamma_n') self.connect('gamma_b', 'tower2.gamma_b') self.connect('life', 'tower2.life') self.connect('m_SN', 'tower2.m_SN') self.connect('DC', 'tower2.DC') self.connect('z_DEL', 'tower2.z_DEL') self.connect('M_DEL', 'tower2.M_DEL') self.connect('gamma_fatigue', 'tower2.gamma_fatigue') self.connect('shear', 'tower2.shear') self.connect('geom', 'tower2.geom') self.connect('dx', 'tower2.dx') self.connect('nM', 'tower2.nM') self.connect('Mmethod', 'tower2.Mmethod') self.connect('lump', 'tower2.lump') self.connect('tol', 'tower2.tol') self.connect('shift', 'tower2.shift') # connections to gc self.connect('d_param', 'gc.d') self.connect('t_param', 'gc.t') self.connect('min_d_to_t', 'gc.min_d_to_t') self.connect('min_taper', 'gc.min_taper') # outputs self.connect('tower1.mass', 'mass') self.connect('tower1.f1', 'f1') self.connect('tower1.f2', 'f2') self.connect('tower1.top_deflection', 'top_deflection1') self.connect('tower2.top_deflection', 'top_deflection2') self.connect('tower1.stress', 'stress1') self.connect('tower2.stress', 'stress2') self.connect('tower1.global_buckling', 'global_buckling1') self.connect('tower2.global_buckling', 'global_buckling2') self.connect('tower1.shell_buckling', 'shell_buckling1') self.connect('tower2.shell_buckling', 'shell_buckling2') self.connect('tower1.damage', 'damage') self.connect('gc.weldability', 'weldability') self.connect('gc.manufacturability', 'manufacturability')
class ArchitectureAssembly(Assembly): implements(IHasConstraints, IHasParameters, IHasCouplingVars, IHasObjectives) architecture = Slot(IArchitecture, desc="Slot for automatic architecture configurations.") def get_expr_scope(self): """Return the scope to be used to evaluate ExprEvaluators.""" return self def _architecture_changed(self, old, new): if old is None or not old.configured: if new is not None: self.architecture.parent = self else: self._trait_change_notify(False) try: self.architecture = old # put the old value back finally: self._trait_change_notify(True) self.raise_exception( "This Assembly was already configured with an " "architecture. To change architectures you must " "create a new ArchitectureAssembly.", RuntimeError) def initialize(self): """Sets all des_vars and coupling_vars to the start values, if specified.""" self.init_parameters() self.init_coupling_vars() def check_config(self): """Checks the configuration of the assembly to make sure it's compatible with the architecture. Then initializes all the values in the parameters and coupling vars and configures the architecture if it hasn't been done already. """ super(ArchitectureAssembly, self).check_config() if self.architecture is not None: self.architecture.check_config() if not self.architecture.configured: self.architecture.configure() self.architecture.configured = True def _invalidate(self): """ Method for delegates to declare that this is in an invalid state so that isvalid() returns false. Presently, this is called when a constraint/objective/parameter is set, removed, or cleared. """ self._invalidated = True self._set_exec_state('INVALID') def get_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters for all parameters.""" result = {} for k, v in self.get_parameters().items(): data = v.get_referenced_vars_by_compname() for name, vars in data.iteritems(): try: result[name].extend(vars) except KeyError: result[name] = list(vars) return result def get_local_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters for all single-target parameters.""" comps = {} for k, v in self.get_parameters().items(): if isinstance(v, Parameter): comp_names = v.get_referenced_compnames() if len(comp_names) > 1: continue comp = comp_names.pop() try: comps[comp].append(v) except KeyError: comps[comp] = [v] return comps def get_local_des_vars(self): """Return a list of single-target Parameters.""" return [(k, v) for k, v in self.get_parameters().items() if isinstance(v, Parameter)] def get_global_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters for all multi-target parameters.""" result = {} for k, v in self.get_parameters().items(): if isinstance(v, ParameterGroup): data = v.get_referenced_vars_by_compname() for name, vars in data.iteritems(): try: result[name].extend(vars) except KeyError: result[name] = list(vars) return result def get_global_des_vars(self): """Return a list of multi-target Parameters.""" return [(k, v) for k, v in self.get_parameters().items() if isinstance(v, ParameterGroup)] def get_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters (global and local).""" result = self.get_local_des_vars_by_comp() for k, v in self.get_global_des_vars_by_comp().iteritems(): try: result[k].extend(v) except KeyError: result[k] = v return result def get_coupling_indeps_by_comp(self): """Returns a dictionary of coupling var independent parameter objects, keyed to the component they are part of.""" result = {} for indep_dep, couple in self.list_coupling_vars().iteritems(): comp = couple.indep.get_referenced_compnames().pop() try: result[comp].append(couple) except KeyError: result[comp] = [couple] return result def get_coupling_deps_by_comp(self): """Returns a dictionary of coupling var dependents keyed to the component they are part of.""" result = {} for indep_dep, couple in self.list_coupling_vars().iteritems(): comp = couple.dep.get_referenced_compnames().pop() try: result[comp].append(couple) except KeyError: result[comp] = [couple] return result def get_constraints_by_comp(self): result = {} for text, const in self.get_constraints().iteritems(): comps = const.get_referenced_compnames() for comp in comps: try: result[comp].append(const) except: result[comp] = [ const, ] return result
class ArchitectureAssembly(Assembly): implements(IHasConstraints, IHasParameters, IHasCouplingVars, IHasObjectives) architecture = Slot(IArchitecture, desc="Slot for automatic architecture configurations.") def get_expr_scope(self): """Return the scope to be used to evaluate ExprEvaluators.""" return self def _architecture_changed(self, old, new): if old is None or not old.configured: if new is not None: self.architecture.parent = self else: self._trait_change_notify(False) try: self.architecture = old # put the old value back finally: self._trait_change_notify(True) self.raise_exception("This Assembly was already configured with an " "architecture. To change architectures you " "must create a new ArchitectureAssembly.", RuntimeError) def initialize(self): """Sets all des_vars and coupling_vars to the start values, if specified.""" self.init_parameters() self.init_coupling_vars() def configure_recording(self, recording_options=None): self.check_config() super(ArchitectureAssembly, self).configure_recording(recording_options) def setup_init(self): if self.architecture is not None: self.architecture.check_config(strict=False) if not self.architecture.configured: self.architecture.configure() self.architecture.configured = True super(ArchitectureAssembly, self).setup_init() # def check_config(self, strict=False): # """Checks the configuration of the assembly to make sure it's compatible # with the architecture. Then initializes all the values in the # parameters and coupling vars and configures the architecture if it # hasn't been done already. # """ # super(ArchitectureAssembly, self).check_config(strict=strict) # if self.architecture is not None: # self.architecture.check_config(strict=strict) # if not self.architecture.configured: # self.architecture.configure() # self.architecture.configured = True # def get_local_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters for all single-target parameters.""" comps = {} for v in self.get_parameters().values(): if not isinstance(v, ParameterGroup): comp_names = v.get_referenced_compnames() if len(comp_names) > 1: continue comp = comp_names.pop() try: comps[comp].append(v) except KeyError: comps[comp] = [v] return comps def get_local_des_vars(self): """Return a list of single-target Parameters.""" return [(k, v) for k, v in self.get_parameters().items() if not isinstance(v, ParameterGroup)] def get_global_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters for all multi-target parameters.""" result = {} for v in self.get_parameters().values(): if isinstance(v, ParameterGroup): data = v.get_referenced_vars_by_compname() for name, vars in data.iteritems(): try: result[name].extend(vars) except KeyError: result[name] = list(vars) return result def get_global_des_vars(self): """Return a list of multi-target Parameters.""" return [(k, v) for k, v in self.get_parameters().items() if isinstance(v, ParameterGroup)] def get_des_vars_by_comp(self): """Return a dictionary of component names/list of parameters (global and local).""" result = self.get_local_des_vars_by_comp() for k, v in self.get_global_des_vars_by_comp().iteritems(): try: result[k].extend(v) except KeyError: result[k] = v return result def get_coupling_indeps_by_comp(self): """Returns a dictionary of coupling var independent parameter objects, keyed to the component they are part of.""" result = {} for couple in self.list_coupling_vars().itervalues(): comp = couple.indep.get_referenced_compnames().pop() try: result[comp].append(couple) except KeyError: result[comp] = [couple] return result def get_coupling_deps_by_comp(self): """Returns a dictionary of coupling var dependents keyed to the component they are part of.""" result = {} for couple in self.list_coupling_vars().itervalues(): comp = couple.dep.get_referenced_compnames().pop() try: result[comp].append(couple) except KeyError: result[comp] = [couple] return result def get_constraints_by_comp(self): result = {} for const in self.get_constraints().itervalues(): comps = const.get_referenced_compnames() for comp in comps: try: result[comp].append(const) except: result[comp] = [const, ] return result def list_pseudocomps(self): """Return a list of names of pseudocomps resulting from our objectives, and constraints. """ pcomps = [] if hasattr(self, '_delegates_'): for name in self._delegates_: delegate = getattr(self, name) if hasattr(delegate, 'list_pseudocomps'): pcomps.extend(delegate.list_pseudocomps()) return pcomps
class Driver(Component): """ A Driver iterates over a workflow of Components until some condition is met. """ implements(IDriver, IHasEvents) # set factory here so we see a default value in the docs, even # though we replace it with a new Dataflow in __init__ workflow = Slot(Workflow, allow_none=True, required=True, factory=Dataflow, hidden=True) gradient_options = VarTree(GradientOptions(), iotype='in', framework_var=True) # flag to determine partitioning of our workflow's System system_type = Enum('auto', ['auto', 'serial', 'parallel'], desc="Determines the partitioning of this driver's " "workflow components into Systems. Default is " "'auto', where a hierarchy of serial and parallel " "systems is automatically determined. 'serial' " "and 'parallel' may be specified to force the" "workflow components into a single serial or " "parallel System. Note that when not running " "under MPI, this option is ignored and the " "resulting System will always be serial.", framework_var=True) def __init__(self): self._iter = None super(Driver, self).__init__() self.workflow = Dataflow(self) self._required_compnames = None self._reduced_graph = None # clean up unwanted trait from Component self.remove_trait('missing_deriv_policy') def __deepcopy__(self, memo): """For some reason `missing_deriv_policy` gets resurrected.""" result = super(Driver, self).__deepcopy__(memo) result.remove_trait('missing_deriv_policy') return result def _workflow_changed(self, oldwf, newwf): """callback when new workflow is slotted""" if newwf is not None: newwf.parent = self def requires_derivs(self): return False def get_expr_scope(self): """Return the scope to be used to evaluate ExprEvaluators.""" return self.parent def _collapse_subdrivers(self, g): """collapse subdriver iteration sets into single nodes.""" # collapse all subdrivers in our graph itercomps = {} itercomps['#parent'] = self.workflow.get_names(full=True) for child_drv in self.subdrivers(recurse=False): itercomps[child_drv.name] = [ c.name for c in child_drv.iteration_set() ] for child_drv in self.subdrivers(recurse=False): excludes = set() for name, comps in itercomps.items(): if name != child_drv.name: for cname in comps: if cname not in itercomps[child_drv.name]: excludes.add(cname) collapse_driver(g, child_drv, excludes) # now remove any comps that are shared by subdrivers but are not found # in our workflow to_remove = set() for name, comps in itercomps.items(): if name != '#parent': for comp in comps: if comp not in itercomps['#parent']: to_remove.add(comp) g.remove_nodes_from(to_remove) def get_depgraph(self): return self.parent._depgraph # May change this to use a smaller graph later def get_reduced_graph(self): if self._reduced_graph is None: parent_graph = self.parent.get_reduced_graph() # copy parent graph g = parent_graph.subgraph(parent_graph.nodes_iter()) nodes = set([c.name for c in self.workflow]) g.collapse_subdrivers(nodes, self.workflow.subdrivers()) nodes.add(self.name) g = g.full_subgraph(nodes) nodes.remove(self.name) # create fake edges to/from the driver and each of its # components so we can get everything that's relevant # by getting all nodes that are strongly connected to the # driver in the graph. to_add = [] for name in nodes: if not g.has_edge(self.name, name): to_add.append((self.name, name)) if not g.has_edge(name, self.name): to_add.append((name, self.name)) g.add_edges_from(to_add) comps = [] for comps in strongly_connected_components(g): if self.name in comps: break g.remove_edges_from(to_add) self._reduced_graph = g.subgraph(comps) return self._reduced_graph def check_config(self, strict=False): """Verify that our workflow is able to resolve all of its components.""" # workflow will raise an exception if it can't resolve a Component super(Driver, self).check_config(strict=strict) self.workflow.check_config(strict=strict) @rbac(('owner', 'user')) def get_itername(self): """Return current 'iteration coordinates'.""" if self.parent._top_driver is self: return self.parent.get_itername() return self.itername def iteration_set(self, solver_only=False): """Return a set of all Components in our workflow and recursively in any workflow in any Driver in our workflow. solver_only: Bool Only recurse into solver drivers. These are the only kinds of drivers whose derivatives get absorbed into the parent driver's graph. """ allcomps = set() for child in self.workflow: allcomps.add(child) if has_interface(child, IDriver): if solver_only and not has_interface(child, ISolver): continue allcomps.update(child.iteration_set()) return allcomps @rbac(('owner', 'user')) def get_expr_depends(self): """Returns a list of tuples of the form (src_comp_name, dest_comp_name) for each dependency introduced by any ExprEvaluators in this Driver, ignoring any dependencies on components that are inside of this Driver's iteration set. """ iternames = set([c.name for c in self.iteration_set()]) deps = [] for src, dest in super(Driver, self).get_expr_depends(): if src not in iternames and dest not in iternames: deps.add((src, dest)) return list(deps) @rbac(('owner', 'user')) def get_expr_var_depends(self, recurse=True): """Returns a tuple of sets of the form (src_set, dest_set) containing all dependencies introduced by any parameters, objectives, or constraints in this Driver. If recurse is True, include any refs from subdrivers. """ srcset = set() destset = set() if hasattr(self, '_delegates_'): for dname in self._delegates_: delegate = getattr(self, dname) if isinstance(delegate, HasParameters): destset.update(delegate.get_referenced_varpaths(refs=True)) elif isinstance( delegate, (HasConstraints, HasEqConstraints, HasIneqConstraints)): srcset.update(delegate.list_constraint_targets()) elif isinstance(delegate, (HasObjective, HasObjectives)): srcset.update(delegate.list_objective_targets()) if recurse: for sub in self.subdrivers(recurse=True): srcs, dests = sub.get_expr_var_depends(recurse=True) srcset.update(srcs) destset.update(dests) return srcset, destset @rbac(('owner', 'user')) def subdrivers(self, recurse=False): """Returns a generator of all subdrivers contained in this driver's workflow. If recurse is True, include all subdrivers in our entire iteration set. """ if recurse: itercomps = self.iteration_set() else: itercomps = list(self.workflow) for comp in itercomps: if has_interface(comp, IDriver): yield comp def _get_required_compnames(self): """Returns a set of names of components that are required by this Driver in order to evaluate parameters, objectives and constraints. This list will include any intermediate components in the data flow between components referenced by parameters and those referenced by objectives and/or constraints. """ if self._required_compnames is None: # call base class version of get_expr_depends so we don't filter out # comps in our iterset. We want required names to be everything between # and including comps that we reference in any parameter, objective, or # constraint. conns = super(Driver, self).get_expr_depends() getcomps = set([u for u, v in conns if u != self.name]) setcomps = set([v for u, v in conns if v != self.name]) full = set(setcomps) full.update(getcomps) full.update(self.list_pseudocomps()) compgraph = self.get_depgraph().component_graph() for end in getcomps: for start in setcomps: full.update(find_all_connecting(compgraph, start, end)) self._required_compnames = full return self._required_compnames @rbac(('owner', 'user')) def list_pseudocomps(self): """Return a list of names of pseudocomps resulting from our objectives, and constraints. """ pcomps = [] if hasattr(self, '_delegates_'): for name in self._delegates_: delegate = getattr(self, name) if hasattr(delegate, 'list_pseudocomps'): pcomps.extend(delegate.list_pseudocomps()) return pcomps def get_references(self, name): """Return a dict of parameter, constraint, and objective references to component `name` in preparation for subsequent :meth:`restore_references` call. name: string Name of component being referenced. """ refs = {} if hasattr(self, '_delegates_'): for dname in self._delegates_: inst = getattr(self, dname) if isinstance(inst, (HasParameters, HasConstraints, HasEqConstraints, HasIneqConstraints, HasObjective, HasObjectives, HasResponses)): refs[inst] = inst.get_references(name) return refs def remove_references(self, name): """Remove parameter, constraint, objective and workflow references to component `name`. name: string Name of component being removed. """ if hasattr(self, '_delegates_'): for dname in self._delegates_: inst = getattr(self, dname) if isinstance(inst, (HasParameters, HasConstraints, HasEqConstraints, HasIneqConstraints, HasObjective, HasObjectives, HasResponses)): inst.remove_references(name) self.workflow.remove(name) def restore_references(self, refs): """Restore parameter, constraint, and objective references to component `name` from `refs`. refs: object Value returned by :meth:`get_references`. """ for inst, inst_refs in refs.items(): inst.restore_references(inst_refs) @rbac('*', 'owner') def run(self, force=False, case_uuid=''): """Run this object. This should include fetching input variables if necessary, executing, and updating output variables. Do not override this function. force: bool If True, force component to execute even if inputs have not changed. (Default is False) case_uuid: str Identifier for the Case that is associated with this run. """ # (Re)configure parameters. if hasattr(self, 'config_parameters'): self.config_parameters() # force param pseudocomps to get updated values to start self.update_parameters() # Reset the workflow. self.workflow.reset() super(Driver, self).run(case_uuid) @rbac(('owner', 'user')) def configure_recording(self, recording_options=None): """Called at start of top-level run to configure case recording. Returns set of paths for changing inputs.""" return self.workflow.configure_recording(recording_options) def update_parameters(self): if hasattr(self, 'get_parameters'): params = self.get_parameters() for param in params.values(): param.initialize(self.get_expr_scope(), self) if 'u' in self.workflow._system.vec: self.workflow._system.vec['u'].set_to_scope( self.parent, params.keys()) def execute(self): """ Iterate over a workflow of Components until some condition is met. If you don't want to structure your driver to use *pre_iteration*, *post_iteration*, etc., just override this function. As a result, none of the ``<start/pre/post/continue>_iteration()`` functions will be called. """ self._iter = None self.start_iteration() while self.continue_iteration(): self.pre_iteration() self.run_iteration() self.post_iteration() self.end_iteration() def stop(self): """Stop the workflow.""" self._stop = True self.workflow.stop() def start_iteration(self): """Called just prior to the beginning of an iteration loop. This can be overridden by inherited classes. It can be used to perform any necessary pre-iteration initialization. """ self._continue = True def end_iteration(self): """Called at the end of the iteraton loop. Override this in inherited classes to perform some action after iteration is complete. """ pass def continue_iteration(self): """Return False to stop iterating.""" return self._continue def pre_iteration(self): """Called prior to each iteration. This is where iteration events are set.""" self.set_events() def run_iteration(self): """Runs workflow.""" wf = self.workflow if len(wf) == 0: self._logger.warning("'%s': workflow is empty!" % self.get_pathname()) wf.run() def calc_derivatives(self, first=False, second=False): """ Calculate derivatives and save baseline states for all components in this workflow.""" self.workflow.calc_derivatives(first, second) def post_iteration(self): """Called after each iteration.""" self._continue = False # by default, stop after one iteration def config_changed(self, update_parent=True): """Call this whenever the configuration of this Component changes, for example, children are added or removed or dependencies may have changed. """ super(Driver, self).config_changed(update_parent) self._required_compnames = None self._depgraph = None if self.workflow is not None: self.workflow.config_changed() def _get_param_constraint_pairs(self): """Returns a list of tuples of the form (param, constraint).""" pairs = [] if hasattr(self, 'list_param_group_targets'): pgroups = self.list_param_group_targets() for key, cnst in self.get_eq_constraints().iteritems(): for params in pgroups: if params[0] == cnst.rhs.text: pairs.append((params[0], cnst.pcomp_name + '.out0')) elif params[0] == cnst.lhs.text: pairs.append((params[0], cnst.pcomp_name + '.out0')) return pairs @rbac(('owner', 'user')) def setup_systems(self): """Set up system trees from here down to all of our child Components. """ if self.name in self.parent._reduced_graph: self._system = self.parent._reduced_graph.node[self.name]['system'] self.workflow.setup_systems(self.system_type) #### MPI related methods #### @rbac(('owner', 'user')) def get_req_cpus(self): """Return requested_cpus.""" return self.workflow.get_req_cpus() def setup_communicators(self, comm): """Allocate communicators from here down to all of our child Components. """ self.workflow.setup_communicators(comm) def setup_scatters(self): self.workflow.setup_scatters() @rbac(('owner', 'user')) def get_full_nodeset(self): """Return the full set of nodes in the depgraph belonging to this driver (includes full iteration set). """ names = super(Driver, self).get_full_nodeset() names.update(self.workflow.get_full_nodeset()) return names def calc_gradient(self, inputs=None, outputs=None, mode='auto', return_format='array', force_regen=True): """Returns the Jacobian of derivatives between inputs and outputs. inputs: list of strings List of OpenMDAO inputs to take derivatives with respect to. outputs: list of strings Lis of OpenMDAO outputs to take derivatives of. mode: string in ['forward', 'adjoint', 'auto', 'fd'] Mode for gradient calculation. Set to 'auto' to let OpenMDAO choose forward or adjoint based on problem dimensions. Set to 'fd' to finite difference the entire workflow. return_format: string in ['array', 'dict'] Format for return value. Default is array, but some optimizers may want a dictionary instead. force_regen: boolean Set to True to force a regeneration of the system hierarchy. This is set to True because this function is meant for manual testing. """ return self.workflow.calc_gradient(inputs=inputs, outputs=outputs, mode=mode, return_format=return_format, force_regen=force_regen) @rbac(('owner', 'user')) def setup_depgraph(self, dgraph): self._reduced_graph = None # # add connections for params, constraints, etc. # pass if self.workflow._calc_gradient_inputs is not None: for param in self.workflow._calc_gradient_inputs: dgraph.add_param(self.name, param) else: # add connections for our params/constraints/objectives # if hasattr(self, 'list_param_group_targets'): # params = self.list_param_group_targets() # for now do nothing here because params are already # in the depgraph pass # add connections for calc gradient outputs if self.workflow._calc_gradient_outputs is not None: for vname in self.workflow._calc_gradient_outputs: dgraph.add_driver_input(self.name, vname) else: pass # for now, do nothing @rbac(('owner', 'user')) def pre_setup(self): self._reduced_graph = None self.workflow.pre_setup()
class Driver(Component): """ A Driver iterates over a workflow of Components until some condition is met. """ implements(IDriver, IHasEvents) # set factory here so we see a default value in the docs, even # though we replace it with a new Dataflow in __init__ workflow = Slot(Workflow, allow_none=True, required=True, factory=Dataflow, hidden=True) gradient_options = VarTree(GradientOptions(), iotype='in', framework_var=True) def __init__(self): self._iter = None super(Driver, self).__init__() self.workflow = Dataflow(self) self._required_compnames = None # clean up unwanted trait from Component self.remove_trait('missing_deriv_policy') def __deepcopy__(self, memo): """For some reason `missing_deriv_policy` gets resurrected.""" result = super(Driver, self).__deepcopy__(memo) result.remove_trait('missing_deriv_policy') return result def _workflow_changed(self, oldwf, newwf): """callback when new workflow is slotted""" if newwf is not None: newwf.parent = self def get_expr_scope(self): """Return the scope to be used to evaluate ExprEvaluators.""" return self.parent def check_config(self, strict=False): """Verify that our workflow is able to resolve all of its components.""" # workflow will raise an exception if it can't resolve a Component super(Driver, self).check_config(strict=strict) self.workflow.check_config(strict=strict) def iteration_set(self, solver_only=False): """Return a set of all Components in our workflow and recursively in any workflow in any Driver in our workflow. solver_only: Bool Only recurse into solver drivers. These are the only kinds of drivers whose derivatives get absorbed into the parent driver's graph. """ allcomps = set() for child in self.workflow.get_components(full=True): allcomps.add(child) if has_interface(child, IDriver): if solver_only and not has_interface(child, ISolver): continue allcomps.update(child.iteration_set()) return allcomps @rbac(('owner', 'user')) def get_expr_depends(self): """Returns a list of tuples of the form (src_comp_name, dest_comp_name) for each dependency introduced by any ExprEvaluators in this Driver, ignoring any dependencies on components that are inside of this Driver's iteration set. """ iternames = set([c.name for c in self.iteration_set()]) conn_list = super(Driver, self).get_expr_depends() new_list = [] for src, dest in conn_list: if src not in iternames and dest not in iternames: new_list.append((src, dest)) return new_list @rbac(('owner', 'user')) def get_expr_var_depends(self, recurse=True): """Returns a tuple of sets of the form (src_set, dest_set) containing all dependencies introduced by any parameters, objectives, or constraints in this Driver. If recurse is True, include any refs from subdrivers. """ srcset = set() destset = set() if hasattr(self, '_delegates_'): for dname in self._delegates_: delegate = getattr(self, dname) if isinstance(delegate, HasParameters): destset.update(delegate.get_referenced_varpaths()) elif isinstance(delegate, (HasConstraints, HasEqConstraints, HasIneqConstraints, HasObjective, HasObjectives)): srcset.update(delegate.get_referenced_varpaths()) if recurse: for sub in self.subdrivers(): srcs, dests = sub.get_expr_var_depends(recurse) srcset.update(srcs) destset.update(dests) return srcset, destset @rbac(('owner', 'user')) def subdrivers(self): """Returns a generator of of subdrivers of this driver.""" for d in self.iteration_set(): if has_interface(d, IDriver): yield d def _get_required_compnames(self): """Returns a set of names of components that are required by this Driver in order to evaluate parameters, objectives and constraints. This list will include any intermediate components in the data flow between components referenced by parameters and those referenced by objectives and/or constraints. """ if self._required_compnames is None: conns = super(Driver, self).get_expr_depends() getcomps = set([u for u, v in conns if u != self.name]) setcomps = set([v for u, v in conns if v != self.name]) full = set(setcomps) full.update(getcomps) full.update(self.list_pseudocomps()) compgraph = self.parent._depgraph.component_graph() for end in getcomps: for start in setcomps: full.update(find_all_connecting(compgraph, start, end)) self._required_compnames = full return self._required_compnames @rbac(('owner', 'user')) def list_pseudocomps(self): """Return a list of names of pseudocomps resulting from our objectives, and constraints. """ pcomps = [] if hasattr(self, '_delegates_'): for name in self._delegates_: delegate = getattr(self, name) if hasattr(delegate, 'list_pseudocomps'): pcomps.extend(delegate.list_pseudocomps()) return pcomps def get_references(self, name): """Return a dict of parameter, constraint, and objective references to component `name` in preparation for subsequent :meth:`restore_references` call. name: string Name of component being referenced. """ refs = {} if hasattr(self, '_delegates_'): for dname in self._delegates_: inst = getattr(self, dname) if isinstance(inst, (HasParameters, HasConstraints, HasEqConstraints, HasIneqConstraints, HasObjective, HasObjectives)): refs[inst] = inst.get_references(name) return refs def remove_references(self, name): """Remove parameter, constraint, objective and workflow references to component `name`. name: string Name of component being removed. """ if hasattr(self, '_delegates_'): for dname in self._delegates_: inst = getattr(self, dname) if isinstance(inst, (HasParameters, HasConstraints, HasEqConstraints, HasIneqConstraints, HasObjective, HasObjectives)): inst.remove_references(name) self.workflow.remove(name) def restore_references(self, refs): """Restore parameter, constraint, and objective references to component `name` from `refs`. refs: object Value returned by :meth:`get_references`. """ for inst, inst_refs in refs.items(): inst.restore_references(inst_refs) @rbac('*', 'owner') def run(self, force=False, ffd_order=0, case_uuid=''): """Run this object. This should include fetching input variables if necessary, executing, and updating output variables. Do not override this function. force: bool If True, force component to execute even if inputs have not changed. (Default is False) ffd_order: int Order of the derivatives to be used when finite differencing (1 for first derivatives, 2 for second derivatives). During regular execution, ffd_order should be 0. (Default is 0) case_uuid: str Identifier for the Case that is associated with this run. """ # (Re)configure parameters. if hasattr(self, 'config_parameters'): self.config_parameters() # force param pseudocomps to get updated values to start # KTM1 - probably don't need this anymore self.update_parameters() # Reset the workflow. self.workflow.reset() super(Driver, self).run(ffd_order, case_uuid) @rbac(('owner', 'user')) def configure_recording(self, includes, excludes): """Called at start of top-level run to configure case recording. Returns set of paths for changing inputs.""" return self.workflow.configure_recording(includes, excludes) def update_parameters(self): if hasattr(self, 'get_parameters'): for param in self.get_parameters().values(): param.initialize(self.get_expr_scope()) def execute(self): """ Iterate over a workflow of Components until some condition is met. If you don't want to structure your driver to use *pre_iteration*, *post_iteration*, etc., just override this function. As a result, none of the ``<start/pre/post/continue>_iteration()`` functions will be called. """ self._iter = None self.start_iteration() while self.continue_iteration(): self.pre_iteration() self.run_iteration() self.post_iteration() def stop(self): """Stop the workflow.""" self._stop = True self.workflow.stop() def start_iteration(self): """Called just prior to the beginning of an iteration loop. This can be overridden by inherited classes. It can be used to perform any necessary pre-iteration initialization. """ self._continue = True def continue_iteration(self): """Return False to stop iterating.""" return self._continue def pre_iteration(self): """Called prior to each iteration. This is where iteration events are set.""" self.set_events() def run_iteration(self): """Runs workflow.""" wf = self.workflow if len(wf) == 0: self._logger.warning("'%s': workflow is empty!" % self.get_pathname()) wf.run(ffd_order=self.ffd_order) def calc_derivatives(self, first=False, second=False, savebase=False, required_inputs=None, required_outputs=None): """ Calculate derivatives and save baseline states for all components in this workflow.""" self.workflow.calc_derivatives(first, second, savebase, required_inputs, required_outputs) def calc_gradient(self, inputs=None, outputs=None): """Returns the gradient of the passed outputs with respect to all passed inputs. The basic driver behavior is to call calc_gradient on its workflow. However, some driver (optimizers in particular) may want to define their own behavior. """ return self.workflow.calc_gradient(inputs, outputs, upscope=True) def post_iteration(self): """Called after each iteration.""" self._continue = False # by default, stop after one iteration def config_changed(self, update_parent=True): """Call this whenever the configuration of this Component changes, for example, children are added or removed or dependencies may have changed. """ super(Driver, self).config_changed(update_parent) self._required_compnames = None if self.workflow is not None: self.workflow.config_changed() def get_workflow(self): """ Get the driver info and the list of components that make up the driver's workflow; recurse on nested drivers. """ from openmdao.main.assembly import Assembly ret = {} ret['pathname'] = self.get_pathname() ret['type'] = type(self).__module__ + '.' + type(self).__name__ ret['workflow'] = [] comps = [comp for comp in self.workflow] for comp in comps: # Skip pseudo-comps if hasattr(comp, '_pseudo_type'): continue pathname = comp.get_pathname() if is_instance(comp, Assembly) and comp.driver: inames = [cls.__name__ for cls in list(implementedBy(comp.__class__))] ret['workflow'].append({ 'pathname': pathname, 'type': type(comp).__module__ + '.' + type(comp).__name__, 'interfaces': inames, 'driver': comp.driver.get_workflow(), }) elif is_instance(comp, Driver): ret['workflow'].append(comp.get_workflow()) else: inames = [cls.__name__ for cls in list(implementedBy(comp.__class__))] ret['workflow'].append({ 'pathname': pathname, 'type': type(comp).__module__ + '.' + type(comp).__name__, 'interfaces': inames, }) return ret