def get_declared_identifiers(composition_list): names = set() for composition_dict in composition_list: names.add(parse_valid_identifier(composition_dict['name'])) for name, node in composition_dict[MODEL_SPEC_ID_NODES].items(): if MODEL_SPEC_ID_COMPOSITION in node: names.update( get_declared_identifiers( node[MODEL_SPEC_ID_COMPOSITION])) names.add(parse_valid_identifier(name)) return names
def _parse_condition_arg_value(value): pnl_str = parse_string_to_psyneulink_object_string(value) try: identifier = parse_valid_identifier(value) except TypeError: identifier = None if identifier in component_identifiers: return identifier elif pnl_str is not None: return f'psyneulink.{pnl_str}' else: return str(value)
def _generate_scheduler_string( scheduler_id, scheduler_dict, component_identifiers, blacklist=[] ): output = [] for node, condition in scheduler_dict['node_specific'].items(): if node not in blacklist: output.append( '{0}.add_condition({1}, {2})'.format( scheduler_id, parse_valid_identifier(node), _generate_condition_string( condition, component_identifiers ) ) ) output.append('') termination_str = [] for scale, cond in scheduler_dict['termination'].items(): termination_str.insert( 1, 'psyneulink.{0}: {1}'.format( f'TimeScale.{str.upper(scale)}', _generate_condition_string(cond, component_identifiers) ) ) output.append( '{0}.termination_conds = {{{1}}}'.format( scheduler_id, ', '.join(termination_str) ) ) return '\n'.join(output)
def _generate_composition_string(composition_list, component_identifiers): # used if no generic types are specified default_composition_type = psyneulink.Composition default_node_type = psyneulink.ProcessingMechanism default_edge_type = psyneulink.MappingProjection control_mechanism_types = (psyneulink.ControlMechanism, ) # these are not actively added to a Composition implicit_types = (psyneulink.ObjectiveMechanism, psyneulink.ControlProjection, psyneulink.AutoAssociativeProjection) output = [] # may be given multiple compositions for composition_dict in composition_list: try: comp_type = _parse_component_type(composition_dict) except KeyError: comp_type = default_composition_type comp_name = composition_dict['name'] comp_identifer = parse_valid_identifier(comp_name) # get order in which nodes were added # may be node names or dictionaries try: node_order = composition_dict[comp_type._model_spec_id_parameters][ MODEL_SPEC_ID_PSYNEULINK]['node_ordering'] node_order = { parse_valid_identifier(node['name']) if isinstance(node, dict) else parse_valid_identifier(node): node_order.index(node) for node in node_order } assert all([(parse_valid_identifier(node) in node_order) for node in composition_dict[MODEL_SPEC_ID_NODES]]) except (KeyError, TypeError, AssertionError): # if no node_ordering attribute exists, fall back to # alphabetical order alphabetical = enumerate( sorted(composition_dict[MODEL_SPEC_ID_NODES])) node_order = { parse_valid_identifier(item[1]): item[0] for item in alphabetical } # clean up pnl-specific and other software-specific items pnl_specific_items = {} keys_to_delete = [] for name, node in composition_dict[MODEL_SPEC_ID_NODES].items(): try: _parse_component_type(node) except KeyError: # will use a default type pass except PNLJSONError: # node isn't a node dictionary, but a dict of dicts, # indicating a software-specific set of nodes or # a composition if name == MODEL_SPEC_ID_PSYNEULINK: pnl_specific_items = node if MODEL_SPEC_ID_COMPOSITION not in node: keys_to_delete.append(name) for nodes_dict in pnl_specific_items: for name, node in nodes_dict.items(): composition_dict[MODEL_SPEC_ID_NODES][name] = node for name_to_delete in keys_to_delete: del composition_dict[MODEL_SPEC_ID_NODES][name_to_delete] pnl_specific_items = {} keys_to_delete = [] for name, edge in composition_dict[MODEL_SPEC_ID_PROJECTIONS].items(): try: _parse_component_type(edge) except KeyError: # will use a default type pass except PNLJSONError: if name == MODEL_SPEC_ID_PSYNEULINK: pnl_specific_items = edge keys_to_delete.append(name) for name, edge in pnl_specific_items.items(): # exclude CIM projections because they are automatically # generated if (edge[MODEL_SPEC_ID_SENDER_MECH] != comp_name and edge[MODEL_SPEC_ID_RECEIVER_MECH] != comp_name): composition_dict[MODEL_SPEC_ID_PROJECTIONS][name] = edge for name_to_delete in keys_to_delete: del composition_dict[MODEL_SPEC_ID_PROJECTIONS][name_to_delete] # generate string for Composition itself output.append("{0} = {1}\n".format( comp_identifer, _generate_component_string(composition_dict, component_identifiers, default_type=default_composition_type))) component_identifiers[comp_identifer] = True mechanisms = [] compositions = [] control_mechanisms = [] implicit_mechanisms = [] # add nested compositions and mechanisms in order they were added # to this composition for name, node in sorted( composition_dict[MODEL_SPEC_ID_NODES].items(), key=lambda item: node_order[parse_valid_identifier(item[0])]): if MODEL_SPEC_ID_COMPOSITION in node: compositions.append(node[MODEL_SPEC_ID_COMPOSITION]) else: try: component_type = _parse_component_type(node) except KeyError: component_type = default_node_type identifier = parse_valid_identifier(name) if issubclass(component_type, control_mechanism_types): control_mechanisms.append(node) component_identifiers[identifier] = True elif issubclass(component_type, implicit_types): implicit_mechanisms.append(node) else: mechanisms.append(node) component_identifiers[identifier] = True implicit_names = [ x['name'] for x in implicit_mechanisms + control_mechanisms ] for mech in mechanisms: output.append( _generate_component_string(mech, component_identifiers, assignment=True, default_type=default_node_type)) if len(mechanisms) > 0: output.append('') for mech in control_mechanisms: output.append( _generate_component_string(mech, component_identifiers, assignment=True, default_type=default_node_type)) if len(control_mechanisms) > 0: output.append('') # recursively generate string for inner Compositions for comp in compositions: output.append( _generate_composition_string(comp, component_identifiers)) if len(compositions) > 0: output.append('') # generate string to add the nodes to this Composition try: node_roles = { parse_valid_identifier(node): role for (node, role ) in composition_dict[comp_type._model_spec_id_parameters] [MODEL_SPEC_ID_PSYNEULINK]['required_node_roles'] } except KeyError: node_roles = [] # do not add the controller as a normal node try: controller_name = composition_dict['controller']['name'] except TypeError: controller_name = composition_dict['controller'] except KeyError: controller_name = None for name in sorted( composition_dict[MODEL_SPEC_ID_NODES], key=lambda item: node_order[parse_valid_identifier(item)]): if (name not in implicit_names and name != controller_name): name = parse_valid_identifier(name) output.append('{0}.add_node({1}{2})'.format( comp_identifer, name, ', {0}'.format( _parse_parameter_value(node_roles[name], component_identifiers)) if name in node_roles else '')) if len(composition_dict[MODEL_SPEC_ID_NODES]) > 0: output.append('') # generate string to add the projections for name, projection_dict in composition_dict[ MODEL_SPEC_ID_PROJECTIONS].items(): try: projection_type = _parse_component_type(projection_dict) except KeyError: projection_type = default_edge_type if (not issubclass(projection_type, implicit_types) and projection_dict[MODEL_SPEC_ID_SENDER_MECH] not in implicit_names and projection_dict[MODEL_SPEC_ID_RECEIVER_MECH] not in implicit_names): output.append( '{0}.add_projection(projection={1}, sender={2}, receiver={3})' .format( comp_identifer, _generate_component_string( projection_dict, component_identifiers, default_type=default_edge_type), parse_valid_identifier( projection_dict[MODEL_SPEC_ID_SENDER_MECH]), parse_valid_identifier( projection_dict[MODEL_SPEC_ID_RECEIVER_MECH]), )) # add controller if it exists (must happen after projections) if controller_name is not None: output.append('{0}.add_controller({1})'.format( comp_identifer, parse_valid_identifier(controller_name))) # add schedulers try: schedulers = composition_dict[comp_type._model_spec_id_parameters][ MODEL_SPEC_ID_PSYNEULINK]['schedulers'] ContextFlags = psyneulink.core.globals.context.ContextFlags scheduler_attr_mappings = { str(ContextFlags.PROCESSING): 'scheduler', str(ContextFlags.LEARNING): 'scheduler_learning', } for phase, sched_dict in schedulers.items(): try: sched_attr = scheduler_attr_mappings[phase] except KeyError as e: raise PNLJSONError( f'Invalid scheduler phase in JSON: {phase}') from e # blacklist automatically generated nodes because they will # not exist in the script namespace output.append('') output.append( _generate_scheduler_string( f'{comp_identifer}.{sched_attr}', sched_dict, component_identifiers, blacklist=implicit_names)) except KeyError: pass return '\n'.join(output)
def _parse_parameter_value(value, component_identifiers=None): if component_identifiers is None: component_identifiers = {} exec('import numpy') if isinstance(value, list): value = [ _parse_parameter_value(x, component_identifiers) for x in value ] value = f"[{', '.join([str(x) for x in value])}]" elif isinstance(value, dict): if (MODEL_SPEC_ID_PARAMETER_SOURCE in value and MODEL_SPEC_ID_PARAMETER_VALUE in value): # handle ParameterPort spec try: value_type = eval(value[MODEL_SPEC_ID_TYPE]) except Exception as e: raise PNLJSONError( 'Invalid python type specified in JSON object: {0}'.format( value[MODEL_SPEC_ID_TYPE])) from e value = _parse_parameter_value( value[MODEL_SPEC_ID_PARAMETER_VALUE], component_identifiers) # handle tuples and numpy arrays, which both are dumped # as lists in JSON form if value_type is tuple: # convert list brackets to tuple brackets assert value[0] == '[' and value[-1] == ']' value = f'({value[1:-1]})' elif value_type is numpy.ndarray: value = f'{value[MODEL_SPEC_ID_TYPE]}({value})' else: # it is either a Component spec or just a plain dict try: # try handling as a Component spec identifier = parse_valid_identifier(value['name']) if (identifier in component_identifiers and component_identifiers[identifier]): # if this spec is already created as a node elsewhere, # then just use a reference value = identifier else: value = _generate_component_string(value, component_identifiers) except (PNLJSONError, KeyError): # standard dict handling value = '{{{0}}}'.format(', '.join([ '{0}: {1}'.format( str(_parse_parameter_value(k, component_identifiers)), str(_parse_parameter_value(v, component_identifiers))) for k, v in value.items() ])) elif isinstance(value, str): obj_string = parse_string_to_psyneulink_object_string(value) if obj_string is not None: return f'psyneulink.{obj_string}' # handle dill string try: dill_str = base64.decodebytes(bytes(value, 'utf-8')) dill.loads(dill_str) return f'dill.loads({dill_str})' except (binascii.Error, pickle.UnpicklingError, EOFError): pass # handle IO port specification match = re.match(r'(.+)\.(.+)_ports\.(.+)', value) if match is not None: comp_name, port_type, name = match.groups() comp_identifer = parse_valid_identifier(comp_name) if comp_identifer in component_identifiers: name_as_kw = parse_string_to_psyneulink_object_string(name) if name_as_kw is not None: name = f'psyneulink.{name_as_kw}' else: name = f"'{name}'" return f'{comp_identifer}.{port_type}_ports[{name}]' # if value is just a non-fixed component name, use the fixed name identifier = parse_valid_identifier(value) if identifier in component_identifiers: value = identifier evaluates = False try: eval(value) evaluates = True except (TypeError, NameError, SyntaxError): pass # handle generic string if (value not in component_identifiers # assume a string that contains a dot is a command, not a raw # string, this is definitely imperfect and can't handle the # legitimate case, but don't know how to distinguish.. and '.' not in value and not evaluates): value = f"'{value}'" return value