def _pytorch_fast_path_exec(self, *inputs, **kwargs): """ Builds a fast-path execution method for pytorch / eager. """ inputs = inputs[0] forward_inputs = [] for v in inputs: if v is not None: if isinstance(v, tuple): # Unitary tuples forward_inputs.append(v[0]) else: forward_inputs.append(v) result = self.network_obj.forward(*forward_inputs) # Problem: Not everything in the neural network stack is a true layer. for c in self.non_layer_components: result = getattr(c, "call")(*force_list(result)) return result
def build(self, root_component, input_spaces=None): """ Builds the meta-graph by constructing op-record columns going into and coming out of all API-methods and graph_fns. Args: root_component (Component): Root component of the meta graph to build. input_spaces (Optional[Space]): Input spaces for all (exposed) API methods of the root-component. """ # Time the meta-graph build: DataOpRecord.reset() time_start = time.perf_counter() api = {} # Sanity check input_spaces dict. if input_spaces is not None: for input_param_name in input_spaces.keys(): if input_param_name not in root_component.api_method_inputs: raise RLGraphError( "ERROR: `input_spaces` contains an input-parameter name ('{}') that's not defined in any of " "the root-component's ('{}') API-methods, whose args are '{}'!" .format(input_param_name, root_component.name, root_component.api_method_inputs)) else: input_spaces = {} # Call all API methods of the core once and thereby, create empty in-op columns that serve as placeholders # and bi-directional links between ops (for the build time). for api_method_name, api_method_rec in root_component.api_methods.items( ): self.logger.debug("Building meta-graph of API-method '{}'.".format( api_method_name)) # Create the loose list of in-op-records depending on signature and input-spaces given. # If an arg has a default value, its input-space does not have to be provided. in_ops_records = [] use_named = False for i, param_name in enumerate(api_method_rec.input_names): # Arg has a default of None (flex). If in input_spaces, arg will be provided. if root_component.api_method_inputs[param_name] == "flex": if param_name in input_spaces: in_ops_records.append( DataOpRecord( position=i, kwarg=param_name if use_named else None, placeholder=param_name)) else: use_named = True # Already defined (per default arg value (e.g. bool)). elif isinstance(root_component.api_method_inputs[param_name], Space): if param_name in input_spaces: in_ops_records.append( DataOpRecord( position=i, kwarg=param_name if use_named else None, placeholder=param_name)) else: use_named = True # No default values -> Must be provided in `input_spaces`. else: # A var-positional param. if root_component.api_method_inputs[param_name] == "*flex": assert use_named is False if param_name in input_spaces: for j in range( len(force_list(input_spaces[param_name]))): in_ops_records.append( DataOpRecord(position=i + j, placeholder=param_name + "[{}]".format(j))) # A keyword param. elif root_component.api_method_inputs[ param_name] == "**flex": if param_name in input_spaces: assert use_named is False for key in sorted(input_spaces[param_name].keys()): in_ops_records.append( DataOpRecord(kwarg=key, placeholder=param_name + "[{}]".format(key))) use_named = True else: # TODO: If space not provided in input_spaces -> Try to call this API method later (maybe another API-method). assert param_name in input_spaces, \ "ERROR: arg-name '{}' not defined in input_spaces for root component '{}'!".format( param_name, root_component.global_scope ) in_ops_records.append( DataOpRecord( position=i, kwarg=param_name if use_named else None, placeholder=param_name)) # Do the actual core API-method call (thereby assembling the meta-graph). args = [ op_rec for op_rec in in_ops_records if op_rec.kwarg is None ] kwargs = { op_rec.kwarg: op_rec for op_rec in in_ops_records if op_rec.kwarg is not None } getattr(api_method_rec.component, api_method_name)(*args, **kwargs) # Register core's interface. api[api_method_name] = ( in_ops_records, api_method_rec.out_op_columns[-1].op_records) # Tag very last out-op-records with is_terminal_op=True, so we know in the build process that we are done. for op_rec in api_method_rec.out_op_columns[-1].op_records: op_rec.is_terminal_op = True time_build = time.perf_counter() - time_start self.logger.info( "Meta-graph build completed in {} s.".format(time_build)) # Get some stats on the graph and report. num_meta_ops = DataOpRecord._ID + 1 self.logger.info( "Meta-graph op-records generated: {}".format(num_meta_ops)) return MetaGraph(root_component=root_component, api=api, num_ops=num_meta_ops, build_status=True)
def __init__(self, *layers, **kwargs): """ Args: *layers (Component): Same as `sub_components` argument of Stack. Can be used to add Layer Components (or any other Components) to this Network. Keyword Args: layers (Optional[list]): An optional list of Layer objects or spec-dicts to overwrite(!) *layers. inputs (Optional[List[Space]]): A list of Spaces or a single Space object defining the input spaces for the `call` method of this network. Must be provided, if more than one input arg are needed by `call` to determine the order in which these inputs will come in. outputs (Optional[List[NNCallOutput]]): A list or single output NNCallOutput object, indicating that we have to infer the `call` method from the graph given by these outputs. This is used iff a NN is constructed by the Keras-style functional API. num_inputs (Optional[int]): An optional number of inputs the `call` method will take as `*inputs`. If not given, NN will try to infer this value automatically. fold_time_rank (bool): Whether to overwrite the `fold_time_rank` option for the apply method. Only for auto-generated `call` method. Default: None. unfold_time_rank (bool): Whether to overwrite the `unfold_time_rank` option for the `call` method. Only for auto-generated `call` method. Default: None. """ # In case layers come in via a spec dict -> push it into *layers. layers_args = kwargs.pop("layers", layers) # Add a default scope (if not given) and pass on via kwargs. kwargs["scope"] = kwargs.get("scope", "neural-network") self.keras_style_api_outputs = force_list(kwargs.pop("outputs", None)) self.keras_style_api_inputs = force_list(kwargs.pop("inputs", [])) # If Keras-style inputs are given, just count those, otherwise allow for `num_inputs` hint (default: 1). self.num_inputs = len(self.keras_style_api_inputs) if self.num_inputs == 0: self.num_inputs = kwargs.pop("num_inputs", 1) self.num_outputs = min(len(self.keras_style_api_outputs), 1) # Force the only API-method to be `call`. No matter whether custom-API or auto-generated (via Stack). self.custom_call_given = True if not hasattr(self, "call"): # Automatically create the `call` stack. if "api_methods" not in kwargs: kwargs["api_methods"] = [ dict(api="call_shadowed_", component_api="call") ] self.custom_call_given = False # Sanity check `api_method` to contain only specifications on `call`. else: assert len(kwargs["api_methods"]) == 1, \ "ERROR: Only 0 or 1 given API-methods are allowed in NeuralNetwork ctor! You provided " \ "'{}'.".format(kwargs["api_methods"]) # Make sure the only allowed api_method is `call`. assert next(iter(kwargs["api_methods"]))[0] == "call", \ "ERROR: NeuralNetwork's custom API-method must be called `call`! You named it '{}'.". \ format(next(iter(kwargs["api_methods"]))[0]) # Follow given options. fold_time_rank = kwargs.pop("fold_time_rank", None) if fold_time_rank is not None: kwargs["api_methods"][0]["fold_time_rank"] = fold_time_rank unfold_time_rank = kwargs.pop("unfold_time_rank", None) if unfold_time_rank is not None: kwargs["api_methods"][0]["unfold_time_rank"] = unfold_time_rank assert len(self.keras_style_api_outputs) == 0 or self.custom_call_given is False, \ "ERROR: If functional API is used to construct network, a custom `call` method must not be provided!" # Pytorch specific objects. self.network_obj = None self.non_layer_components = None super(NeuralNetwork, self).__init__(*layers_args, **kwargs) # In case we have more than one input (and not using Keras-style assembly), # add another input splitter here. self.inputs_splitter = None if self.num_inputs > 1: self.inputs_splitter = ContainerSplitter( tuple_length=self.num_inputs, scope=".helper-inputs-splitter") self.add_components(self.inputs_splitter)