def test_make_grouping_by_attr_tuple(tuple_grouping_size): int_grouping, size = tuple_grouping_size letter_grouping, source = make_grouping_attr_source(int_grouping) result = make_grouping_by_attr(letter_grouping, source) flat_props = flatten_grouping(letter_grouping) expected = tuple(getattr(source, p) for p in flat_props) assert expected == result
def _get_arg_input_state_dependencies(all_inputs): input_groupings = OrderedDict() input_deps = [] state_deps = [] # Collect input groupings for name, val in all_inputs.items(): if isinstance(val, Input): grouping = val.dependencies() flat_dependencies = val.flat_dependencies() else: flat_dependencies = [ d.dependencies() for d in flatten_grouping(val) ] if flat_dependencies and isinstance(flat_dependencies[0], Input_dash): grouping = val else: continue slc = slice(len(input_deps), len(input_deps) + len(flat_dependencies)) input_groupings[name] = (grouping, slc) input_deps.extend(flat_dependencies) # Process state num_inputs = len(input_deps) for name, val in all_inputs.items(): if isinstance(val, State): grouping = val.dependencies() flat_dependencies = val.flat_dependencies() else: flat_dependencies = [ d.dependencies() for d in flatten_grouping(val) ] if not flat_dependencies or isinstance(flat_dependencies[0], State_dash): grouping = val else: continue slc = slice( num_inputs + len(state_deps), num_inputs + len(state_deps) + len(flat_dependencies), ) input_groupings[name] = (grouping, slc) state_deps.extend(flat_dependencies) return input_groupings, input_deps, state_deps
def test_flatten_odd_value(): # Anything other than tuple and dict should be treated as a # scalar and passed through expected = [0, sum, Input("foo", "bar")] vals_collection = (0, (sum, Input("foo", "bar"))) result = flatten_grouping(vals_collection) assert expected == result assert len(result) == grouping_len(vals_collection)
def test_map_grouping_mixed(mixed_grouping_size): grouping, size = mixed_grouping_size def fn(x): return x * 2 + 5 result = map_grouping(fn, grouping) expected = make_grouping_by_index( grouping, list(map(fn, flatten_grouping(grouping)))) assert expected == result
def args_components(self): """ :return: list of the components corresponding to the plugin's args dependencies """ return [ self.template.build_argument_components( c.component_id, label=c.label, label_id=c.label_id).container_component for c in flatten_grouping(self.args) if c.has_component ]
def extract_and_validate_output_values(res_grouping, dep_grouping): # Extracting property values from dependency components if isinstance(res_grouping, DashLabsDependency): res_grouping = res_grouping.property_value() # Check value against schema validate_grouping(res_grouping, dep_grouping) flat_results = flatten_grouping(res_grouping, dep_grouping) return flat_results
def _add_arg_components_to_template(vals, template): for name, val in vals.items(): deps = flatten_grouping(val) for dep in deps: if dep.has_component: opts = {} if isinstance(name, str): opts["name"] = name template.add_component( component=dep.component_id, component_property=dep.component_property, role=dep.role, label=dep.label, label_id=dep.label_id, **opts, )
def _normalize_output(output, template): # output_form stores whether wrapped function is expected to return values as # scalar, list, or dict. output_form = None if output is None or isinstance(output, list) and len(output) == 0: if template is None: raise ValueError( "No output dependency objects specified, and no template provided to\n" "construct a default output component.") else: output = template.default_output() if not isinstance(output, (list, dict)): output_form = "scalar" output = [output] if isinstance(output, list): # Convert output from list to output = {i: val for i, val in enumerate(output)} output_form = output_form or "list" else: output_form = output_form or "dict" all_output = OrderedDict() for name, pattern in output.items(): if isinstance(pattern, DashLabsDependency) and not pattern.has_component: pattern = pattern.dependencies(labs=True) flat_deps = flatten_grouping(pattern) for dep in flat_deps: if not isinstance(dep, DashLabsDependency): raise ValueError("Invalid dependency: {}".format(dep)) if dep.has_component: # Check is arg is holding a pattern if dep.label is Component.UNDEFINED: dep.label = None if dep.role is Component.UNDEFINED: dep.role = "output" all_output[name] = pattern return all_output, output_form
def _get_arg_output_dependencies(all_outputs): output_groupings = OrderedDict() output_deps = [] # Collect input groupings for name, val in all_outputs.items(): if isinstance(val, Output): grouping = val.dependencies() flat_dependencies = val.flat_dependencies() else: grouping = val flat_dependencies = [ d.dependencies() for d in flatten_grouping(val) ] output_groupings[name] = grouping output_deps.extend(flat_dependencies) return output_groupings, output_deps
def test_flatten_mixed(mixed_grouping_size): grouping, size = mixed_grouping_size expected = list(range(size)) result = flatten_grouping(grouping) assert expected == result assert len(result) == grouping_len(grouping)
def test_make_grouping_by_attr_scalar(scalar_grouping_size): int_grouping, size = scalar_grouping_size letter_grouping, source = make_grouping_attr_source(int_grouping) result = make_grouping_by_attr(letter_grouping, source) expected = getattr(source, flatten_grouping(letter_grouping)[0]) assert expected == result
def test_flatten_scalar(scalar_grouping_size): grouping, size = scalar_grouping_size expected = list(range(size)) result = flatten_grouping(grouping) assert expected == result assert len(result) == grouping_len(grouping)
def _validate_prop_grouping(component, props): for prop in flatten_grouping(props): _validate_prop_name(component, prop)
def flat_props(self): """ :return: Flat list of properties in component_property grouping """ return flatten_grouping(self.component_property)
def _normalize_inputs(inputs, state): # Handle positional inputs/state as int dict if inputs == [] and isinstance(state, dict): inputs = {} if not isinstance(inputs, dict): input_form = "list" if state is not None and isinstance(state, dict): raise ValueError("inputs and state must both be lists or dicts") if isinstance(inputs, tuple): inputs = list(inputs) elif not isinstance(inputs, list): inputs = [inputs] if state: if isinstance(state, tuple): state = list(state) elif not isinstance(state, list): state = [state] # Create dict from positional argument indices to dependency objects # Note that state values always come after inputs in Dash 1 inputs = {i: val for i, val in enumerate(inputs)} num_inputs = len(inputs) if state is not None: state = {i + num_inputs: val for i, val in enumerate(state)} else: # isinstance(inputs, dict): input_form = "dict" if state is None: state = {} # Check for duplicate keys dups = [k for k in inputs if k in state] if dups: raise ValueError( "argument names must be unique across input and state\n" " The following were found in both: {dups}".format(dups=dups)) # Preprocess non-dependency inputs and state into arg instances all_inputs = OrderedDict() combined_inputs_state = inputs.copy() combined_inputs_state.update(state) for name, pattern in combined_inputs_state.items(): if isinstance(pattern, DashLabsDependency) and not pattern.has_component: pattern = pattern.dependencies(labs=True) flat_deps = flatten_grouping(pattern) for dep in flat_deps: if not isinstance(dep, DashLabsDependency): raise ValueError("Invalid dependency: {}".format(dep)) if dep.has_component: # Apply default label if undefined # (if user explicitly set label to None, leave it alone) if dep.label is Component.UNDEFINED: dep.label = name # Update default role if undefined if dep.role is Component.UNDEFINED: dep.role = "input" all_inputs[name] = pattern return all_inputs, input_form