def _get_params(self, floating: Optional[bool] = True, is_yield: Optional[bool] = None, extract_independent: Optional[bool] = True) -> Set["ZfitParameter"]: params = OrderedSet() params = params.union(*(model.get_params(floating=floating, is_yield=is_yield, extract_independent=extract_independent) for model in self.model)) params = params.union(*(constraint.get_params(floating=floating, is_yield=False, extract_independent=extract_independent) for constraint in self.constraints)) return params
def _order(self, cleaned): # TODO: Word == (adjective or noun) && (plural) && (before a noun) -> make singular e.g.: # - Five easy bananas minutes. (Weird-as-a-Service) # - Five easy banana minutes. (Totally sensible Driven Development) mapped = {} for words in cleaned: for key, value in words.items(): for kind in self._ORDERING: if kind in value.get('categories'): mapped.setdefault(kind, []).append(key) remove_empty = list( filter(None, [ mapped.get('number'), mapped.get('adverb'), mapped.get('adjective'), mapped.get('noun'), ])) # Order the words in the sentence using a basic English language syntax. cartesian_product = list(product(*remove_empty)) sentences = [ list(self._flat_tuple(item)) for item in cartesian_product ] duplicates_removed = OrderedSet( [tuple(OrderedSet(sentence)) for sentence in sentences]) # Having no numbers at the start of otherwise valid sentences is legit English. no_need_for_numbers = OrderedSet([ tuple(self._get_rest(sentence)) for sentence in duplicates_removed if isinstance(self._get_first(sentence), int) and len(sentence) > 1 ]) return duplicates_removed.union(no_need_for_numbers)
def get_clique_category( clique_graph: nx.MultiDiGraph, clique: List ) -> Tuple[str, List]: """ Given a clique, identify the category of the clique. Parameters ---------- clique_graph: nx.MultiDiGraph Clique graph clique: List A list of nodes in clique Returns ------- Tuple[str, list] A tuple of clique category and its ancestors """ l = [clique_graph.nodes()[x]["category"] for x in clique] u = OrderedSet.union(*l) uo = sort_categories(u) log.debug(f"outcome of union (sorted): {uo}") clique_category = uo[0] clique_category_ancestors = get_biolink_ancestors(uo[0]) return clique_category, clique_category_ancestors
def header_elements(self): """ :returns: The set of `css` dependencies for all page elements in the page """ elements = OrderedSet() elements = elements.union(*(content.header_elements for content in self.contents)) return elements
def universal_set(documents, k) : universal = OrderedSet() universal1 = OrderedSet() for doc in documents : #print("doc = ", doc) shingles, shings = k_shingles(doc, k) universal = universal.union(shingles) universal1 = universal1.union(shings) universal = list(universal) universal1 = list(universal1) universal.sort() universal1.sort() universal = OrderedSet(universal) universal1 = OrderedSet(universal1) return universal, universal1
def _get_params( self, floating: bool | None = True, is_yield: bool | None = None, extract_independent: bool | None = True, ) -> set[ZfitParameter]: params = OrderedSet() params = params.union(*(model.get_params( floating=floating, is_yield=is_yield, extract_independent=extract_independent, ) for model in self.model)) params = params.union(*(constraint.get_params( floating=floating, is_yield=False, extract_independent=extract_independent, ) for constraint in self.constraints)) return params
def extract_complete_file_info(graph: RevisionGraph, fn_get_output): ''' Given a revision graph, collect all file info using tokei for those revisions :param: graph - Revision Graph :return: None. As a side effect, compliete file info by language type will be added to all revisions in master_rev ''' # We use a custom .mailmap file to resolve autors. That was great while # we collected revision info (extract_revision_graph), but will block our ability to # checkout individual revisions, therefor, clear the .mailmap checkout (if it exists) fn_get_output(['git checkout -- .mailmap >/dev/null 2>&1']) collect_file_info(graph.revisions.values(), fn_get_output) fn_get_output(['git checkout master >/dev/null 2>&1']) collect_deltas(graph, OrderedSet.union(graph.master_revs, graph.not_a_merge))
def finalise(self): return OrderedSet.union(*self.vocabs)
def _check_convert_input(self, loss: ZfitLoss, params, init=None, floating=True ) -> Tuple[ZfitLoss, Iterable[ZfitParameter], Union[None, FitResult]]: """Sanitize the input values and return all of them. Args: loss: If the loss is a callable, it will be converted to a SimpleLoss. params: If the parameters is an array, it will be converted to free parameters. init: floating: Returns: loss, params, init: """ if isinstance(loss, ZfitResult): init = loss # make the names correct loss = init.loss if params is None: params = list(init.params) to_set_param_values = {} if isinstance(params, collections.Mapping) and all(isinstance(p, ZfitParameter) for p in params): to_set_param_values = {p: val for p, val in params.items() if val is not None} params = list(params.keys()) # convert the function to a SimpleLoss if not isinstance(loss, ZfitLoss): if not callable(loss): raise TypeError("Given Loss has to be a ZfitLoss or a callable.") elif params is None: raise ValueError("If the loss is a callable, the params cannot be None.") from zfit.core.loss import SimpleLoss params = convert_to_parameters(params, prefer_constant=False) loss = SimpleLoss(func=loss, params=params) if params is None: params = loss.get_params(floating=floating) else: if to_set_param_values: try: assign_values(list(to_set_param_values), list(to_set_param_values.values())) except ParameterNotIndependentError as error: not_indep_and_set = {p for p, val in to_set_param_values.items() if val is not None and not p.independent} raise ParameterNotIndependentError(f"Cannot set parameter {not_indep_and_set} to a value as they" f" are not independent. The following `param` argument was" f" given: {params}." f"" f"Original error" f"--------------" f"{error}") from error else: params = convert_to_container(params, container=OrderedSet) # now extract all the independent parameters params = list(OrderedSet.union(*(p.get_params(only_floating=floating) for p in params))) # set the parameter values from the init if init is not None: # don't set the user set params_to_set = OrderedSet(params).intersection(OrderedSet(init.params)) - OrderedSet(to_set_param_values) assign_values(params_to_set, init) if floating: params = self._filter_floating_params(params) if not params: raise RuntimeError("No parameter for minimization given/found. Cannot minimize.") params = list(params) return loss, params, init
class GSet(StateCRDT, OrderedSet): def __init__(self, iterable=None, options=None): self._payload = OrderedSet() if iterable is None else OrderedSet( iterable) # self._operations = [] if iterable is None \ # else map(OperationTuple3.create,iterable) self._options = {} if options is None else options def merge(self, other): assert isinstance(other, GSet) # print("Pre merge:") # print(self._payload) # print(other._payload) merged = list(self._payload.union(other._payload)) # print("******************* PERFORMING MERGE ***************") # print(merged) merged.sort() # print("--") # print(merged) return GSet(merged) def compare(self, other): return self.issubset(other) # @property def values(self): return self._payload.__iter__() def get_payload(self): return list(self._payload) def set_payload(self, payload): self._payload = OrderedSet(payload) def generate_log(self, log): assert isinstance(log, GSet) return GSet(log.get_payload()) payload = property(get_payload, set_payload) # # Set API # def add(self, element): self._payload.add(element) temp = list(self._payload) temp.sort() self._payload = OrderedSet(temp) def discard(self, element): raise NotImplementedError("This is a grow-only set") def __contains__(self, element): return self.values.__contains__(element) def __iter__(self): return self.values.__iter__() def __len__(self): return self._payload.__len__()
def call(self, X, adjM, parts): #print("ok") # extract number of neurons from adjM number of rows (adjM is 2D square matrix) # this is here for option to decrease number of neurons in the following layers by shrinking adjM # e.g. neurons over leaf nodes in graph num_neurons = len(adjM) # contains information which neurons to gather signal from (for every neuron list) receptive_fields = [ tf.where(adjM[i] == 1)[:, 0] for i in range(num_neurons) ] # if self.q_dim_security == 'adjM': # newAdjM = np.array(adjM) # indices = list(zip(*np.where(adjM == 1))) # for row,col in indices: # newAdjM[row] += adjM[col] # oldAdjM = adjM # adjM=np.clip(newAdjM, a_min=0, a_max=1) # # new_parts_delta = [ # OrderedSet(np.where(oldAdjM[i] == 1)[0].tolist()) # for i in range(num_neurons)] # new_parts = [OrderedSet.union(parts[i], new_parts_delta[i]) for i in range(num_neurons)] # else: # new, cumulative receptive fields (parts) based on adjM (for every neuron in current layer) # for every neuron i; # parts of every neuron in the receptive field of 'i' are reduced with union to get cumulative receptive fields # we iter through parts so that initial order of parts is preserved (because it is left operand of union) # we do only if it is part of receptive field so that original logic is kept # also we use union with receptive field to make sure the first step has receptive field inside parts (further unions wont change a thing) new_parts = [ reduce(OrderedSet.union, [ parts[neighbour_idx] for neighbour_idx in OrderedSet.union( parts[node_idx], receptive_fields[node_idx].numpy().tolist( )) if neighbour_idx in receptive_fields[node_idx] ]) for node_idx in range(num_neurons) ] # for every neuron i; # create promotion chi matrix for every neuron/node in i's receptive field chis = [{ neighbour_idx.numpy(): tf.convert_to_tensor(self.permutationFunction( parts[neighbour_idx], new_parts[i]), dtype=tf.float32) for neighbour_idx in receptive_fields[i] } for i in range(num_neurons)] # for every neuron i; # promote every activation of nodes in i's receptive field # IMPORTANT: # (probably) This is where tf functions should start to be used because new structures are formed based on previous ones # and these new structures will ultimately 'transform' and mix with W to create activations # we use new_parts for neighbour_idx (even there might be no promotion) so that we can gather promotions # and fill 0's for next step in appropriate order promotions = [[ tf.einsum( self.einsum_expr, *([chis[i][neighbour_idx]] * self.k + [X[neighbour_idx]])) if neighbour_idx in receptive_fields[i] else tf.zeros([self.channels_in] + [len(new_parts[i])] * self.k + self.feature_vector_shape) for neighbour_idx in new_parts[i] ] for i in range(num_neurons)] ## stacked = [tf.stack(promotions[i], axis=1) for i in range(num_neurons)] if self.mix_promotions_with_adjM: adjMs_forNodes = [ tf.cast( tf.constant(adjM[tuple([new_parts[i]])][:, new_parts[i]]), 'float32') for i in range(num_neurons) ] stacked = [ tf.transpose( tf.tensordot(stacked[i], adjMs_forNodes[i], axes=0), self.tensordot_swap_channel_list) for i in range(num_neurons) ] #temp=[[tf.einsum(expression, stacked[i]) for expression in self.contractions_expressions] for i in range(num_neurons)] qs = [ tf.stack([ tf.einsum(expression, *([stacked[i]] + operator(len(new_parts[i])))) for expression, operator in zip( self.contractions_expressions, self.contractions_add_operators) ], axis=1) for i in range(num_neurons) ] activations = [ tf.transpose( self.nonlinearity( tf.add( tf.einsum(self.einsum_activation, self.W, qs[neuron_ind]), self.bias)), self.activation_swap_channels_list) for neuron_ind in range(num_neurons) ] return activations, adjM, new_parts
def keys(self): return OrderedSet.union(self._default.keys(), self.__dict__.keys())