def __new__(metacls, name: str, bases: tx.Iterable[type], attributes: tx.MutableMapping[str, tx.Any], **kwargs) -> type: """ When used as a metaclass, OCDType will insert a specialization of the class for which it has been chosen as a metaclass as its immediate base ancestor, and then create a new class based on that specialization for forwarding to Python’s class-creation apparatus: """ if name in metacls.subtypes: return metacls.subtypes[name].Type subbase: type = object for basecls in bases: if issubclass(basecls, (tx.Iterable, tx.Iterator)): subbase = basecls break # DON’T KNOW ABOUT YOU BUT I AM UN: debaser: tx.Tuple[type, ...] = tuplize( subbase, collections.abc.Iterable, # type: ignore collections.abc.Iterator) # type: ignore subname: MaybeString = kwargs.pop('subname', None) factory: MaybeFactory = kwargs.pop('factory', None) key: MaybePredicate = kwargs.pop('key', None) rev: bool = kwargs.pop('reverse', False) baseset: tx.List[type] = [ chien for chien in bases if chien not in debaser ] # Create the base ancestor with a direct call to “__class_getitem__(…)” # -- which, note, will fail if no bases were specified; if `subbase` # defaults to “object”, this call will raise a TypeError, as it requires # an iterable operand: base = metacls.__class_getitem__(subbase, subname, factory, key=key, reverse=rev, baseset=baseset, **kwargs) # The return value of type.__new__(…), called with the amended # inheritance-chain values, is what we pass off to Python: cls = super().__new__( metacls, name, # type: ignore tuplize(base), dict(attributes), **kwargs) metacls.subtypes[name] = TypeAndBases.for_type(cls) return cls
def main(): if version_info < REQ_VERSION: print("Python version too low! Please use", REQ_VERSION, "or later.") exit(1) test_cases = read_test_cases() for test_case in test_cases: start = time.time() answer = "This puzzle is not solvable." visited = set() queue = Queue() if len(argv) < 2 or argv[2].strip().to_lower() != "no_check=true": if not has_answer(test_case): print(time.time() - start, answer) continue queue.put((0, test_case, "")) while not queue.empty(): level, matrix, current_answer = queue.get() # A tuple is necessary for storing in a set since it is immutable matrix_tuple = tuplize(matrix) if matrix_tuple not in visited: visited.add(matrix_tuple) else: continue if level > 50: break if check_answer(matrix): answer = current_answer break permutations = calculate_permutations(matrix) for permutation, letter in permutations: permutation_tuple = tuplize(permutation) if permutation_tuple not in visited: queue.put((level + 1, permutation, current_answer + letter )) print(time.time() - start, answer)
def main(): if version_info < REQ_VERSION: print("Python version too low! Please use", REQ_VERSION, "or later.") test_cases = read_test_cases() for test_case in test_cases: start = time.time() answer = "This puzzle is not solvable." queue = PriorityQueue() visited = set() if len(argv) < 2 or argv[2].strip().to_lower() != "no_check=true": if not has_answer(test_case): print(time.time() - start, answer) continue """ The queue follows the order total cost, level, matrix, answer for all elements """ queue.put((0, 0, test_case, "")) while not queue.empty(): _, level, matrix, current_answer = queue.get() if level > 50: break if check_answer(matrix): answer = current_answer break permutations = calculate_permutations(matrix) for permutation, letter in permutations: # A tuple is necessary for storing in a set since it is immutable permutation_tuple = tuplize(permutation) if permutation_tuple not in visited: heuristic_cost = calculate_heuristic(permutation) visited.add(permutation_tuple) queue.put((heuristic_cost+level+1, level+1, permutation, current_answer + letter )) print(time.time() - start, answer)
def test(): """ Run the inline tests for the halogen.generate module """ import os if __package__ is None or __package__ == '': import api # type: ignore from filesystem import TemporaryDirectory from utils import tuplize else: from . import api # type: ignore from .filesystem import TemporaryDirectory from .utils import tuplize assert str(api.Target()) != 'host' registered_generators = api.registered_generators() if len(registered_generators) > 0: print(registered_generators) print() with TemporaryDirectory(prefix='yo-dogg-') as td: generate(*tuplize('my_first_generator'), target='host', output_directory=os.fspath(td)) generate(*tuplize('my_second_generator'), target='host', output_directory=os.fspath(td)) generate(*tuplize('my_brightest_generator'), target='host', output_directory=os.fspath(td)) else: print("No registered generators found, skipping inline tests")
def get_phenotype_size(self): """ Return the dimensions required to produce a phenotype. Cross-layer connections are NxM matrices. Inter-layer connections are NxN matrices. Also returns the number of regular neurons that need to be assigned t and gain values. """ return { 'cross': [ (sum(self.neuron_count(i)), self.neuron_count(i + 1)[0]) for i in xrange(len(self.layers) - 1) ], 'inter': [ tuplize(self.neuron_count(i)[0]) for i in xrange(1, len(self.layers)) ], 'neurons': sum(self.neuron_count(i)[0] for i in xrange(len(self.layers))) }
def solve(grid, output, heuristic): if version_info < REQ_VERSION: print("Python version too low! Please use", REQ_VERSION, "or later.") test_case = grid start = time.time() answer = "This puzzle is not solvable." queue = PriorityQueue() visited = set() if not has_answer(test_case): print("TIME: " + str(time.time() - start), " --- ANSWER: " + str(answer)) return """ The queue follows the order total cost, level, matrix, answer for all elements """ queue.put((0, 0, test_case, "")) while not queue.empty(): _, level, matrix, current_answer = queue.get() if level > 50: break if check_answer(matrix, output): answer = current_answer break permutations = calculate_permutations(matrix) for permutation, letter in permutations: # A tuple is necessary for storing in a set since it is immutable permutation_tuple = tuplize(permutation) if permutation_tuple not in visited: heuristic_cost = calculate_heuristic(permutation, output, heuristic) visited.add(permutation_tuple) queue.put((heuristic_cost + level + 1, level + 1, permutation, (current_answer + letter))) print("TIME: " + str(time.time() - start), " --- ANSWER: " + str(answer)) print("Emulating answer ...") return answer
class Descriptor(object): __slots__ = tuplize('name') def __init__(self, *args, **kwargs): pass def __get__(self, instance, cls=None): if cls is None: cls = type(instance) def __set__(self, instance, value): pass def __delete__(self, instance): pass def __set_name__(self, cls, name): self.name = name
def setContributors( self, contributors ): """ Set Dublin Core Contributor elements - resource collaborators. """ # XXX: fixme self.contributors = tuplize('contributors', contributors, semi_split)
def setSubject( self, subject ): """ Set Dublin Core Subject element - resource keywords. """ self.subject = tuplize( 'subject', subject )
def setCreators(self, creators): """ Set Dublin Core Creator elements - resource authors. """ self.creators = tuplize('creators', creators)
def setContributors( self, contributors ): "Dublin Core element - additional contributors to resource" # XXX: fixme self.contributors = tuplize( 'contributors', contributors )
def setSubject( self, subject ): "Dublin Core element - resource keywords" self.subject = tuplize( 'subject', subject )
def setContributors(self, contributors): "Dublin Core element - additional contributors to resource" # XXX: fixme self.contributors = tuplize('contributors', contributors, semi_split)
def setSubject(self, subject): "Dublin Core element - resource keywords" self.subject = tuplize('subject', subject)
def test(): """ Inline tests for OCDType and friends """ from pprint import pprint if __package__ is None or __package__ == '': from utils import print_cache else: from .utils import print_cache """ 0. Set up some specializations and subtypes for testing: """ import array OCDArray = OCDType[array.array] import numpy # type: ignore OCDNumpyArray = OCDType[numpy.ndarray, 'OCDNumpyArray', numpy.array] class SortedMatrix(numpy.matrix, metaclass=OCDType, subname='OCDMatrix', factory=numpy.asmatrix, key=lambda x: abs(x), reverse=True): pass OCDMatrix = OCDType[numpy.matrix] ocd_settttts = OCDType[set] """ 1. Assert-check properties of specializations and subtypes: """ assert ocd_settttts == OCDSet assert ocd_settttts.__name__ == 'OCDSet' assert ocd_settttts.__base__ == set assert not hasattr(ocd_settttts, '__factory__') assert ocd_settttts.__generic__ == tx.Set assert OCDSet[T] assert OCDSet[str] assert SortedList[ T] # this is generic because find_generic_for_type() works for `list` assert SortedNamespace[ T] # this is generic because it inherits from all my crazy `utils` shit assert OCDArray[T] assert OCDNumpyArray[T] assert OCDMatrix[T] assert SortedMatrix[ T] # this is generic because I fixed `utils.Originator.__getitem__(…)` assert OCDMatrix.__generic__ == tx.Generic pprint(SortedMatrix.__mro__) # (__main__.test.<locals>.SortedMatrix, # <class 'ocd.OCDMatrix'>, # <class 'numpy.matrixlib.defmatrix.matrix'>, # <class 'numpy.ndarray'>, # <class 'collections.abc.Iterable'>, # <class 'object'>) pprint(OCDMatrix.__mro__) # (<class 'ocd.OCDMatrix'>, # <class 'numpy.matrixlib.defmatrix.matrix'>, # <class 'numpy.ndarray'>, # <class 'collections.abc.Iterable'>, # <class 'object'>) pprint(OCDNumpyArray.__mro__) # (<class 'ocd.OCDNumpyArray'>, # <class 'numpy.ndarray'>, # <class 'collections.abc.Iterable'>, # <class 'object'>) print() # pprint(SortedMatrix.__parameters__) # pprint(OCDMatrix.__parameters__) # pprint(OCDNumpyArray.__parameters__) # pprint(SortedMatrix.__args__) # pprint(OCDMatrix.__args__) # pprint(OCDNumpyArray.__args__) try: # Generics take only one type parameter: print(type(OCDSet[T, S])) except TypeError as exc: assert 'Too many parameters' in str(exc) else: assert False, "`OCDSet[T, S]` didn’t raise!" pprint(OCDSet[T]) # typing.Set[+T] pprint(OCDFrozenSet[T]) # typing.FrozenSet[+T] pprint(OCDArray[T]) # typing.Generic[+T] pprint(OCDNumpyArray[T]) # typing.Generic[+T] pprint(OCDMatrix[T]) # typing.Generic[+T] pprint(OCDTuple[T, ...]) # typing.Tuple[+T, ...] pprint(OCDTuple[T]) # typing.Tuple[+T] pprint(OCDTuple[T, S]) # typing.Tuple[+T, +S] pprint(OCDList[T]) # typing.List[+T] pprint(SortedList[T]) # ocd.OCDList[~T] (PHEW.) pprint(SortedNamespace[T]) # __main__.SortedNamespace[+T] pprint(SortedMatrix[T]) # __main__.test.<locals>.SortedMatrix[+T] print() pprint(OCDSet[T]) pprint(OCDSet.__origin__) pprint(OCDSet.__generic__) pprint(OCDSet[T].__origin__) print() pprint(SortedList[T]) pprint(SortedList.__origin__) pprint(SortedList.__generic__) pprint(SortedList[T].__origin__) print() pprint(OCDList[T]) pprint(OCDList.__origin__) pprint(OCDList.__generic__) pprint(OCDList[T].__origin__) print() pprint(OCDNamespace[S, T]) pprint(OCDNamespace.__origin__) pprint(OCDNamespace.__generic__) pprint(OCDNamespace[S, T].__origin__) print() pprint(SortedNamespace[T]) pprint(SortedNamespace.__origin__) pprint(SortedNamespace.__generic__) pprint(SortedNamespace[T].__origin__) assert OCDNumpyArray.__name__ == 'OCDNumpyArray' assert OCDNumpyArray.__base__ == numpy.ndarray assert OCDNumpyArray.__bases__ == tuplize(numpy.ndarray, collections.abc.Iterable) assert OCDNumpyArray.__factory__ == numpy.array assert OCDNumpyArray.__generic__ == tx.Generic assert SortedMatrix.__base__ == OCDType[numpy.matrix] assert SortedMatrix.__base__.__name__ == 'OCDMatrix' assert SortedMatrix.__base__.__base__ == numpy.matrixlib.defmatrix.matrix assert SortedMatrix.__base__.__factory__ == numpy.asmatrix assert SortedMatrix.__base__.__generic__ == tx.Generic assert OCDArray('i', range(10)).__len__() == 10 assert numpy.array([[0, 1, 2], [0, 1, 2], [0, 1, 2]]).__len__() == 3 assert OCDNumpyArray([[0, 1, 2], [0, 1, 2], [0, 1, 2]]).__len__() == 3 assert SortedMatrix([[0, 1, 2], [0, 1, 2], [0, 1, 2]]).__len__() == 3 assert SortedMatrix(OCDNumpyArray([[0, 1, 2], [0, 1, 2], [0, 1, 2]])).__len__() == 3 try: # can’t specialize a specialization! OCDType[OCDSet] except TypeError as exc: assert "specialization" in str(exc) else: assert False, "`OCDType[OCDSet]` didn’t raise!" """ 2. Test various SimpleNamespace subclasses: """ test_namespace_types() """ 3. Reveal the cached OCDType specializations: """ assert len(OCDType.types) == 8 print_cache(OCDType, 'types') """ 4. Reveal the cached OCDType subtypes: """ assert len(OCDType.subtypes) == 3 print_cache(OCDType, 'subtypes') # class Base(object): # def __init__(self): # self.base = "in your base" # def yodogg(self): # return "i heard you liked attrs" # # class Derived(Base): # # def doggyo(self): # # return getattr(super(), 'base') # # return super().base # f = getattr(super(), 'yodogg') # return f() # # d = Derived() # print(d.doggyo()) class Descriptor(object): __slots__ = tuplize('name') def __init__(self, *args, **kwargs): pass def __get__(self, instance, cls=None): if cls is None: cls = type(instance) def __set__(self, instance, value): pass def __delete__(self, instance): pass def __set_name__(self, cls, name): self.name = name class NewType: """NewType creates simple unique types with almost zero runtime overhead. `NewType(name, tp)` is considered a subtype of `tp` by static type checkers. At runtime, NewType(name, tp) creates a callable instance that simply returns its argument when called. Usage:: UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: ... UserId('user') # Fails type check name_by_id(42) # Fails type check name_by_id(UserId(42)) # OK num = UserId(5) + 1 # type: int """ __slots__ = ('__name__', '__qualname__', '__supertype__') def __init__(self, name, tp): self.__name__ = self.__qualname__ = name self.__supertype__ = tp @staticmethod def __call__(arg): return arg def __repr__(self): return f"{type(self).__name__}<" \ f"{self.__qualname__}:" \ f"{self.__supertype__.__name__}>" def __hash__(self): return hash((self.__name__, self.__supertype__)) YoDogg = NewType('YoDogg', str) YouLikeInts = NewType('YouLikeInts', int) def DoggPrinter(arg: YoDogg) -> YoDogg: print(tx.cast(str, arg)) return arg def DoggEvaluator(arg: YouLikeInts) -> int: intarg = tx.cast(int, arg) print(f"Integer argument: {intarg}") return intarg dogg: YoDogg = YoDogg('Dogg, Yo!') DoggPrinter(dogg) inyour: YouLikeInts = YouLikeInts(666) DoggEvaluator(inyour) print(repr(YoDogg)) print(repr(YouLikeInts))
def run(self, target=None, emit=None, substitutions=None): """ Use the halogen.compile.Generators.run(…) method to run generators. All generator code that this instance knows about must have been previously compiled, dynamically linked, and preloaded. Assuming that all of these generators were properly programmed, they will then be available to halogen via the Halide Generator API -- specifically the Generator Registry (q.v. `loaded_generators()` method docstring, supra). """ # Check self-status: if not self.precompiled: raise GenerationError("Can’t run() before first precompiling, compiling, dynamic-linking, and preloading") if not self.compiled: raise GenerationError("Can’t run() before first compiling, dynamic-linking, and preloading") if not self.linked: raise GenerationError("Can’t run() before first dynamic-linking and preloading") if not self.preloaded: raise GenerationError("Can’t run() before first preloading") if self.loaded_count < 1: raise GenerationError("Can’t run() without one or more loaded generators") # Check args: if not target: target = 'host' if not substitutions: substitutions = {} emits = type(self).emits if not emit: emit = tuplize(*emits['default']) elif is_string(emit): emit = u8str(emit) if emit in emits: emit = tuplize(*emits.get(emit)) else: possibles = ", ".join(OCDList(emits.keys())) raise GenerationError("String value for “emit” when calling Generators::run(…) " f"must be one of: {possibles}") else: emit = tuplize(*emit) if len(emit) < 1: possibles = ", ".join(emits['all']) raise GenerationError("Iterable value for “emit” when calling Generators::run(…) must contain " f"one or more valid emit options (one of: {possibles})") # Run generators, storing output files in $TMP/yodogg artifacts = generate(*self.loaded_generators(), verbose=self.VERBOSE, target=target, emit=emit, output_directory=self.destination, substitutions=substitutions) # Re-dictify: generated = { artifact[2].name : dict(base_path=artifact[0], outputs=artifact[1], module=artifact[2]) for artifact in artifacts } # TELL ME ABOUT IT. if self.VERBOSE: module_names = ", ".join(u8str(key) for key in OCDList(generated.keys())) print(f"run(): Accreted {len(generated)} total generation artifacts") print(f"run(): Module names: {module_names}") # Return redictified artifacts: return generated
class CDBBase(CDBSubBase, collections.abc.Sequence, collections.abc.Sized): fields = tuplize('length') def __init__(self): self.clear() def push(self, source, command, directory=None, destination=None): if not source: raise CDBError("a file source is required per entry") entry = { 'directory': os.fspath(directory or os.getcwd()), 'command': u8str(command), 'file': source } if destination: entry.update({'output': destination}) self.entries[source] = entry def rollout(self): out = [] for k, v in self.entries.items(): out.append(v) return out @property def length(self): return len(self.entries) def clear(self): self.entries = {} return self def __len__(self): return self.length def __getitem__(self, key): try: return self.entries[int(key)] except (ValueError, KeyError): skey = str(key) if os.extsep in skey: for entry in self.entries: if entry['file'] == skey: return entry raise KeyError(f"not found: {key}") def to_string(self): return stringify(self, type(self).fields) def __repr__(self): return stringify(self, type(self).fields) def __str__(self): return u8str(json.dumps(self.rollout())) def __bytes__(self): return u8bytes(json.dumps(self.rollout())) def __bool__(self): return True
def __class_getitem__(metacls, typename: tx.Union[type, tuple], clsname: tx.Optional[str] = None, factory: tx.Optional[TypeFactory] = None, **kwargs) -> type: """ Specialize the template type OCDType on a given iterable type. Returns the newly specialized type, as per metaclass type creation. """ from string import capwords if __package__ is None or __package__ == '': from utils import find_generic_for_type else: from .utils import find_generic_for_type # Validate covariant typevar argument: if not typename: raise KeyError("OCDType is a templated type, " "it requires a Python type on which to specialize") if type(typename) == tuple: tup: tuple = tx.cast(tuple, typename) if len(tup) == 2: typename: type = tx.cast(type, tup[0]) clsname: str = tx.cast(str, tup[1]) elif len(tup) == 3: typename: type = tx.cast(type, tup[0]) clsname: str = tx.cast(str, tup[1]) factory: TypeFactory = tx.cast(TypeFactory, tup[2]) elif len(tup) > 3: raise KeyError("Too many arguments passed to OCDType template " f"specialization: {tup}") typename = tx.cast(type, typename) if not hasattr(typename, '__name__'): raise TypeError("OCDType is a templated type, " "it must be specialized using a Python type " f"(not a {type(typename)})") if typename.__name__ in metacls.types or \ typename.__name__ in metacls.subtypes: raise TypeError("OCDType cannot be specialized on an " "existant product of an OCDType specialization") if not hasattr(typename, '__iter__'): raise TypeError( "OCDType is a templated type, " "it must be specialized on an iterable Python type " f"(not a {type(typename)})") # Save any passed clsname: clsnamearg: tx.Optional[str] = clsname and str(clsname) or None # Compute the name for the new class: if not clsname: name: str = capwords(typename.__name__) clsname = f"{metacls.prefix}{name}" elif not clsname.startswith(metacls.prefix): name: str = capwords(clsname) clsname = f"{metacls.prefix}{name}" if not clsname.isidentifier(): raise KeyError( "specialization class name must be a valid identifier " f"(not “{clsname}”)") # If the class name already exists in the metaclass type dictionary, # return it without creating a new class: if clsname in metacls.types: return metacls.types[clsname].Type # Stow the covariant typevar and the computed name in the new class, # and install an `__iter__()` method that delegates to the covariant # implementation and wraps the results in a `sorted()` iterator before # returning them: # modulename: str = getattr(metacls, '__module__', 'ocd') modulename: str = metacls.prefix.lower() generic: type = find_generic_for_type(typename, missing=tx.Generic) unwrapped: ClassGetType = tx.Generic.__class_getitem__.__wrapped__ get: ClassGetType = getattr( generic, '__class_getitem__', getattr( generic, '__getitem__', classmethod( lambda cls, *args: unwrapped(cls, *args)))) # type: ignore params: tx.Tuple[tx.TypeVar, ...] = getattr( typename, '__parameters__', getattr(generic, '__parameters__', tuple())) key: MaybePredicate = kwargs.pop('key', None) rev: bool = kwargs.pop('reverse', False) attributes: tx.Dict[str, tx.Any] = { '__class_getitem__': get, '__covariant__': typename, '__generic__': generic, '__module__': modulename, '__name__': clsname, '__iter__': lambda self: iter( sorted(typename.__iter__(self), key=key, reverse=rev)), # q.v. inline notes to the Python 3 `typing` module # supra: https://git.io/fAsNO '__args__': tuplize(typename), '__parameters__': params, '__getitem_args__': tuplize(typename, clsnamearg, factory), '__origin__': generic } # Using a factory -- a callable that returns an instance of the type, # á la “__new__” -- allows the wrapping of types like numpy.ndarray, # like so: # # OCDNumpyArray = OCDType[numpy.ndarray, 'OCDNumpyArray', # numpy.array] # # … where “numpy.array(…)” is the factory function returning instances # of “numpy.ndarray”: if callable(factory): attributes.update({ '__new__': lambda cls, *args, **kw: factory(*args, **kw), # type: ignore '__factory__': staticmethod(factory) }) # Create the new class, as one does in the override of a # metaclasses’ __new__(…) method, and stash it in a # metaclass-local dict keyed with the generated classname: baseset: tx.List[type] = kwargs.pop('baseset', []) cls = type( clsname, tuplize(typename, *baseset, collections.abc.Iterable), # type: ignore dict(attributes), **kwargs) metacls.types[clsname] = TypeAndBases.for_type(cls) return cls
def test_tuplize(self): matrix = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 0, 12], [13, 14, 15, 11]] temp = utils.tuplize(matrix) assert temp == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 12, 13, 14, 15, 11)