Example #1
0
 def inner():
     img = ax.imshow([[0, 1], [2, 3]])
     cursor = mplcursors.cursor(img)
     f_img = weakref.finalize(img, lambda: None)
     f_cursor = weakref.finalize(cursor, lambda: None)
     img.remove()
     return f_img, f_cursor
Example #2
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._silent = False

        # console, qtconsole uses `kernel-$pid`, notebook uses `kernel-$uuid`.
        self._has_console_frontend = bool(re.match(
            r"\Akernel-\d+\Z",
            Path(self.config["IPKernelApp"]["connection_file"]).stem))

        if os.name == "posix":
            with ExitStack() as stack:
                for name in ["stdout", "stderr"]:
                    stream = getattr(sys, "__{}__".format(name))
                    def callback(data, *, _name=name, _stream=stream):
                        if not self._silent:
                            self._send_stream(
                                _name, data.decode(_stream.encoding))
                    stack.enter_context(
                        _redirection.redirect(stream.fileno(), callback))
                weakref.finalize(self, stack.pop_all().close)

        self._dead_engines = []
        engine_name = os.environ.get("IMATLAB_CONNECT")
        if engine_name:
            if re.match(r"\A(?a)[a-zA-Z]\w*\Z", engine_name):
                self._engine = matlab.engine.connect_matlab(engine_name)
            else:
                self._engine = matlab.engine.connect_matlab()
        else:
            self._engine = matlab.engine.start_matlab()
        self._history = MatlabHistory(Path(self._call("prefdir")))
        self._engine.addpath(
            str(Path(sys.modules[__name__.split(".")[0]].__file__).
                with_name("data")),
            "-end")
    def __init__(self):
        conn_parent, conn_child = mp.Pipe()

        # Use a local variable for child so that we can talk to the child in
        # on_finalize without needing a reference to self
        child = mp.Process(target=_converter_process_func, args=(conn_parent, conn_child))
        child.daemon = True
        child.start()
        self.child = child

        conn_child.close()
        self.conn = conn_parent

        self.busy = False
        self.conversionNotifier = QSocketNotifier(self.conn.fileno(),
                                                  QSocketNotifier.Read)

        # assign the activated signal of the notifier to a conversionDone
        # member to get a more meaningful signal name for others to connect to
        self.conversionDone = self.conversionNotifier.activated

        def on_finalize(conn):
            conn_parent.send({'command':'quit'})
            conn_parent.close()
            child.join()

        weakref.finalize(self, on_finalize, conn_parent)
Example #4
0
    def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
        """
        Connect receiver to sender for signal.

        Arguments:

            receiver
                A function or an instance method which is to receive signals.
                Receivers must be hashable objects.

                If weak is True, then receiver must be weak referenceable.

                Receivers must be able to accept keyword arguments.

                If a receiver is connected with a dispatch_uid argument, it
                will not be added if another receiver was already connected
                with that dispatch_uid.

            sender
                The sender to which the receiver should respond. Must either be
                of type Signal, or None to receive events from any sender.

            weak
                Whether to use weak references to the receiver. By default, the
                module will attempt to use weak references to the receiver
                objects. If this parameter is false, then strong references will
                be used.

            dispatch_uid
                An identifier used to uniquely identify a particular instance of
                a receiver. This will usually be a string, though it may be
                anything hashable.
        """

        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        if weak:
            ref = weakref.ref
            receiver_object = receiver
            # Check for bound methods
            if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
                ref = WeakMethod
                receiver_object = receiver.__self__
            if six.PY3:
                receiver = ref(receiver)
                weakref.finalize(receiver_object, self._remove_receiver)
            else:
                receiver = ref(receiver, self._remove_receiver)

        with self.lock:
            self._clear_dead_receivers()
            for r_key, _ in self.receivers:
                if r_key == lookup_key:
                    break
            else:
                self.receivers.append((lookup_key, receiver))
            self.sender_receivers_cache.clear()
Example #5
0
    def initializeGL(self):
        print('initialize GL')

        if self._get_proc_address is 'ctypes':
            import ctypes,ctypes.util
            lgl = ctypes.cdll.LoadLibrary(ctypes.util.find_library('GL'))
            get_proc_address = lgl.glXGetProcAddress
            get_proc_address.restype = ctypes.c_void_p
            get_proc_address.argtypes = [ ctypes.c_char_p ]
            _get_proc_address = lambda name: get_proc_address(name if isinstance(name,bytes) else name.encode('latin1'))
        else:
            qgl_get_proc_address = Q.QGLContext.currentContext().getProcAddress
            _get_proc_address = lambda name: int(qgl_get_proc_address(name.decode('latin1') if isinstance(name,bytes) else name))

        if self._get_proc_address_debug:
            def getprocaddr(name):
                res = _get_proc_address(name)
                print('{} -> address {}'.format(name,res))
                return res
        else:
            getprocaddr = _get_proc_address

        self.ogl = self.m.opengl_cb_context
        self.ogl.init_gl(getprocaddr,None)

        weakref.finalize(self, lambda:self.ogl.set_update_callback(None))
        self.wakeup.connect(self.onWakeup,Q.Qt.QueuedConnection|Q.Qt.UniqueConnection)
        self.frameSwapped.connect(self.onFrameSwapped)
        self.ogl.set_update_callback(self.wakeup.emit)
        self.openglInitialized.emit(Q.QOpenGLContext.currentContext())
Example #6
0
 def __init__(self,name=None,dir='/tmp',text=False,encoding=None, expires=None):
     if name is None:
         fd,path = tempfile.mkstemp(suffix='.tmp',dir=dir,text=False)
     else:
         path = os.path.join(dir,name)
         if os.path.exists(path) and ((expires is None) or (time.time() + expires > os.stat(path).st_mtime)):
             self.new = False
         else:
             path = path + '.tmp'
         encoding = encoding if encoding else 'utf-8' if text else None
     self.name = path
     if name is not None:
         mode = 'w+' if self.new else 'r+'
         mode += 't' if text else 'b'
         self.file = open(path, mode, encoding=encoding)
         if encoding or text:
             self.raw = self.file.raw
         else:
             self.raw = self.file
     else:
         self.raw = os.fdopen(fd,'w+b')
         if encoding:
             if encoding is True:
                 encoding = 'utf-8'
             self.file = io.TextIOWrapper(self.raw,encoding=encoding)
         else:
             self.file = self.raw
     weakref.finalize(self, self.ifnew, os.unlink, path)
Example #7
0
    def __init__(self):
        super(QObject, self).__init__()

        conn_parent, conn_child = socketpair()

        # TODO: figure out which of the two sockets should be set to 
        #       inheritable and which should be passed to the child
        if hasattr(conn_child, 'set_inheritable'):
            conn_child.set_inheritable(True)

        # Use a local variable for child so that we can talk to the child in
        # on_finalize without needing a reference to self
        child = mp.Process(target=_converter_process_func, args=(conn_parent, conn_child))
        child.daemon = True
        child.start()
        self.child = child

        conn_child.close()
        self.conn = conn_parent

        self.busy = False
        self.notificationPending = False
        self.conversionNotifier = QSocketNotifier(self.conn.fileno(),
                                                  QSocketNotifier.Read)
        self.conversionNotifier.activated.connect(self._conversionNotifierActivated)

        def on_finalize(conn):
            sendObject(conn_parent, {'command':'quit'})
            conn_parent.close()
            child.join()

        weakref.finalize(self, on_finalize, conn_parent)
Example #8
0
    def _connect_signal(self, receiver, sender, weak, dispatch_uid):
        assert callable(receiver), 'Signal receivers must be callable'
        if not fun_accepts_kwargs(receiver):
            raise ValueError(
                'Signal receiver must accept keyword arguments.')

        if isinstance(sender, PromiseProxy):
            sender.__then__(
                self._connect_proxy, receiver, sender, weak, dispatch_uid,
            )
            return receiver

        lookup_key = _make_lookup_key(receiver, sender, dispatch_uid)

        if weak:
            ref, receiver_object = _boundmethod_safe_weakref(receiver)
            if PY3:
                receiver = ref(receiver)
                weakref.finalize(receiver_object, self._remove_receiver)
            else:
                receiver = ref(receiver, self._remove_receiver)

        with self.lock:
            self._clear_dead_receivers()
            for r_key, _ in self.receivers:
                if r_key == lookup_key:
                    break
            else:
                self.receivers.append((lookup_key, receiver))
            self.sender_receivers_cache.clear()

        return receiver
Example #9
0
    def __init__(self, username=None, password=None):
        self._username = self._remove_nonascii(username)
        self._password = self._remove_nonascii(password)
        self.logger.info('Initialized with user: %s', self._username)

        self.session = Session()
        # urllib3 will sleep for {backoff factor} * (2 ^ ({number of total retries} - 1)) seconds between attempts.
        self.session.mount('http://', HTTPAdapter(
            max_retries=Retry(total=2, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
        ))
        self.session.mount('https://', HTTPAdapter(
            max_retries=Retry(total=2, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
        ))
        self.set_useragent()
        # Avoid ResourceWarning: unclosed <ssl.SSLSocket ...> with python 3
        finalize(self, self.session.close)

        cookie_file = os.path.join(self.appdata_path, self._make_fs_safe(username)+'.lwp')
        self.session.cookies = LWPCookieJar(cookie_file)
        if not os.path.exists(cookie_file):
            # initialize new cookie file
            self.logger.info('Creating new cookie file: "%s"', cookie_file)
            self.set_mobile_cookies()
            self._save_cookies()
            os.chmod(cookie_file,  stat.S_IRUSR | stat.S_IWUSR)
        else:
            # load cookies
            self.logger.info('Loading cookies from file: "%s"', cookie_file)
            self.session.cookies.load(ignore_discard=True)
            if not self._has_cookie('forceMobile') or not self._has_cookie('mobileClient'):
                self.clear_mobile_cookies()
                self.set_mobile_cookies()
                self._save_cookies()
Example #10
0
    def __init__(self, handler):
        _path = msg.join_path(self._path, '__init__')

        self.handler = handler
        self.pub_subs = {
            'w': self.handler.ws_pub_sub,
            'd': mb,
            'l': self.handler.local_pub_sub,
        }

        for attr_name in dir(self):
            attribute = getattr(self, attr_name)
            if hasattr(attribute, 'msg_types'):
                for _type, channels in attribute.msg_types:
                    msg.code_debug(
                        _path,
                        'Adding action: %r ...' % attribute
                    )
                    self.register_action_in(
                        msg_type=_type, action=attribute,
                        channels=channels)

        finalize(
            self, msg.code_debug, self._path,
            'Deleting WSClass {0} from {0.handler} '
            '...'.format(self)
        )
Example #11
0
    def _delete_widget(self, widget):
        """
        Delete the OWBaseWidget instance.
        """
        widget.close()
        # Save settings to user global settings.
        widget.saveSettings()
        # Notify the widget it will be deleted.
        widget.onDeleteWidget()

        state = self.__widget_processing_state[widget]
        if state & WidgetManager._DelayDeleteMask:
            # If the widget is in an update loop and/or blocking we
            # delay the scheduled deletion until the widget is done.
            log.debug("Widget %s removed but still in state :%s. "
                      "Deferring deletion.", widget, state)
            self.__delay_delete.add(widget)
        else:
            widget.deleteLater()
            name = "{} '{}'".format(type(widget).__name__, widget.captionTitle)
            if log.isEnabledFor(logging.DEBUG):
                finalize(
                    widget, log.debug, "Destroyed namespace: %s", name
                )
            del self.__widget_processing_state[widget]
Example #12
0
    def __init__(
            self,
            scidb_url=None,
            scidb_auth=None,
            http_auth=None,
            verify=None,
            admin=False,
            namespace=None,
            use_arrow=False,
            result_size_limit=256,
            no_ops=False):
        if scidb_url is None:
            scidb_url = os.getenv('SCIDB_URL', 'http://localhost:8080')

        self.scidb_url = scidb_url
        self.verify = verify
        self.admin = admin
        self.namespace = namespace
        self.use_arrow = use_arrow
        self.result_size_limit = result_size_limit
        self.no_ops = no_ops

        if http_auth:
            self._http_auth = requests.auth.HTTPDigestAuth(*http_auth)
            self.http_auth = (http_auth[0], Password_Placeholder())
        else:
            self._http_auth = self.http_auth = None

        admin_shim = 1 if self.admin else 0
        if scidb_auth:
            self._id = self._shim(Shim.new_session,
                                  user=scidb_auth[0],
                                  password=scidb_auth[1],
                                  admin=admin_shim).text
            self.scidb_auth = (scidb_auth[0], Password_Placeholder())
        else:
            self._id = self._shim(Shim.new_session,
                                  admin=admin_shim).text
            self.scidb_auth = None

        finalize(self,
                 _shim_release_session,
                 self.scidb_url,
                 self._http_auth,
                 self.verify,
                 self._id)

        self.arrays = Arrays(self)

        self._uid = uuid.uuid1().hex
        self._lock = threading.Lock()
        self._array_cnt = 0
        self._formatter = string.Formatter()

        if self.no_ops:
            self.operators = None
            self._dir = None
        else:
            self.load_ops()
Example #13
0
 def __init__(self, title, width, height):
   self.win = SDL_CreateWindow(title.encode('utf-8'), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
                        width, height, SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE)
   weakref.finalize(self.win, SDL_DestroyWindow, self.win)
   self.dirtyRects = []
   self.width = width
   self.height = height
   self.scale = 1
Example #14
0
 def watch(self,action,done):
     self.action = action
     self.done = done
     if self.cancelled:
         done.add_done_callback(self.nocancel)
         weakref.finalize(self,self.cancel)
         self.cancelled = False
     return done
Example #15
0
    def __init__(self, db, name, gc=False):
        self.db = db
        self.name = name

        if gc:
            finalize(self,
                     self.db.iquery,
                     'remove({})'.format(self.name))
Example #16
0
 def __new__(cls, filename):
   if filename in AdvConfig._configs:
     n=AdvConfig._configs[filename]
     n.sync()
   else:
     n=super(AdvConfig,cls).__new__(cls)
     AdvConfig._configs[filename]=n
     finalize(n, n.sync)
   return n
    def _get_properties(self, request):
        req_id = id(request)

        if req_id in self._request_properties:
            return self._request_properties[req_id]
        else:
            finalize(request, self._free_properties, req_id)
            properties = {}
            self._request_properties[req_id] = properties
            return properties
Example #18
0
 def __init__(self, service):
     super().__init__(service)
     logging.getLogger("chardet").setLevel(logging.WARNING)
     self.common_loop = service.loop
     self.common_connector = TCPConnector(limit=GET_URL_PARALLEL_LIMIT, loop=self.common_loop)
     self.common_cookie_jar = self.new_cookie_jar()
     self.common_client_timeout = aiohttp.ClientTimeout(sock_connect=GET_URL_CONNECT_TIMEOUT,
                                                        sock_read=GET_URL_RECV_TIMEOUT)
     logging.debug("init %s" % self.common_connector)
     weakref.finalize(self, _close_connector, self.common_loop, self.common_connector)
Example #19
0
  def __init__(self, path):
    self.surfacePtr = IMG_Load(path.encode("utf-8"))

    if not self.surfacePtr:
      raise FileNotFoundError(path)

    weakref.finalize(self.surfacePtr, SDL_FreeSurface, self.surfacePtr)

    self.xScale = 1
    self.yScale = 1
	def __init__(self, filename):
		''' Read contents of dictionary from file, set up deletion hook '''
		self.filename = filename
		dict.__init__(self)
		try:
			with open(self.filename, 'rb') as cache_file:
				self.update(load(cache_file))
		except FileNotFoundError:
			pass # on read, don't create the file if it does not exist
		# when object is deleted, update file
		finalize(self, lambda: self.dump_to_file())
Example #21
0
    def register_injector(cls, instance, injector):
        """Registers the injector for a specific instance, using weak references.

        Arguments
        ---------
        instance: any
            an arbitrary object
        injector: Injector
            the injector to be used
        """
        cls.INSTANCE_REGISTRY[id(instance)] = injector
        weakref.finalize(instance, lambda: cls.INSTANCE_REGISTRY.pop(id(instance)))
Example #22
0
    def __init__(self, parent):
        self.parent = parent

        self.__methods = []

        if self.__options__:
            self.__config = Config(
                self.__class__.__gsettings__.get("schema"),
                self.__class__.__gsettings__.get("path")
            )

        weakref.finalize(self, self._on_plugin_delete)
Example #23
0
    def __init__(self):
        super().__init__()
        self.__transfers = {}
        self.__signals = []

        self._object_manager = Gio.DBusObjectManagerClient.new_for_bus_sync(
            Gio.BusType.SESSION, Gio.DBusObjectManagerClientFlags.NONE,
            self.__bus_name, '/', None, None, None)

        self.__signals.append(self._object_manager.connect('object-added', self._on_object_added))
        self.__signals.append(self._object_manager.connect('object-removed', self._on_object_removed))

        weakref.finalize(self, self._on_delete)
Example #24
0
    def __init__(self, defer_atexit=False):
        """Constructor.

        :param bool defer_atexit: cleanup() to atexit instead of after garbage collection.
        """
        self.name = tempfile.mkdtemp('sphinxcontrib_versioning')
        if defer_atexit:
            atexit.register(shutil.rmtree, self.name, True)
            return
        try:
            weakref.finalize(self, shutil.rmtree, self.name, True)
        except AttributeError:
            weakref.proxy(self, functools.partial(shutil.rmtree, self.name, True))
Example #25
0
    def __lookup_single(cls, o):
        if o is None:
            return None

        cache = cls.__minidb_cache__
        if o.id not in cache:
            if DEBUG_OBJECT_CACHE:
                logger.debug('Storing id={} in cache {}'.format(o.id, o))
                weakref.finalize(o, cls._finalize, o.id)
            cache[o.id] = o
        else:
            if DEBUG_OBJECT_CACHE:
                logger.debug('Getting id={} from cache'.format(o.id))
        return cache[o.id]
Example #26
0
    def save(self, db=None):
        if getattr(self, Store.MINIDB_ATTR, None) is None:
            if db is None:
                raise ValueError('Needs a db object')
            setattr(self, Store.MINIDB_ATTR, db)

        getattr(self, Store.MINIDB_ATTR).save_or_update(self)

        if DEBUG_OBJECT_CACHE:
            logger.debug('Storing id={} in cache {}'.format(self.id, self))
            weakref.finalize(self, self.__class__._finalize, self.id)
        self.__class__.__minidb_cache__[self.id] = self

        return self
Example #27
0
    def bore(self):
        """Create SSH tunnel from given context."""
        cmd = ['ssh', '-fNT']

        if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
            cmd.append('-v')
        else:
            cmd.append('-q')

        if self.context.port:
            cmd.extend(('-p', str(self.context.port)))

        cmd.extend(('-L', '{}:{}'.format(self.context.local_socket,
                                         self.context.remote_socket)))
        if self.context.identity_file:
            cmd.extend(('-i', self.context.identity_file))

        cmd.append('{}@{}'.format(self.context.username,
                                  self.context.hostname))

        logging.debug('Opening tunnel "%s", cmd "%s"', self.context.uri,
                      ' '.join(cmd))

        tunnel = subprocess.Popen(cmd, close_fds=True)
        # The return value of Popen() has no long term value as that process
        # has already exited by the time control is returned here. This is a
        # side effect of the -f option. wait() will be called to clean up
        # resources.
        for _ in range(300):
            # TODO: Make timeout configurable
            if os.path.exists(self.context.local_socket) \
                    or tunnel.returncode is not None:
                break
            with suppress(subprocess.TimeoutExpired):
                # waiting for either socket to be created
                # or first child to exit
                tunnel.wait(0.5)
        else:
            raise TimeoutError(
                'Failed to create tunnel "{}", using: "{}"'.format(
                    self.context.uri, ' '.join(cmd)))
        if tunnel.returncode is not None and tunnel.returncode != 0:
            raise subprocess.CalledProcessError(tunnel.returncode,
                                                ' '.join(cmd))
        tunnel.wait()

        self._closed = False
        weakref.finalize(self, self.close)
        return self
    def connect(self, receiver):

        # Check for bound methods
        if hasattr(receiver, "__self__") and hasattr(receiver, "__func__"):
            ref = weakref.WeakMethod
            receiver_object = receiver.__self__
        else:
            ref = weakref.ref
            receiver_object = receiver
        receiver_ref = ref(receiver)
        weakref.finalize(receiver_object, self._remove_receiver)

        with self._lock:
            self._clear_dead_receivers()
            if receiver not in self._receivers:
                self._receivers.append(receiver_ref)
Example #29
0
        def __new__(cls, *args, **kwargs):
            """Creates a new object instance and adds the private finalizer
            attributes to it.

            Returns: new object instance

            Arguments:
            * *args, **kwargs -- passed to the parent instance creator
                                 (which ignores them)
            """
            # Note:   Do not pass a (hard) reference to instance to the
            #         finalizer as func/args/kwargs, it'd keep the object
            #         alive until the program terminates.
            #         A weak reference is fine.
            #
            # Note 2: When using weakrefs and not calling finalize() in
            #         __del__, the object may already have disappeared
            #         when weakref.finalize() kicks in.
            #         Make sure that _finalizer() gets called,
            #         i.e. keep __del__() from the base class.
            #
            # Note 3: the _finalize_called attribute is (probably) useless
            #         for this class
            instance = super(AutoFinalizedObject, cls).__new__(
                cls, *args, **kwargs
            )

            instance._finalizer = weakref.finalize(
                instance, _do_finalize_object_ref, weakref.ref(instance)
            )

            return instance
Example #30
0
    def __init__(self, port, simPort=None):
        """Constructor

        :param port: the physical SPI port
        :type port: :class:`.SPI.Port`
        :param simPort: This must be an object that implements all of
                        the spi* functions from hal_impl that you use.
                        See ``test_spi.py`` for an example.
        """
        
        if hal.HALIsSimulation():
            if simPort is None:
                raise ValueError("You will need to use a mock for this SPI port, or provide a simPort implementation")
            
            # Just check for basic functionality
            assert hasattr(simPort, 'spiInitialize')
            assert hasattr(simPort, 'spiClose')
            
            self._port = (simPort, port)
        else:
            self._port = port
        
        self.bitOrder = 0
        self.clockPolarity = 0
        self.dataOnTrailing = 0

        hal.spiInitialize(self._port)
        self.__finalizer = weakref.finalize(self, _freeSPI, self._port)

        SPI.devices += 1
        hal.HALReport(hal.HALUsageReporting.kResourceType_SPI, SPI.devices)
Example #31
0
 def numba_device_array(n):
     a = numba.cuda.device_array((n,), dtype="u1")
     weakref.finalize(a, numba.cuda.current_context)
     return a
Example #32
0
 def __init__(self):
     self._finalizer = weakref.finalize(self, self.close_connection)
Example #33
0
    def __init__(self, *args, **kwargs):
        """Counter constructor.

        The counter will start counting immediately.

        Positional arguments may be either channel numbers, :class:`.DigitalSource`
        sources, or :class:`.AnalogTrigger` sources in the following order:

        A "source" is any valid single-argument input to :meth:`setUpSource` and :meth:`setDownSource`
        
        - (none)
        - upSource
        - upSource, down source
        And, to keep consistency with Java wpilib.
        - encodingType, up source, down source, inverted

        If the passed object has a
        `getPortHandleForRouting` function, it is assumed to be a DigitalSource.
        If the passed object has a `createOutput` function, it is assumed to
        be an AnalogTrigger.

        In addition, extra keyword parameters may be provided for mode, inverted,
        and encodingType.

        :param upSource: The source (channel num, DigitalInput, or AnalogTrigger)
            that should be used for up counting.
        :param downSource: The source (channel num, DigitalInput, or AnalogTrigger)
            that should be used for down counting or direction control.
        :param mode:
            How and what the counter counts (see :class:`.Mode`).  Defaults to
            `Mode.kTwoPulse` for zero or one source, and
            `Mode.kExternalDirection` for two sources.
        :param inverted:
            Flips the direction of counting.  Defaults to False if unspecified.
            Only used when two sources are specified.
        :param encodingType:
            Either k1X or k2X to indicate 1X or 2X decoding. 4X decoding
            is not supported by Counter; use `Encoder` instead.  Defaults
            to k1X if unspecified.  Only used when two sources are specified.
        :type encodingType: :class:`.Counter.EncodingType`
        """

        source_identifier = [int, HasAttribute("getPortHandleForRouting"), HasAttribute("createOutput")]

        argument_templates = [[],
                              [("upSource", source_identifier), ],
                              [("upSource", source_identifier), ("downSource", source_identifier)],
                              [("encodingType", None), ("upSource", source_identifier),
                               ("downSource", source_identifier), ("inverted", bool)], ]


        _, results = match_arglist('Counter.__init__',
                                   args, kwargs, argument_templates, allow_extra_kwargs=True)

        # extract arguments
        upSource = results.pop("upSource", None)
        downSource = results.pop("downSource", None)

        encodingType = results.pop("encodingType", None)
        inverted = results.pop("inverted", False)
        mode = results.pop("mode", None)

        if mode is None:
            #Get the mode
            if upSource is not None and downSource is not None:
                mode = self.Mode.kExternalDirection
            else:
                mode = self.Mode.kTwoPulse

        # save some variables
        self.distancePerPulse = 1.0 # distance of travel for each tick
        self.pidSource = self.PIDSourceType.kDisplacement

        # create counter
        self._counter, self.index = hal.initializeCounter(mode)
        self.__finalizer = \
            weakref.finalize(self, _freeCounter, self)

        self.setMaxPeriod(.5)

        hal.report(hal.UsageReporting.kResourceType_Counter, self.index, mode)

        # Set sources
        if upSource is not None:
            self.setUpSource(upSource)
        else:
            self.upSource = None

        if downSource is not None:
            self.setDownSource(downSource)
        else:
            self.downSource = None

        # when given two sources, set edges
        if upSource is not None and downSource is not None:
            if encodingType == self.EncodingType.k1X:
                self.setUpSourceEdge(True, False)
                hal.setCounterAverageSize(self._counter, 1)
            else:
                self.setUpSourceEdge(True, True)
                hal.setCounterAverageSize(self._counter, 2)
            self.setDownSourceEdge(inverted, True)
Example #34
0
    def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
        """
        Connect receiver to sender for signal.

        Arguments:

            receiver
                A function or an instance method which is to receive signals.
                Receivers must be hashable objects.

                If weak is True, then receiver must be weak referenceable.

                Receivers must be able to accept keyword arguments.

                If a receiver is connected with a dispatch_uid argument, it
                will not be added if another receiver was already connected
                with that dispatch_uid.

            sender
                The sender to which the receiver should respond. Must either be
                of type Signal, or None to receive events from any sender.

            weak
                Whether to use weak references to the receiver. By default, the
                module will attempt to use weak references to the receiver
                objects. If this parameter is false, then strong references will
                be used.

            dispatch_uid
                An identifier used to uniquely identify a particular instance of
                a receiver. This will usually be a string, though it may be
                anything hashable.
        """
        #from django.conf import settings

        # If DEBUG is on, check that we got a good receiver
        if pysignals_debug:
            import inspect
            assert callable(receiver), "Signal receivers must be callable."

            # Check for **kwargs
            if not func_accepts_kwargs(receiver):
                raise ValueError(
                    "Signal receivers must accept keyword arguments (**kwargs)."
                )

        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        if weak:
            ref = weakref.ref
            receiver_object = receiver
            # Check for bound methods
            if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
                ref = WeakMethod
                receiver_object = receiver.__self__
            if six.PY3:
                receiver = ref(receiver)
                weakref.finalize(receiver_object, self._remove_receiver)
            else:
                receiver = ref(receiver, self._remove_receiver)

        with self.lock:
            self._clear_dead_receivers()
            for r_key, _ in self.receivers:
                if r_key == lookup_key:
                    break
            else:
                self.receivers.append((lookup_key, receiver))
            self.sender_receivers_cache.clear()
Example #35
0
 def insert(self, obj: Any, node: ANFNode) -> None:
     """Add a mapping in the environement, rejecting duplicates."""
     assert id(obj) not in self._object_map
     self._object_map[id(obj)] = node
     finalize(obj, self._remove, id(obj))
Example #36
0
 def __new(cls, *args):
     self = super().__new__(cls, *args)
     self._destructor = weakref.finalize(self, self._destruct, str(self))
     return self
Example #37
0
 def numba_cuda_array(n):
     a = numba.cuda.device_array((n, ), dtype=np.uint8)
     weakref.finalize(a, numba.cuda.current_context)
     return a
Example #38
0
    def __init__(self,
                 pod_template=None,
                 name=None,
                 namespace=None,
                 n_workers=None,
                 host=None,
                 port=None,
                 env=None,
                 **kwargs):
        name = name or dask.config.get('kubernetes.name')
        namespace = namespace or dask.config.get('kubernetes.namespace')
        n_workers = n_workers if n_workers is not None else dask.config.get(
            'kubernetes.count.start')
        host = host or dask.config.get('kubernetes.host')
        port = port if port is not None else dask.config.get('kubernetes.port')
        env = env if env is not None else dask.config.get('kubernetes.env')

        if not pod_template and dask.config.get('kubernetes.worker-template',
                                                None):
            d = dask.config.get('kubernetes.worker-template')
            d = dask.config.expand_environment_variables(d)
            pod_template = make_pod_from_dict(d)

        if not pod_template and dask.config.get(
                'kubernetes.worker-template-path', None):
            import yaml
            fn = dask.config.get('kubernetes.worker-template-path')
            fn = fn.format(**os.environ)
            with open(fn) as f:
                d = yaml.safe_load(f)
            d = dask.config.expand_environment_variables(d)
            pod_template = make_pod_from_dict(d)

        if not pod_template:
            msg = ("Worker pod specification not provided. See KubeCluster "
                   "docstring for ways to specify workers")
            raise ValueError(msg)

        self.cluster = LocalCluster(ip=host or socket.gethostname(),
                                    scheduler_port=port,
                                    n_workers=0,
                                    **kwargs)
        try:
            kubernetes.config.load_incluster_config()
        except kubernetes.config.ConfigException:
            kubernetes.config.load_kube_config()

        self.core_api = kubernetes.client.CoreV1Api()

        if namespace is None:
            namespace = _namespace_default()

        name = name.format(user=getpass.getuser(),
                           uuid=str(uuid.uuid4())[:10],
                           **os.environ)
        name = escape(name)

        self.pod_template = clean_pod_template(pod_template)
        # Default labels that can't be overwritten
        self.pod_template.metadata.labels['dask.org/cluster-name'] = name
        self.pod_template.metadata.labels['user'] = escape(getpass.getuser())
        self.pod_template.metadata.labels['app'] = 'dask'
        self.pod_template.metadata.labels['component'] = 'dask-worker'
        self.pod_template.metadata.namespace = namespace

        self.pod_template.spec.containers[0].env.append(
            kubernetes.client.V1EnvVar(name='DASK_SCHEDULER_ADDRESS',
                                       value=self.scheduler_address))
        if env:
            self.pod_template.spec.containers[0].env.extend([
                kubernetes.client.V1EnvVar(name=k, value=str(v))
                for k, v in env.items()
            ])
        self.pod_template.metadata.generate_name = name

        finalize(self, _cleanup_pods, self.namespace,
                 self.pod_template.metadata.labels)

        if n_workers:
            self.scale(n_workers)
Example #39
0
    For use in finalizers, __del__ methods, and similar; it is advised
    to early bind this function rather than look it up when calling it,
    since at shutdown module globals may be cleared.
    """
    # At shutdown, the attribute may have been cleared or set to None.
    v = globals().get('_shutting_down')
    return v is True or v is None


# weakref.finalize registers an exit function that runs all finalizers for
# which atexit is True. Some of these finalizers may call shutting_down() to
# check whether the interpreter is shutting down. For this to behave correctly,
# we need to make sure that _at_shutdown is called before the finalizer exit
# function. Since atexit operates as a LIFO stack, we first contruct a dummy
# finalizer then register atexit to ensure this ordering.
weakref.finalize(lambda: None, lambda: None)
atexit.register(_at_shutdown)


class ConfigStack:
    """A stack for tracking target configurations in the compiler.

    It stores the stack in a thread-local class attribute. All instances in the
    same thread will see the same stack.
    """
    tls = threading.local()

    def __init__(self):
        tls = self.tls
        try:
            stk = tls.stack
Example #40
0
 def __init__(self, permutation: List[int], sector: int):
     """Create a symmetry given a `permutation` of sites and `sector` specifying the eigenvalue."""
     self._payload = _create_symmetry(permutation, sector)
     self._finalizer = weakref.finalize(self,
                                        _destroy(_lib.ls_destroy_symmetry),
                                        self._payload)
Example #41
0
    async def _start(self):
        self._generate_name = self._generate_name or dask.config.get(
            "kubernetes.name")
        self._namespace = self._namespace or dask.config.get(
            "kubernetes.namespace")
        self._idle_timeout = self._idle_timeout or dask.config.get(
            "kubernetes.idle-timeout")
        self._scheduler_service_wait_timeout = (
            self._scheduler_service_wait_timeout
            or dask.config.get("kubernetes.scheduler-service-wait-timeout"))
        self._deploy_mode = self._deploy_mode or dask.config.get(
            "kubernetes.deploy-mode")

        self._n_workers = (self._n_workers if self._n_workers is not None else
                           dask.config.get("kubernetes.count.start"))
        self.host = self.host or dask.config.get("kubernetes.host")
        self.port = (self.port if self.port is not None else
                     dask.config.get("kubernetes.port"))
        self._protocol = self._protocol or dask.config.get(
            "kubernetes.protocol")
        self._interface = self._interface or dask.config.get(
            "kubernetes.interface")
        self._dashboard_address = self._dashboard_address or dask.config.get(
            "kubernetes.dashboard_address")
        self.env = (self.env if self.env is not None else
                    dask.config.get("kubernetes.env"))

        if not self.pod_template and dask.config.get(
                "kubernetes.worker-template", None):
            d = dask.config.get("kubernetes.worker-template")
            d = dask.config.expand_environment_variables(d)
            self.pod_template = make_pod_from_dict(d)

        if not self.pod_template and dask.config.get(
                "kubernetes.worker-template-path", None):
            import yaml

            fn = dask.config.get("kubernetes.worker-template-path")
            fn = fn.format(**os.environ)
            with open(fn) as f:
                d = yaml.safe_load(f)
            d = dask.config.expand_environment_variables(d)
            self.pod_template = make_pod_from_dict(d)

        if not self.pod_template:
            msg = ("Worker pod specification not provided. See KubeCluster "
                   "docstring for ways to specify workers")
            raise ValueError(msg)

        self.pod_template = clean_pod_template(self.pod_template)
        await ClusterAuth.load_first(self.auth)

        self.core_api = kubernetes.client.CoreV1Api()

        if self._namespace is None:
            self._namespace = _namespace_default()

        self._generate_name = self._generate_name.format(
            user=getpass.getuser(), uuid=str(uuid.uuid4())[:10], **os.environ)
        self._generate_name = escape(self._generate_name)

        # Default labels that can't be overwritten
        self.pod_template.metadata.labels[
            "dask.org/cluster-name"] = self._generate_name
        self.pod_template.metadata.labels["user"] = escape(getpass.getuser())
        self.pod_template.metadata.labels["app"] = "dask"
        self.pod_template.metadata.namespace = self._namespace

        if self.env:
            self.pod_template.spec.containers[0].env.extend([
                kubernetes.client.V1EnvVar(name=k, value=str(v))
                for k, v in self.env.items()
            ])
        self.pod_template.metadata.generate_name = self._generate_name

        finalize(self, _cleanup_resources, self._namespace,
                 self.pod_template.metadata.labels)

        common_options = {
            "core_api": self.core_api,
            "pod_template": self.pod_template,
            "namespace": self._namespace,
            "loop": self.loop,
        }

        if self._deploy_mode == "local":
            self.scheduler_spec = {
                "cls": dask.distributed.Scheduler,
                "options": {
                    "protocol": self._protocol,
                    "interface": self._interface,
                    "host": self.host,
                    "port": self.port,
                    "dashboard_address": self._dashboard_address,
                    "security": self.security,
                },
            }
        elif self._deploy_mode == "remote":
            self.scheduler_spec = {
                "cls": Scheduler,
                "options": {
                    "idle_timeout": self._idle_timeout,
                    "service_wait_timeout_s":
                    self._scheduler_service_wait_timeout,
                    **common_options,
                },
            }
        else:
            raise RuntimeError("Unknown deploy mode %s" % self._deploy_mode)

        self.new_spec = {"cls": Worker, "options": {**common_options}}
        self.worker_spec = {i: self.new_spec for i in range(self._n_workers)}

        await super()._start()
Example #42
0
 def load_lib(self):
     self._lib = npct.load_library(self.lib_file, '.')
     self._function = self._lib.particle_loop
     self._cleanup_lib = finalize(self, cleanup_unload_lib, self._lib)
Example #43
0
 def __init__(self, generators: List[Symmetry]):
     """Construct a symmetry group from a list of generators."""
     self._payload = _create_group(generators)
     self._finalizer = weakref.finalize(self,
                                        _destroy(_lib.ls_destroy_group),
                                        self._payload)
Example #44
0
    def __init__(self, *args, **kwargs):
        """Constructor.

        Arguments can be supplied as positional or keyword.  Acceptable
        positional argument combinations are:
        
        - forwardChannel, reverseChannel
        - moduleNumber, forwardChannel, reverseChannel

        Alternatively, the above names can be used as keyword arguments.

        :param moduleNumber: The module number of the solenoid module to use.
        :param forwardChannel: The forward channel number on the module to control (0..7)
        :param reverseChannel: The reverse channel number on the module to control  (0..7)
        """
        # keyword arguments
        forwardChannel = kwargs.pop("forwardChannel", None)
        reverseChannel = kwargs.pop("reverseChannel", None)
        moduleNumber = kwargs.pop("moduleNumber", None)

        if kwargs:
            warnings.warn("unknown keyword arguments: %s" % kwargs.keys(),
                          RuntimeWarning)

        # positional arguments
        if len(args) == 2:
            forwardChannel, reverseChannel = args
        elif len(args) == 3:
            moduleNumber, forwardChannel, reverseChannel = args
        elif len(args) != 0:
            raise ValueError(
                "don't know how to handle %d positional arguments" % len(args))

        if moduleNumber is None:
            moduleNumber = SensorUtil.getDefaultSolenoidModule()
        if forwardChannel is None:
            raise ValueError("must specify forward channel")
        if reverseChannel is None:
            raise ValueError("must specify reverse channel")

        super().__init__(moduleNumber)

        self.valueEntry = None
        SensorUtil.checkSolenoidModule(moduleNumber)
        SensorUtil.checkSolenoidChannel(forwardChannel)
        SensorUtil.checkSolenoidChannel(reverseChannel)

        portHandle = hal.getPortWithModule(moduleNumber, forwardChannel)
        self.forwardHandle = hal.initializeSolenoidPort(portHandle)

        try:
            portHandle = hal.getPortWithModule(moduleNumber, reverseChannel)
            self.reverseHandle = hal.initializeSolenoidPort(portHandle)
        except Exception:
            # free the forward handle on exception, then rethrow
            hal.freeSolenoidPort(self.forwardHandle)
            self.forwardHandle = None
            self.reverseHandle = None
            raise

        self.forwardMask = 1 << forwardChannel
        self.reverseMask = 1 << reverseChannel

        # Need this to free on unit test wpilib reset
        Resource._add_global_resource(self)

        hal.report(hal.UsageReporting.kResourceType_Solenoid, forwardChannel,
                   moduleNumber)
        hal.report(hal.UsageReporting.kResourceType_Solenoid, reverseChannel,
                   moduleNumber)

        self.setName("DoubleSolenoid", moduleNumber, forwardChannel)

        self.__finalizer = weakref.finalize(self, _freeSolenoid,
                                            self.forwardHandle,
                                            self.reverseHandle)
Example #45
0
 def rmm_cuda_array(n):
     a = rmm.device_array(n, dtype=np.uint8)
     weakref.finalize(a, numba.cuda.current_context)
     return a
Example #46
0
 async def start(self):
     assert self.connection
     weakref.finalize(
         self, self.proc.kill
     )  # https://github.com/ronf/asyncssh/issues/112
     await super().start()
Example #47
0
 def _setup_finalizer(self):
     # pylint: disable=E1101
     self.__class__._finalizers.append(
         weakref.finalize(self, _finalize_tmpdir, weakref.ref(self)))
Example #48
0
#*************************************************************************#
# File Name: testweakref.py
# Author: yoghourt->ilvcr
# Mail: [email protected]  @@  [email protected]
# Created Time: Thu Mar 28 20:02:41 2019
# Description:
#************************************************************************#

# error

import weakref

s1 = {1, 2, 3}

s2 = s1


def bye():
    print 'Gone with the wind...'


ender = weakref.finalize(s1, bye)

print '----------------------------------------------\n\n'
print ender.alive
print '\n\n----------------------------------------------'

del s1

print ender.alive
Example #49
0
    def __init__(
        self,
        savedir="",
        archivefile="nethack.%(pid)i.%(time)s.zip",
        character="mon-hum-neu-mal",
        max_episode_steps=5000,
        observation_keys=(
            "glyphs",
            "chars",
            "colors",
            "specials",
            "blstats",
            "message",
            "inv_glyphs",
            "inv_strs",
            "inv_letters",
            "inv_oclasses",
            "screen_descriptions",
        ),
        actions=None,
        options=None,
        wizard=False,
        allow_all_yn_questions=False,
    ):
        """Constructs a new NLE environment.

        Args:
            savedir (str or None): path to save ttyrecs (game recordings) into.
                Defaults to "" (empty string), which makes NLE chose the
                directory name. If None, don't save any data. Otherwise,
                interpreted as a path to a new or existing directory.
            character (str): name of character. Defaults to "mon-hum-neu-mal".
            max_episode_steps (int): maximum amount of steps allowed before the
                game is forcefully quit. In such cases, ``info["end_status"]``
                will be equal to ``StepStatus.ABORTED``. Defaults to 5000.
            observation_keys (list): keys to use when creating the observation.
                Defaults to all.
            actions (list): list of actions. If None, the full action space will
                be used, i.e. ``nle.nethack.ACTIONS``. Defaults to None.
            options (list): list of game options to initialize Nethack. If None,
                Nethack will be initialized with the options found in
                ``nle.nethack.NETHACKOPTIONS`. Defaults to None.
            wizard (bool): activate wizard mode. Defaults to False.
            allow_all_yn_questions (bool):
                If set to True, no y/n questions in step() are declined.
                If set to False, only elements of SKIP_EXCEPTIONS are not declined.
                Defaults to False.
        """

        self.character = character
        self._max_episode_steps = max_episode_steps
        self._allow_all_yn_questions = allow_all_yn_questions

        if actions is None:
            actions = FULL_ACTIONS
        self._actions = actions

        self.last_observation = None

        try:
            if savedir is None:
                self.savedir = None
                self._stats_file = None
                self._stats_logger = None
            elif savedir:
                self.savedir = os.path.abspath(savedir)
                os.makedirs(self.savedir)
            else:  # Empty savedir: We create our unique savedir inside nle_data/.
                parent_dir = os.path.join(os.getcwd(), "nle_data")
                os.makedirs(parent_dir, exist_ok=True)
                self.savedir = tempfile.mkdtemp(
                    prefix=time.strftime("%Y%m%d-%H%M%S_"), dir=parent_dir)
        except FileExistsError:
            logger.info("Using existing savedir: %s", self.savedir)
        else:
            if self.savedir:
                logger.info("Created savedir: %s", self.savedir)
            else:
                logger.info("Not saving any NLE data.")

        # TODO: Fix stats_file logic.
        # self._setup_statsfile = self.savedir is not None
        self._setup_statsfile = False
        self._stats_file = None
        self._stats_logger = None

        self._observation_keys = list(observation_keys)

        # Observations we always need.
        for key in (
                "glyphs",
                "blstats",
                "message",
                "program_state",
                "internal",
        ):
            if key not in self._observation_keys:
                self._observation_keys.append(key)

        self._glyph_index = self._observation_keys.index("glyphs")
        self._blstats_index = self._observation_keys.index("blstats")
        self._message_index = self._observation_keys.index("message")
        self._program_state_index = self._observation_keys.index(
            "program_state")
        self._internal_index = self._observation_keys.index("internal")

        self._original_observation_keys = observation_keys
        self._original_indices = tuple(
            self._observation_keys.index(key) for key in observation_keys)

        if self.savedir:
            self._ttyrec_pattern = os.path.join(
                self.savedir, "nle.%i.%%i.ttyrec.bz2" % os.getpid())
            ttyrec = self._ttyrec_pattern % 0
        else:
            ttyrec = "/dev/null"

        self.env = nethack.Nethack(
            observation_keys=self._observation_keys,
            options=options,
            playername="Agent-" + self.character,
            ttyrec=ttyrec,
            wizard=wizard,
        )
        self._close_env = weakref.finalize(self, lambda e: e.close(), self.env)

        self._random = random.SystemRandom()

        # -1 so that it's 0-based on first reset
        self._episode = -1

        space_dict = {
            "glyphs":
            gym.spaces.Box(low=0,
                           high=nethack.MAX_GLYPH,
                           **nethack.OBSERVATION_DESC["glyphs"]),
            "chars":
            gym.spaces.Box(low=0,
                           high=255,
                           **nethack.OBSERVATION_DESC["chars"]),
            "colors":
            gym.spaces.Box(low=0,
                           high=15,
                           **nethack.OBSERVATION_DESC["colors"]),
            "specials":
            gym.spaces.Box(low=0,
                           high=255,
                           **nethack.OBSERVATION_DESC["specials"]),
            "blstats":
            gym.spaces.Box(
                low=np.iinfo(np.int32).min,
                high=np.iinfo(np.int32).max,
                **nethack.OBSERVATION_DESC["blstats"],
            ),
            "message":
            gym.spaces.Box(
                low=np.iinfo(np.uint8).min,
                high=np.iinfo(np.uint8).max,
                **nethack.OBSERVATION_DESC["message"],
            ),
            "program_state":
            gym.spaces.Box(
                low=np.iinfo(np.int32).min,
                high=np.iinfo(np.int32).max,
                **nethack.OBSERVATION_DESC["program_state"],
            ),
            "internal":
            gym.spaces.Box(
                low=np.iinfo(np.int32).min,
                high=np.iinfo(np.int32).max,
                **nethack.OBSERVATION_DESC["internal"],
            ),
            "inv_glyphs":
            gym.spaces.Box(
                low=0,
                high=nethack.MAX_GLYPH,
                **nethack.OBSERVATION_DESC["inv_glyphs"],
            ),
            "inv_strs":
            gym.spaces.Box(low=0,
                           high=127,
                           **nethack.OBSERVATION_DESC["inv_strs"]),
            "inv_letters":
            gym.spaces.Box(low=0,
                           high=127,
                           **nethack.OBSERVATION_DESC["inv_letters"]),
            "inv_oclasses":
            gym.spaces.Box(
                low=0,
                high=nethack.MAXOCLASSES,
                **nethack.OBSERVATION_DESC["inv_oclasses"],
            ),
            "screen_descriptions":
            gym.spaces.Box(low=0,
                           high=127,
                           **nethack.OBSERVATION_DESC["screen_descriptions"]),
        }

        self.observation_space = gym.spaces.Dict(
            {key: space_dict[key]
             for key in observation_keys})

        self.action_space = gym.spaces.Discrete(len(self._actions))
 def __init__(self, suffix=None, prefix=None, dir=None):
     self.name = mkdtemp(suffix, prefix, dir)
     self._finalizer = _weakref.finalize(
         self, self._cleanup, self.name,
         warn_message="Implicitly cleaning up {!r}".format(self))
Example #51
0
        [["--" + k.replace("_", "-"), convert_value(v)] for k, v in d.items()], []
    )


def is_valid_xml(text):
    return xml.etree.ElementTree.fromstring(text) is not None


try:
    _offload_executor = ThreadPoolExecutor(
        max_workers=1, thread_name_prefix="Dask-Offload"
    )
except TypeError:
    _offload_executor = ThreadPoolExecutor(max_workers=1)

weakref.finalize(_offload_executor, _offload_executor.shutdown)


def import_term(name: str):
    """ Return the fully qualified term

    Examples
    --------
    >>> import_term("math.sin")
    <function math.sin(x, /)>
    """
    try:
        module_name, attr_name = name.rsplit(".", 1)
    except ValueError:
        return importlib.import_module(name)
Example #52
0
 def __new__(cls):
     cls.n_instances += 1
     obj = object.__new__(cls)
     weakref.finalize(obj, cls._finalize)
     return obj
Example #53
0
from typing import Callable, Dict, List, Any, Optional

import config
from training_loop import TrainingLoop
from util import download

logger = logging.getLogger("bgsplit")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)


# For caching dataset images
data_cache_dir = tempfile.TemporaryDirectory()
data_finalizer = weakref.finalize(
    data_cache_dir, shutil.rmtree, data_cache_dir.name)


# Step 3: Call webhook to indicate completion
@backoff.on_exception(backoff.expo, requests.exceptions.RequestException)
def notify(url: str, payload: Dict[str, Any]):
    r = requests.put(url, data=json.dumps(payload))
    r.raise_for_status()


@dataclass
class TrainingJob:
    train_positive_paths: List[str]
    train_negative_paths: List[str]
    train_unlabeled_paths: List[str]
    val_positive_paths: List[str]
Example #54
0
 def __init__(self, basis, terms):
     self._payload = _create_operator(basis, terms)
     self._finalizer = weakref.finalize(self,
                                        _destroy(_lib.ls_destroy_operator),
                                        self._payload)
     self.basis = basis
Example #55
0
    def __init__(
        self,
        save_ttyrec_every=0,
        savedir=None,
        character="mon-hum-neu-mal",
        max_episode_steps=5000,
        observation_keys=(
            "glyphs",
            "chars",
            "colors",
            "specials",
            "blstats",
            "message",
            "inv_glyphs",
            "inv_strs",
            "inv_letters",
            "inv_oclasses",
            "screen_descriptions",
            "tty_chars",
            "tty_colors",
            "tty_cursor",
        ),
        actions=None,
        options=None,
        wizard=False,
        allow_all_yn_questions=False,
        allow_all_modes=False,
        spawn_monsters=True,
    ):
        """Constructs a new NLE environment.

        Args:
            save_ttyrec_every: Integer, if 0, no ttyrecs (game recordings) will
                be saved. Otherwise, save a ttyrec every Nth episode.
            savedir (str or None): Path to save ttyrecs (game recordings) into,
                if save_ttyrec_every is nonzero. If nonempty string, interpreted
                as a path to a new or existing directory.
                If "" (empty string) or None, NLE choses a unique directory name.
            character (str): name of character. Defaults to "mon-hum-neu-mal".
            max_episode_steps (int): maximum amount of steps allowed before the
                game is forcefully quit. In such cases, ``info["end_status"]``
                will be equal to ``StepStatus.ABORTED``. Defaults to 5000.
            observation_keys (list): keys to use when creating the observation.
                Defaults to all.
            actions (list): list of actions. If None, the full action space will
                be used, i.e. ``nle.nethack.ACTIONS``. Defaults to None.
            options (list): list of game options to initialize Nethack. If None,
                Nethack will be initialized with the options found in
                ``nle.nethack.NETHACKOPTIONS`. Defaults to None.
            wizard (bool): activate wizard mode. Defaults to False.
            allow_all_yn_questions (bool):
                If set to True, no y/n questions in step() are declined.
                If set to False, only elements of SKIP_EXCEPTIONS are not declined.
                Defaults to False.
            allow_all_modes (bool):
                If set to True, do not decline menus, text input or auto 'MORE'.
                If set to False, only skip click through 'MORE' on death.
            spawn_monsters: If False, disables normal NetHack behavior to randomly
                create monsters.
        """
        self.character = character
        self._max_episode_steps = max_episode_steps
        self._allow_all_yn_questions = allow_all_yn_questions
        self._allow_all_modes = allow_all_modes
        self._save_ttyrec_every = save_ttyrec_every

        if actions is None:
            actions = FULL_ACTIONS
        self.actions = actions

        self.last_observation = ()

        try:
            if not save_ttyrec_every:
                self.savedir = None
            elif savedir:
                self.savedir = os.path.abspath(savedir)
                os.makedirs(self.savedir)
            else:  # Empty savedir: We create our unique savedir inside nle_data/.
                parent_dir = os.path.join(os.getcwd(), "nle_data")
                os.makedirs(parent_dir, exist_ok=True)
                self.savedir = tempfile.mkdtemp(
                    prefix=time.strftime("%Y%m%d-%H%M%S_"), dir=parent_dir)
        except FileExistsError:
            logger.info("Using existing savedir: %s", self.savedir)
        else:
            if self.savedir:
                logger.info("Created savedir: %s", self.savedir)
            else:
                logger.info("Not saving any NLE data.")

        self._observation_keys = list(observation_keys)

        if "internal" in self._observation_keys:
            logger.warn(
                "The 'internal' NLE observation was requested. "
                "This might contain data that shouldn't be available to agents."
            )

        # Observations we always need.
        for key in (
                "glyphs",
                "blstats",
                "message",
                "program_state",
                "internal",
        ):
            if key not in self._observation_keys:
                self._observation_keys.append(key)

        self._glyph_index = self._observation_keys.index("glyphs")
        self._blstats_index = self._observation_keys.index("blstats")
        self._message_index = self._observation_keys.index("message")
        self._program_state_index = self._observation_keys.index(
            "program_state")
        self._internal_index = self._observation_keys.index("internal")

        self._original_observation_keys = observation_keys
        self._original_indices = tuple(
            self._observation_keys.index(key) for key in observation_keys)

        if self.savedir:
            self._ttyrec_pattern = os.path.join(
                self.savedir, "nle.%i.%%i.ttyrec.bz2" % os.getpid())
            ttyrec = self._ttyrec_pattern % 0
        else:
            ttyrec = None

        self.nethack = nethack.Nethack(
            observation_keys=self._observation_keys,
            options=options,
            playername="Agent-" + self.character,
            ttyrec=ttyrec,
            wizard=wizard,
            spawn_monsters=spawn_monsters,
        )
        self._close_nethack = weakref.finalize(self, self.nethack.close)

        self._random = random.SystemRandom()

        # -1 so that it's 0-based on first reset
        self._episode = -1

        space_dict = dict(NLE_SPACE_ITEMS)
        self.observation_space = gym.spaces.Dict(
            {key: space_dict[key]
             for key in observation_keys})

        self.action_space = gym.spaces.Discrete(len(self.actions))
Example #56
0
people = [1, 2, 3]
bus1 = Bus(people)
bus2 = Bus(people)
bus2.pick(4)
print(bus1.passengers, bus2.passengers)

import weakref


def bye():
    print('bye~')


s1 = {1, 2, 3}
s2 = s1
f = weakref.finalize(s1, bye)
del s1
print(f.alive)
# del s2
s2 = {1}
print(f.alive)

import weakref

s = {1, 2, 3}
wref = weakref.ref(s)
print(wref())
s = {1, 2}
print(wref())
# input()
print(wref())
Example #57
0
    def __init__(self,
                 ctx: 'Context',
                 size: Tuple[int, int],
                 *,
                 components: int = 4,
                 dtype: str = 'f1',
                 data: Any = None,
                 filter: Tuple[gl.GLuint, gl.GLuint] = None,
                 wrap_x: gl.GLuint = None,
                 wrap_y: gl.GLuint = None):
        """
        A texture can be created with or without initial data.
        NOTE: Currently does not support multisample textures even
        thought ``samples`` is exposed.

        :param Context ctx: The context the object belongs to
        :param Tuple[int, int] size: The size of the texture
        :param int components: The number of components (1: R, 2: RG, 3: RGB, 4: RGBA)
        :param str dtype: The data type of each component: f1, f2, f4 / i1, i2, i4 / u1, u2, u4
        :param Any data: The byte data of the texture. bytes or anything supporting the buffer protocol.
        :param Tuple[gl.GLuint, gl.GLuint] filter: The minification/magnification filter of the texture
        :param gl.GLuint wrap_s
        :param data: The texture data (optional)
        """
        self._ctx = ctx
        self._width, self._height = size
        self._dtype = dtype
        self._components = components
        self._target = gl.GL_TEXTURE_2D
        self._samples = 0
        # Default filters for float and integer textures
        if 'f' in self.dtype:
            self._filter = gl.GL_LINEAR, gl.GL_LINEAR
        else:
            self._filter = gl.GL_NEAREST, gl.GL_NEAREST
        self._wrap_x = gl.GL_REPEAT
        self._wrap_y = gl.GL_REPEAT

        if components not in [1, 2, 3, 4]:
            raise ValueError("Components must be 1, 2, 3 or 4")

        try:
            format_info = self._formats[self._dtype]
        except KeyError:
            raise ValueError(
                f"dype '{dtype}' not support. Supported types are : {tuple(self._formats.keys())}"
            )

        gl.glActiveTexture(
            gl.GL_TEXTURE0)  # Create textures in the default channel (0)

        self._glo = glo = gl.GLuint()
        gl.glGenTextures(1, byref(self._glo))

        if self._glo.value == 0:
            raise RuntimeError(
                "Cannot create Texture. OpenGL failed to generate a texture id"
            )

        gl.glBindTexture(self._target, self._glo)
        gl.glPixelStorei(gl.GL_PACK_ALIGNMENT, 1)
        gl.glPixelStorei(gl.GL_UNPACK_ALIGNMENT, 1)

        if data is not None:
            byte_length, data = data_to_ctypes(data)

        try:
            _format, _internal_format, self._type, self._component_size = format_info
            self._format = _format[components]
            self._internal_format = _internal_format[components]
            gl.glTexImage2D(
                self._target,  # target
                0,  # level
                self._internal_format,  # internal_format
                self._width,  # width
                self._height,  # height
                0,  # border
                self._format,  # format
                self._type,  # type
                data  # data
            )
        except gl.GLException as ex:
            raise gl.GLException((
                f"Unable to create texture: {ex} : dtype={dtype} size={size} components={components} "
                "MAX_TEXTURE_SIZE = {self.ctx.limits.MAX_TEXTURE_SIZE}"))

        self.filter = filter or self._filter
        self.wrap_x = wrap_x or self._wrap_x
        self.wrap_y = wrap_y or self._wrap_y

        self.ctx.stats.incr('texture')
        weakref.finalize(self, Texture.release, self._ctx, glo)
Example #58
0
 def on_destroy(instance, function, *args, **kwargs):
     weakref.finalize(instance, function, *args, **kwargs)
Example #59
0
    async def _start(self):
        self._generate_name = self._generate_name or dask.config.get(
            "kubernetes.name")
        self._namespace = self._namespace or dask.config.get(
            "kubernetes.namespace")
        self._idle_timeout = self._idle_timeout or dask.config.get(
            "kubernetes.idle-timeout")
        self._scheduler_service_wait_timeout = (
            self._scheduler_service_wait_timeout
            or dask.config.get("kubernetes.scheduler-service-wait-timeout"))
        self._deploy_mode = self._deploy_mode or dask.config.get(
            "kubernetes.deploy-mode")

        self._n_workers = (self._n_workers if self._n_workers is not None else
                           dask.config.get("kubernetes.count.start"))
        self.host = self.host or dask.config.get("kubernetes.host")
        self.port = (self.port if self.port is not None else
                     dask.config.get("kubernetes.port"))
        self._protocol = self._protocol or dask.config.get(
            "kubernetes.protocol")
        self._interface = self._interface or dask.config.get(
            "kubernetes.interface")
        self._dashboard_address = self._dashboard_address or dask.config.get(
            "kubernetes.dashboard_address")
        self.env = (self.env if self.env is not None else
                    dask.config.get("kubernetes.env"))

        self.pod_template = self._get_pod_template(self.pod_template,
                                                   pod_type="worker")
        self.scheduler_pod_template = self._get_pod_template(
            self.scheduler_pod_template, pod_type="scheduler")
        if not self.pod_template:
            msg = ("Worker pod specification not provided. See KubeCluster "
                   "docstring for ways to specify workers")
            raise ValueError(msg)

        base_pod_template = self.pod_template
        self.pod_template = clean_pod_template(self.pod_template,
                                               pod_type="worker")

        if not self.scheduler_pod_template:
            self.scheduler_pod_template = base_pod_template
            self.scheduler_pod_template.spec.containers[0].args = [
                "dask-scheduler"
            ]

        self.scheduler_pod_template = clean_pod_template(
            self.scheduler_pod_template, pod_type="scheduler")

        await ClusterAuth.load_first(self.auth)

        self.core_api = kubernetes.client.CoreV1Api()

        if self._namespace is None:
            self._namespace = _namespace_default()

        self._generate_name = self._generate_name.format(
            user=getpass.getuser(), uuid=str(uuid.uuid4())[:10], **os.environ)
        self._generate_name = escape(self._generate_name)

        self.pod_template = self._fill_pod_templates(self.pod_template,
                                                     pod_type="worker")
        self.scheduler_pod_template = self._fill_pod_templates(
            self.scheduler_pod_template, pod_type="scheduler")

        finalize(self, _cleanup_resources, self._namespace,
                 self.pod_template.metadata.labels)

        common_options = {
            "core_api": self.core_api,
            "namespace": self._namespace,
            "loop": self.loop,
        }

        if self._deploy_mode == "local":
            self.scheduler_spec = {
                "cls": dask.distributed.Scheduler,
                "options": {
                    "protocol": self._protocol,
                    "interface": self._interface,
                    "host": self.host,
                    "port": self.port,
                    "dashboard_address": self._dashboard_address,
                    "security": self.security,
                },
            }
        elif self._deploy_mode == "remote":
            self.scheduler_spec = {
                "cls": Scheduler,
                "options": {
                    "idle_timeout": self._idle_timeout,
                    "service_wait_timeout_s":
                    self._scheduler_service_wait_timeout,
                    "pod_template": self.scheduler_pod_template,
                    **common_options,
                },
            }
        else:
            raise RuntimeError("Unknown deploy mode %s" % self._deploy_mode)

        self.new_spec = {
            "cls": Worker,
            "options": {
                "pod_template": self.pod_template,
                **common_options
            },
        }
        self.worker_spec = {i: self.new_spec for i in range(self._n_workers)}

        await super()._start()
import weakref


class ExpensiveObj:
    def __del__(self):
        print('Deleting {}'.format(self))


def on_finalize(*args):
    print('on_finalize({!r})'.format(args))


obj = ExpensiveObj()
weakref.finalize(obj, on_finalize, 'extra argument')

del obj