def __init__(self, service, channel, config = {}, _lazy = False): # Close the connection while we set it up self._closed = True # Setup the config self._config = deepcopy(DEFAULT_CONFIG) self._config.update(config) if self._config["connid"] is None: self._config["connid"] = "conn{conn_id}".format(conn_id=self._connection_id_generator.__next__()) self._channel = channel self._local_root = service(weakref.proxy(self)) # Why does this have to be weak self._remote_root = None # This is set by root. Why?????? self._local_objects = Locking_dict() # {oid: native_obj} dictionary, orginal objects to object ids self._proxy_cache = WeakValueDictionary() # {oid: proxy_obj} oid to proxy objects not owned by this connection self._netref_classes_cache = {} # classes that have been?????? self._netref_proxy_builtin_cls = netref.PROXY_BUILTIN_TYPE_DICT # Already created in netref self._seqcounter = itertools.count() # With this we will generate the msg seq numbers self._recvlock = Lock() self._sendlock = Lock() self._sync_replies = {} self._async_callbacks = {} self._last_traceback = None if not _lazy: # Should object automatically be set to go self._init_service() # Open this connection as we have finished setting it up self._closed = False
class Connection(object): """The RPyC connection (also know as the RPyC protocol). __init__(self, service, channel, config = {}, _lazy = False) * service: the service to expose * channel: the channel over which messages are passed * config: this connection's config dict (overriding parameters from the default = DEFAULT_CONFIG) * _lazy: whether or not to initialize the service with the creation of the connection. initiaslise service, default is True. if set to False, you will need to call _init_service manually later Important holders * _local_objects # native objs added when boxing something mutable for the first time. {oid: obj} dictionary, native object ids to the full native object * _proxy_cache # Added to when making a new proxies after unboxing data, tagged as new {oid: weakref(obj, callback)} * _netref_classes_cache # Dictionary of previously created proxy classes, {(clsname, modname): cls} # netref.PROXY_BUILTIN_TYPE_DICT # pre created proxy class for builtins """ _connection_id_generator = itertools.count(1) _HANDLERS = {} # functionailty specfic handlers def __init__(self, service, channel, config = {}, _lazy = False): # Close the connection while we set it up self._closed = True # Setup the config self._config = deepcopy(DEFAULT_CONFIG) self._config.update(config) if self._config["connid"] is None: self._config["connid"] = "conn{conn_id}".format(conn_id=self._connection_id_generator.__next__()) self._channel = channel self._local_root = service(weakref.proxy(self)) # Why does this have to be weak self._remote_root = None # This is set by root. Why?????? self._local_objects = Locking_dict() # {oid: native_obj} dictionary, orginal objects to object ids self._proxy_cache = WeakValueDictionary() # {oid: proxy_obj} oid to proxy objects not owned by this connection self._netref_classes_cache = {} # classes that have been?????? self._netref_proxy_builtin_cls = netref.PROXY_BUILTIN_TYPE_DICT # Already created in netref self._seqcounter = itertools.count() # With this we will generate the msg seq numbers self._recvlock = Lock() self._sendlock = Lock() self._sync_replies = {} self._async_callbacks = {} self._last_traceback = None if not _lazy: # Should object automatically be set to go self._init_service() # Open this connection as we have finished setting it up self._closed = False #------------------------------------------------------ # Properties #------------------------------------------------------ @property def root(self): # Check this is correct one argument to sync request!!!!!!!!!!!!!!!!!!!!!!! """fetch the root object of the other party This is used to access the root of the remote system, its service stuff. So if me make a connection it is actually two connections connected each with a service, we use the_Connection.root to gain access to the service we connected to""" if self._remote_root is None: self._remote_root = self.sync_request(global_consts.HANDLE_GETROOT) return self._remote_root #------------------------------------------------------ # Function to make netrefs #------------------------------------------------------ def _make_netref_instance(self, oid, clsname, modname): """ creation of proxy objects, we call netrefs First creates and instance of the proxy to act as the function or object or class or metaclass using the proxy instance creater BaseNetref then creates an instance of this and sets it's ___oid__ and ___conn__""" typeinfo = (clsname, modname) if typeinfo in self._netref_classes_cache: # previously created netref classes cls = self._netref_classes_cache[typeinfo] elif typeinfo in self._netref_proxy_builtin_cls: # pre created builtins netref classes cls = self._netref_proxy_builtin_cls[typeinfo] else: # build new netref class methods_tup = self.sync_request(global_consts.HANDLE_INSPECT, oid) cls = netref.make_proxy_class(clsname, modname, methods_tup) self._netref_classes_cache[typeinfo] = cls # add to previously created netref_instance = cls(conn=weakref.ref(self), oid=oid) return netref_instance # Here ___con__ and ___oid__ are set #------------------------------------------------------ # Boxing, wrap up the objects and label them # In python everything is an object! and thus everything supports id and type #------------------------------------------------------ def _box(self, obj): # Might be nice to have *obj """label and store a local object in such a way that it can be sent and unboxed the remote party either by-value or by-reference returns package package = (label, contents) :for some labels, contents may be a tuple """ if brine.dumpable(obj): # Immutable obj supported by brine label = global_consts.LABEL_IMMUTABLE contents = obj package = (label, contents) elif type(obj) is tuple: # Tuple containing mutables label = global_consts.LABEL_MUT_TUPLE contents = tuple(self._box(item) for item in obj) package = (label, contents) elif netref.is_netref(obj) and obj.____conn__() is self: # This one detects local proxy objects label = global_consts.LABEL_LOCAL_OBJECT_REF contents = obj.____oid__ package = (label, contents) print("using the third way in _box, detected local proxy object, wow, how did this come to be") # Will have to see this one in action else: # Pure mutable data oid = id(obj) cls = getattr(obj, "__class__", type(obj)) #Add object to local object dict, I would have thought this should be a weakkeydict self._local_objects[oid] = obj # the same id could be used later if garbage collected label = global_consts.LABEL_NETREFABLE contents = (oid, cls.__name__, cls.__module__) package = (label, contents) # cls.inspect.getmodule.__name__ !!!!!!!!!!!!!Maybe better !!!!!!!!!!!!!!!!!!!!!!! # If can't get name or module what to do???!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! return package def _unbox(self, package): """recreate a local object representation of the remote object: if the object is passed by value, just return it; if the object is passed by reference, create a netref to it """ label, contents = package if label == global_consts.LABEL_IMMUTABLE: obj = contents return obj elif label == global_consts.LABEL_MUT_TUPLE: mut_tuple = tuple(self._unbox(sub_package) for sub_package in contents) return mut_tuple elif label == global_consts.LABEL_LOCAL_OBJECT_REF: # Needs some thought################### oid = contents try: native_object = self._local_objects[oid] except KeyError: raise Protocol_Unbox_Exception("labeled as local but I know nothing of this proxy") return native_object elif label == global_consts.LABEL_NETREFABLE: oid, clsname, modname = contents if oid in self._proxy_cache: # Known netref proxies proxy = self._proxy_cache[oid] else: proxy = self._make_netref_instance(oid, clsname, modname) self._proxy_cache[oid] = proxy # Store proxy as known return proxy else: raise Protocol_Unbox_Exception("Couldn't unbox, invalid label={label}".format(label=label)) #------------------------------------------------------ # Startup #------------------------------------------------------ def _init_service(self): """Dunno """ #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # Seem to be a weak proxy to the service self._local_root.on_connect() #------------------------------------------------------ # Shutdown, cleanup and __del__ #------------------------------------------------------ def close(self, _catchall=True): if self._closed: return self._closed = True try: self._async_request(global_consts.HANDLE_CLOSE) except EOFError: # Dunno this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! pass except Exception: if not _catchall: raise finally: self._cleanup(_anyway=True) def _cleanup(self, _anyway=True): if self._closed and not _anyway: return self._closed = True #<--- Should i not call the above?!!!!!!!!!!!!!!!!!!!!!!! self._channel.close() self._local_root.on_disconnect() self._sync_replies.clear() self._async_callbacks.clear() self._local_objects.clear() self._proxy_cache.clear() self._netref_classes_cache.clear() self._last_traceback = None self._last_traceback = None self._remote_root = None self._local_root = None #self._seqcounter = None #self._config.clear() def __del__(self): self.close() #------------------------------------------------------ # fileno: to make it compatable with select #------------------------------------------------------ def fileno(self): """Need this to allow this object to be select able""" return self._channel.fileno() #------------------------------------------------------ # Context manger: to make it compatable with with #------------------------------------------------------ def __enter__(self): """Context manager:: __enter__ with Connection(...) as the_connection: do stuff.... """ return self def __exit__(self, exc_class, exc_instance, traceback): """Context manager:: __exit__ exc_class: class object for exception that was raised exc_instance: instance of that class that was raised exc_traceback: traceback state of execution when exception was raised This class is width statement compatable with Connection(...) as the_con: do stuff.... """ self.close() if (exc_class, exc_instance, traceback) == (None, None, None): # Closed cleanly no exception was raised pass else: # An exception occured pass #return True to catch the error #return False to let them go, or just do nothing and implictly return None #------------------------------------------------------ # Representation #------------------------------------------------------ def __repr__(self): a, b = object.__repr__(self).split(" object ") return "%s %r object %s" % (a, self._config["connid"], b) #------------------------------------------------------ # IO #------------------------------------------------------ def ping(self, data="the world is a vampire!" * 20, timeout=3): """assert that the other party is functioning properly Interesting choice of default data: 'the world is a vampire'""" res = self.async_request(global_consts.HANDLE_PING, data, timeout=timeout) if res.value != data: raise Protocol_Ping_Exception("echo mismatches sent data") def _send(self, msg_type, seq_num, msg): data = brine.dump((msg_type, seq_num, msg)) self._sendlock.acquire() try: self._channel.send(data) except Exception: #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! raise finally: self._sendlock.release() def _send_request(self, handler, args): seq_num = self._seqcounter.next() self._send(global_consts.MSG_REQUEST, seq_num, (handler, self._box(args))) return seq_num def _send_reply(self, seq_num, obj): self._send(global_consts.MSG_REPLY, seq_num, self._box(obj)) def _send_exception(self, seq_num, exctype, excval, exctb): option_include_traceback = self._config["include_local_traceback"] exception_dump = vinegar.dump(exctype, excval, exctb, option_include_traceback) self._send(global_consts.MSG_EXCEPTION, seq_num, exception_dump) #------------------------------------------------------ # Requests #------------------------------------------------------ def sync_request(self, handler, *args): """send a request and wait for the reply to arrive""" seq = self._send_request(handler, args) while seq not in self._sync_replies: self.serve(0.1) isexc, obj = self._sync_replies.pop(seq) if isexc: print("dunno here") raise obj else: return obj def _async_request(self, handler, args=(), callback=(lambda a, b: None)): seq = self._send_request(handler, args) self._async_callbacks[seq] = callback def async_request(self, handler, *args, **kwargs): """send a request and return an AsyncResult object, which will eventually hold the reply""" timeout = kwargs.pop("timeout", None) if kwargs: raise TypeError("got unexpected keyword argument %r" % (kwargs.keys()[0],)) res = AsyncResult(weakref.proxy(self)) self._async_request(handler, args, res) if timeout is not None: res.set_expiry(timeout) return res #------------------------------------------------------ # Dispatching #------------------------------------------------------ def _dispatch_request(self, seq, raw_args): try: handler, args = raw_args args = self._unbox(args) res = self._HANDLERS[handler](self, *args) except KeyboardInterrupt: raise except: t, v, tb = sys.exc_info() self._last_traceback = tb if t is SystemExit and self._config["propagate_SystemExit_locally"]: raise self._send_exception(seq, t, v, tb) else: self._send_reply(seq, res) def _dispatch_reply(self, seq, raw): obj = self._unbox(raw) if seq in self._async_callbacks: self._async_callbacks.pop(seq)(False, obj) else: self._sync_replies[seq] = (False, obj) def _dispatch_exception(self, seq, raw): obj = vinegar.load(raw, import_custom_exceptions=self._config["import_custom_exceptions"], instantiate_custom_exceptions=self._config["instantiate_custom_exceptions"], instantiate_oldstyle_exceptions=self._config["instantiate_oldstyle_exceptions"]) if seq in self._async_callbacks: self._async_callbacks.pop(seq)(True, obj) else: self._sync_replies[seq] = (True, obj) #------------------------------------------------------ # Serving #------------------------------------------------------ def _recv(self, timeout, wait_for_lock): if not self._recvlock.acquire(wait_for_lock): return None try: if self._channel.poll(timeout): data = self._channel.recv() else: data = None except EOFError: self.close() raise finally: self._recvlock.release() return data def _dispatch(self, data): msg, seq, args = brine.load(data) if msg == global_consts.MSG_REQUEST: self._dispatch_request(seq, args) elif msg == global_consts.MSG_REPLY: self._dispatch_reply(seq, args) elif msg == global_consts.MSG_EXCEPTION: self._dispatch_exception(seq, args) else: raise ValueError("invalid message type: %r" % (msg,)) def poll(self, timeout=0): """serve a single transaction, should one arrives in the given interval. note that handling a request/reply may trigger nested requests, which are all part of the transaction. returns True if one was served, False otherwise""" data = self._recv(timeout, wait_for_lock=False) # wait for lock not used !!!!!!!!!!!!!!!!!!!! if not data: return False self._dispatch(data) return True def serve(self, timeout=1): """serve a single request or reply that arrives within the given time frame (default is 1 sec). note that the dispatching of a request might trigger multiple (nested) requests, thus this function may be reentrant. returns True if a request or reply were received, False otherwise.""" data = self._recv(timeout, wait_for_lock=True) # wait_for_lock not used!!!!!!!!!!!!!!!!!! if not data: return False self._dispatch(data) return True def serve_all(self): """serve all requests and replies while the connection is alive""" try: while True: self.serve(0.1) except _select_error: if not self._closed: raise except EOFError: pass finally: self.close() def poll_all(self, timeout=0): """serve all requests and replies that arrive within the given interval. returns True if at least one was served, False otherwise""" at_least_once = False try: while self.poll(timeout): at_least_once = True except EOFError: pass return at_least_once #------------------------------------------------------ # Attribute access #------------------------------------------------------ def _check_attr(self, obj, name): if self._config["allow_exposed_attrs"]: if name.startswith(self._config["exposed_prefix"]): name2 = name else: name2 = self._config["exposed_prefix"] + name if hasattr(obj, name2): return name2 if self._config["allow_all_attrs"]: return name if self._config["allow_safe_attrs"] and name in self._config["safe_attrs"]: return name if self._config["allow_public_attrs"] and not name.startswith("_"): return name return False def _access_attr(self, oid, name, args, overrider, param, default): if type(name) is not str: raise TypeError("attr name must be a string") obj = self._local_objects[oid] #What is the purpose of the overrider #Rewirte this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! #Here we get the attr accessor = getattr(type(obj), overrider, None) #getattr(object, name[, default]) if accessor is None: name2 = self._check_attr(obj, name) if not self._config[param] or not name2: raise AttributeError("cannot access %r" % (name,)) accessor = default name = name2 return accessor(obj, name, *args) #------------------------------------------------------ # Handlers #------------------------------------------------------ @register_handler(global_consts.HANDLE_PING, _HANDLERS) def _handle_ping(self, data): """Return what ever was sent, echo""" return data @register_handler(global_consts.HANDLE_CLOSE, _HANDLERS) def _handle_close(self): self._cleanup() @register_handler(global_consts.HANDLE_GETROOT, _HANDLERS) def _handle_getroot(self): return self._local_root @register_handler(global_consts.HANDLE_DEL, _HANDLERS) def _handle_del(self, oid): """ This used to be decref to thing, but I couldn't see where that was used. Now just a do nothing pass function""" self._local_objects.do_nothing_place_holder(oid) #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! @register_handler(global_consts.HANDLE_REPR, _HANDLERS) def _handle_repr(self, oid): """return repr of a local object looks up oid in self._local_objects of connection instance which returns up the obj then we passes that obj to repr and returns the result""" return repr(self._local_objects[oid]) @register_handler(global_consts.HANDLE_STR, _HANDLERS) def _handle_str(self, oid): """return str of a local object looks up oid in self._local_objects of connection instance""" return str(self._local_objects[oid]) @register_handler(global_consts.HANDLE_CMP, _HANDLERS) def _handle_cmp(self, oid, other): # cmp() might enter recursive resonance... yet another workaround !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! #return cmp(self._local_objects[oid], other) obj = self._local_objects[oid] try: return type(obj).__cmp__(obj, other) except TypeError: return NotImplemented @register_handler(global_consts.HANDLE_HASH, _HANDLERS) def _handle_hash(self, oid): """return hash of a local object looks up oid in self._local_objects of connection instance""" return hash(self._local_objects[oid]) @register_handler(global_consts.HANDLE_CALL, _HANDLERS) def _handle_call(self, oid, args, kwargs): """call an object with given arguments looks up oid in self._local_objects of connection instance and calls it with the passed args and kwargs.""" return self._local_objects[oid](*args, **dict(kwargs)) @register_handler(global_consts.HANDLE_DIR, _HANDLERS) def _handle_dir(self, oid): """ call dir on local object specifed but its oid looks up oid in self._local_objects and passes that object to dir. returns a tuple as that is immuatble and thus can be brined""" return tuple(dir(self._local_objects[oid])) @register_handler(global_consts.HANDLE_INSPECT, _HANDLERS) def _handle_inspect(self, oid): # inspect methods is a weird function doesn't return all methods just some that arn't builtins """Used for returning the methods needed to build new proxy classes here""" return netref.inspect_methods(self._local_objects[oid]) @register_handler(global_consts.HANDLE_GETATTR, _HANDLERS) def _handle_getattr(self, oid, name): """ """ return self._access_attr(oid, name, (), "_rpyc_getattr", "allow_getattr", getattr) @register_handler(global_consts.HANDLE_DELATTR, _HANDLERS) def _handle_delattr(self, oid, name): """ """ return self._access_attr(oid, name, (), "_rpyc_delattr", "allow_delattr", delattr) @register_handler(global_consts.HANDLE_SETATTR, _HANDLERS) def _handle_setattr(self, oid, name, value): """ """ return self._access_attr(oid, name, (value,), "_rpyc_setattr", "allow_setattr", setattr) @register_handler(global_consts.HANDLE_CALLATTR, _HANDLERS) def _handle_callattr(self, oid, name, args, kwargs): """call an objects method and return the result looks up oid in self._local_objects and passes the resulting obj. with this obj call gettatr and pass the resulting arguments""" #Scarey bickeys kwards is objects?!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! return self._handle_getattr(oid, name)(*args, **dict(kwargs)) @register_handler(global_consts.HANDLE_PICKLE, _HANDLERS) def _handle_pickle(self, oid, proto): #How do we guarentee consistant protcol here!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! """ """ if not self._config["allow_pickle"]: raise ValueError("pickling is disabled") return brine._pickle(self._local_objects[oid], proto) #Check for picker exception here @register_handler(global_consts.HANDLE_BUFFITER, _HANDLERS) def _handle_buffiter(self, oid, count): """ """ items = [] obj = self._local_objects[oid] for i in xrange(count): try: items.append(obj.next()) except StopIteration: break return tuple(items)