Exemple #1
0
class WalkoffSignal(object):
    _signals = {}

    def __init__(self, name, event_type, loggable=True, message=''):
        self.name = name
        self.signal = Signal(name)
        self.event_type = event_type
        if loggable:
            signal_callback = partial(add_entry_to_case,
                                      data='',
                                      event_type=event_type.name,
                                      entry_message=message,
                                      message_name=name)
            self.connect(signal_callback, weak=False)

    def send(self, sender, **kwargs):
        self.signal.send(sender, **kwargs)

    def connect(self, func, weak=True):
        self.signal.connect(func)
        if not weak:
            WalkoffSignal._store_callback(func)
        return func

    @classmethod
    def _store_callback(cls, func):
        """
        Stores callbacks so they aren't garbage collected and the weak references of the signals disappear
        """
        cls._signals[id(func)] = func
class EventService(Service):
    def __init__(self, env):
        """event service"""
        super(EventService, self).__init__(env)
        self._channel = Signal('event_channel')

    def subscribe(self, func, event_type=None ):
        '''
        :param func: def func(event_type, **kwarg):
                        pass
        :param event_filter: option
        :return:
        '''
        # sender = event_type or ANY
        # weak = True
        # if isinstance(event_type, basestring):
        #     weak = False
        # self._channel.connect(func, sender, weak)
        sender = event_type or ANY
        self._channel.connect(func, sender)

    def unsubscribe(self, func):
        self._channel.disconnect(func)

    def publish(self, event_type, **kwarg):
        self._channel.send(event_type, **kwarg)
Exemple #3
0
class CSession:
    def __init__(self, name, start, end):
        self.name = name
        self.uid = uuid.uuid1()
        self.start = datetime.strptime(start, DT_FORMAT)
        self.end = datetime.strptime(end, DT_FORMAT)
        self.on_live = Signal('on-live')
        self.on_end = Signal('on-end')
        self.liveURL = None
        self._teacher = None
        self.popups = []

    @property
    def teacher(self):
        return self._teacher

    @teacher.setter
    def teacher(self, t):
        self.on_live.connect(t.live_recv)
        self.on_end.connect(t.session_end_recv)
        self._teacher = t

    def live(self):
        self.on_live.send(self, msg="hello")

    def liveEnd(self):
        self.on_end.send(self, msg="bye")

    def openLive(self):
        webbrowser.open_new(self.liveURL)
Exemple #4
0
class WalkoffSignal(object):
    """A signal to send Walkoff data

    The class is a wrapper around a blinker.Signal

    Attributes:
        name (str): The name of the signal
        signal (Signal): The signal object which sends the event and data
        event_type (EventType): The event type of this signal
        is_sent_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True
        message (str): Human readable message for this event

    Args:
        name (str): The name of the signal
        event_type (EventType): The event type of this signal
        send_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True
        message (str, optional): Human readable message for this event. Defaults to empty string
    """
    _signals = {}

    def __init__(self, name, event_type, send_to_interfaces=True, message=''):
        self.name = name
        self.signal = Signal(name)
        self.event_type = event_type
        self.is_sent_to_interfaces = send_to_interfaces
        self.message = message

    def send(self, sender, **kwargs):
        """Sends the signal with data

        Args:
            sender: The thing that is sending the signal

        Kwargs:
            data: Additional data to send with the signal
        """
        self.signal.send(sender, **kwargs)

    def connect(self, func, weak=True):
        """A decorator which registers a function as a callback for this signal

        Args:
            func (func): The function to register
            weak (bool, optional): Should a weak reference be used for this connection? Defaults to True

        Returns:
            func: The function connected
        """
        self.signal.connect(func)
        if not weak:
            WalkoffSignal._store_callback(func)
        return func

    @classmethod
    def _store_callback(cls, func):
        """
        Stores callbacks so they aren't garbage collected and the weak references of the signals disappear
        """
        cls._signals[id(func)] = func
Exemple #5
0
class WalkoffSignal(object):
    """A signal to send Walkoff data

    The class is a wrapper around a blinker.Signal

    Attributes:
        name (str): The name of the signal
        signal (Signal): The signal object which sends the event and data
        event_type (EventType): The event type of this signal
        is_sent_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True
        message (str): Human readable message for this event

    Args:
        name (str): The name of the signal
        event_type (EventType): The event type of this signal
        send_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True
        message (str, optional): Human readable message for this event. Defaults to empty string
    """
    _signals = {}

    def __init__(self, name, event_type, send_to_interfaces=True, message=''):
        self.name = name
        self.signal = Signal(name)
        self.event_type = event_type
        self.is_sent_to_interfaces = send_to_interfaces
        self.message = message

    def send(self, sender, **kwargs):
        """Sends the signal with data

        Args:
            sender: The thing that is sending the signal

        Kwargs:
            data: Additional data to send with the signal
        """
        self.signal.send(sender, **kwargs)

    def connect(self, func, weak=True):
        """A decorator which registers a function as a callback for this signal

        Args:
            func (func): The function to register
            weak (bool, optional): Should a weak reference be used for this connection? Defaults to True

        Returns:
            func: The function connected
        """
        self.signal.connect(func)
        if not weak:
            WalkoffSignal._store_callback(func)
        return func

    @classmethod
    def _store_callback(cls, func):
        """
        Stores callbacks so they aren't garbage collected and the weak references of the signals disappear
        """
        cls._signals[id(func)] = func
Exemple #6
0
class Event:
    def __init__(self, callback, name=''):
        self.signal = Signal()
        self.name = name
        self.callback = callback(
            name
        )  # needed b/c blinker cannot use weak references for callbacks
        self.signal.connect(self.callback)

    def send(self, sender):
        self.signal.send(sender)
Exemple #7
0
def test_temp_connection_for_sender():
    sig = Signal()

    canary = []
    receiver = lambda sender: canary.append(sender)

    with sig.connected_to(receiver, sender=2):
        sig.send(1)
        sig.send(2)

    assert canary == [2]
    assert not sig.receivers
Exemple #8
0
def anonymous_signal():
    """
    匿名信号的例子

    :return: no return
    """
    # 定义一个信号
    animal_signal = Signal()
    # 信号注册一个接收者
    animal_signal.connect(animal)
    # 发送信号
    animal_signal.send("anonymous")
Exemple #9
0
class Settings():
    """Make multiple *Value instances accessible as a dict with their name as key"""
    def __init__(self, *values):
        # _values and _values_dict hold the same instances; the list is to
        # preserve order and the dict is for fast access via __getitem__
        self._values = []
        self._values_dict = {}
        self._on_change = Signal()
        self.load(*values)

    def add(self, value):
        """Add `value` to collection"""
        self._values.append(value)
        self._values_dict[value.name] = value

    def load(self, *values):
        """Add multiple `values` to collection"""
        for v in values:
            self.add(v)

    @property
    def values(self):
        """Iterate over collected values"""
        yield from self._values

    @property
    def names(self):
        """Iterate over values' `name` properties"""
        yield from self._values_dict.keys()

    def on_change(self, callback, autoremove=True):
        """
        Run `callback` every time a value changes

        `callback` gets the value instances as the only argument.

        If `autoremove` is True, stop calling `callback` once it is garbage
        collected.
        """
        self._on_change.connect(callback, weak=autoremove)

    def __getitem__(self, name):
        return self._values_dict[name]

    def __setitem__(self, name, value):
        self._values_dict[name].value = value
        self._on_change.send(self._values_dict[name])

    def __contains__(self, name):
        return name in self._values_dict
Exemple #10
0
 def go(self, signal: Signal, *args, **kwargs):
     """
     在对应的时期点触发信号
     如果此信号没有订阅者 那么将不会被真正发送
     :param signal:  信号
     :param args:  参数
     :param kwargs: 字典参数
     :return: none
     """
     if signal is None:
         raise ValueError("signal can not be null")
     has_receivers = bool(signal.receivers)
     if has_receivers:
         try:
             signal.send(*args, **kwargs)
         except Exception as e:
             pass
Exemple #11
0
class IngestionFeeder(object):
    def __init__(self, version):
        """
        :param version: the version the records produced by this feeder will be
        """
        self.version = version
        self.read_signal = Signal(
            doc=u'''Signal fired for each record read from the feeder. The
                                          kwargs passed when the signal is triggered are "number"
                                          and "record", the number is the number of the record from
                                          the feeder so far (so essentially a count) and the record
                                          is the actual record object.''')
        self.finish_signal = Signal(
            doc=u'''Signal fired when the feeder has been exhausted and all
                                            records read. One kwarg is passed when the signal is
                                            triggered: "number", the total number of records read.
                                            ''')

    @property
    @abc.abstractmethod
    def source(self):
        return None

    @abc.abstractmethod
    def records(self):
        """
        Abstract function which when iterated over produces records. This could therefore either
        return an iterable type (like a list, or set) or yield results as a generator (the latter is
        recommended). An example implementation of this function: a csv parser that yields each row
        until the CSV is exhausted.

        :return: an iterable or yields each record
        """
        return []

    def read(self):
        """
        Generator function which yields each record from the source.
        """
        number = 0
        for number, record in enumerate(self.records(), start=1):
            self.read_signal.send(self, number=number, record=record)
            yield record
        self.finish_signal.send(self, number=number)
Exemple #12
0
class Settings():
    """Dict of ordered Value objects"""
    def __init__(self, *values):
        # _values and _values_dict hold the same instances; the list is to
        # preserve order and the dict is for access via __getitem__, etc.
        self._values = []
        self._values_dict = {}
        self._on_change = Signal()
        self.load(*values)

    def add(self, value):
        self._values.append(value)
        self._values_dict[value.name] = value

    def load(self, *values):
        for v in values:
            self.add(v)

    @property
    def names(self):
        yield from self._values_dict.keys()

    @property
    def values(self):
        yield from self._values

    def on_change(self, callback, autoremove=True):
        """Run `callback` every time a value changes with the value

        If `autoremove` is True, stop calling callback once it is garbage
        collected.
        """
        self._on_change.connect(callback, weak=autoremove)

    def __getitem__(self, name):
        return self._values_dict[name]

    def __setitem__(self, name, value):
        self._values_dict[name].set(value)
        self._on_change.send(self._values_dict[name])

    def __contains__(self, name):
        return name in self._values_dict
Exemple #13
0
class imageDisplay(NodeBase):
    def __init__(self, name):
        super(imageDisplay, self).__init__(name)
        self.loadImage = Signal(str)
        self.inExec = self.createInputPin(DEFAULT_IN_EXEC_NAME, 'ExecPin',
                                          None, self.compute)
        self.entity = self.createInputPin('path', 'StringPin')
        self.outExec = self.createOutputPin(DEFAULT_OUT_EXEC_NAME, 'ExecPin',
                                            None)
        self.failed = self.createOutputPin("failed", 'ExecPin', None)

    @staticmethod
    def pinTypeHints():
        helper = NodePinsSuggestionsHelper()
        helper.addInputDataType('ExecPin')
        helper.addInputDataType('StringPin')
        helper.addInputStruct(PinStructure.Single)
        return helper

    @staticmethod
    def category():
        return 'UI'

    @staticmethod
    def keywords():
        return ['image']

    @staticmethod
    def description():
        return "Loads image to node body. This is UI only node"

    def compute(self, *args, **kwargs):
        path = self.entity.getData()
        if os.path.exists(path):
            self.loadImage.send(path)
            self.outExec.call()
        else:
            self.failed.call()
Exemple #14
0
class MailboxesAccountItem(NSObject):
    def init(self):
        self = super(MailboxesAccountItem, self).init()
        self.folder_items = []
        self.updated = Signal()
        return self

    @classmethod
    def newBlank(cls):
        return cls.alloc().init()

    @classmethod
    def newWithAccount_(cls, account):
        self = cls.alloc().init()
        self.account = account
        account_updated.connect(objc_callback(self.folders_updated), account)
        self.folders_updated(account)
        return self

    def folders_updated(self, account):
        self.folder_items[:] = [MailboxesFolderItem.newWithFolder_(f)
                                for n, f in sorted(account._folders.items())]

        self.updated.send(self)
Exemple #15
0
class cons_d2c(object):
    '''包含约束的d2c类'''
    def __init__(self, d, constrains, init=True):
        '''
        Parameters:
            d: 一个字典
            constrains: 约束条目,每个元素都为一个三元元组(deps, func, target),例如,
                (
                    (('k1.k11','k1.k12'),   (lambda v1,v2:v1+v2),   'k2'       ),
                    (('k3','k4'),           (lambda v1,v2:(v1,v2)), ('k5','k6')),
                )
            init: bool, 是否使用约束条件初始化传入的字典
        '''
        self.__d2c_instance = d2c(d)
        self.sig = Signal()
        self.constrains = constrains
        self.subscribers = self.__init_cons(constrains, init=init)

    def __init_cons(self, constrains, init=True):
        subscribers = []
        for deps, func, target in constrains:
            subs = _subscrib(deps, func, target, self)
            subscribers.append(subs)
            for _k in deps:
                self.sig.connect(subs, sender=_k)
        if init:
            deps_set = set()
            for deps, func, target in constrains:
                deps_set.update(deps)
            for dep in deps_set:
                self.sig.send(dep)
        return subscribers

    def set(self, k, v, check=True):
        _changed = self.__d2c_instance.set(k, v, check=check)
        if _changed:
            self.sig.send(k)
        return _changed

    def get(self, k):
        return self.__d2c_instance.get(k)

    def update(self, d, check=True):
        k_changed = []
        for k, v in d.items():
            _changed = self.__d2c_instance.set(k, v, check=check)
            if _changed:
                k_changed.append(k)
        for _k in k_changed:
            self.sig.send(_k)

    def __getattr__(self, item):
        return getattr(self.__d2c_instance, item)
Exemple #16
0
def test_temp_connection_alias():
    sig = Signal()

    canary = []
    receiver = lambda sender: canary.append(sender)

    sig.send(1)
    with sig.temporarily_connected_to(receiver):
        sig.send(2)
    sig.send(3)

    assert canary == [2]
    assert not sig.receivers
Exemple #17
0
def test_temp_connection_failure():
    sig = Signal()

    canary = []
    receiver = lambda sender: canary.append(sender)

    class Failure(Exception):
        pass

    try:
        sig.send(1)
        with sig.connected_to(receiver):
            sig.send(2)
            raise Failure
        sig.send(3)
    except Failure:
        pass
    else:
        raise AssertionError("Context manager did not propagate.")

    assert canary == [2]
    assert not sig.receivers
Exemple #18
0
class Session(object):

    def __init__(self,
                 username,
                 password,
                 account_id,
                 auth_token=None,
                 user_directory='mpx',
                 region='US1',
                 service_registry=None,
                 token_duration=43200000,       # 12 hours
                 token_idle_timeout=14400000,   # 4 hours
                 use_ssl=True,
                 ):

        self.username = username
        self.password = password
        self.account = account_id
        self.auth_token = auth_token
        self.user_directory = user_directory
        self.region = region
        self.token_duration = token_duration
        self.token_idle_timeout = token_idle_timeout
        self.use_ssl = use_ssl
        self.registry_url = REGISTRY_URL.format(tld=self.regional_tld)
        self.signin_url = SIGN_IN_URL.format(tld=self.regional_tld)
        self.post_sign_in = Signal()
        self._registry = service_registry
        self.session = requests.Session()
        self.session.mount('https://', TLS1Adapter())
        self.session.headers.update({
            'Content-Type': 'application/json',
            'Accept': 'application/json',
            'User-Agent': 'Python Mediaamp %s' % __version__,
        })

    @property
    def registry(self):
        if self._registry is None:
            self._registry = self.resolve_domain()
        return self._registry

    @property
    def regional_tld(self):
        return 'eu' if 'eu' in self.region.lower() else 'com'

    @property
    def signin_username(self):
        return self.user_directory + '/' + self.username

    def resolve_domain(self):
        resp = self.get(self.registry_url, params={
            'schema': '1.1',
            '_accountId': self.account,
        })
        try:
            return resp['resolveDomainResponse']
        except KeyError:
            raise MediaAmpError('Unexpected response loading registry.')

    def sign_in(self):
        self.auth_token = None
        self.session.auth = HTTPBasicAuth(self.signin_username, self.password)
        result = self.get(self.signin_url, is_signin_request=True, params={
            'schema': '1.0',
            '_duration': self.token_duration,
            '_idleTimeout': self.token_idle_timeout,
        })
        try:
            self.auth_token = result['signInResponse']['token']
        except KeyError:
            raise AuthenticationError('Could not retrieve token.')
        self.post_sign_in.send(self)

    def request_json(self, method, url, retry_sign_in=True, is_signin_request=False, **kwargs):
        """ Requests JSON content from the supplied URL.

        This is the primary function to be used to make requests to the MPX API.
        Not only does it ensure that the body of the response can be encoded as
        Python via JSON, it also wraps exceptions and will auto-login using
        the supplied credentials when necessary (e.g. when a token expires).

        This API is known to return 200 statuses for requests that fail. It's
        their convention to include the HTTP response code in the body of
        the JSON returned. This checks for that case and turns them into actual
        exceptions.

        """
        if self.auth_token is not None:
            self.session.auth = HTTPBasicAuth('', self.auth_token)
        elif not is_signin_request:
            self.sign_in()

        response = getattr(self.session, method)(url, **kwargs)

        try:
            response.raise_for_status()
        except requests.HTTPError as e:
            wrap_http_error(e)

        try:
            data = response.json() if response.text else {}
        except ValueError:
            raise MediaAmpError('Response body can not be read as JSON. ')

        try:
            raise_for_json_exception(data)
        except InvalidTokenError:
            if retry_sign_in:
                self.sign_in()
                return self.request_json(method, url, retry_sign_in=False, **kwargs)
            else:
                raise

        return data

    def get(self, url, **kwargs):
        return self.request_json('get', url, **kwargs)

    def put(self, url, **kwargs):
        return self.request_json('put', url, **kwargs)

    def post(self, url, **kwargs):
        return self.request_json('post', url, **kwargs)

    def delete(self, url, **kwargs):
        return self.request_json('delete', url, **kwargs)

    def __getitem__(self, key):
        url = self.registry.get(key)
        if url is None:
            url = self.registry(key + ' read-only')
        if url is None:
            raise KeyError(key + ' not available.')
        if self.use_ssl:
            url = url.replace('http://', 'https://')
        return services[key](self, url)
Exemple #19
0
class ScriptRunner(object):
    def __init__(
        self,
        session_id,
        report,
        enqueue_forward_msg,
        client_state,
        request_queue,
        uploaded_file_mgr=None,
    ):
        """Initialize the ScriptRunner.

        (The ScriptRunner won't start executing until start() is called.)

        Parameters
        ----------
        session_id : str
            The ReportSession's id.

        report : Report
            The ReportSession's report.

        client_state : streamlit.proto.ClientState_pb2.ClientState
            The current state from the client (widgets and query params).

        request_queue : ScriptRequestQueue
            The queue that the ReportSession is publishing ScriptRequests to.
            ScriptRunner will continue running until the queue is empty,
            and then shut down.

        uploaded_file_mgr : UploadedFileManager
            The File manager to store the data uploaded by the file_uploader widget.

        """
        self._session_id = session_id
        self._report = report
        self._enqueue_forward_msg = enqueue_forward_msg
        self._request_queue = request_queue
        self._uploaded_file_mgr = uploaded_file_mgr

        self._client_state = client_state
        self._widgets = Widgets()
        self._widgets.set_state(client_state.widget_states)

        self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs.

            This signal is *not* emitted on the same thread that the
            ScriptRunner was created on.

            Parameters
            ----------
            event : ScriptRunnerEvent

            exception : BaseException | None
                Our compile error. Set only for the
                SCRIPT_STOPPED_WITH_COMPILE_ERROR event.

            widget_states : streamlit.proto.WidgetStates_pb2.WidgetStates | None
                The ScriptRunner's final WidgetStates. Set only for the
                SHUTDOWN event.
            """)

        # Set to true when we process a SHUTDOWN request
        self._shutdown_requested = False

        # Set to true while we're executing. Used by
        # maybe_handle_execution_control_request.
        self._execing = False

        # This is initialized in start()
        self._script_thread = None

    def __repr__(self) -> str:
        return util.repr_(self)

    def start(self):
        """Start a new thread to process the ScriptEventQueue.

        This must be called only once.

        """
        if self._script_thread is not None:
            raise Exception("ScriptRunner was already started")

        self._script_thread = ReportThread(
            session_id=self._session_id,
            enqueue=self._enqueue_forward_msg,
            query_string=self._client_state.query_string,
            widgets=self._widgets,
            uploaded_file_mgr=self._uploaded_file_mgr,
            target=self._process_request_queue,
            name="ScriptRunner.scriptThread",
        )
        self._script_thread.start()

    def _process_request_queue(self):
        """Process the ScriptRequestQueue and then exits.

        This is run in a separate thread.

        """
        LOGGER.debug("Beginning script thread")

        while not self._shutdown_requested and self._request_queue.has_request:
            request, data = self._request_queue.dequeue()
            if request == ScriptRequest.STOP:
                LOGGER.debug("Ignoring STOP request while not running")
            elif request == ScriptRequest.SHUTDOWN:
                LOGGER.debug("Shutting down")
                self._shutdown_requested = True
            elif request == ScriptRequest.RERUN:
                self._run_script(data)
            else:
                raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

        # Send a SHUTDOWN event before exiting. This includes the widget values
        # as they existed after our last successful script run, which the
        # ReportSession will pass on to the next ScriptRunner that gets
        # created.
        client_state = ClientState()
        client_state.query_string = self._client_state.query_string
        self._widgets.marshall(client_state)
        self.on_event.send(ScriptRunnerEvent.SHUTDOWN,
                           client_state=client_state)

    def _is_in_script_thread(self):
        """True if the calling function is running in the script thread"""
        return self._script_thread == threading.current_thread()

    def maybe_handle_execution_control_request(self):
        if not self._is_in_script_thread():
            # We can only handle execution_control_request if we're on the
            # script execution thread. However, it's possible for deltas to
            # be enqueued (and, therefore, for this function to be called)
            # in separate threads, so we check for that here.
            return

        if not self._execing:
            # If the _execing flag is not set, we're not actually inside
            # an exec() call. This happens when our script exec() completes,
            # we change our state to STOPPED, and a statechange-listener
            # enqueues a new ForwardEvent
            return

        # Pop the next request from our queue.
        request, data = self._request_queue.dequeue()
        if request is None:
            return

        LOGGER.debug("Received ScriptRequest: %s", request)
        if request == ScriptRequest.STOP:
            raise StopException()
        elif request == ScriptRequest.SHUTDOWN:
            self._shutdown_requested = True
            raise StopException()
        elif request == ScriptRequest.RERUN:
            raise RerunException(data)
        else:
            raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

    def _install_tracer(self):
        """Install function that runs before each line of the script."""
        def trace_calls(frame, event, arg):
            self.maybe_handle_execution_control_request()
            return trace_calls

        # Python interpreters are not required to implement sys.settrace.
        if hasattr(sys, "settrace"):
            sys.settrace(trace_calls)

    @contextmanager
    def _set_execing_flag(self):
        """A context for setting the ScriptRunner._execing flag.

        Used by maybe_handle_execution_control_request to ensure that
        we only handle requests while we're inside an exec() call
        """
        if self._execing:
            raise RuntimeError("Nested set_execing_flag call")
        self._execing = True
        try:
            yield
        finally:
            self._execing = False

    def _run_script(self, rerun_data):
        """Run our script.

        Parameters
        ----------
        rerun_data: RerunData
            The RerunData to use.

        """
        assert self._is_in_script_thread()

        LOGGER.debug("Running script %s", rerun_data)

        # Reset DeltaGenerators, widgets, media files.
        media_file_manager.clear_session_files()

        ctx = get_report_ctx()
        if ctx is None:
            # This should never be possible on the script_runner thread.
            raise RuntimeError(
                "ScriptRunner thread has a null ReportContext. Something has gone very wrong!"
            )

        ctx.reset(query_string=rerun_data.query_string)

        self.on_event.send(ScriptRunnerEvent.SCRIPT_STARTED)

        # Compile the script. Any errors thrown here will be surfaced
        # to the user via a modal dialog in the frontend, and won't result
        # in their previous report disappearing.

        try:
            with source_util.open_python_file(self._report.script_path) as f:
                filebody = f.read()

            if config.get_option("runner.magicEnabled"):
                filebody = magic.add_magic(filebody, self._report.script_path)

            code = compile(
                filebody,
                # Pass in the file path so it can show up in exceptions.
                self._report.script_path,
                # We're compiling entire blocks of Python, so we need "exec"
                # mode (as opposed to "eval" or "single").
                mode="exec",
                # Don't inherit any flags or "future" statements.
                flags=0,
                dont_inherit=1,
                # Use the default optimization options.
                optimize=-1,
            )

        except BaseException as e:
            # We got a compile error. Send an error event and bail immediately.
            LOGGER.debug("Fatal script error: %s" % e)
            self.on_event.send(
                ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR,
                exception=e)
            return

        # If we get here, we've successfully compiled our script. The next step
        # is to run it. Errors thrown during execution will be shown to the
        # user as ExceptionElements.

        # Update the Widget object with the new widget_states.
        # (The ReportContext has a reference to this object, so we just update it in-place)
        if rerun_data.widget_states is not None:
            self._widgets.set_state(rerun_data.widget_states)

        if config.get_option("runner.installTracer"):
            self._install_tracer()

        # This will be set to a RerunData instance if our execution
        # is interrupted by a RerunException.
        rerun_with_data = None

        try:
            # Create fake module. This gives us a name global namespace to
            # execute the code in.
            module = _new_module("__main__")

            # Install the fake module as the __main__ module. This allows
            # the pickle module to work inside the user's code, since it now
            # can know the module where the pickled objects stem from.
            # IMPORTANT: This means we can't use "if __name__ == '__main__'" in
            # our code, as it will point to the wrong module!!!
            sys.modules["__main__"] = module

            # Add special variables to the module's globals dict.
            # Note: The following is a requirement for the CodeHasher to
            # work correctly. The CodeHasher is scoped to
            # files contained in the directory of __main__.__file__, which we
            # assume is the main script directory.
            module.__dict__["__file__"] = self._report.script_path

            with modified_sys_path(self._report), self._set_execing_flag():
                exec(code, module.__dict__)

        except RerunException as e:
            rerun_with_data = e.rerun_data

        except StopException:
            pass

        except BaseException as e:
            handle_uncaught_app_exception(e)

        finally:
            self._widgets.reset_triggers()
            self._widgets.cull_nonexistent(ctx.widget_ids_this_run.items())
            self.on_event.send(ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS)
            # delete expired files now that the script has run and files in use
            # are marked as active
            media_file_manager.del_expired_files()

        # Use _log_if_error() to make sure we never ever ever stop running the
        # script without meaning to.
        _log_if_error(_clean_problem_modules)

        if rerun_with_data is not None:
            self._run_script(rerun_with_data)
class PinBase(IPin):
    """
    **Base class for pins**

    This is the base class that stores the data in the graph.
    This class is intended to be subclassed for each new registered data type you want to create.

    :param _packageName: This holds the package where the sub classed pin is registered.
                         It is not intended to be set by developer, PyFlow automatically fills this property at
                         registration point
    :type _packageName: str

    Signals:
        * **serializationHook** : Fired when Serialize Pin called, so Ui wrapers can append data to the serialization object
        * **onPinConnected** : Fired when a new connection is made to this Pin, sends other Pin
        * **onPinDisconnected** : Fired when some disconnection is made to this Pin, sends other Pin
        * **nameChanged** : Fired when pin.setName() called, sends New Name
        * **killed** : Fired when Pin gets deleted
        * **onExecute** : Fired when Pin execution gets called
        * **containerTypeChanged** : Fired when Pin Structure Changes
        * **dataBeenSet** : Fired when data changes, sends New Data
        * **dictChanged** : Fired when current structure changes to :py:const:`PyFlow.Core.Common.StructureType.Dict`, sends Dict key DataType
        * **errorOccurred** : Fired when some error fired, like incorrect dataType set, sends ocurred Error
        * **errorCleared** : Fired when error cleared

    :ivar owningNode: Weak reference to owning node
    :ivar reconnectionPolicy: What to do if connect with busy pin. Used when :attr:`~PyFlow.Core.Common.PinOptions.AllowMultipleConnections` flag is disabled
    :ivar dirty: This flag for lazy evaluation
    :ivar affects: List of pins this pin affects to
    :ivar affected_by: List of pins that affects to this pin
    :ivar name: Pin name
    :ivar direction: Pin direction
    :ivar inputWidgetVariant: Input widget variant tag
    :ivar constraint: **description here**
    :ivar structConstraint: **description here**
    :ivar super: **description here**
    :ivar activeDataType: Current data type of this pin. Used by AnyPin
    :ivar pinIndex: Position of this pin on node
    :ivar description: Text description of this pin
    """
    _packageName = ""

    def __init__(self, name, owningNode, direction):
        super(PinBase, self).__init__()
        # signals
        self.serializationHook = Signal()
        self.onPinConnected = Signal(object)
        self.onPinDisconnected = Signal(object)
        self.nameChanged = Signal(str)
        self.killed = Signal()
        self.onExecute = Signal(object)
        self.containerTypeChanged = Signal()
        self.dataBeenSet = Signal(object)
        self.dictChanged = Signal(str)
        self.markedAsDirty =Signal()

        self.errorOccured = Signal(object)
        self.errorCleared = Signal()
        self._lastError = None

        ## Access to the node
        self.owningNode = weakref.ref(owningNode)

        self._uid = uuid.uuid4()
        self._data = None
        self._defaultValue = None
        self.reconnectionPolicy = PinReconnectionPolicy.DisconnectIfHasConnections
        self.dirty = True
        self.affects = set()
        self.affected_by = set()

        self.name = name
        self._group = ""
        self.direction = direction

        # gui class weak ref
        self._wrapper = None
        self.__wrapperJsonData = None
        self.annotationDescriptionDict = None
        self._inputWidgetVariant = "DefaultWidget"

        # Constraint ports
        self.constraint = None
        self.structConstraint = None

        # Flags
        self._flags = PinOptions.Storable
        self._origFlags = self._flags
        self._structure = StructureType.Single
        self._currStructure = self._structure
        self._isAny = False
        self._isArray = False
        self._isDict = False
        self._alwaysList = False
        self._alwaysDict = False
        self._alwaysSingle = False
        self._defaultSupportedDataTypes = self._supportedDataTypes = self.supportedDataTypes()
        self.canChange = False
        self._isDictElement = False
        self.hidden = False

        # DataTypes
        self.super = self.__class__
        self.activeDataType = self.__class__.__name__
        self._keyType = None

        # registration
        self.owningNode().pins.add(self)
        self.owningNode().pinsCreationOrder[self.uid] = self

        # This is for to be able to connect pins by location on node
        self.pinIndex = 0
        if direction == PinDirection.Input:
            self.pinIndex = len(self.owningNode().orderedInputs)
        if direction == PinDirection.Output:
            self.pinIndex = len(self.owningNode().orderedOutputs)

        self.description = "{} instance".format(self.dataType)

    @property
    def wrapperJsonData(self):
        try:
            dt = self.__wrapperJsonData.copy()
            return dt
        except Exception as e:
            return None

    def getInputWidgetVariant(self):
        return self._inputWidgetVariant

    def setInputWidgetVariant(self, value):
        self._inputWidgetVariant = value

    def path(self):
        owningNodePath = self.owningNode().path()
        return "{}.{}".format(owningNodePath, self.getName())

    @property
    def group(self):
        """Pin group

        This is just a tag which can be used in ui level

        :rtype: str
        """
        return self._group

    @group.setter
    def group(self, value):
        self._group = str(value)

    def enableOptions(self, *options):
        """Enables flags on pin instance

        Example:

        >>> self.pinInstance.enableOptions(PinOptions.RenamingEnabled)

        You can also pass array/set of flags

        >>> self.pinInstance.enableOptions({PinOptions.RenamingEnabled, PinOptions.Dynamic})

        This is equivalent of

        >>> self.pinInstance.enableOptions(PinOptions.RenamingEnabled | PinOptions.Dynamic)
        """
        for option in options:
            self._flags = self._flags | option
        self._origFlags = self._flags

    def disableOptions(self, *options):
        """Same as :meth:`~PyFlow.Core.PinBase.PinBase.enableOptions` but inverse
        """
        for option in options:
            self._flags = self._flags & ~option
        self._origFlags = self._flags

    def optionEnabled(self, option):
        """Is option enabled or not

        :param option: Option to check
        :type option: :class:`~PyFlow.Core.Common.PinOptions`
        :rtype: bool
        """
        return bool(self._flags & option)

    def isAny(self):
        """Wheter this pin of type Any or not

        :rtype: bool
        """
        return self._isAny

    @property
    def packageName(self):
        """Returns name of package this pin belongs to

        :rtype: bool
        """
        return self._packageName

    @property
    def linkedTo(self):
        """store connection from pins

        from left hand side to right hand side

        .. code-block:: python

            {
                "lhsNodeName": "", "outPinId": 0,
                "rhsNodeName": "", "inPinId": 0
            }

        where pin id is order in which pin was added to node

        :returns: Serialized connections
        :rtype: list(dict)
        """
        result = list()
        if self.direction == PinDirection.Output:
            for i in getConnectedPins(self):
                connection = {"lhsNodeName": "", "outPinId": 0, "rhsNodeName": "", "inPinId": 0}
                connection["lhsNodeName"] = self.owningNode().getName()
                connection["lhsNodeUid"] = str(self.owningNode().uid)
                connection["outPinId"] = self.pinIndex
                connection["rhsNodeName"] = i.owningNode().getName()
                connection["rhsNodeUid"] = str(i.owningNode().uid)
                connection["inPinId"] = i.pinIndex
                result.append(connection)

        if self.direction == PinDirection.Input:
            for i in getConnectedPins(self):
                connection = {"lhsNodeName": "", "outPinId": 0, "rhsNodeName": "", "inPinId": 0}
                connection["lhsNodeName"] = i.owningNode().getName()
                connection["lhsNodeUid"] = str(i.owningNode().uid)
                connection["outPinId"] = i.pinIndex
                connection["rhsNodeName"] = self.owningNode().getName()
                connection["rhsNodeUid"] = str(self.owningNode().uid)
                connection["inPinId"] = self.pinIndex
                result.append(connection)
        return result

    def __repr__(self):
        return "[{0}:{1}:{2}:{3}]".format(self.dataType, self.getFullName(), self.dirty, self.currentData())

    def isExec(self):
        """Returns whether this is exec pin or not

        :rtype: bool
        """
        return False

    def initAsArray(self, bIsArray):
        """Sets this pins to be a list always

        :param bIsArray: Define as array
        :type bIsArray: bool
        """
        self._alwaysList = bool(bIsArray)
        if bool(bIsArray):
            self._alwaysDict = False
        self.setAsArray(bool(bIsArray))

    def initAsDict(self, bIsDict):
        """Sets this pins to be a dict always

        :param bIsArray: Define as dict
        :type bIsArray: bool
        """
        self._alwaysDict = bool(bIsDict)
        if bool(bIsDict):
            self._alwaysList = False
        self.setAsDict(bool(bIsDict))

    def setAsArray(self, bIsArray):
        """Sets this pins to be a list

        :param bIsArray: Define as Array
        :type bIsArray: bool
        """
        bIsArray = bool(bIsArray)
        if self._isArray == bIsArray:
            return

        self._isArray = bIsArray
        if bIsArray:
            if self.isDict():
                self.setAsDict(False)
            # list pins supports only lists by default
            self.enableOptions(PinOptions.SupportsOnlyArrays)
            self._currStructure = StructureType.Array
            self._isDict = False
        else:
            self._currStructure = self._structure
        self._data = self.defaultValue()
        self.containerTypeChanged.send()

    def setAsDict(self, bIsDict):
        """Sets this pins to be a dict

        :param bIsArray: Define as Array
        :type bIsArray: bool
        """
        bIsDict = bool(bIsDict)
        if self._isDict == bIsDict:
            return

        self._isDict = bIsDict
        if bIsDict:
            if self.isArray():
                self.setAsArray(False)
            # list pins supports only lists by default
            self.enableOptions(PinOptions.SupportsOnlyArrays)
            self._currStructure = StructureType.Dict
            self._isArray = False
        else:
            self._currStructure = self._structure
            self._keyType = None
        self._data = self.defaultValue()
        self.containerTypeChanged.send()

    def isArray(self):
        """Returns whether this pin is array or not

        :rtype: bool
        """
        return self._isArray

    def isDict(self):
        """Returns whether this pin is dict or not

        :rtype: bool
        """
        return self._isDict

    def setWrapper(self, wrapper):
        """Sets ui wrapper instance

        :param wrapper: Whatever ui class that represents this pin
        """
        if self._wrapper is None:
            self._wrapper = weakref.ref(wrapper)

    def getWrapper(self):
        """Returns ui wrapper instance
        """
        return self._wrapper

    def deserialize(self, jsonData):
        """Restores itself from supplied serialized data

        :param jsonData: Json representation of pin
        :type jsonData: dict
        """
        self.setName(jsonData["name"])
        self.uid = uuid.UUID(jsonData['uuid'])

        for opt in PinOptions:
            if opt.value in jsonData["options"]:
                self.enableOptions(opt)
            else:
                self.disableOptions(opt)

        self.changeStructure(jsonData["structure"])
        self._alwaysList = jsonData['alwaysList']
        self._alwaysSingle = jsonData['alwaysSingle']
        self._alwaysDict = jsonData['alwaysDict']

        try:
            self.setData(json.loads(jsonData['value'], cls=self.jsonDecoderClass()))
        except Exception as e:
            self.setData(self.defaultValue())

        if "wrapper" in jsonData:
            self.__wrapperJsonData = jsonData["wrapper"]

    def serialize(self):
        """Serializes itself to json

        :rtype: dict
        """
        storable = self.optionEnabled(PinOptions.Storable)

        serializedData = None
        if not self.dataType == "AnyPin":
            if storable:
                serializedData = json.dumps(self.currentData(), cls=self.jsonEncoderClass())
            #else:
            #    serializedData = json.dumps(self.defaultValue(), cls=self.jsonEncoderClass())

        data = {
            'name': self.name,
            'package': self.packageName,
            'fullName': self.getFullName(),
            'dataType': self.__class__.__name__,
            'direction': int(self.direction),
            'value': serializedData,
            'uuid': str(self.uid),
            'linkedTo': list(self.linkedTo),
            'pinIndex': self.pinIndex,
            'options': [i.value for i in PinOptions if self.optionEnabled(i)],
            'structure': int(self._currStructure),
            'alwaysList': self._alwaysList,
            'alwaysSingle': self._alwaysSingle,
            'alwaysDict': self._alwaysDict
        }

        # Wrapper class can subscribe to this signal and return
        # UI specific data which will be considered on serialization
        # Blinker returns a tuple (receiver, return val)
        wrapperData = self.serializationHook.send(self)
        if wrapperData is not None:
            if len(wrapperData) > 0:
                # We take return value from one wrapper
                data['wrapper'] = wrapperData[0][1]
        return data

    @property
    def uid(self):
        return self._uid

    @uid.setter
    def uid(self, value):
        if not value == self._uid:
            self._uid = value

    def setName(self, name, force=False):
        """Sets pin name and fires events

        :param name: New pin name
        :type name: str
        :param force: If True - name will be changed even if option :attr:`~PyFlow.Core.Common.PinOptions.RenamingEnabled` is turned off
        :type force: bool
        :returns: Whether renaming performed or not
        :rtype: bool
        """
        if not force:
            if not self.optionEnabled(PinOptions.RenamingEnabled):
                return False
        if name == self.name:
            return False
        self.name = self.owningNode().getUniqPinName(name)
        self.nameChanged.send(self.name)
        return True

    def getName(self):
        return self.name

    def getFullName(self):
        """Returns full pin name, including node name

        :rtype: str
        """
        return self.owningNode().name + '_' + self.name

    def allowedDataTypes(self, checked=[], dataTypes=[], selfCheck=True, defaults=False):
        return list(self.supportedDataTypes())

    def checkFree(self, checked=[], selfCheck=True):
        return False

    def defaultValue(self):
        """Returns default value of this pin
        """
        if self.isArray():
            return []
        elif self.isDict():
            return PFDict("StringPin", "AnyPin")
        else:
            return self._defaultValue

    def getData(self):
        """Returns pin value

        If something is connected to this pin, graph will be evaluated

        .. seealso:: :class:`~PyFlow.Core.EvaluationEngine.DefaultEvaluationEngine_Impl`
        """
        return EvaluationEngine().getPinData(self)

    def clearError(self):
        """Clears any last error on this pin and fires event
        """
        if self._lastError is not None:
            self._lastError = None
            self.errorCleared.send()

    def setError(self, err):
        """Marks this pin as invalid by setting error message to it. Also fires event

        :param err: Error message
        :type err: str
        """
        self._lastError = str(err)
        self.errorOccured.send(self._lastError)

    def validateArray(self, array, func):
        valid = True
        if isinstance(array, list):
            for i in array:
                self.validateArray(i, func)
        else:
            func(array)
        return valid

    def setData(self, data):
        """Sets value to pin

        :param data: Data to be set
        :type data: object
        """
        if self.super is None:
            return
        try:
            self.setDirty()
            if isinstance(data, DictElement) and not self.optionEnabled(PinOptions.DictElementSupported):
                data = data[1]
            if not self.isArray() and not self.isDict():
                if isinstance(data, DictElement):
                    self._data = DictElement(data[0], self.super.processData(data[1]))
                else:
                    if isinstance(data, list):
                        self._data = data
                    else:
                        self._data = self.super.processData(data)
            elif self.isArray():
                if isinstance(data, list):
                    if self.validateArray(data, self.super.processData):
                        self._data = data
                    else:
                        raise Exception("Some Array Input is not valid Data")
                else:
                    self._data = [self.super.processData(data)]
            elif self.isDict():
                if isinstance(data, PFDict):
                    self._data = PFDict(data.keyType, data.valueType)
                    for key, value in data.items():
                        self._data[key] = self.super.processData(value)
                elif isinstance(data, DictElement) and len(data) == 2:
                    self._data.clear()
                    self._data[data[0]] = self.super.processData(data[1])

            if self.direction == PinDirection.Output:
                for i in self.affects:
                    i.setData(self.currentData())

            elif self.direction == PinDirection.Input and self.owningNode().__class__.__name__ == "compound":
                for i in self.affects:
                    i.setData(self.currentData())
            
            if self.direction == PinDirection.Input or self.optionEnabled(PinOptions.AlwaysPushDirty):
                push(self)
            self.clearError()
            self.dataBeenSet.send(self)
        except Exception as exc:
            self.setError(exc)
            self.setDirty()
        if self._lastError is not None:
            self.owningNode().setError(self._lastError)
        wrapper = self.owningNode().getWrapper()
        if wrapper:
            wrapper.update()

    def call(self, *args, **kwargs):
        if self.owningNode().isValid():
            self.onExecute.send(*args, **kwargs)

    def disconnectAll(self):
        if self.direction == PinDirection.Input:
            for o in list(self.affected_by):
                disconnectPins(self, o)
            self.affected_by.clear()

        if self.direction == PinDirection.Output:
            for i in list(self.affects):
                disconnectPins(self, i)
            self.affects.clear()

    @property
    def dataType(self):
        """Returns data type of this pin

        :rtype: str
        """
        return self.__class__.__name__

    @property
    def structureType(self):
        """Returns current structure of this pin

        :rtype: :class:`~PyFlow.Core.Common.StructureType`
        """
        return self._structure

    @structureType.setter
    def structureType(self, structure):
        self._structure = structure
        self._currStructure = structure

    # PinBase methods

    def kill(self, *args, **kwargs):
        """Deletes this pin
        """
        self.disconnectAll()
        if self in self.owningNode().pins:
            self.owningNode().pins.remove(self)
        if self.uid in self.owningNode().pinsCreationOrder:
            self.owningNode().pinsCreationOrder.pop(self.uid)

        # Fix pin indexes on owning node
        if self.optionEnabled(PinOptions.Dynamic):
            # sort owning node pins indexes
            index = 1
            if self.direction == PinDirection.Input:
                for inputPin in self.owningNode().orderedInputs.values():
                    if inputPin == self:
                        continue
                    inputPin.pinIndex = index
                    index += 1
            index = 1
            if self.direction == PinDirection.Output:
                for outputPin in self.owningNode().orderedOutputs.values():
                    if outputPin == self:
                        continue
                    outputPin.pinIndex = index
                    index += 1
        self.killed.send(self)
        clearSignal(self.killed)

    def currentData(self):
        """Returns current value of this pin, without any graph evaluation

        :rtype: object
        """
        if self._data is None:
            return self._defaultValue
        return self._data

    def aboutToConnect(self, other):
        """This method called right before two pins connected

        :param other: Pin which this pin is going to be connected with
        :type other: :class:`~PyFlow.Core.PinBase.PinBase`
        """
        if other.structureType != self.structureType:
            if self.optionEnabled(PinOptions.ChangeTypeOnConnection) or self.structureType == StructureType.Multi:
                self.changeStructure(other._currStructure)
                self.onPinConnected.send(other)

    def getCurrentStructure(self):
        """Returns this pin structure type

        :rtype: :class:`~PyFlow.Core.Common.StructureType`
        """
        if self.structureType == StructureType.Multi:
            if self._alwaysSingle:
                return StructureType.Single
            elif self._alwaysList:
                return StructureType.Array
            elif self._alwaysDict:
                return StructureType.Dict
            else:
                return self.structureType
        else:
            return self.structureType

    def changeStructure(self, newStruct, init=False):
        """Changes this pin structure type

        :param newStruct: Target structure
        :type newStruct: :class:`~PyFlow.Core.Common.StructureType`
        :param init: **docs goes here**
        :type init: bool
        """
        free = self.canChangeStructure(newStruct, [], init=init)
        if free:
            self.updateConstrainedPins(set(), newStruct, init, connecting=True)

    def canChangeStructure(self, newStruct, checked=[], selfCheck=True, init=False):
        """Recursive function to determine if pin can change its structure

        :param newStruct: New structure we want to apply
        :type newStruct: string
        :param checked: Already visited pins, defaults to []
        :type checked: list, optional
        :param selfCheck: Define if check pin itself for connected pins, defaults to True
        :type selfCheck: bool, optional
        :param init: Initialization flag, if set multi pins can became other structure and don't be able to change after new call with init=True, defaults to False
        :type init: bool, optional
        :returns: True if pin can change structure to newStruct
        :rtype: bool
        """
        if not init and (self._alwaysList or self._alwaysSingle or self._alwaysDict):
            return False
        if self.structConstraint is None or self.structureType == StructureType.Multi:
            return True
        elif self.structureType != StructureType.Multi:
            return False
        else:
            con = []
            if selfCheck:
                free = not self.hasConnections()
                if not free:
                    for c in getConnectedPins(self):
                        if c not in checked:
                            con.append(c)
            else:
                free = True
                checked.append(self)
            free = True
            if selfCheck:
                def testfree():
                    free = False
                    for pin in getConnectedPins(self):
                        if pin._structure == StructureType.Multi:
                            free = True
                        else:
                            free = False
                            break
                    return free
                if any([self._currStructure == StructureType.Single and newStruct == StructureType.Array and not self.optionEnabled(PinOptions.ArraySupported) and self.hasConnections(),
                        self._currStructure == StructureType.Single and newStruct == StructureType.Dict and not self.optionEnabled(PinOptions.DictSupported) and self.hasConnections(),
                        self._currStructure == StructureType.Array and newStruct == StructureType.Single and self.optionEnabled(PinOptions.SupportsOnlyArrays) and self.hasConnections(),
                        self._currStructure == StructureType.Dict and newStruct == StructureType.Single and self.optionEnabled(PinOptions.SupportsOnlyArrays) and self.hasConnections(),
                        self._currStructure == StructureType.Array and newStruct == StructureType.Dict and self.hasConnections(),
                        self._currStructure == StructureType.Dict and newStruct == StructureType.Array and self.hasConnections()]):
                    free = testfree()
            if free:
                for port in self.owningNode().structConstraints[self.structConstraint] + con:
                    if port not in checked:
                        checked.append(port)
                        free = port.canChangeStructure(newStruct, checked, True, init=init)
                        if not free:
                            break
            return free

    def updateConstrainedPins(self, traversed, newStruct, init=False, connecting=False):
        nodePins = set()
        if self.structConstraint is not None:
            nodePins = set(self.owningNode().structConstraints[self.structConstraint])
        else:
            nodePins = set([self])
        for connectedPin in getConnectedPins(self):
            if connectedPin.structureType == StructureType.Multi:
                if connectedPin.canChangeStructure(self._currStructure, init=init):
                    nodePins.add(connectedPin)
        for neighbor in nodePins:
            if neighbor not in traversed and neighbor.structureType == StructureType.Multi:
                neighbor.setAsArray(newStruct == StructureType.Array)
                neighbor.setAsDict(newStruct == StructureType.Dict)
                if connecting:
                    if init:
                        neighbor._alwaysList = newStruct == StructureType.Array
                        neighbor._alwaysSingle = newStruct == StructureType.Single
                        neighbor._alwaysDict = newStruct == StructureType.Dict
                    neighbor._currStructure = newStruct
                    neighbor.disableOptions(PinOptions.ArraySupported)
                    neighbor.disableOptions(PinOptions.DictSupported)
                    if newStruct == StructureType.Array:
                        neighbor.enableOptions(PinOptions.ArraySupported)
                    elif newStruct == StructureType.Dict:
                        neighbor.enableOptions(PinOptions.DictSupported)   
                    elif newStruct == StructureType.Multi:
                        neighbor.enableOptions(PinOptions.ArraySupported)
                        neighbor.enableOptions(PinOptions.DictSupported)
                    elif newStruct == StructureType.Single:
                        neighbor.disableOptions(PinOptions.SupportsOnlyArrays)
                else:
                    neighbor._currStructure = neighbor._structure
                    neighbor._data = neighbor.defaultValue()
                traversed.add(neighbor)
                neighbor.setData(neighbor.defaultValue())
                neighbor.updateConstrainedPins(traversed, newStruct, init, connecting=connecting)

    def pinConnected(self, other):
        push(self)
        if self.isDict():
            self.updateConnectedDicts([], self._data.keyType)

    def pinDisconnected(self, other):
        self.onPinDisconnected.send(other)
        push(other)

    def canChangeTypeOnConnection(self, checked=[], can=True, extraPins=[], selfCheck=True):
        """Recursive function to determine if pin can change its dataType

        :param checked: Already visited pins, defaults to []
        :type checked: list, optional
        :param can: Variable Updated during iteration, defaults to True
        :type can: bool, optional
        :param extraPins: extra pins, non constrained or connected to this pin but that want to check also, defaults to []
        :type extraPins: list, optional
        :param selfCheck: Define if check pin itself for connected pins, defaults to True
        :type selfCheck: bool, optional
        :returns: True if pin can becabe other dataType
        :rtype: bool
        """
        if not self.optionEnabled(PinOptions.ChangeTypeOnConnection):
            return False
        con = []
        neis = []
        if selfCheck:
            if self.hasConnections():
                for c in getConnectedPins(self):
                    if c not in checked:
                        con.append(c)
        else:
            checked.append(self)
        if self.constraint:
            neis = self.owningNode().constraints[self.constraint]
        for port in neis + con + extraPins:
            if port not in checked and can:
                checked.append(port)
                can = port.canChangeTypeOnConnection(checked, can, selfCheck=True)
        return can

    def getDictElementNode(self, checked=[], node=None):
        """Get the connected :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeDictElement.makeDictElement` to this pin recursively

        :param checked: Currently visited pins, defaults to []
        :type checked: list, optional
        :param node: founded node, defaults to None
        :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase` or None
        """
        if self.owningNode().__class__.__name__ == "makeDictElement":
            return self.owningNode()
        con = []
        neis = []
        if self.hasConnections() and self.direction == PinDirection.Input:
            for c in getConnectedPins(self):
                if c not in checked:
                    con.append(c)
        if self.constraint:
            neis = self.owningNode().constraints[self.constraint]
        for port in con + neis:
            if port not in checked and node == None:
                checked.append(port)
                node = port.getDictElementNode(checked, node)
        return node

    def getDictNode(self, checked=[], node=None):
        """Get the connected :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeDict.makeDict` or
        :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeAnyDict.makeAnyDict` to this pin recursively

        :param checked: Currently visited pins, defaults to []
        :type checked: list, optional
        :param node: founded node, defaults to None
        :returns: founded node or None if not found
        """
        if self.owningNode().__class__.__name__ in ["makeDict", "makeAnyDict"]:
            return self.owningNode()
        con = []
        neis = []
        if self.hasConnections():
            for c in getConnectedPins(self):
                if c not in checked:
                    con.append(c)
        if self.constraint:
            neis = self.owningNode().constraints[self.constraint]
        for port in con + neis:
            if port not in checked and node == None:
                checked.append(port)
                node = port.getDictNode(checked, node)
        return node

    def supportDictElement(self, checked=[], can=True, selfCheck=True):
        """Iterative functions that search in all connected pins to see if they support DictElement nodes.

        :param checked: Already visited pins, defaults to []
        :type checked: list, optional
        :param can: this is the variable that will be actualized during the recursive function, defaults to False
        :type can: bool, optional
        :param selfCheck: Define if look itself or no, defaults to True
        :type selfCheck: bool, optional
        :returns: True if can connect DictElement nodes to this pin
        :rtype: bool
        """
        if not self.optionEnabled(PinOptions.DictElementSupported):
            return False
        con = []
        neis = []
        if selfCheck:
            if self.hasConnections() and self.direction == PinDirection.Input:
                for c in getConnectedPins(self):
                    if c not in checked:
                        con.append(c)
        else:
            checked.append(self)
        if self.constraint and self.owningNode().__class__.__name__ != "makeDictElement":
            neis = self.owningNode().constraints[self.constraint]
        for port in neis + con:
            if port not in checked and can:
                checked.append(port)
                can = port.supportDictElement(checked, can, selfCheck=True)
        return can

    def supportOnlyDictElement(self, checked=[], can=False, selfCheck=True):
        """Iterative Functions that search in all connected pins to see if they support only DictElement nodes, this
        is done for nodes like makeDict and simmilars.

        :param checked: Already Visited Pins, defaults to []
        :type checked: list, optional
        :param can: this is the variable that will be actualized during the recursive function, defaults to False
        :type can: bool, optional
        :param selfCheck: Defines if look itself or no, defaults to True
        :type selfCheck: bool, optional
        :returns: True if can connect only DictElement and Dicts nodes to this Pin
        :rtype: bool
        """
        if self.isDict():
            return True
        con = []
        neis = []
        if selfCheck:
            if self.hasConnections() and self.direction == PinDirection.Output:
                for c in getConnectedPins(self):
                    if c not in checked:
                        con.append(c)
        else:
            checked.append(self)
        if self.constraint and self.owningNode().__class__.__name__ != "makeDictElement":
            neis = self.owningNode().constraints[self.constraint]
        for port in neis + con:
            if port not in checked and not can:
                checked.append(port)
                can = port.supportOnlyDictElement(checked, can, selfCheck=True)
        return can

    def updateConnectedDicts(self, checked=[], keyType=None):
        """Iterate over connected dicts pins and DictElements pins updating key data type

        :param checked: Already visited pins, defaults to []
        :type checked: list, optional
        :param keyType: KeyDataType to set, defaults to None
        :type keyType: string, optional
        """
        if not self.isDict():
            return
        con = []
        neis = []
        if self.hasConnections():
            for c in getConnectedPins(self):
                if c not in checked:
                    con.append(c)
        if self.constraint:
            neis = self.owningNode().constraints[self.constraint]
        for port in con + neis:
            if port not in checked and port.isDict():
                checked.append(port)
                port._keyType = keyType
                if port._data.keyType != keyType:
                    port._data = PFDict(keyType, port.dataType)
                port.dictChanged.send(keyType)
                if port.getWrapper():
                    port.getWrapper()().update()
                port.updateConnectedDicts(checked, keyType)

    def setClean(self):
        """Sets dirty flag to True
        """
        self.dirty = False
        #if self.direction == PinDirection.Output:
        #    for i in self.affects:
        #        i.dirty = False

    def setDirty(self):
        """Sets dirty flag to True
        """
        if self.isExec():
            return
        self.dirty = True
        for i in self.affects:
            i.dirty = True
        self.markedAsDirty.send()

    def hasConnections(self):
        """Return the number of connections this pin has

        :rtype: int
        """
        numConnections = 0
        if self.direction == PinDirection.Input:
            numConnections += len(self.affected_by)
        elif self.direction == PinDirection.Output:
            numConnections += len(self.affects)
        return numConnections > 0

    def setDefaultValue(self, val):
        """In python, all user-defined classes are mutable
        So make sure to store separate copy of value
        For example if this is a Matrix, default value will be changed each time data has been set in original Matrix

        :param val: defaultValue
        :type val: object
        """
        self._defaultValue = copy(val)

    def updateConstraint(self, constraint):
        self.constraint = constraint
        if constraint in self.owningNode().constraints:
            self.owningNode().constraints[constraint].append(self)
        else:
            self.owningNode().constraints[constraint] = [self]

    def updateStructConstraint(self, constraint):
        self.structConstraint = constraint
        if constraint in self.owningNode().structConstraints:
            self.owningNode().structConstraints[constraint].append(self)
        else:
            self.owningNode().structConstraints[constraint] = [self]

    @staticmethod
    def IsValuePin():
        """Returns whether this pin is value pin or not

        :rtype: bool
        """
        return True

    @staticmethod
    def pinDataTypeHint():
        """Hint of what data type is this pin, as well as default value for this data type.

        Used to easily find pin classes by type id.

        :rtype: tuple(str, object)
        :raises NotImplementedError: If not implemented
        """
        raise NotImplementedError('pinDataTypeHint method of PinBase is not implemented')

    @staticmethod
    def supportedDataTypes():
        return ()

    @staticmethod
    def jsonEncoderClass():
        """Returns json encoder class for this pin
        """
        return json.JSONEncoder

    @staticmethod
    def jsonDecoderClass():
        """Returns json decoder class for this pin
        """
        return json.JSONDecoder
class MotionStage(object):
	def __init__(self, axes, constraints = None):
		from Action import EmergencyStop
		from blinker import Signal

		self.axes_idx = dict()
		for i, axis in enumerate(axes):
			self.axes_idx[axis] = i
		self.axes = [axis for axis in axes]
		self.constraints = constraints
		self.abort_action = EmergencyStop(self.axes)

		self.onCycleStarted = Signal()
		self.onCycleFinished = Signal()
		self.onCycleAborted = Signal()

		self.onDestinationChanged = Signal()

		self.onRunning = Signal()
		self.onInitializing = Signal()
		self.onInitialized = Signal()
		self.onInitiatorMinus = Signal()
		self.onInitiatorPlus = Signal()
		self.onPositionChanged = Signal()
		for axis in self.axes:
			axis.onInitializing.connect(self.onInitializing_repeat)
			axis.onInitialized.connect(self.onInitialized_repeat)
			axis.onInitiatorMinus.connect(self.onInitiatorMinus_repeat)
			axis.onInitiatorPlus.connect(self.onInitiatorPlus_repeat)
			axis.onRunning.connect(self.onRunning_repeat)
			axis.onPosition.connect(self.onPosition_repeat)

		self.worker_thread = None
		self.active = False
		self.destination = None
		self.cycle_clear()

		self.update()

	def __del__(self):
		self.abort()

	def cycle_clear(self):
		import Queue
		self.action_queue = Queue.Queue()
	
	def cycle_add_action(self, action):
		if not self.action_queue:
			self.cycle_clear()
		self.action_queue.put(action)

	@property
	def running(self):
		return tuple([axis.running for axis in self.axes])
	@property
	def initializing(self):
		return tuple([axis.initializing for axis in self.axes])
	@property
	def initialized(self):
		return tuple([axis.initialized for axis in self.axes])
	@property
	def initiator_minus(self):
		return tuple([axis.initiator_minus for axis in self.axes])
	@property
	def initiator_plus(self):
		return tuple([axis.initiator_plus for axis in self.axes])
	@property
	def position(self):
		return tuple([axis.position for axis in self.axes])

	def onRunning_repeat(self, sender, running):
		self.onRunning.send(self, axis=self.axes_idx[sender], running=running)
	def onPosition_repeat(self, sender, position):
		self.onPositionChanged.send(self, axis=self.axes_idx[sender], position=position)
	def onInitializing_repeat(self, sender, initializing):
		self.onInitializing.send(self, axis=self.axes_idx[sender], initializing=initializing)
	def onInitialized_repeat(self, sender, initialized):
		self.onInitialized.send(self, axis=self.axes_idx[sender], initialized=initialized)
	def onInitiatorMinus_repeat(self, sender, active):
		self.onInitiatorMinus.send(self, axis=self.axes_idx[sender], active=active)
	def onInitiatorPlus_repeat(self, sender, active):
		self.onInitiatorPlus.send(self, axis=self.axes_idx[sender], active=active)

	def update(self):
		old_position = self.position
		for axis in self.axes:
			axis.update()
			
	def set_destination(self, destination):
		current_position = self.position
		if not self.destination:
			self.update()
			self.destination = current_position
		if isinstance(destination, list) or isinstance(destination, tuple):
			if len(destination) != len(self.axes):
				raise ValueError
			self.destination = tuple(destination)
		if isinstance(destination, dict):
			new_destination = list(self.destination)
			for k in destination:
				new_destination[k] = destination[k]
			self.destination = tuple(new_destination)

		for i,dest in enumerate(self.destination):
			self.onDestinationChanged.send(self, axis = i, destination = dest)

		speed = None
		if not None in current_position:
			delta = [abs(a-b) for a,b in zip(self.destination, current_position)]
			max_delta = max(delta)
			if max_delta > 0:
				speed = [float(d)/float(max_delta) for d in delta]

		from Action import GotoAbsolute

		self.cycle_clear()
		self.cycle_add_action(GotoAbsolute(self.axes, self.destination, speed))

	def can_cycle_start(self):
		return True # FIXME: Add constraint tests here

	def cycle_start(self):
		import threading, weakref

		if self.active:
			return False

#		if not self.can_cycle_start():
#			return False

		self.current_action = None
		self.active = True
		self.worker_thread = threading.Thread(target = MotionStage.cycleWorker, name = "MotionControl.cycleWorker", args=(weakref.proxy(self),))
		self.worker_thread.daemon = True
		self.worker_thread.start()
		self.onCycleStarted.send(self)

	def abort(self):
		import threading
		self.active = False
		if isinstance(self.worker_thread, threading.Thread):
			self.worker_thread.join()

	def cycleWorker(ref):
		abort_action = ref.abort_action
		try:
			import time
			while True:
				if not ref.active:
					raise CycleAbort()
				ref.update()
				if not ref.current_action or ref.current_action.ended():
					if ref.action_queue.empty():
						break
					ref.current_action = ref.action_queue.get_nowait()

				ref.current_action.execute()

				while True:
					if not ref.active:
						raise CycleAbort()
					ref.update()
					if ref.current_action.ended():
						break

				ref.action_queue.task_done()

			ref.onCycleFinished.send(ref)
		except CycleAbort:
			ref.abort_action.execute()
			ref.onCycleAborted.send(ref)

		finally:
			try:
				while not ref.action_queue.empty():
					ref.action_queue.get_nowait()
					ref.action_queue.task_done()
			except:
				pass
			ref.active = False
	
	def reference(self):
		from Action import Initiate, GotoAbsolute
		self.destination = None
		self.cycle_clear()
		self.cycle_add_action(Initiate([self.axes[0]]))
		self.cycle_add_action(GotoAbsolute([self.axes[0]], [0]))
		self.cycle_start()
Exemple #22
0
class GraphBase(ISerializable):
    """Data structure representing a nodes graph

    :var graphManager: reference to graph manager
    :vartype graphManager: :class:`~PyFlow.Core.GraphManager.GraphManager`

    :var nameChanged: signal emitted after graph name was changed
    :vartype nameChanged: :class:`~blinker.base.Signal`

    :var categoryChanged: signal emitted after graph category was changed
    :vartype categoryChanged: :class:`~blinker.base.Signal`

    :var childGraphs: a set of child graphs
    :vartype childGraphs: :class:`set`

    :var nodes: nodes storage. Dictionary with :class:`uuid.UUID` as key and :class:`~PyFlow.Core.NodeBase.NodeBase` as value
    :vartype nodes: :class:`dict`

    :var uid: Unique identifier
    :vartype uid: :class:`uuid.UUID`

    .. py:method:: parentGraph
        :property:

        :getter: Returns a reference to parent graph or None if this graph is root

        :setter: Sets new graph as new parent for this graph

    .. py:method:: name
        :property:

        :getter: Returns graph name

        :setter: Sets new graph name and fires signal

    .. py:method:: category
        :property:

        :getter: Returns graph category

        :setter: Sets new graph category and fires signal

    .. py:method:: pins
        :property:

        :getter: Returns dictionary with :class:`uuid.UUID` as key and :class:`~PyFlow.Core.PinBase.PinBase` as value
        :rtype: dict

    """
    def __init__(self,
                 name,
                 manager,
                 parentGraph=None,
                 category='',
                 uid=None,
                 *args,
                 **kwargs):
        super(GraphBase, self).__init__(*args, **kwargs)
        self.graphManager = manager
        self._isRoot = False

        self.nameChanged = Signal(str)
        self.categoryChanged = Signal(str)

        self.__name = name
        self.__category = category

        self._parentGraph = None
        self.childGraphs = set()

        self.parentGraph = parentGraph

        self._nodes = {}
        self._vars = {}
        self.uid = uuid.uuid4() if uid is None else uid

        manager.add(self)

    def setIsRoot(self, bIsRoot):
        """Sets this graph as root

        .. warning:: Used internally

        :param bIsRoot: -- Root or not
        :type bIsRoot: :class:`bool`
        """
        self._isRoot = bIsRoot

    def isRoot(self):
        """Whether this graph is root or not

        :rtype: :class:`bool`
        """
        return self._isRoot

    def getVars(self):
        """Returns this graph's variables storage

        :returns: :class:`uuid.UUID` - :class:`~PyFlow.Core.NodeBase.NodeBase` dict
        :rtype: :class:`dict`
        """
        return self._vars

    @property
    def parentGraph(self):
        return self._parentGraph

    @parentGraph.setter
    def parentGraph(self, newParentGraph):
        if self.isRoot():
            self._parentGraph = None
            return

        if newParentGraph is not None:
            if self._parentGraph is not None:
                # remove self from old parent's children set
                if self in self._parentGraph.childGraphs:
                    self._parentGraph.childGraphs.remove(self)
            # add self to new parent's children set
            newParentGraph.childGraphs.add(self)
            # update parent
            self._parentGraph = newParentGraph

    def depth(self):
        """Returns depth level of this graph

        :rtype: int
        """
        result = 1
        parent = self._parentGraph
        while parent is not None:
            result += 1
            parent = parent.parentGraph
        return result

    def getVarList(self):
        """return list of variables from active graph

        :rtype: list(:class:`~PyFlow.Core.Variable.Variable`)
        """
        result = list(self._vars.values())
        parent = self._parentGraph
        while parent is not None:
            result += list(parent._vars.values())
            parent = parent.parentGraph
        return result

    def serialize(self, *args, **kwargs):
        """Returns serialized representation of this graph

        :rtype: dict
        """
        result = {
            'name':
            self.name,
            'category':
            self.category,
            'vars': [v.serialize() for v in self._vars.values()],
            'nodes': [n.serialize() for n in self._nodes.values()],
            'depth':
            self.depth(),
            'isRoot':
            self.isRoot(),
            'parentGraphName':
            str(self._parentGraph.name)
            if self._parentGraph is not None else str(None)
        }
        return result

    def populateFromJson(self, jsonData):
        """Populates itself from serialized data

        :param jsonData: serialized graph
        :type jsonData: dict
        """
        self.clear()
        self.name = self.graphManager.getUniqGraphName(jsonData['name'])
        self.category = jsonData['category']
        self.setIsRoot(jsonData['isRoot'])
        if self.isRoot():
            self.name = "root"
        # restore vars
        for varJson in jsonData['vars']:
            var = Variable.deserialize(self, varJson)
            self._vars[var.uid] = var
        # restore nodes
        for nodeJson in jsonData['nodes']:
            # check if variable getter or setter and pass variable
            nodeArgs = ()
            nodeKwargs = {}
            if nodeJson['type'] in ('getVar', 'setVar'):
                nodeKwargs['var'] = self._vars[uuid.UUID(nodeJson['varUid'])]
            nodeJson['owningGraphName'] = self.name
            node = getRawNodeInstance(nodeJson['type'],
                                      packageName=nodeJson['package'],
                                      libName=nodeJson['lib'],
                                      *nodeArgs,
                                      **nodeKwargs)
            self.addNode(node, nodeJson)

        # restore connection
        for nodeJson in jsonData['nodes']:
            for nodeOutputJson in nodeJson['outputs']:
                for linkData in nodeOutputJson['linkedTo']:
                    try:
                        lhsNode = self._nodes[uuid.UUID(
                            linkData["lhsNodeUid"])]
                    except Exception as e:
                        lhsNode = self.findNode(linkData["lhsNodeName"])

                    try:
                        lhsPin = lhsNode.orderedOutputs[linkData["outPinId"]]
                    except Exception as e:
                        print("lhsPin not found {0}".format(str(linkData)))
                        continue

                    try:
                        rhsNode = self._nodes[uuid.UUID(
                            linkData["rhsNodeUid"])]
                    except Exception as e:
                        rhsNode = self.findNode(linkData["rhsNodeName"])

                    try:
                        rhsPin = rhsNode.orderedInputs[linkData["inPinId"]]
                    except Exception as e:
                        continue

                    if not arePinsConnected(lhsPin, rhsPin):
                        connected = connectPins(lhsPin, rhsPin)
                        # assert(connected is True), "Failed to restore connection"
                        if not connected:
                            print("Failed to restore connection", lhsPin,
                                  rhsPin)
                            connectPins(lhsPin, rhsPin)

    def remove(self):
        """Removes this graph as well as child graphs. Deepest graphs will be removed first
        """
        # graphs should be removed from leafs to root
        for childGraph in set(self.childGraphs):
            childGraph.remove()
        # remove itself
        self.graphManager.removeGraph(self)

    def clear(self):
        """Clears content of this graph as well as child graphs. Deepest graphs will be cleared first
        """
        # graphs should be cleared from leafs to root
        for childGraph in set(self.childGraphs):
            childGraph.clear()

        # clear itself
        for node in list(self._nodes.values()):
            node.kill()
        self._nodes.clear()

        for var in list(self._vars.values()):
            self.killVariable(var)
        self._vars.clear()

    @property
    def name(self):
        return self.__name

    @name.setter
    def name(self, value):
        value = str(value)
        if self.__name != value:
            self.__name = value
            self.nameChanged.send(self.__name)

    @property
    def category(self):
        return self.__category

    @category.setter
    def category(self, value):
        self.__category = str(value)
        self.categoryChanged.send(self.__category)

    def Tick(self, deltaTime):
        """Executed periodically

        :param deltaTime: Elapsed time since last tick
        :type deltaTime: float
        """
        for node in self._nodes.values():
            node.Tick(deltaTime)

    @property
    def pins(self):
        result = {}
        for n in self.getNodesList():
            for pin in tuple(n.inputs.values()) + tuple(n.outputs.values()):
                result[pin.uid] = pin
        return result

    def createVariable(self,
                       dataType=str('AnyPin'),
                       accessLevel=AccessLevel.public,
                       uid=None,
                       name=str("var")):
        """Creates variable inside this graph scope

        :param dataType: Variable data type
        :type dataType: str
        :param accessLevel: Variable access level
        :type accessLevel: :class:`~PyFlow.Core.Common.AccessLevel`
        :param uid: Variable unique identifier
        :type uid: :class:`uuid.UUID`
        :param name: Variable name
        :type name: str
        """
        name = self.graphManager.getUniqVariableName(name)
        var = Variable(self,
                       getPinDefaultValueByType(dataType),
                       name,
                       dataType,
                       accessLevel=accessLevel,
                       uid=uid)
        self._vars[var.uid] = var
        return var

    # TODO: add arguments to deal with references of this var
    # disconnect pins or mark nodes invalid
    def killVariable(self, var):
        """Removes variable from this graph

        :param var: Variable to remove
        :type var: :class:`~PyFlow.Core.Variable.Variable`
        """
        assert (isinstance(var, Variable))
        if var.uid in self._vars:
            popped = self._vars.pop(var.uid)
            popped.killed.send()

    def getNodes(self):
        """Returns this graph's nodes storage

        :rtype: dict(:class:`~PyFlow.Core.NodeBase.NodeBase`)
        """
        return self._nodes

    def getNodesList(self, classNameFilters=[]):
        """Returns this graph's nodes list
        :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`)
        """
        if len(classNameFilters) > 0:
            return [
                n for n in self._nodes.values()
                if n.__class__.__name__ in classNameFilters
            ]
        else:
            return [n for n in self._nodes.values()]

    def findNode(self, name):
        """Tries to find node by name

        :param name: Node name
        :type name: str or None
        """
        for i in self._nodes.values():
            if i.name == name:
                return i
        return None

    def getNodesByClassName(self, className):
        """Returns a list of nodes filtered by class name
        :param className: Class name of target nodes
        :type className: str
        :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`)
        """
        nodes = []
        for i in self.getNodesList():
            if i.__class__.__name__ == className:
                nodes.append(i)
        return nodes

    def findPinByUid(self, uid):
        """Tries to find pin by uuid

        :param uid: Unique identifier
        :type uid: :class:`~uuid.UUID`
        :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None
        """
        pin = None
        if uid in self.pins:
            pin = self.pins[uid]
        return pin

    def findPin(self, pinName):
        """Tries to find pin by name

        :param pinName: String to search by
        :type pinName: str
        :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None
        """
        result = None
        for pin in self.pins.values():
            if pinName == pin.getFullName():
                result = pin
                break
        return result

    def getInputNode(self):
        """Creates and adds to graph :class:`~PyFlow.Packages.Base.Nodes.graphNodes.graphInputs` node

        pins on this node will be exposed on compound node as input pins
        :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase`
        """
        node = getRawNodeInstance("graphInputs", "Base")
        self.addNode(node)
        return node

    def getOutputNode(self):
        """Creates and adds to graph :class:`~PyFlow.Packages.Base.Nodes.graphNodes.graphOutputs` node.

        pins on this node will be exposed on compound node as output pins
        :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase`
        """
        node = getRawNodeInstance("graphOutputs", "Base")
        self.addNode(node)
        return node

    def addNode(self, node, jsonTemplate=None):
        """Adds node to storage

        :param node: Node to add
        :type node: NodeBase
        :param jsonTemplate: serialized representation of node. This used when graph deserialized to do custom stuff after node will be added.
        :type jsonTemplate: dict
        :rtype: bool
        """
        from PyFlow.Core.PathsRegistry import PathsRegistry

        assert (node is not None), "failed to add node, None is passed"
        if node.uid in self._nodes:
            return False

        # Check if this node is variable get/set. Variables created in child graphs are not visible to parent ones
        # Do not disrupt variable scope
        if node.__class__.__name__ in ['getVar', 'setVar']:
            var = self.graphManager.findVariableByUid(node.variableUid())
            variableLocation = var.location()
            if len(variableLocation) > len(self.location()):
                return False
            if len(variableLocation) == len(self.location()):
                if Counter(variableLocation) != Counter(self.location()):
                    return False

        node.graph = weakref.ref(self)
        if jsonTemplate is not None:
            jsonTemplate['name'] = self.graphManager.getUniqNodeName(
                jsonTemplate['name'])
        else:
            node.setName(self.graphManager.getUniqNodeName(node.name))

        self._nodes[node.uid] = node
        node.postCreate(jsonTemplate)
        PathsRegistry().rebuild()
        return True

    def location(self):
        """Returns path to current location in graph tree

        Example:

        >>> ["root", "compound1", "compound2"]

        means:

        >>> # root
        >>> # |- compound
        >>> #    |- compound2

        :rtype: list(str)
        """
        result = [self.name]
        parent = self._parentGraph
        while parent is not None:
            result.insert(0, parent.name)
            parent = parent.parentGraph
        return result

    def count(self):
        """Returns number of nodes

        :rtype: int
        """
        return self._nodes.__len__()

    def plot(self):
        """Prints graph to console. May be useful for debugging
        """
        depth = self.depth()
        prefix = "".join(['-'] * depth) if depth > 1 else ''
        parentGraphString = str(
            None) if self.parentGraph is None else self.parentGraph.name
        print(prefix + "GRAPH:" + self.name +
              ", parent:{0}".format(parentGraphString))

        assert (self not in self.childGraphs)

        for child in self.childGraphs:
            child.plot()
Exemple #23
0
class ScriptRunner:
    def __init__(
        self,
        session_id: str,
        session_data: SessionData,
        enqueue_forward_msg: Callable[[ForwardMsg], None],
        client_state: ClientState,
        request_queue: ScriptRequestQueue,
        session_state: SessionState,
        uploaded_file_mgr: UploadedFileManager,
    ):
        """Initialize the ScriptRunner.

        (The ScriptRunner won't start executing until start() is called.)

        Parameters
        ----------
        session_id : str
            The AppSession's id.

        session_data : SessionData
            The AppSession's session data.

        enqueue_forward_msg : Callable
            Function to call to send a ForwardMsg to the frontend.
            (When not running a unit test, this will be the enqueue function
            of the AppSession instance that created this ScriptRunner.)

        client_state : ClientState
            The current state from the client (widgets and query params).

        request_queue : ScriptRequestQueue
            The queue that the AppSession is publishing ScriptRequests to.
            ScriptRunner will continue running until the queue is empty,
            and then shut down.

        uploaded_file_mgr : UploadedFileManager
            The File manager to store the data uploaded by the file_uploader widget.

        """
        self._session_id = session_id
        self._session_data = session_data
        self._enqueue_forward_msg = enqueue_forward_msg
        self._request_queue = request_queue
        self._uploaded_file_mgr = uploaded_file_mgr

        self._client_state = client_state
        self._session_state: SessionState = session_state
        self._session_state.set_widgets_from_proto(client_state.widget_states)

        self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs.

            This signal is *not* emitted on the same thread that the
            ScriptRunner was created on.

            Parameters
            ----------
            sender: ScriptRunner
                The sender of the event (this ScriptRunner).

            event : ScriptRunnerEvent

            exception : BaseException | None
                Our compile error. Set only for the
                SCRIPT_STOPPED_WITH_COMPILE_ERROR event.

            widget_states : streamlit.proto.WidgetStates_pb2.WidgetStates | None
                The ScriptRunner's final WidgetStates. Set only for the
                SHUTDOWN event.
            """)

        # Set to true when we process a SHUTDOWN request
        self._shutdown_requested = False

        # Set to true while we're executing. Used by
        # _maybe_handle_execution_control_request.
        self._execing = False

        # This is initialized in start()
        self._script_thread: Optional[threading.Thread] = None

    def __repr__(self) -> str:
        return util.repr_(self)

    def start(self) -> None:
        """Start a new thread to process the ScriptEventQueue.

        This must be called only once.

        """
        if self._script_thread is not None:
            raise Exception("ScriptRunner was already started")

        self._script_thread = threading.Thread(
            target=self._run_script_thread,
            name="ScriptRunner.scriptThread",
        )
        self._script_thread.start()

    def _get_script_run_ctx(self) -> ScriptRunContext:
        """Get the ScriptRunContext for the current thread.

        Returns
        -------
        ScriptRunContext
            The ScriptRunContext for the current thread.

        Raises
        ------
        AssertionError
            If called outside of a ScriptRunner thread.
        RuntimeError
            If there is no ScriptRunContext for the current thread.

        """
        assert self._is_in_script_thread()

        ctx = get_script_run_ctx()
        if ctx is None:
            # This should never be possible on the script_runner thread.
            raise RuntimeError(
                "ScriptRunner thread has a null ScriptRunContext. Something has gone very wrong!"
            )
        return ctx

    def _run_script_thread(self) -> None:
        """The entry point for the script thread.

        Processes the ScriptRequestQueue, which will at least contain the RERUN
        request that will trigger the first script-run.

        When the ScriptRequestQueue is empty, or when a SHUTDOWN request is
        dequeued, this function will exit and its thread will terminate.
        """
        assert self._is_in_script_thread()

        LOGGER.debug("Beginning script thread")

        # Create and attach the thread's ScriptRunContext
        ctx = ScriptRunContext(
            session_id=self._session_id,
            enqueue=self._enqueue,
            query_string=self._client_state.query_string,
            session_state=self._session_state,
            uploaded_file_mgr=self._uploaded_file_mgr,
        )
        add_script_run_ctx(threading.current_thread(), ctx)

        while not self._shutdown_requested and self._request_queue.has_request:
            request, data = self._request_queue.dequeue()
            if request == ScriptRequest.STOP:
                LOGGER.debug("Ignoring STOP request while not running")
            elif request == ScriptRequest.SHUTDOWN:
                LOGGER.debug("Shutting down")
                self._shutdown_requested = True
            elif request == ScriptRequest.RERUN:
                self._run_script(data)
            else:
                raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

        # Send a SHUTDOWN event before exiting. This includes the widget values
        # as they existed after our last successful script run, which the
        # AppSession will pass on to the next ScriptRunner that gets
        # created.
        client_state = ClientState()
        client_state.query_string = ctx.query_string
        widget_states = self._session_state.as_widget_states()
        client_state.widget_states.widgets.extend(widget_states)
        self.on_event.send(self,
                           event=ScriptRunnerEvent.SHUTDOWN,
                           client_state=client_state)

    def _is_in_script_thread(self) -> bool:
        """True if the calling function is running in the script thread"""
        return self._script_thread == threading.current_thread()

    def _enqueue(self, msg: ForwardMsg) -> None:
        """Enqueue a ForwardMsg to our browser queue.
        This private function is called by ScriptRunContext only.

        It may be called from the script thread OR the main thread.
        """
        # Whenever we enqueue a ForwardMsg, we also handle any pending
        # execution control request. This means that a script can be
        # cleanly interrupted and stopped inside most `st.foo` calls.
        #
        # (If "runner.installTracer" is true, then we'll actually be
        # handling these requests in a callback called after every Python
        # instruction instead.)
        if not config.get_option("runner.installTracer"):
            self._maybe_handle_execution_control_request()

        # Pass the message up to our associated AppSession.
        self._enqueue_forward_msg(msg)

    def _maybe_handle_execution_control_request(self) -> None:
        if not self._is_in_script_thread():
            # We can only handle execution_control_request if we're on the
            # script execution thread. However, it's possible for deltas to
            # be enqueued (and, therefore, for this function to be called)
            # in separate threads, so we check for that here.
            return

        if not self._execing:
            # If the _execing flag is not set, we're not actually inside
            # an exec() call. This happens when our script exec() completes,
            # we change our state to STOPPED, and a statechange-listener
            # enqueues a new ForwardEvent
            return

        # Pop the next request from our queue.
        request, data = self._request_queue.dequeue()
        if request is None:
            return

        LOGGER.debug("Received ScriptRequest: %s", request)
        if request == ScriptRequest.STOP:
            raise StopException()
        elif request == ScriptRequest.SHUTDOWN:
            self._shutdown_requested = True
            raise StopException()
        elif request == ScriptRequest.RERUN:
            raise RerunException(data)
        else:
            raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

    def _install_tracer(self) -> None:
        """Install function that runs before each line of the script."""
        def trace_calls(frame, event, arg):
            self._maybe_handle_execution_control_request()
            return trace_calls

        # Python interpreters are not required to implement sys.settrace.
        if hasattr(sys, "settrace"):
            sys.settrace(trace_calls)

    @contextmanager
    def _set_execing_flag(self):
        """A context for setting the ScriptRunner._execing flag.

        Used by _maybe_handle_execution_control_request to ensure that
        we only handle requests while we're inside an exec() call
        """
        if self._execing:
            raise RuntimeError("Nested set_execing_flag call")
        self._execing = True
        try:
            yield
        finally:
            self._execing = False

    def _run_script(self, rerun_data: RerunData) -> None:
        """Run our script.

        Parameters
        ----------
        rerun_data: RerunData
            The RerunData to use.

        """
        assert self._is_in_script_thread()

        LOGGER.debug("Running script %s", rerun_data)

        # Reset DeltaGenerators, widgets, media files.
        in_memory_file_manager.clear_session_files()

        ctx = self._get_script_run_ctx()
        ctx.reset(query_string=rerun_data.query_string)

        self.on_event.send(self, event=ScriptRunnerEvent.SCRIPT_STARTED)

        # Compile the script. Any errors thrown here will be surfaced
        # to the user via a modal dialog in the frontend, and won't result
        # in their previous script elements disappearing.

        try:
            with source_util.open_python_file(
                    self._session_data.main_script_path) as f:
                filebody = f.read()

            if config.get_option("runner.magicEnabled"):
                filebody = magic.add_magic(filebody,
                                           self._session_data.main_script_path)

            code = compile(
                filebody,
                # Pass in the file path so it can show up in exceptions.
                self._session_data.main_script_path,
                # We're compiling entire blocks of Python, so we need "exec"
                # mode (as opposed to "eval" or "single").
                mode="exec",
                # Don't inherit any flags or "future" statements.
                flags=0,
                dont_inherit=1,
                # Use the default optimization options.
                optimize=-1,
            )

        except BaseException as e:
            # We got a compile error. Send an error event and bail immediately.
            LOGGER.debug("Fatal script error: %s" % e)
            self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = False
            self.on_event.send(
                self,
                event=ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR,
                exception=e,
            )
            return

        # If we get here, we've successfully compiled our script. The next step
        # is to run it. Errors thrown during execution will be shown to the
        # user as ExceptionElements.

        if config.get_option("runner.installTracer"):
            self._install_tracer()

        # This will be set to a RerunData instance if our execution
        # is interrupted by a RerunException.
        rerun_with_data = None

        try:
            # Create fake module. This gives us a name global namespace to
            # execute the code in.
            module = _new_module("__main__")

            # Install the fake module as the __main__ module. This allows
            # the pickle module to work inside the user's code, since it now
            # can know the module where the pickled objects stem from.
            # IMPORTANT: This means we can't use "if __name__ == '__main__'" in
            # our code, as it will point to the wrong module!!!
            sys.modules["__main__"] = module

            # Add special variables to the module's globals dict.
            # Note: The following is a requirement for the CodeHasher to
            # work correctly. The CodeHasher is scoped to
            # files contained in the directory of __main__.__file__, which we
            # assume is the main script directory.
            module.__dict__["__file__"] = self._session_data.main_script_path

            with modified_sys_path(
                    self._session_data), self._set_execing_flag():
                # Run callbacks for widgets whose values have changed.
                if rerun_data.widget_states is not None:
                    # Update the WidgetManager with the new widget_states.
                    # The old states, used to skip callbacks if values
                    # haven't changed, are also preserved in the
                    # WidgetManager.
                    self._session_state.compact_state()
                    self._session_state.set_widgets_from_proto(
                        rerun_data.widget_states)

                    self._session_state.call_callbacks()

                ctx.on_script_start()
                exec(code, module.__dict__)
                self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = True
        except RerunException as e:
            rerun_with_data = e.rerun_data

        except StopException:
            pass

        except BaseException as e:
            self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = False
            handle_uncaught_app_exception(e)

        finally:
            self._on_script_finished(ctx)

        # Use _log_if_error() to make sure we never ever ever stop running the
        # script without meaning to.
        _log_if_error(_clean_problem_modules)

        if rerun_with_data is not None:
            self._run_script(rerun_with_data)

    def _on_script_finished(self, ctx: ScriptRunContext) -> None:
        """Called when our script finishes executing, even if it finished
        early with an exception. We perform post-run cleanup here.
        """
        self._session_state.reset_triggers()
        self._session_state.cull_nonexistent(ctx.widget_ids_this_run)
        # Signal that the script has finished. (We use SCRIPT_STOPPED_WITH_SUCCESS
        # even if we were stopped with an exception.)
        self.on_event.send(self,
                           event=ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS)
        # Delete expired files now that the script has run and files in use
        # are marked as active.
        in_memory_file_manager.del_expired_files()

        # Force garbage collection to run, to help avoid memory use building up
        # This is usually not an issue, but sometimes GC takes time to kick in and
        # causes apps to go over resource limits, and forcing it to run between
        # script runs is low cost, since we aren't doing much work anyway.
        if config.get_option("runner.postScriptGC"):
            gc.collect(2)
class MotionControl(object):
	def __init__(self, axes, constraints = None):
		import Queue

		self.axes = axes
		self.constraints = constraints

		self.action_queue = Queue.Queue()
		self.abort_action = NullAction()

		self.onCycleStarted = Signal()
		self.onCycleFinished = Signal()
		self.onCycleAborted = Signal()

		self.active = False
		self.target = None

	def __del__(self):
		self.abort()

	def __getattr__(self, name):
		if name == 'position':
			return [axis.position for axis in self.axes]

	def update(self):
		for axis in self.axes:
			axis.update()

	def set_target(self, target):
		if isinstance(target, list):
			if len(target) != len(self.axes):
				raise ValueError
			self.target = target
		if isinstance(target, dict):
			for k,v in target:
				self.target[k] = v
		if isinstance(target, tuple):
			self.target[target[0]] = target[1]

		speed = None
		current_position = self.position
		if not None in current_position:
			delta = [abs(a-b) for a,b in zip(target, current_position)]
			max_delta = max(delta)
			speed = [float(d)/float(max_delta) for d in delta]
		self.action_queue = Queue.Queue()
		self.action_queue.put(GotoAbsolute(self.axes, self.target, speed))

	def can_cycle_start(self):
		if self.active:
			return False
		return True # FIXME: Add constraint tests here

	def start_cycle(self):
		import threading, weakref

		if not self.can_cycle_start():
			return False

		self.current_action = None
		self.active = True
		self.worker_thread = threading.Thread(target = MotionControl.cycle_worker, name = "MotionControl.worker", args=(weakref.proxy(self),))
		self.worker_thread.daemon =True
		self.worker_thread.start()
		self.onCycleStarted.send()

	def abort(self):
		self.active = False
		self.worker_thread.join()

	def __del__(self):
		self.abort()

	def cycle_worker(ref):
		abort_action = ref.abort_action
		try:
			import time
			while True:
				if not ref.active:
					raise CycleAbort()
				ref.update()
				if not ref.current_action or ref.current_action.ended():
					if ref.action_queue.empty():
						break
					ref.current_action = ref.action_queue.get_nowait()

				ref.current_action.execute()

				while True:
					if not ref.active:
						raise CycleAbort()
					ref.update()
					if ref.current_action.ended():
						break

				ref.action_queue.task_done()

			ref.onCycleFinished.send()
		except CycleAbort:
			ref.abort_action.execute()
			ref.onCycleAborted.send()

		finally:
			try:
				while not ref.action_queue.empty():
					ref.action_queue.get_nowait()
					ref.action_queue.task_done()
			except:
				pass
			ref.active = False
Exemple #25
0
class WokEngine(Synchronizable):
	"""
	The Wok engine manages the execution of workflow cases.
	Each case represents a workflow loaded with a certain configuration.
	"""

	def __init__(self, conf, conf_base_path=None):
		Synchronizable.__init__(self)

		self._global_conf = conf

		self._expanded_global_conf = conf.clone().expand_vars()

		self._conf = self._expanded_global_conf.get("wok", default=Data.element)

		self._conf_base_path = conf_base_path

		self._log = logger.get_logger("wok.engine")

		self._work_path = self._conf.get("work_path", os.path.join(os.getcwd(), "wok-files"))
		if not os.path.exists(self._work_path):
			os.makedirs(self._work_path)

		self._cases = []
		self._cases_by_name = {}

		self._stopping_cases = {}

		#self._lock = Lock()
		self._cvar = threading.Condition(self._lock)

		self._run_thread = None
		self._running = False

		self._finished_event = threading.Event()

		self._job_task_map = {}

		self._logs_threads = []
		self._logs_queue = Queue()

		self._join_thread = None
		self._join_queue = Queue()

		self._num_log_threads = self._conf.get("num_log_threads", cpu_count())
		self._max_alive_threads = 2 + self._num_log_threads
		self._num_alive_threads = AtomicCounter()

		self._started = False

		self._notified = False

		recover = self._conf.get("recover", False)

		db_path = os.path.join(self._work_path, "engine.db")
		if not recover and os.path.exists(db_path):
			os.remove(db_path)
		self._db = db.create_engine("sqlite:///{}".format(db_path), drop_tables=not recover)

		# platforms

		self._platforms = self._create_platforms()
		self._platforms_by_name = {}
		for platform in self._platforms:
			self._platforms_by_name[platform.name] = platform
		default_platform_name = self._conf.get("default_platform", self._platforms[0].name)
		if default_platform_name not in self._platforms_by_name:
			self._log.warn("Platform '{}' not found, using '{}' as the default platform".format(
				default_platform_name, self._platforms[0].name))
			default_platform_name = self._platforms[0].name
		self._default_platform = self._platforms_by_name[default_platform_name]

		# projects

		if conf_base_path is None:
			conf_base_path = os.getcwd()
		projects_conf = self._global_conf.get("wok.projects")
		self._projects = ProjectManager(projects_conf, base_path=conf_base_path)
		self._projects.initialize()

		# signals

		self.case_created = Signal()
		self.case_state_changed = Signal()
		self.case_started = Signal()
		self.case_finished = Signal()
		self.case_removed = Signal()

		# recovering
		if recover:
			self.__recover_from_db()

	def _create_platforms(self):
		"""
		Creates the platform according to the configuration
		:return: Platform
		"""

		platform_confs = self._conf.get("platforms")
		if platform_confs is None:
			platform_confs = Data.list()
		elif not Data.is_list(platform_confs):
			self._log.error("Wrong configuration type for 'platforms': {}".format(platform_confs))
			platform_confs = Data.list()

		if len(platform_confs) == 0:
			platform_confs += [Data.element(dict(type="local"))]

		platforms = []

		names = {}
		for pidx, platform_conf in enumerate(platform_confs):
			if isinstance(platform_conf, basestring):
				if not os.path.isabs(platform_conf) and self._conf_base_path is not None:
					platform_conf = os.path.join(self._conf_base_path, platform_conf)
				platform_conf = ConfigLoader(platform_conf).load()

			if not Data.is_element(platform_conf):
				raise errors.ConfigTypeError("wok.platforms[{}]".format(pidx, platform_conf))

			ptype = platform_conf.get("type", "local")

			name = platform_conf.get("name", ptype)
			if name in names:
				name = "{}-{}".format(name, names[name])
				names[name] += 1
			else:
				names[name] = 2
			platform_conf["name"] = name

			if "work_path" not in platform_conf:
				platform_conf["work_path"] = os.path.join(self._work_path, "platform_{}".format(name))

			self._log.info("Creating '{}' platform ...".format(name))
			self._log.debug("Platform configuration: {}".format(repr(platform_conf)))

			platforms += [create_platform(ptype, platform_conf)]

		return platforms

	def _on_job_update(self, event, **kwargs):
		self.notify()

	def __recover_from_db(self):
		raise NotImplementedError()

	def __queue_adaptative_get(self, queue, start_timeout=1.0, max_timeout=6.0):
		timeout = start_timeout
		msg = None
		while self._running and msg is None:
			try:
				msg = queue.get(timeout=timeout)
			except Empty:
				if timeout < max_timeout:
					timeout += 0.5
			except:
				break
		return msg

	# Not used anywhere
	def __queue_batch_get(self, queue, start_timeout=1, max_timeout=5):
		timeout = start_timeout
		msg_batch = []
		while self._running and len(msg_batch) == 0:
			try:
				msg_batch += [queue.get(timeout=timeout)]
				while not queue.empty():
					msg_batch += [queue.get(timeout=timeout)]
			except Empty:
				if timeout < max_timeout:
					timeout += 1
		return msg_batch

	def __job_submissions(self, session, platform):
		#FIXME Be fair with priorities between different cases ?
		query = session.query(db.WorkItem)\
			.filter(db.WorkItem.state == runstates.READY)\
			.filter(db.WorkItem.platform == platform.name)\
			.order_by(db.WorkItem.priority)

		for workitem in query:
			case = self._cases_by_name[workitem.case.name]
			task = case.component(workitem.task.cname)

			js = JobSubmission(
					case=case,
					task=task,
					workitem_id=workitem.id,
					job_name=workitem.cname,
					task_conf=task.conf,
					priority=workitem.priority)

			execution = task.execution
			cmd_builder = create_command_builder(execution.mode)
			js.script, js.env = cmd_builder.prepare(case, task, workitem.index)

			yield js

	def __remove_case(self, session, case):
		"""
		Definitively remove a case. The engine should be locked and no case jobs running.
		"""

		self._log.info("Dropping case {} ...".format(case.name))

		del self._cases_by_name[case.name]
		self._cases.remove(case)

		# remove engine db objects and finalize case
		self._log.debug("  * database ...")
		case.remove(session)

		self._lock.release()
		try:
			#TODO clean the job manager output files

			try:
				self._log.debug("  * logs ...")
				logs_path = os.path.join(self._work_path, "logs", case.name)
				shutil.rmtree(logs_path)
			except:
				self._log.exception("Error removing logs at {}".format(logs_path))

			# remove data
			self._log.debug("  * data ...")
			for platform in case.platforms:
				platform.data.remove_case(case.name)

			# remove storage
			self._log.debug("  * storage ...")
			for platform in case.platforms:
				platform.storage.delete_container(case.name)

			# emit signal
			self.case_removed.send(case)

		finally:
			self._lock.acquire()

	# threads ----------------------

	@synchronized
	def _run(self):

		set_thread_title()

		num_exc = 0

		self._running = True

		self._num_alive_threads += 1

		# Start the logs threads

		for i in range(self._num_log_threads):
			t = threading.Thread(target=self._logs, args=(i, ), name="wok-engine-logs-%d" % i)
			self._logs_threads += [t]
			t.start()

		# Start the join thread

		self._join_thread = threading.Thread(target=self._join, name="wok-engine-join")
		self._join_thread.start()

		_log = logger.get_logger("wok.engine.run")

		_log.debug("Engine run thread ready")

		while self._running:

			session = db.Session()

			try:
				#_log.debug("Scheduling new tasks ...")
				set_thread_title("scheduling")

				updated_tasks = set()

				# schedule tasks ready to be executed and save new workitems into the db
				for case in self._cases:
					tasks = case.schedule(session)
					updated_tasks.update(tasks)
					session.commit()

				# submit workitems ready to be executed
				for platform in self._platforms:
					job_submissions = self.__job_submissions(session, platform)
					for js, job_id, job_state in platform.submit(job_submissions):
						workitem = session.query(db.WorkItem).filter(db.WorkItem.id == js.workitem_id).one()
						workitem.job_id = job_id
						workitem.state = job_state
						js.task.dirty = True
						session.commit()
						updated_tasks.add(js.task)

				session.close()
				session = None

				#_log.debug("Waiting for events ...")

				set_thread_title("waiting")

				while len(updated_tasks) == 0 and not self._notified and self._running:
					self._cvar.wait(1)
				self._notified = False

				if not self._running:
					break

				session = db.Session() # there is a session.close() in the finished block

				#_log.debug("Stopping jobs for aborting instances ...")

				set_thread_title("working")

				# check stopping instances
				for case in self._cases:
					if (case.state == runstates.ABORTING or case.removed) and case not in self._stopping_cases:
						num_job_ids = session.query(db.WorkItem.job_id).filter(db.WorkItem.case_id == case.id)\
											.filter(~db.WorkItem.state.in_(runstates.TERMINAL_STATES)).count()
						if num_job_ids == 0:
							if case.state == runstates.ABORTING:
								_log.debug("Aborted case {} with no running jobs".format(case.name))
								dbcase = session.query(db.Case).filter(db.Case.id == case.id)
								dbcase.state = case.state = runstates.ABORTED
								session.commit()
							else:
								_log.debug("Stopped case {} with no running jobs".format(case.name))

							if case.removed:
								_log.debug("Removing case {} with no running jobs".format(case.name))
								self.__remove_case(session, case)
								session.commit()
						else:
							_log.info("Stopping {} jobs for case {} ...".format(num_job_ids, case.name))

							self._stopping_cases[case] = set()
							for platform in self._platforms:
								job_ids = [int(r[0]) for r in session.query(db.WorkItem.job_id)
															.filter(db.WorkItem.case_id == case.id)\
															.filter(db.WorkItem.platform == platform.name)\
															.filter(~db.WorkItem.state.in_(runstates.TERMINAL_STATES))]

								self._stopping_cases[case].update(job_ids)

								platform.jobs.abort(job_ids)

				#_log.debug("Checking job state changes ...")

				# detect workitems which state has changed
				for platform in self._platforms:
					for job_id, state in platform.jobs.state():
						try:
							workitem = session.query(db.WorkItem).filter(db.WorkItem.job_id == job_id).one()
						except NoResultFound:
							_log.warn("No work-item available for the job {0} while retrieving state".format(job_id))
							platform.jobs.abort([job_id])
							platform.jobs.join(job_id)
							continue

						if workitem.state != state:
							case = self._cases_by_name[workitem.case.name]
							task = case.component(workitem.task.cname)
							task.dirty = True

							workitem.state = state
							workitem.substate = runstates.LOGS_RETRIEVAL
							session.commit()
							updated_tasks.add(task)

							# if workitem has finished, queue it for logs retrieval
							if state in runstates.TERMINAL_STATES:
								self._logs_queue.put((workitem.id, job_id))

							_log.debug("[{}] Work-Item {} changed state to {}".format(case.name, workitem.cname, state))

				#_log.debug("Updating components state ...")

				# update affected components state
				updated_cases = set([task.case for task in updated_tasks])
				for case in updated_cases:
					case.update_states(session)
					case.update_count_by_state(session)
					case.clean_components(session)
					session.commit()

					if case.state == runstates.RUNNING:
						self._lock.release()
						try:
							self.case_started.send(case)
						finally:
							self._lock.acquire()

				for task in updated_tasks:
					case = task.case
					#_log.debug("[{}] Component {} updated state to {} ...".format(
					#				component.case.name, component.cname, component.state))

					count = task.workitem_count_by_state
					sb = ["[{}] {} ({})".format(case.name, task.cname, task.state.title)]
					sep = " "
					for state in runstates.STATES:
						if state in count:
							sb += [sep, "{}={}".format(state.symbol, count[state])]
							if sep == " ":
								sep = ", "

					if task.state == runstates.FINISHED and task.state in count:
						elapsed = str(task.elapsed)
						elapsed = elapsed.split(".")[0]
						sb += [" ", "<{}>".format(elapsed)]

					self._log.info("".join(sb))

			except BaseException as ex:
				num_exc += 1
				_log.warn("Exception in run thread ({}): {}".format(num_exc, str(ex)))
				#if num_exc > 3:
				#	raise
				#else:
				from traceback import format_exc
				_log.debug(format_exc())

				try:
					if session is not None:
						session.rollback()
				except Exception as ex:
					_log.warn("Session rollback failed")
					_log.exception(ex)

			finally:
				try:
					if session is not None:
						session.close()
				except Exception as ex:
					_log.warn("Session close failed")
					_log.exception(ex)

				session = None

		set_thread_title("finishing")

		try:
			# print cases state before leaving the thread
			#for case in self._cases:
			#	_log.debug("Case state:\n" + repr(case))

			for t in self._logs_threads:
				t.join()

			self._lock.release()
			self._join_thread.join()
			self._lock.acquire()

			_log.debug("Engine run thread finished")
		except Exception as ex:
			_log.exception(ex)
		
		self._running = False
		self._num_alive_threads -= 1

	def _logs(self, index):
		"Log retrieval thread"

		set_thread_title()

		self._num_alive_threads += 1

		_log = logger.get_logger("wok.engine.logs-{}".format(index))
		
		_log.debug("Engine logs thread ready")

		num_exc = 0

		while self._running:
			set_thread_title("waiting")

			# get the next task to retrieve the logs
			job_info = self.__queue_adaptative_get(self._logs_queue)
			if job_info is None:
				continue

			workitem_id, job_id = job_info

			session = db.Session()

			task = None
			try:
				workitem = session.query(db.WorkItem).filter(db.WorkItem.id == workitem_id).one()

				case = self._cases_by_name[workitem.case.name]
				task = case.component(workitem.task.cname)

				set_thread_title(workitem.cname)

				_log.debug("[{}] Reading logs for work-item {} ...".format(case.name, workitem.cname))

				output = task.platform.jobs.output(job_id)
				if output is None:
					output = StringIO.StringIO()

				path = os.path.join(self._work_path, "logs", case.name, task.cname)
				if not os.path.isdir(path):
					try:
						os.makedirs(path)
					except:
						if not os.path.isdir(path):
							raise

				path = os.path.join(path, "{:08}.db".format(workitem.index))
				if os.path.isfile(path):
					os.remove(path)

				logs_db = LogsDb(path)
				logs_db.open()
				logs_db.add(case.name, task.cname, workitem.index, output)
				logs_db.close()

				_log.debug("[{}] Done with logs of work-item {}".format(case.name, workitem.cname))

			except BaseException as ex:
				num_exc += 1
				session.rollback()
				_log.info("Exception in logs thread ({}): {}".format(num_exc, str(ex)))
				from traceback import format_exc
				_log.debug(format_exc())

			finally:
				workitem.substate = runstates.JOINING
				self._join_queue.put(job_info)
				session.commit()
				session.close()

		self._num_alive_threads -= 1

		_log.debug("Engine logs thread finished")

	def _join(self):
		"Joiner thread"

		set_thread_title()

		self._num_alive_threads += 1

		_log = logger.get_logger("wok.engine.join")

		_log.debug("Engine join thread ready")

		session = None

		num_exc = 0

		while self._running:
			try:
				set_thread_title("waiting")

				job_info = self.__queue_adaptative_get(self._join_queue)
				if job_info is None:
					continue

				workitem_id, job_id = job_info

				with self._lock:
					session = db.Session()

					workitem = session.query(db.WorkItem).filter(db.WorkItem.id == workitem_id).one()

					case = self._cases_by_name[workitem.case.name]
					task = case.component(workitem.task.cname)

					set_thread_title(task.cname)

					#_log.debug("Joining work-item %s ..." % task.cname)

					jr = task.platform.jobs.join(job_id)

					wr = Data.element(dict(
							hostname=jr.hostname,
							created=jr.created.strftime(_DT_FORMAT) if jr.created is not None else None,
							started=jr.started.strftime(_DT_FORMAT) if jr.started is not None else None,
							finished=jr.finished.strftime(_DT_FORMAT) if jr.finished is not None else None,
							exitcode=jr.exitcode.code if jr.exitcode is not None else None))

					r = task.platform.data.load_workitem_result(case.name, task.cname, workitem.index)

					if r is not None:
						if r.exception is not None:
							wr["exception"] = r.exception
						if r.trace is not None:
							wr["trace"] = r.trace

					workitem.substate = None
					workitem.result = wr

					case.num_active_workitems -= 1

					session.commit()

					# check if there are still more work-items
					num_workitems = session.query(func.count(db.WorkItem.id)).filter(
						~db.WorkItem.state.in_(runstates.TERMINAL_STATES)).scalar()

					if self._single_run and num_workitems == 0:
						stop_engine = True
						for case in self._cases:
							stop_engine = stop_engine and (case.state in runstates.TERMINAL_STATES)
						#self._running = not stop_engine
						if stop_engine:
							self._finished_event.set()

					_log.debug("[{}] Joined work-item {}".format(case.name, workitem.cname))

					# check stopping instances
					if case in self._stopping_cases:
						job_ids = self._stopping_cases[case]
						if job_id in job_ids:
							job_ids.remove(job_id)

						if len(job_ids) == 0:
							del self._stopping_cases[case]
							if case.state == runstates.ABORTING:
								workitem.case.state = case.state = runstates.ABORTED

							session.commit()

							if case.removed:
								self.__remove_case(session, case)
								session.commit()
						else:
							_log.debug("Still waiting for {} jobs to stop".format(len(job_ids)))

					if case.state in runstates.TERMINAL_STATES and case.num_active_workitems == 0:
						_log.info("[{}] Case {}. Total time: {}".format(case.name, case.state.title, str(case.elapsed)))

						self._lock.release()
						try:
							self.case_finished.send(case)
						finally:
							self._lock.acquire()

			except BaseException as ex:
				num_exc += 1
				_log.warn("Exception in join thread ({}): {}".format(num_exc, str(ex)))
				from traceback import format_exc
				_log.debug(format_exc())

				try:
					if session is not None:
						session.rollback()
				except Exception as ex:
					_log.warn("Session rollback failed")
					_log.exception(ex)

			finally:
				try:
					if session is not None:
						session.close()
				except Exception as ex:
					_log.warn("Session close failed")
					_log.exception(ex)

		self._num_alive_threads -= 1

		_log.debug("Engine join thread finished")

	# API -----------------------------------

	@property
	def conf(self):
		return self._conf

	@property
	def work_path(self):
		return self._work_path

	@property
	def projects(self):
		return self._projects

	def platform(self, name):
		return self._platforms_by_name.get(name)

	@property
	def default_platform(self):
		return self._default_platform

	@synchronized
	def start(self, wait=True, single_run=False):
		self._log.info("Starting engine ...")

		started_platforms = []
		try:
			for platform in self._platforms:
				started_platforms += [platform]
				platform.start()
				platform.callbacks.add(events.JOB_UPDATE, self._on_job_update)
		except BaseException as ex:
			self._log.error(str(ex))
			for platform in started_platforms:
				platform.close()
			raise

		#for project in self._projects:
		#	self._default_platform.sync_project(project)

		self._single_run = single_run
		
		self._run_thread = threading.Thread(target=self._run, name="wok-engine-run")
		self._run_thread.start()

		self._lock.release()
		try:
			try:
				self._num_alive_threads.wait_condition(lambda value: value < self._max_alive_threads)

				self._started = True

				self._log.info("Engine started")
			except KeyboardInterrupt:
				wait = False
				self._log.warn("Ctrl-C pressed ...")
			except Exception as e:
				wait = False
				self._log.error("Exception while waiting for the engine to start")
				self._log.exception(e)

			if wait:
				self.wait()
		finally:
			self._lock.acquire()

	def wait(self):
		self._log.info("Waiting for the engine to finish ...")

		try:
			finished = self._finished_event.wait(1)
			while not finished:
				finished = self._finished_event.wait(1)
		except KeyboardInterrupt:
			self._log.warn("Ctrl-C pressed ...")
		except Exception as e:
			self._log.error("Exception while waiting for the engine to finish, stopping the engine ...")
			self._log.exception(e)

		self._log.info("Finished waiting for the engine ...")

	def _stop_threads(self):
		self._log.info("Stopping threads ...")

		if self._run_thread is not None:

			with self._lock:
				self._running = False
				self._cvar.notify()

			while self._run_thread.isAlive():
				try:
					self._run_thread.join(1)
				except KeyboardInterrupt:
					self._log.warn("Ctrl-C pressed, killing the process ...")
					import signal
					os.kill(os.getpid(), signal.SIGTERM)
				except Exception as e:
					self._log.error("Exception while waiting for threads to finish ...")
					self._log.exception(e)
					self._log.warn("killing the process ...")
					exit(-1)
					import signal
					os.kill(os.getpid(), signal.SIGTERM)

			self._run_thread = None

		self._log.info("All threads finished ...")

	@synchronized
	def stop(self):
		self._log.info("Stopping the engine ...")

		self._finished_event.set()

		self._lock.release()
		try:
			if self._run_thread is not None:
				self._stop_threads()

			for platform in self._platforms:
				platform.close()
		finally:
			self._lock.acquire()

		self._started = False

		self._log.info("Engine stopped")

	def running(self):
		return self._started

	def notify(self, lock=True):
		if lock:
			self._lock.acquire()
		self._notified = True
		self._cvar.notify()
		if lock:
			self._lock.release()

	@synchronized
	def cases(self):
		instances = []
		for inst in self._cases:
			instances += [SynchronizedCase(self, inst)]
		return instances
	
	@synchronized
	def case(self, name):
		inst = self._cases_by_name.get(name)
		if inst is None:
			return None
		return SynchronizedCase(self, inst)

	@synchronized
	def exists_case(self, name):
		return name in self._cases_by_name

	@synchronized
	def create_case(self, case_name, conf_builder, project_name, flow_name, container_name):
		"Creates a new workflow case"

		session = db.Session()
		if session.query(db.Case).filter(db.Case.name==case_name).count() > 0:
			raise Exception("A case with this name already exists: {}".format(case_name))

		flow_uri = "{}:{}".format(project_name, flow_name)

		self._log.info("Creating case {} from {} ...".format(case_name, flow_uri))

		try:
			try:
				flow = self.projects.load_flow(flow_uri)
				project = flow.project
			except:
				self._log.error("Error while loading the workflow from {}".format(flow_uri))
				raise

			for platform in self._platforms:
				try:
					platform.data.remove_case(case_name)
					platform.data.create_case(case_name)
				except:
					self._log.error("Error while initializing data for case {}".format(case_name))
					raise

			try:
				case = Case(case_name, conf_builder, project, flow, container_name, engine=self)

				self._cases += [case]
				self._cases_by_name[case_name] = case

				case.persist(session)

				session.flush()
				self.notify(lock=False)
			except:
				self._log.error("Error while creating case {} for the workflow {} with configuration {}".format(
					case_name, flow_uri, conf_builder.get_conf()))
				raise
		except:
			session.rollback()
			#self._log.error("Error while creating case {} for the workflow {} with configuration {}".format(
			#	case_name, flow_uri, conf_builder.get_conf()))
			raise

		session.close()

		self._log.debug("\n" + repr(case))

		self._lock.release()
		try:
			self.case_created.send(case)
		finally:
			self._lock.acquire()

		return SynchronizedCase(self, case)

	@synchronized
	def remove_case(self, name):
		if name in self._cases_by_name:
			session = db.Session()
			case = self._cases_by_name[name]
			dbcase = session.query(db.Case).filter(db.Case.id == case.id).first()
			dbcase.removed = case.removed = True
			if case.state not in runstates.TERMINAL_STATES + [runstates.READY]:
				dbcase.state = case.state = runstates.ABORTING

			num_retries = 3
			while num_retries > 0:
				try:
					session.commit()
					self.notify(lock=False)
					self._log.debug("Case {} marked for removal".format(case.name))
				except BaseException as ex:
					num_retries -= 1
					_log.info("Exception in remove_case: {}".format(str(ex)))
					if num_retries > 0:
						_log.info("Remaining retries = {}".format(num_retries))
						import time
						time.sleep(1)
					else:
						from traceback import format_exc
						_log.debug(format_exc())
						session.rollback()
				finally:
					session.close()

		else:
			self._log.error("Trying to remove a non existing case: {}".format(name))

		'''
class NodeBase(INode):
    _packageName = ""

    def __init__(self, name, uid=None):
        super(NodeBase, self).__init__()
        self.bCacheEnabled = False
        self.cacheMaxSize = 1000
        self.cache = {}

        self.killed = Signal()
        self.tick = Signal(float)
        self.errorOccured = Signal(object)
        self.errorCleared = Signal()

        self._uid = uuid.uuid4() if uid is None else uid
        self.graph = None
        self.name = name
        self.pinsCreationOrder = OrderedDict()
        self._pins = set()
        self.x = 0.0
        self.y = 0.0
        self.bCallable = False
        self._wrapper = None
        self._constraints = {}
        self._structConstraints = {}
        self.lib = None
        self.isCompoundNode = False
        self._lastError = None
        self.__wrapperJsonData = None
        self._nodeMetaData = None
        self.headerColor = None
        self._deprecated = False
        self._deprecationMessage = "This node is deprecated"
        self._experimental = False

    def setDeprecated(self, message):
        self._deprecated = True
        self._deprecationMessage = "This node will be removed in later releases! {}".format(
            message)

    def isDeprecated(self):
        return self._deprecated

    def isExperimental(self):
        return self._experimental

    def setExperimental(self):
        self._experimental = True

    def deprecationMessage(self):
        return self._deprecationMessage

    def getMetaData(self):
        """Returns node metadata dict.

        This will return dict only for function based nodes. For class based nodes it will return None

        .. seealso:: :mod:`~PyFlow.Core.FunctionLibrary`

        :rtype: dict or None
        """
        return self._nodeMetaData

    @property
    def wrapperJsonData(self):
        try:
            dt = self.__wrapperJsonData.copy()
            self.__wrapperJsonData.clear()
            self.__wrapperJsonData = None
            return dt
        except:
            return None

    def isValid(self):
        return self._lastError is None

    def clearError(self):
        self._lastError = None
        self.errorCleared.send()

    def setError(self, err):
        self._lastError = str(err)
        self.errorOccured.send(self._lastError)

    def checkForErrors(self):
        failedPins = {}
        for pin in self._pins:
            if pin._lastError is not None:
                failedPins[pin.name] = pin._lastError
        if len(failedPins):
            self._lastError = "Error on Pins:%s" % str(failedPins)
        else:
            self.clearError()
        wrapper = self.getWrapper()
        if wrapper:
            wrapper.update()

    @property
    def packageName(self):
        return self._packageName

    @property
    def constraints(self):
        return self._constraints

    @property
    def structConstraints(self):
        return self._structConstraints

    def getOrderedPins(self):
        return self.pinsCreationOrder.values()

    def getter(self, pinName):
        pin = self.getPinByName(pinName)
        if not pin:
            raise Exception()
        else:
            return pin

    def __getitem__(self, pinName):
        try:
            return self.getter(pinName)
        except Exception as x:
            if "<str>" in str(x):
                try:
                    return self.getter(str(pinName))
                except:
                    raise Exception(
                        "Could not find pin with name:{0}".format(pinName))
            else:
                raise Exception(
                    "Could not find signature for __getitem__:{0}".format(
                        type(pinName)))

    @property
    def pins(self):
        return self._pins

    @property
    def inputs(self):
        """Returns all input pins. Dictionary generated every time property called, so cache it when possible.
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Input:
                result[pin.uid] = pin
        return result

    @property
    def orderedInputs(self):
        result = {}
        sortedInputs = sorted(self.inputs.values(), key=lambda x: x.pinIndex)
        for inp in sortedInputs:
            result[inp.pinIndex] = inp
        return result

    @property
    def namePinInputsMap(self):
        """Returns all input pins. Dictionary generated every time property called, so cache it when possible.
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Input:
                result[pin.name] = pin
        return result

    @property
    def outputs(self):
        """Returns all output pins. Dictionary generated every time property called, so cache it when possible.
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Output:
                result[pin.uid] = pin
        return result

    @property
    def orderedOutputs(self):
        result = {}
        sortedOutputs = sorted(self.outputs.values(), key=lambda x: x.pinIndex)
        for out in sortedOutputs:
            result[out.pinIndex] = out
        return result

    @property
    def namePinOutputsMap(self):
        """Returns all output pins. Dictionary generated every time property called, so cache it when possible.
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Output:
                result[pin.name] = pin
        return result

    # IItemBase interface

    def setWrapper(self, wrapper):
        if self._wrapper is None:
            self._wrapper = wrapper

    def getWrapper(self):
        return self._wrapper

    def location(self):
        return self.graph().location()

    def path(self):
        location = "/".join(self.location())
        return "{}/{}".format(location, self.getName())

    @property
    def uid(self):
        return self._uid

    @uid.setter
    def uid(self, value):
        if self.graph is not None:
            self.graph().getNodes()[value] = self.graph().getNodes().pop(
                self._uid)
        self._uid = value

    @staticmethod
    def jsonTemplate():
        template = {
            'package': None,
            'lib': None,
            'type': None,
            'owningGraphName': None,
            'name': None,
            'uuid': None,
            'inputs': [],
            'outputs': [],
            'meta': {
                'var': {}
            },
            'wrapper': {}
        }
        return template

    def serialize(self):
        template = NodeBase.jsonTemplate()

        uidString = str(self.uid)
        nodeName = self.name

        template['package'] = self.packageName
        template['lib'] = self.lib
        template['type'] = self.__class__.__name__
        template['name'] = nodeName
        template['owningGraphName'] = self.graph().name
        template['uuid'] = uidString
        template['inputs'] = [i.serialize() for i in self.inputs.values()]
        template['outputs'] = [o.serialize() for o in self.outputs.values()]
        template['meta']['label'] = self.name
        template['x'] = self.x
        template['y'] = self.y

        # if running with ui get ui wrapper data to save
        wrapper = self.getWrapper()
        if wrapper:
            template['wrapper'] = wrapper.serializationHook()
        return template

    def isUnderActiveGraph(self):
        return self.graph() == self.graph().graphManager.activeGraph()

    def kill(self, *args, **kwargs):
        from PyFlow.Core.PathsRegistry import PathsRegistry

        if self.uid not in self.graph().getNodes():
            return

        self.killed.send()

        for pin in self.inputs.values():
            pin.kill()
        for pin in self.outputs.values():
            pin.kill()
        self.graph().getNodes().pop(self.uid)

        PathsRegistry().rebuild()

    def Tick(self, delta):
        self.tick.send(delta)

    @staticmethod
    def category():
        return "Default"

    @staticmethod
    def keywords():
        return []

    @staticmethod
    def pinTypeHints():
        return NodePinsSuggestionsHelper()

    @staticmethod
    def description():
        return "Default node description"

    def getName(self):
        return self.name

    def setName(self, name):
        self.name = str(name)

    def useCache(self):
        # if cached results exists - return them without calling compute
        args = tuple([
            pin.currentData() for pin in self.inputs.values()
            if pin.IsValuePin()
        ])

        # not hashable types will not be cached
        for arg in args:
            if not isinstance(arg, collections.Hashable):
                return False

        if args in self.cache:
            for outPin, data in self.cache[args].items():
                outPin.setData(data)
            return True

    def afterCompute(self):
        if len(self.cache) >= self.cacheMaxSize:
            return

        # cache results
        args = tuple([
            pin.currentData() for pin in self.inputs.values()
            if pin.IsValuePin()
        ])
        for arg in args:
            if not isinstance(arg, collections.Hashable):
                return

        cache = {}
        for pin in self.outputs.values():
            cache[pin] = pin.currentData()
        self.cache[args] = cache

    def processNode(self, *args, **kwargs):
        if not self.isValid():
            return
        if self.bCacheEnabled:
            if not self.useCache():
                try:
                    self.compute()
                    self.clearError()
                    self.checkForErrors()
                except Exception as e:
                    self.setError(e)
            self.afterCompute()
        else:
            try:
                self.compute()
                self.clearError()
                self.checkForErrors()
            except Exception as e:
                self.setError(e)

    # INode interface

    def compute(self, *args, **kwargs):
        """This is node's brains. Main logic goes here

        Here are basic steps:

        1. Get data from input pins
        2. Do stuff
        3. Set data to output pins
        4. Call execs if needed

        Here is compute method of charge node

        .. code-block:: python
            :linenos:

            def compute(self, *args, **kwargs):
                step = abs(self.step.getData())
                if (self._currentAmount + step) < abs(self.amount.getData()):
                    self._currentAmount += step
                    return
                self.completed.call(*args, **kwargs)
                self._currentAmount = 0.0

        .. note:: See :mod:`PyFlow.Packages.PyFlowBase.Nodes` source code module for more examples

        """
        pass

    # INode interface end

    def isCallable(self):
        """Whether this node is callable or not
        """
        for p in list(self.inputs.values()) + list(self.outputs.values()):
            if p.isExec():
                return True
        return False

    def setPosition(self, x, y):
        """Sets node coordinate on canvas

        Used to correctly restore gui wrapper class

        :param x: X coordinate
        :type x: float
        :param y: Y coordinate
        :type y: float
        """
        self.x = x
        self.y = y

    def autoAffectPins(self):
        """All value inputs affects on all value outputs. All exec inputs affects on all exec outputs
        """
        for i in self.inputs.values():
            for o in self.outputs.values():
                assert (i is not o)
                if not i.IsValuePin() and o.IsValuePin():
                    continue
                if i.IsValuePin() and not o.IsValuePin():
                    continue
                pinAffects(i, o)

    def createInputPin(self,
                       pinName,
                       dataType,
                       defaultValue=None,
                       foo=None,
                       structure=PinStructure.Single,
                       constraint=None,
                       structConstraint=None,
                       supportedPinDataTypes=[],
                       group=""):
        """Creates input pin

        :param pinName: Pin name
        :type pinName: str
        :param dataType: Pin data type
        :type dataType: str
        :param defaultValue: Pin default value
        :type defaultValue: object
        :param foo: Pin callback. used for exec pins
        :type foo: function
        :param structure: Pin structure
        :type structure: :class:`~PyFlow.Core.Common.PinStructure.Single`
        :param constraint: Pin constraint. Should be any hashable type. We use str
        :type constraint: object
        :param structConstraint: Pin struct constraint. Also should be hashable type
        :type structConstraint: object
        :param supportedPinDataTypes: List of allowed pin data types to be connected. Used by AnyPin
        :type supportedPinDataTypes: list(str)
        :param group: Pin group. Used only by ui wrapper
        :type group: str
        """
        pinName = self.getUniqPinName(pinName)
        p = CreateRawPin(pinName, self, dataType, PinDirection.Input)
        p.structureType = structure
        p.group = group

        if structure == PinStructure.Array:
            p.initAsArray(True)
        elif structure == PinStructure.Dict:
            p.initAsDict(True)
        elif structure == PinStructure.Multi:
            p.enableOptions(PinOptions.ArraySupported)

        if foo:
            p.onExecute.connect(foo, weak=False)

        if defaultValue is not None or dataType == "AnyPin":
            p.setDefaultValue(defaultValue)
            p.setData(defaultValue)
            if dataType == "AnyPin":
                p.setTypeFromData(defaultValue)
        else:
            p.setDefaultValue(getPinDefaultValueByType(dataType))

        if dataType == "AnyPin" and supportedPinDataTypes:

            def supportedDataTypes():
                return supportedPinDataTypes

            p._supportedDataTypes = p._defaultSupportedDataTypes = tuple(
                supportedPinDataTypes)
            p.supportedDataTypes = supportedDataTypes
        if constraint is not None:
            p.updateConstraint(constraint)
        if structConstraint is not None:
            p.updateStructConstraint(structConstraint)
        return p

    def createOutputPin(self,
                        pinName,
                        dataType,
                        defaultValue=None,
                        structure=PinStructure.Single,
                        constraint=None,
                        structConstraint=None,
                        supportedPinDataTypes=[],
                        group=""):
        """Creates output pin

        :param pinName: Pin name
        :type pinName: str
        :param dataType: Pin data type
        :type dataType: str
        :param defaultValue: Pin default value
        :type defaultValue: object
        :param structure: Pin structure
        :type structure: :class:`~PyFlow.Core.Common.PinStructure.Single`
        :param constraint: Pin constraint. Should be any hashable type. We use str
        :type constraint: object
        :param structConstraint: Pin struct constraint. Also should be hashable type
        :type structConstraint: object
        :param supportedPinDataTypes: List of allowed pin data types to be connected. Used by AnyPin
        :type supportedPinDataTypes: list(str)
        :param group: Pin group. Used only by ui wrapper
        :type group: str
        """
        pinName = self.getUniqPinName(pinName)
        p = CreateRawPin(pinName, self, dataType, PinDirection.Output)
        p.structureType = structure
        p.group = group

        if structure == PinStructure.Array:
            p.initAsArray(True)
        elif structure == PinStructure.Dict:
            p.initAsDict(True)
        elif structure == PinStructure.Multi:
            p.enableOptions(PinOptions.ArraySupported)

        if defaultValue is not None or dataType == "AnyPin":
            p.setDefaultValue(defaultValue)
            p.setData(defaultValue)
            if dataType == "AnyPin":
                p.setTypeFromData(defaultValue)
        else:
            p.setDefaultValue(getPinDefaultValueByType(dataType))

        if dataType == "AnyPin" and supportedPinDataTypes:

            def supportedDataTypes():
                return supportedPinDataTypes

            p.supportedDataTypes = supportedDataTypes
        if constraint is not None:
            p.updateConstraint(constraint)
        if structConstraint is not None:
            p.updateStructConstraint(structConstraint)
        return p

    def setData(self,
                pinName,
                data,
                pinSelectionGroup=PinSelectionGroup.BothSides):
        """Sets data to pin by pin name

        :param pinName: Target pin name
        :type pinName: str
        :param data: Pin data to be set
        :type data: object
        :param pinSelectionGroup: Which side to search
        :type pinSelectionGroup: :class:`~PyFlow.Core.Common.PinSelectionGroup`
        """
        p = self.getPinSG(str(pinName), pinSelectionGroup)
        assert (p
                is not None), "Failed to find pin by name: {}".format(pinName)
        p.setData(data)

    def getData(self, pinName, pinSelectionGroup=PinSelectionGroup.BothSides):
        """Get data from pin by name

        :param pinName: Target pin name
        :type pinName: str
        :param pinSelectionGroup: Which side to search
        :type pinSelectionGroup: :class:`~PyFlow.Core.Common.PinSelectionGroup`
        :rtype: object
        """
        p = self.getPinSG(str(pinName), pinSelectionGroup)
        assert (p
                is not None), "Failed to find pin by name: {}".format(pinName)
        return p.getData()

    def getUniqPinName(self, name):
        """Returns unique name for pin

        :param name: Target pin name
        :type name: str
        :rtype: str
        """
        pinNames = [
            i.name for i in list(list(self.inputs.values())) +
            list(list(self.outputs.values()))
        ]
        return getUniqNameFromList(pinNames, name)

    def __repr__(self):
        graphName = self.graph().name if self.graph is not None else str(None)
        return "<class[{0}]; name[{1}]; graph[{2}]>".format(
            self.__class__.__name__, self.getName(), graphName)

    def call(self, name, *args, **kwargs):
        """Call exec pin by name

        :param name: Target pin name
        :type name: str
        """
        namePinOutputsMap = self.namePinOutputsMap
        namePinInputsMap = self.namePinInputsMap
        if name in namePinOutputsMap:
            p = namePinOutputsMap[name]
            if p.isExec():
                p.call(*args, **kwargs)
        if name in namePinInputsMap:
            p = namePinInputsMap[name]
            if p.isExec():
                p.call(*args, **kwargs)

    def getPinSG(self, name, pinsSelectionGroup=PinSelectionGroup.BothSides):
        """Tries to find pin by name and selection group

        :param name: Pin name to search
        :type name: str
        :param pinsSelectionGroup: Side to search
        :type pinsSelectionGroup: :class:`~PyFlow.Core.Common.PinSelectionGroup`
        :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None
        """
        inputs = self.inputs
        outputs = self.outputs
        if pinsSelectionGroup == PinSelectionGroup.BothSides:
            for p in list(inputs.values()) + list(outputs.values()):
                if p.name == name:
                    return p
        elif pinsSelectionGroup == PinSelectionGroup.Inputs:
            for p in list(inputs.values()):
                if p.name == name:
                    return p
        else:
            for p in list(outputs.values()):
                if p.name == name:
                    return p

    def getPinByName(self, name):
        """Tries to find pin by name

        :param name: pin name
        :type name: str
        :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None
        """
        inputs = self.inputs
        outputs = self.outputs
        for p in list(inputs.values()) + list(outputs.values()):
            if p.name == name:
                return p

        if uid in inputs:
            return inputs[uid]
        if uid in outputs:
            return outputs[uid]
        return None

    def postCreate(self, jsonTemplate=None):
        """Called after node was added to graph

        :param jsonTemplate: Serialized data of spawned node
        :type jsonTemplate: dict or None
        """
        if jsonTemplate is not None:
            self.uid = uuid.UUID(jsonTemplate['uuid'])
            self.setName(jsonTemplate['name'])
            self.x = jsonTemplate['x']
            self.y = jsonTemplate['y']

            # set pins data
            sortedInputs = sorted(jsonTemplate['inputs'],
                                  key=lambda pinDict: pinDict["pinIndex"])
            for inpJson in sortedInputs:
                dynamicEnabled = PinOptions.Dynamic.value in inpJson["options"]
                if dynamicEnabled or inpJson[
                        'name'] not in self.namePinInputsMap:
                    # create custom dynamically created pins in derived classes
                    continue

                pin = self.getPinSG(str(inpJson['name']),
                                    PinSelectionGroup.Inputs)
                pin.deserialize(inpJson)

            sortedOutputs = sorted(jsonTemplate['outputs'],
                                   key=lambda pinDict: pinDict["pinIndex"])
            for outJson in sortedOutputs:
                dynamicEnabled = PinOptions.Dynamic.value in outJson["options"]
                if dynamicEnabled or outJson[
                        'name'] not in self.namePinOutputsMap:
                    # create custom dynamically created pins in derived classes
                    continue

                pin = self.getPinSG(str(outJson['name']),
                                    PinSelectionGroup.Outputs)
                pin.deserialize(outJson)

            # store data for wrapper
            if "wrapper" in jsonTemplate:
                self.__wrapperJsonData = jsonTemplate["wrapper"]

        if self.isCallable():
            self.bCallable = True

        # make no sense cache nodes without inputs
        if len(self.inputs) == 0:
            self.bCacheEnabled = False

        self.autoAffectPins()
        self.checkForErrors()

    @staticmethod
    def initializeFromFunction(foo):
        """Constructs node from annotated function

        .. seealso :: :mod:`PyFlow.Core.FunctionLibrary`

        :param foo: Annotated function
        :type foo: function
        :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase`
        """
        retAnyOpts = None
        retConstraint = None
        meta = foo.__annotations__['meta']
        returnType = returnDefaultValue = None
        returnPinOptionsToEnable = None
        returnPinOptionsToDisable = None
        returnWidgetVariant = "DefaultWidget"
        retStructConstraint = None
        returnAnnotationDict = None
        if foo.__annotations__['return'] is not None:
            returnType = foo.__annotations__['return'][0]
            returnDefaultValue = foo.__annotations__['return'][1]
            if len(foo.__annotations__['return']) == 3:
                returnAnnotationDict = foo.__annotations__['return'][2]

                if "supportedDataTypes" in returnAnnotationDict:
                    retAnyOpts = returnAnnotationDict["supportedDataTypes"]
                if "constraint" in returnAnnotationDict:
                    retConstraint = returnAnnotationDict["constraint"]
                if "structConstraint" in returnAnnotationDict:
                    retStructConstraint = returnAnnotationDict[
                        "structConstraint"]
                if "enabledOptions" in returnAnnotationDict:
                    returnPinOptionsToEnable = returnAnnotationDict[
                        "enabledOptions"]
                if "disabledOptions" in returnAnnotationDict:
                    returnPinOptionsToDisable = returnAnnotationDict[
                        "disabledOptions"]
                if "inputWidgetVariant" in returnAnnotationDict:
                    returnWidgetVariant = returnAnnotationDict[
                        "inputWidgetVariant"]

        nodeType = foo.__annotations__['nodeType']
        _packageName = foo.__annotations__['packageName']
        libName = foo.__annotations__['lib']
        fooArgNames = getargspec(foo).args

        @staticmethod
        def description():
            return foo.__doc__

        @staticmethod
        def category():
            return meta['Category']

        @staticmethod
        def keywords():
            return meta['Keywords']

        def constructor(self, name, **kwargs):
            NodeBase.__init__(self, name, **kwargs)

        nodeClass = type(
            foo.__name__, (NodeBase, ), {
                '__init__': constructor,
                'category': category,
                'keywords': keywords,
                'description': description
            })

        nodeClass._packageName = _packageName

        raw_inst = nodeClass(foo.__name__)
        raw_inst.lib = libName

        # this is list of 'references' outputs will be created for
        refs = []
        outExec = None

        # generate compute method from function
        def compute(self, *args, **kwargs):
            # arguments will be taken from inputs
            if not self.isValid():
                return
            kwds = {}
            for i in list(self.inputs.values()):
                if not i.isExec():
                    kwds[i.name] = i.getData()
            for ref in refs:
                if not ref.isExec():
                    kwds[ref.name] = ref.setData
            result = foo(**kwds)
            if returnType is not None:
                self.setData(str('out'), result)
            if nodeType == NodeTypes.Callable:
                outExec.call(*args, **kwargs)

        raw_inst.compute = MethodType(compute, raw_inst)

        raw_inst._nodeMetaData = meta
        if 'CacheEnabled' in meta:
            raw_inst.bCacheEnabled = meta['CacheEnabled']

        # create execs if callable
        if nodeType == NodeTypes.Callable:
            inputExec = raw_inst.createInputPin(DEFAULT_IN_EXEC_NAME,
                                                'ExecPin', None,
                                                raw_inst.compute)
            outExec = raw_inst.createOutputPin(DEFAULT_OUT_EXEC_NAME,
                                               'ExecPin')
            raw_inst.bCallable = True
            raw_inst.bCacheEnabled = False

        if returnType is not None:
            p = raw_inst.createOutputPin('out',
                                         returnType,
                                         returnDefaultValue,
                                         supportedPinDataTypes=retAnyOpts,
                                         constraint=retConstraint,
                                         structConstraint=retStructConstraint)
            p.setData(returnDefaultValue)
            p.setDefaultValue(returnDefaultValue)
            p.initAsArray(isinstance(returnDefaultValue, list))
            p.setInputWidgetVariant(returnWidgetVariant)
            p.annotationDescriptionDict = copy(
                returnAnnotationDict
            ) if returnAnnotationDict is not None else None
            if p.annotationDescriptionDict is not None and "Description" in p.annotationDescriptionDict:
                p.description = p.annotationDescriptionDict["Description"]
            if returnPinOptionsToEnable is not None:
                p.enableOptions(returnPinOptionsToEnable)
            if returnPinOptionsToDisable is not None:
                p.disableOptions(returnPinOptionsToDisable)
            if not p.isArray() and p.optionEnabled(PinOptions.ArraySupported):
                p.structureType = PinStructure.Multi
            elif p.isArray():
                p.structureType = PinStructure.Array

        # iterate over function arguments and create pins according to data types
        for index in range(len(fooArgNames)):
            argName = fooArgNames[index]
            pinDescriptionTuple = foo.__annotations__[argName]
            anyOpts = None
            constraint = None
            structConstraint = None
            pinOptionsToEnable = None
            pinOptionsToDisable = None
            inputWidgetVariant = "DefaultWidget"
            # tuple means this is reference pin with default value eg - (dataType, defaultValue)
            if str("Reference") == pinDescriptionTuple[0]:
                pinDataType = pinDescriptionTuple[1][0]
                pinDefaultValue = pinDescriptionTuple[1][1]
                pinDict = None
                if len(pinDescriptionTuple[1]) == 3:
                    pinDict = pinDescriptionTuple[1][2]

                if pinDict is not None:
                    if "supportedDataTypes" in pinDict:
                        anyOpts = pinDict["supportedDataTypes"]
                    if "constraint" in pinDict:
                        constraint = pinDict["constraint"]
                    if "structConstraint" in pinDict:
                        structConstraint = pinDict["structConstraint"]
                    if "enabledOptions" in pinDict:
                        pinOptionsToEnable = pinDict["enabledOptions"]
                    if "disabledOptions" in pinDict:
                        pinOptionsToDisable = pinDict["disabledOptions"]
                    if "inputWidgetVariant" in pinDict:
                        inputWidgetVariant = pinDict["inputWidgetVariant"]

                outRef = raw_inst.createOutputPin(
                    argName,
                    pinDataType,
                    supportedPinDataTypes=anyOpts,
                    constraint=constraint,
                    structConstraint=structConstraint)
                outRef.annotationDescriptionDict = copy(
                    pinDict) if pinDict is not None else None
                if outRef.annotationDescriptionDict is not None and "Description" in outRef.annotationDescriptionDict:
                    outRef.description = outRef.annotationDescriptionDict[
                        "Description"]
                outRef.initAsArray(isinstance(pinDefaultValue, list))
                outRef.setDefaultValue(pinDefaultValue)
                outRef.setData(pinDefaultValue)
                outRef.setInputWidgetVariant(inputWidgetVariant)
                if pinOptionsToEnable is not None:
                    outRef.enableOptions(pinOptionsToEnable)
                if pinOptionsToDisable is not None:
                    outRef.disableOptions(pinOptionsToDisable)
                if not outRef.isArray() and outRef.optionEnabled(
                        PinOptions.ArraySupported):
                    outRef.structureType = PinStructure.Multi
                elif outRef.isArray():
                    outRef.structureType = PinStructure.Array
                refs.append(outRef)
            else:
                pinDataType = pinDescriptionTuple[0]
                pinDefaultValue = pinDescriptionTuple[1]
                pinDict = None
                if len(pinDescriptionTuple) == 3:
                    pinDict = pinDescriptionTuple[2]

                if pinDict is not None:
                    if "supportedDataTypes" in pinDict:
                        anyOpts = pinDict["supportedDataTypes"]
                    if "constraint" in pinDict:
                        constraint = pinDict["constraint"]
                    if "structConstraint" in pinDict:
                        structConstraint = pinDict["structConstraint"]
                    if "enabledOptions" in pinDict:
                        pinOptionsToEnable = pinDict["enabledOptions"]
                    if "disabledOptions" in pinDict:
                        pinOptionsToDisable = pinDict["disabledOptions"]
                    if "inputWidgetVariant" in pinDict:
                        inputWidgetVariant = pinDict["inputWidgetVariant"]

                inp = raw_inst.createInputPin(
                    argName,
                    pinDataType,
                    supportedPinDataTypes=anyOpts,
                    constraint=constraint,
                    structConstraint=structConstraint)
                inp.annotationDescriptionDict = copy(
                    pinDict) if pinDict is not None else None
                if inp.annotationDescriptionDict is not None and "Description" in inp.annotationDescriptionDict:
                    inp.description = inp.annotationDescriptionDict[
                        "Description"]
                inp.initAsArray(isinstance(pinDefaultValue, list))
                inp.setData(pinDefaultValue)
                inp.setDefaultValue(pinDefaultValue)
                inp.setInputWidgetVariant(inputWidgetVariant)
                if pinOptionsToEnable is not None:
                    inp.enableOptions(pinOptionsToEnable)
                if pinOptionsToDisable is not None:
                    inp.disableOptions(pinOptionsToDisable)
                if not inp.isArray() and inp.optionEnabled(
                        PinOptions.ArraySupported):
                    inp.structureType = PinStructure.Multi
                elif inp.isArray():
                    inp.structureType = PinStructure.Array
        raw_inst.autoAffectPins()
        return raw_inst
class UploadedFileManager(CacheStatsProvider):
    """Holds files uploaded by users of the running Streamlit app,
    and emits an event signal when a file is added.
    """
    def __init__(self):
        # List of files for a given widget in a given session.
        self._files_by_id: Dict[Tuple[str, str], List[UploadedFileRec]] = {}

        # A counter that generates unique file IDs. Each file ID is greater
        # than the previous ID, which means we can use IDs to compare files
        # by age.
        self._file_id_counter = 1
        self._file_id_lock = threading.Lock()

        # Prevents concurrent access to the _files_by_id dict.
        # In remove_session_files(), we iterate over the dict's keys. It's
        # an error to mutate a dict while iterating; this lock prevents that.
        self._files_lock = threading.Lock()
        self.on_files_updated = Signal(
            doc="""Emitted when a file list is added to the manager or updated.

            Parameters
            ----------
            session_id : str
                The session_id for the session whose files were updated.
            """)

    def __repr__(self) -> str:
        return util.repr_(self)

    def add_file(
        self,
        session_id: str,
        widget_id: str,
        file: UploadedFileRec,
    ) -> UploadedFileRec:
        """Add a file to the FileManager, and return a new UploadedFileRec
        with its ID assigned.

        The "on_files_updated" Signal will be emitted.

        Parameters
        ----------
        session_id
            The session ID of the report that owns the files.
        widget_id
            The widget ID of the FileUploader that created the files.
        file
            The file to add.

        Returns
        -------
        UploadedFileRec
            The added file, which has its unique ID assigned.
        """
        files_by_widget = session_id, widget_id

        # Assign the file a unique ID
        file_id = self._get_next_file_id()
        file = UploadedFileRec(id=file_id,
                               name=file.name,
                               type=file.type,
                               data=file.data)

        with self._files_lock:
            file_list = self._files_by_id.get(files_by_widget, None)
            if file_list is not None:
                file_list.append(file)
            else:
                self._files_by_id[files_by_widget] = [file]

        self.on_files_updated.send(session_id)
        return file

    def get_all_files(self, session_id: str,
                      widget_id: str) -> List[UploadedFileRec]:
        """Return all the files stored for the given widget.

        Parameters
        ----------
        session_id
            The session ID of the report that owns the file.
        widget_id
            The widget ID of the FileUploader that created the file.
        """
        file_list_id = (session_id, widget_id)
        with self._files_lock:
            return self._files_by_id.get(file_list_id, []).copy()

    def get_files(self, session_id: str, widget_id: str,
                  file_ids: List[int]) -> List[UploadedFileRec]:
        """Return the files with the given widget_id and file_ids.

        Parameters
        ----------
        session_id
            The session ID of the report that owns the file.
        widget_id
            The widget ID of the FileUploader that created the file.
        file_ids
            List of file IDs. Only files whose IDs are in this list will be
            returned.
        """
        return [
            f for f in self.get_all_files(session_id, widget_id)
            if f.id in file_ids
        ]

    def remove_orphaned_files(
        self,
        session_id: str,
        widget_id: str,
        newest_file_id: int,
        active_file_ids: List[int],
    ) -> None:
        """Remove 'orphaned' files: files that have been uploaded and
        subsequently deleted, but haven't yet been removed from memory.

        Because FileUploader can live inside forms, file deletion is made a
        bit tricky: a file deletion should only happen after the form is
        submitted.

        FileUploader's widget value is an array of numbers that has two parts:
        - The first number is always 'this.state.newestServerFileId'.
        - The remaining 0 or more numbers are the file IDs of all the
          uploader's uploaded files.

        When the server receives the widget value, it deletes "orphaned"
        uploaded files. An orphaned file is any file associated with a given
        FileUploader whose file ID is not in the active_file_ids, and whose
        ID is <= `newestServerFileId`.

        This logic ensures that a FileUploader within a form doesn't have any
        of its "unsubmitted" uploads prematurely deleted when the script is
        re-run.
        """
        file_list_id = (session_id, widget_id)
        with self._files_lock:
            file_list = self._files_by_id.get(file_list_id)
            if file_list is None:
                return

            # Remove orphaned files from the list:
            # - `f.id in active_file_ids`:
            #   File is currently tracked by the widget. DON'T remove.
            # - `f.id > newest_file_id`:
            #   file was uploaded *after* the widget  was most recently
            #   updated. (It's probably in a form.) DON'T remove.
            # - `f.id < newest_file_id and f.id not in active_file_ids`:
            #   File is not currently tracked by the widget, and was uploaded
            #   *before* this most recent update. This means it's been deleted
            #   by the user on the frontend, and is now "orphaned". Remove!
            new_list = [
                f for f in file_list
                if f.id > newest_file_id or f.id in active_file_ids
            ]
            self._files_by_id[file_list_id] = new_list
            num_removed = len(file_list) - len(new_list)

        if num_removed > 0:
            LOGGER.debug("Removed %s orphaned files" % num_removed)

    def remove_file(self, session_id: str, widget_id: str,
                    file_id: int) -> bool:
        """Remove the file list with the given ID, if it exists.

        The "on_files_updated" Signal will be emitted.

        Returns
        -------
        bool
            True if the file was removed, or False if no such file exists.
        """
        file_list_id = (session_id, widget_id)
        with self._files_lock:
            file_list = self._files_by_id.get(file_list_id, None)
            if file_list is None:
                return False

            # Remove the file from its list.
            new_file_list = [file for file in file_list if file.id != file_id]
            self._files_by_id[file_list_id] = new_file_list

        self.on_files_updated.send(session_id)
        return True

    def _remove_files(self, session_id: str, widget_id: str) -> None:
        """Remove the file list for the provided widget in the
        provided session, if it exists.

        Does not emit any signals.
        """
        files_by_widget = session_id, widget_id
        with self._files_lock:
            self._files_by_id.pop(files_by_widget, None)

    def remove_files(self, session_id: str, widget_id: str) -> None:
        """Remove the file list for the provided widget in the
        provided session, if it exists.

        The "on_files_updated" Signal will be emitted.

        Parameters
        ----------
        session_id : str
            The session ID of the report that owns the file.
        widget_id : str
            The widget ID of the FileUploader that created the file.
        """
        self._remove_files(session_id, widget_id)
        self.on_files_updated.send(session_id)

    def remove_session_files(self, session_id: str) -> None:
        """Remove all files that belong to the given report.

        Parameters
        ----------
        session_id : str
            The session ID of the report whose files we're removing.

        """
        # Copy the keys into a list, because we'll be mutating the dictionary.
        with self._files_lock:
            all_ids = list(self._files_by_id.keys())

        for files_id in all_ids:
            if files_id[0] == session_id:
                self.remove_files(*files_id)

    def _get_next_file_id(self) -> int:
        """Return the next file ID and increment our ID counter."""
        with self._file_id_lock:
            file_id = self._file_id_counter
            self._file_id_counter += 1
            return file_id

    def get_stats(self) -> List[CacheStat]:
        with self._files_lock:
            # Flatten all files into a single list
            all_files: List[UploadedFileRec] = []
            for file_list in self._files_by_id.values():
                all_files.extend(file_list)

        return [
            CacheStat(
                category_name="UploadedFileManager",
                cache_name="",
                byte_length=len(file.data),
            ) for file in all_files
        ]
Exemple #28
0
 def _send_signal(self, sig: Signal, **data: Any) -> None:
     sig.send(self.kuyruk, task=self, **data)
Exemple #29
0
class BaseProcess(object):
    def __init__(self, new_func=None, total_count=None, *args, **kwargs):
        self.func = new_func
        self.total_count = total_count
        self.count = 0
        self.task_queue = []
        self.process_list = []
        self.process_map = {}
        self.task_map = {}
        self.args = args
        self.kwargs = kwargs
        signal.signal(signal.SIGWINCH, self.action)
        # self.window = curses.initscr()
        self.add_task_signal = Signal(1000)
        self.count_signal = Signal(1001)
        self.reg_task_signal()
        self.reg_count_sinal()

    def reg_count_sinal(self):
        if not self.total_count:
            return
        else:
            self.count_signal.connect(self.process_count)

    def reg_task_signal(self):
        self.add_task_signal.connect(self.process_func)

    def create_new_task(self, work_num=1):
        for i in range(work_num):
            self.task_queue.append((self.func, self.args, self.kwargs))
            self.add_task_signal.send()
        if self.total_count:
            for i in range(self.total_count):
                self.count_signal.send()
        self.process_map = {x: 0 for x in range(work_num)}



    def make_func(self):
        def add_count_deco(self, func):
            def wrapper(func):
                @functools.wraps(func)
                def _wrapper(*args, **kwargs):
                    func(*args, **kwargs)
                    self.count += 1
                return _wrapper

            return wrapper
        @add_count_deco(self,self.func)
        def _make_func(*args,**kwargs):
            pass
        return _make_func

    def process_func(self, sender):
        while self.task_queue != []:
            self.task_queue.pop()
            process = multiprocessing.Process(target=self.func, args=self.args, kwargs=self.kwargs)
            process.start()

    def process_count(self, sender):
        self.count += 1
        bar_length = 20
        percent = float(self.count * 1.0 / self.total_count)
        hashes = '#' * int(percent * bar_length)
        spaces = ' ' * (bar_length - len(hashes))
        multiprocessing.Process(
            target=sys.stdout.write("\rPercent: [%s] %d%%" % (hashes + spaces, percent * 100))).start()

    def action(self):
        key = self.window.getch()
        if key == ord('p'):
            pass
        if key == ord('r'):
            pass

    def start(self):
        while True:
            self.action()
Exemple #30
0
class NodeBase(INode):
    _packageName = ""

    def __init__(self, name, uid=None):
        super(NodeBase, self).__init__()
        # memo
        self.bCacheEnabled = True
        self.cacheMaxSize = 1000
        self.cache = {}

        self.killed = Signal()
        self.tick = Signal(float)
        self.errorOccured = Signal(object)
        self.errorCleared = Signal()

        self._uid = uuid.uuid4() if uid is None else uid
        self.graph = None
        self.name = name
        self.pinsCreationOrder = OrderedDict()
        self._pins = set()
        self.x = 0.0
        self.y = 0.0
        self.bCallable = False
        self._wrapper = None
        self._constraints = {}
        self._structConstraints = {}
        self.lib = None
        self.isCompoundNode = False
        self._lastError = None
        self.__wrapperJsonData = None

    @property
    def wrapperJsonData(self):
        try:
            dt = self.__wrapperJsonData.copy()
            self.__wrapperJsonData.clear()
            self.__wrapperJsonData = None
            return dt
        except:
            return None

    def isValid(self):
        return self._lastError is None

    def clearError(self):
        self._lastError = None
        self.errorCleared.send()

    def setError(self, err):
        self._lastError = str(err)
        self.errorOccured.send(self._lastError)

    @property
    def packageName(self):
        return self._packageName

    @property
    def constraints(self):
        return self._constraints

    @property
    def structConstraints(self):
        return self._structConstraints

    def getOrderedPins(self):
        return self.pinsCreationOrder.values()

    @dispatch(str)
    def __getitem__(self, pinName):
        return self.getPin(pinName)

    @property
    def pins(self):
        return self._pins

    @property
    def inputs(self):
        """Returns all input pins. Dictionary generated every time property called, so cache it when possible
        Returns:
            dict(uuid: PinBase)
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Input:
                result[pin.uid] = pin
        return result

    @property
    def namePinInputsMap(self):
        """Returns all input pins. Dictionary generated every time property called, so cache it when possible
        Returns:
            dict(str: PinBase)
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Input:
                result[pin.name] = pin
        return result

    @property
    def outputs(self):
        """Returns all output pins. Dictionary generated every time property called, so cache it when possible
        Returns:
            dict(uuid: PinBase)
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Output:
                result[pin.uid] = pin
        return result

    @property
    def namePinOutputsMap(self):
        """Returns all output pins. Dictionary generated every time property called, so cache it when possible
        Returns:
            dict(str: PinBase)
        """
        result = OrderedDict()
        for pin in self.pins:
            if pin.direction == PinDirection.Output:
                result[pin.name] = pin
        return result

    # IItemBase interface

    def setWrapper(self, wrapper):
        if self._wrapper is None:
            self._wrapper = wrapper

    def getWrapper(self):
        return self._wrapper

    @property
    def uid(self):
        return self._uid

    @uid.setter
    def uid(self, value):
        if self.graph is not None:
            self.graph().nodes[value] = self.graph().nodes.pop(self._uid)
        self._uid = value

    @staticmethod
    def jsonTemplate():
        template = {
            'package': None,
            'lib': None,
            'type': None,
            'owningGraphName': None,
            'name': None,
            'uuid': None,
            'inputs': [],
            'outputs': [],
            'meta': {
                'var': {}
            },
            'wrapper': {}
        }
        return template

    def serialize(self):
        template = NodeBase.jsonTemplate()

        uidString = str(self.uid)
        nodeName = self.name

        template['package'] = self.packageName
        template['lib'] = self.lib
        template['type'] = self.__class__.__name__
        template['name'] = nodeName
        template['owningGraphName'] = self.graph().name
        template['uuid'] = uidString
        template['inputs'] = [i.serialize() for i in self.inputs.values()]
        template['outputs'] = [o.serialize() for o in self.outputs.values()]
        template['meta']['label'] = self.name
        template['x'] = self.x
        template['y'] = self.y

        # if running with ui get ui wrapper data to save
        wrapper = self.getWrapper()
        if wrapper:
            template['wrapper'] = wrapper.serializationHook()
        return template

    def isUnderActiveGraph(self):
        return self.graph() == self.graph().graphManager.activeGraph()

    def kill(self, *args, **kwargs):
        if self.uid not in self.graph().nodes:
            # already killed
            # this block executes for variable getter/setter
            return

        self.killed.send()

        for pin in self.inputs.values():
            pin.kill()
        for pin in self.outputs.values():
            pin.kill()
        self.graph().nodes.pop(self.uid)

    def Tick(self, delta):
        self.tick.send(delta)

    @staticmethod
    def category():
        return "Default"

    @staticmethod
    def keywords():
        return []

    @staticmethod
    def pinTypeHints():
        return {'inputs': [], 'outputs': []}

    @staticmethod
    def description():
        return "Default node description"

    def getName(self):
        return self.name

    def setName(self, name):
        # self.graph is sometimes still none
        try:
            assert self.graph != None
            self.name = self.graph().graphManager.getUniqNodeName(str(name))
        except:
            self.name = name

    def useCache(self):
        # if cached results exists - return them without calling compute
        args = tuple([pin.currentData() for pin in self.inputs.values()])
        try:
            # mutable unhashable types will not be cached
            if args in self.cache:
                for outPin, data in self.cache[args].items():
                    outPin.setData(data)
                return True
        except:
            return False

    def afterCompute(self):
        if len(self.cache) >= self.cacheMaxSize:
            return

        # cache results
        args = tuple([pin.currentData() for pin in self.inputs.values()])
        try:
            # mutable unhashable types will not be cached
            if args in self.cache:
                return
        except:
            return

        cache = {}
        for pin in self.outputs.values():
            cache[pin] = pin.currentData()
        self.cache[args] = cache

    def processNode(self, *args, **kwargs):
        if not self.isValid():
            return

        if self.bCacheEnabled:
            if not self.useCache():
                try:
                    self.compute()
                    self.clearError()
                except Exception as e:
                    self.setError(e)
            self.afterCompute()
        else:
            self.compute()

    # INode interface

    def compute(self, *args, **kwargs):
        '''
        node calculations here
        '''
        # getting data from inputs

        # do stuff

        # write data to outputs
        return

    # INode interface end

    def isCallable(self):
        for p in list(self.inputs.values()) + list(self.outputs.values()):
            if p.isExec():
                return True
        return False

    def setPosition(self, x, y):
        self.x = x
        self.y = y

    def autoAffectPins(self):
        """All value inputs affects on all value outputs. All exec inputs affects on all exec outputs
        """
        for i in self.inputs.values():
            for o in self.outputs.values():
                assert (i is not o)
                if not i.IsValuePin() and o.IsValuePin():
                    continue
                if i.IsValuePin() and not o.IsValuePin():
                    continue
                pinAffects(i, o)

    def createInputPin(self,
                       pinName,
                       dataType,
                       defaultValue=None,
                       foo=None,
                       structure=PinStructure.Single,
                       constraint=None,
                       structConstraint=None,
                       allowedPins=[],
                       group=""):
        # check unique name
        pinName = self.getUniqPinName(pinName)
        p = CreateRawPin(pinName, self, dataType, PinDirection.Input)
        p.structureType = structure
        p.group = group

        if structure == PinStructure.Array:
            p.initAsArray(True)
        elif structure == PinStructure.Multi:
            p.enableOptions(PinOptions.ArraySupported)

        if foo:
            p.onExecute.connect(foo, weak=False)

        if defaultValue is not None:
            p.setDefaultValue(defaultValue)
            p.setData(defaultValue)
        else:
            p.setDefaultValue(getPinDefaultValueByType(dataType))

        if dataType == "AnyPin" and allowedPins:

            def supportedDataTypes():
                return allowedPins

            p._supportedDataTypes = p._defaultSupportedDataTypes = tuple(
                allowedPins)
            p.supportedDataTypes = supportedDataTypes
        if constraint is not None:
            p.updateConstraint(constraint)
        if structConstraint is not None:
            p.updatestructConstraint(structConstraint)
        return p

    def createOutputPin(self,
                        pinName,
                        dataType,
                        defaultValue=None,
                        structure=PinStructure.Single,
                        constraint=None,
                        structConstraint=None,
                        allowedPins=[],
                        group=""):
        pinName = self.getUniqPinName(pinName)
        p = CreateRawPin(pinName, self, dataType, PinDirection.Output)
        p.structureType = structure
        p.group = group

        if structure == PinStructure.Array:
            p.initAsArray(True)
        elif structure == PinStructure.Multi:
            p.enableOptions(PinOptions.ArraySupported)

        if defaultValue is not None:
            p.setDefaultValue(defaultValue)
            p.setData(defaultValue)
        else:
            p.setDefaultValue(getPinDefaultValueByType(dataType))

        if dataType == "AnyPin" and allowedPins:

            def supportedDataTypes():
                return allowedPins

            p.supportedDataTypes = supportedDataTypes
        if constraint is not None:
            p.updateConstraint(constraint)
        if structConstraint is not None:
            p.updatestructConstraint(structConstraint)
        return p

    def setData(self,
                pinName,
                data,
                pinSelectionGroup=PinSelectionGroup.BothSides):
        p = self.getPin(str(pinName), pinSelectionGroup)
        assert (p
                is not None), "Failed to find pin by name: {}".format(pinName)
        p.setData(data)

    def getData(self, pinName, pinSelectionGroup=PinSelectionGroup.BothSides):
        p = self.getPin(str(pinName), pinSelectionGroup)
        assert (p
                is not None), "Failed to find pin by name: {}".format(pinName)
        return p.currentData()

    def getUniqPinName(self, name):
        pinNames = [
            i.name for i in list(list(self.inputs.values())) +
            list(list(self.outputs.values()))
        ]
        return getUniqNameFromList(pinNames, name)

    def __repr__(self):
        graphName = self.graph().name if self.graph is not None else str(None)
        return "<class[{0}]; name[{1}]; graph[{2}]>".format(
            self.__class__.__name__, self.getName(), graphName)

    def call(self, name, *args, **kwargs):
        namePinOutputsMap = self.namePinOutputsMap
        namePinInputsMap = self.namePinInputsMap
        if name in namePinOutputsMap:
            p = namePinOutputsMap[name]
            if p.isExec():
                p.call(*args, **kwargs)
        if name in namePinInputsMap:
            p = namePinInputsMap[name]
            if p.isExec():
                p.call(*args, **kwargs)

    @dispatch(str, PinSelectionGroup)
    def getPin(self, name, pinsSelectionGroup=PinSelectionGroup.BothSides):
        inputs = self.inputs
        outputs = self.outputs
        if pinsSelectionGroup == PinSelectionGroup.BothSides:
            for p in list(inputs.values()) + list(outputs.values()):
                if p.name == name:
                    return p
        elif pinsSelectionGroup == PinSelectionGroup.Inputs:
            for p in list(inputs.values()):
                if p.name == name:
                    return p
        else:
            for p in list(outputs.values()):
                if p.name == name:
                    return p

    @dispatch(str)
    def getPin(self, name):
        inputs = self.inputs
        outputs = self.outputs
        for p in list(inputs.values()) + list(outputs.values()):
            if p.name == name:
                return p

    @dispatch(uuid.UUID)
    def getPin(self, uid):
        inputs = self.inputs
        outputs = self.outputs

        if uid in inputs:
            return inputs[uid]
        if uid in outputs:
            return outputs[uid]
        return None

    def postCreate(self, jsonTemplate=None):
        if jsonTemplate is not None:
            self.uid = uuid.UUID(jsonTemplate['uuid'])
            self.setName(jsonTemplate['name'])
            self.x = jsonTemplate['x']
            self.y = jsonTemplate['y']

            # set pins data
            for inpJson in jsonTemplate['inputs']:
                dynamicEnabled = PinOptions.Dynamic.value in inpJson["options"]
                if dynamicEnabled or inpJson[
                        'name'] not in self.namePinInputsMap:
                    # create custom dynamically created pins in derived classes
                    continue

                pin = self.getPin(str(inpJson['name']),
                                  PinSelectionGroup.Inputs)
                pin.deserialize(inpJson)

            for outJson in jsonTemplate['outputs']:
                dynamicEnabled = PinOptions.Dynamic.value in outJson["options"]
                if dynamicEnabled or outJson[
                        'name'] not in self.namePinOutputsMap:
                    # create custom dynamically created pins in derived classes
                    continue

                pin = self.getPin(str(outJson['name']),
                                  PinSelectionGroup.Outputs)
                pin.deserialize(outJson)

            # store data for wrapper
            if "wrapper" in jsonTemplate:
                self.__wrapperJsonData = jsonTemplate["wrapper"]

        if self.isCallable():
            self.bCallable = True

        self.autoAffectPins()

    @staticmethod
    # Constructs a node from given annotated function
    def initializeFromFunction(foo):
        retAnyOpts = None
        retConstraint = None
        foo = foo
        meta = foo.__annotations__['meta']
        returnType = returnDefaultValue = None
        returnPinOptionsToEnable = None
        returnPinOptionsToDisable = None
        retStructConstraint = None
        if foo.__annotations__['return'] is not None:
            returnType = foo.__annotations__['return'][0]
            returnDefaultValue = foo.__annotations__['return'][1]
            if len(foo.__annotations__['return']) == 3:
                if "supportedDataTypes" in foo.__annotations__['return'][2]:
                    retAnyOpts = foo.__annotations__['return'][2][
                        "supportedDataTypes"]
                if "constraint" in foo.__annotations__['return'][2]:
                    retConstraint = foo.__annotations__['return'][2][
                        "constraint"]
                if "structConstraint" in foo.__annotations__['return'][2]:
                    retStructConstraint = foo.__annotations__['return'][2][
                        "structConstraint"]
                if "enabledOptions" in foo.__annotations__['return'][2]:
                    returnPinOptionsToEnable = foo.__annotations__['return'][
                        2]["enabledOptions"]
                if "disabledOptions" in foo.__annotations__['return'][2]:
                    returnPinOptionsToDisable = foo.__annotations__['return'][
                        2]["disabledOptions"]

        nodeType = foo.__annotations__['nodeType']
        _packageName = foo.__annotations__['packageName']
        libName = foo.__annotations__['lib']
        fooArgNames = getargspec(foo).args

        @staticmethod
        def description():
            return foo.__doc__

        @staticmethod
        def category():
            return meta['Category']

        @staticmethod
        def keywords():
            return meta['Keywords']

        def constructor(self, name, **kwargs):
            NodeBase.__init__(self, name, **kwargs)

        nodeClass = type(
            foo.__name__, (NodeBase, ), {
                '__init__': constructor,
                'category': category,
                'keywords': keywords,
                'description': description
            })

        nodeClass._packageName = _packageName

        raw_inst = nodeClass(foo.__name__)
        raw_inst.lib = libName

        # this is list of 'references' outputs will be created for
        refs = []
        outExec = None

        # generate compute method from function
        def compute(self, *args, **kwargs):
            # arguments will be taken from inputs
            kwds = {}
            for i in list(self.inputs.values()):
                if not i.isExec():
                    kwds[i.name] = i.getData()
            for ref in refs:
                if not ref.isExec():
                    kwds[ref.name] = ref.setData
            result = foo(**kwds)
            if returnType is not None:
                self.setData(str('out'), result)
            if nodeType == NodeTypes.Callable:
                outExec.call(*args, **kwargs)

        raw_inst.compute = MethodType(compute, raw_inst)

        if 'CacheEnabled' in meta:
            raw_inst.bCacheEnabled = meta['CacheEnabled']

        # create execs if callable
        if nodeType == NodeTypes.Callable:
            inputExec = raw_inst.createInputPin(DEFAULT_IN_EXEC_NAME,
                                                'ExecPin', None,
                                                raw_inst.compute)
            outExec = raw_inst.createOutputPin(DEFAULT_OUT_EXEC_NAME,
                                               'ExecPin')
            raw_inst.bCallable = True
            raw_inst.bCacheEnabled = False

        if returnType is not None:
            p = raw_inst.createOutputPin('out',
                                         returnType,
                                         returnDefaultValue,
                                         allowedPins=retAnyOpts,
                                         constraint=retConstraint,
                                         structConstraint=retStructConstraint)
            p.setData(returnDefaultValue)
            p.setDefaultValue(returnDefaultValue)
            p.initAsArray(isinstance(returnDefaultValue, list))
            if returnPinOptionsToEnable is not None:
                p.enableOptions(returnPinOptionsToEnable)
            if returnPinOptionsToDisable is not None:
                p.disableOptions(returnPinOptionsToDisable)
            if not p.isArray() and p.optionEnabled(PinOptions.ArraySupported):
                p.structureType = PinStructure.Multi

        # iterate over function arguments and create pins according to data types
        for index in range(len(fooArgNames)):
            argName = fooArgNames[index]
            pinDescriptionTuple = foo.__annotations__[argName]
            anyOpts = None
            constraint = None
            structConstraint = None
            pinOptionsToEnable = None
            pinOptionsToDisable = None
            # tuple means this is reference pin with default value eg - (dataType, defaultValue)
            if str("Reference") == pinDescriptionTuple[0]:
                pinDataType = pinDescriptionTuple[1][0]
                pinDefaultValue = pinDescriptionTuple[1][1]
                pinDict = None
                if len(pinDescriptionTuple[1]) == 3:
                    pinDict = pinDescriptionTuple[1][2]

                if pinDict is not None:
                    if "supportedDataTypes" in pinDict:
                        anyOpts = pinDict["supportedDataTypes"]
                    if "constraint" in pinDict:
                        constraint = pinDict["constraint"]
                    if "structConstraint" in pinDict:
                        structConstraint = pinDict["structConstraint"]
                    if "enabledOptions" in pinDict:
                        pinOptionsToEnable = pinDict["enabledOptions"]
                    if "disabledOptions" in pinDict:
                        pinOptionsToDisable = pinDict["disabledOptions"]

                outRef = raw_inst.createOutputPin(
                    argName,
                    pinDataType,
                    allowedPins=anyOpts,
                    constraint=constraint,
                    structConstraint=structConstraint)
                outRef.initAsArray(isinstance(pinDefaultValue, list))
                outRef.setDefaultValue(pinDefaultValue)
                outRef.setData(pinDefaultValue)
                if pinOptionsToEnable is not None:
                    outRef.enableOptions(pinOptionsToEnable)
                if pinOptionsToDisable is not None:
                    outRef.disableOptions(pinOptionsToDisable)
                if not outRef.isArray() and outRef.optionEnabled(
                        PinOptions.ArraySupported):
                    outRef.structureType = PinStructure.Multi
                refs.append(outRef)
            else:
                pinDataType = pinDescriptionTuple[0]
                pinDefaultValue = pinDescriptionTuple[1]
                pinDict = None
                if len(pinDescriptionTuple) == 3:
                    pinDict = pinDescriptionTuple[2]

                if pinDict is not None:
                    if "supportedDataTypes" in pinDict:
                        anyOpts = pinDict["supportedDataTypes"]
                    if "constraint" in pinDict:
                        constraint = pinDict["constraint"]
                    if "structConstraint" in pinDict:
                        structConstraint = pinDict["structConstraint"]
                    if "enabledOptions" in pinDict:
                        pinOptionsToEnable = pinDict["enabledOptions"]
                    if "disabledOptions" in pinDict:
                        pinOptionsToDisable = pinDict["disabledOptions"]

                inp = raw_inst.createInputPin(
                    argName,
                    pinDataType,
                    allowedPins=anyOpts,
                    constraint=constraint,
                    structConstraint=structConstraint)
                inp.initAsArray(isinstance(pinDefaultValue, list))
                inp.setData(pinDefaultValue)
                inp.setDefaultValue(pinDefaultValue)
                if pinOptionsToEnable is not None:
                    inp.enableOptions(pinOptionsToEnable)
                if pinOptionsToDisable is not None:
                    inp.disableOptions(pinOptionsToDisable)
                if not inp.isArray() and inp.optionEnabled(
                        PinOptions.ArraySupported):
                    inp.structureType = PinStructure.Multi

        raw_inst.autoAffectPins()
        return raw_inst
Exemple #31
0
class ScriptRunner(object):
    def __init__(self, report, main_dg, sidebar_dg, widget_states,
                 request_queue):
        """Initialize the ScriptRunner.

        (The ScriptRunner won't start executing until start() is called.)

        Parameters
        ----------
        report : Report
            The ReportSession's report.

        main_dg : DeltaGenerator
            The ReportSession's main DeltaGenerator.

        sidebar_dg : DeltaGenerator
            The ReportSession's sidebar DeltaGenerator.

        widget_states : streamlit.proto.Widget_pb2.WidgetStates
            The ReportSession's current widget states

        request_queue : ScriptRequestQueue
            The queue that the ReportSession is publishing ScriptRequests to.
            ScriptRunner will continue running until the queue is empty,
            and then shut down.

        """
        self._report = report
        self._main_dg = main_dg
        self._sidebar_dg = sidebar_dg
        self._request_queue = request_queue

        self._widgets = Widgets()
        self._widgets.set_state(widget_states)

        self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs.

            This signal is *not* emitted on the same thread that the
            ScriptRunner was created on.

            Parameters
            ----------
            event : ScriptRunnerEvent

            exception : BaseException | None
                Our compile error. Set only for the
                SCRIPT_STOPPED_WITH_COMPILE_ERROR event.

            widget_states : streamlit.proto.Widget_pb2.WidgetStates | None
                The ScriptRunner's final WidgetStates. Set only for the
                SHUTDOWN event.
            """)

        # Set to true when we process a SHUTDOWN request
        self._shutdown_requested = False

        # Set to true while we're executing. Used by
        # maybe_handle_execution_control_request.
        self._execing = False

        # This is initialized in start()
        self._script_thread = None

    def start(self):
        """Start a new thread to process the ScriptEventQueue.

        This must be called only once.

        """
        if self._script_thread is not None:
            raise Exception("ScriptRunner was already started")

        self._script_thread = ReportThread(
            main_dg=self._main_dg,
            sidebar_dg=self._sidebar_dg,
            widgets=self._widgets,
            target=self._process_request_queue,
            name="ScriptRunner.scriptThread",
        )
        self._script_thread.start()

    def _process_request_queue(self):
        """Process the ScriptRequestQueue and then exits.

        This is run in a separate thread.

        """
        LOGGER.debug("Beginning script thread")

        while not self._shutdown_requested and self._request_queue.has_request:
            request, data = self._request_queue.dequeue()
            if request == ScriptRequest.STOP:
                LOGGER.debug("Ignoring STOP request while not running")
            elif request == ScriptRequest.SHUTDOWN:
                LOGGER.debug("Shutting down")
                self._shutdown_requested = True
            elif request == ScriptRequest.RERUN:
                self._run_script(data)
            else:
                raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

        # Send a SHUTDOWN event before exiting. This includes the widget values
        # as they existed after our last successful script run, which the
        # ReportSession will pass on to the next ScriptRunner that gets
        # created.
        self.on_event.send(ScriptRunnerEvent.SHUTDOWN,
                           widget_states=self._widgets.get_state())

    def _is_in_script_thread(self):
        """True if the calling function is running in the script thread"""
        return self._script_thread == threading.current_thread()

    def maybe_handle_execution_control_request(self):
        if not self._is_in_script_thread():
            # We can only handle execution_control_request if we're on the
            # script execution thread. However, it's possible for deltas to
            # be enqueued (and, therefore, for this function to be called)
            # in separate threads, so we check for that here.
            return

        if not self._execing:
            # If the _execing flag is not set, we're not actually inside
            # an exec() call. This happens when our script exec() completes,
            # we change our state to STOPPED, and a statechange-listener
            # enqueues a new ForwardEvent
            return

        # Pop the next request from our queue.
        request, data = self._request_queue.dequeue()
        if request is None:
            return

        LOGGER.debug("Received ScriptRequest: %s", request)
        if request == ScriptRequest.STOP:
            raise StopException()
        elif request == ScriptRequest.SHUTDOWN:
            self._shutdown_requested = True
            raise StopException()
        elif request == ScriptRequest.RERUN:
            raise RerunException(data)
        else:
            raise RuntimeError("Unrecognized ScriptRequest: %s" % request)

    def _install_tracer(self):
        """Install function that runs before each line of the script."""
        def trace_calls(frame, event, arg):
            self.maybe_handle_execution_control_request()
            return trace_calls

        # Python interpreters are not required to implement sys.settrace.
        if hasattr(sys, "settrace"):
            sys.settrace(trace_calls)

    @contextmanager
    def _set_execing_flag(self):
        """A context for setting the ScriptRunner._execing flag.

        Used by maybe_handle_execution_control_request to ensure that
        we only handle requests while we're inside an exec() call
        """
        if self._execing:
            raise RuntimeError("Nested set_execing_flag call")
        self._execing = True
        try:
            yield
        finally:
            self._execing = False

    def _run_script(self, rerun_data):
        """Run our script.

        Parameters
        ----------
        rerun_data: RerunData
            The RerunData to use.

        """
        assert self._is_in_script_thread()

        LOGGER.debug("Running script %s", rerun_data)

        # Reset delta generator so it starts from index 0.
        import streamlit as st

        st._reset(self._main_dg, self._sidebar_dg)

        self.on_event.send(ScriptRunnerEvent.SCRIPT_STARTED)

        # Compile the script. Any errors thrown here will be surfaced
        # to the user via a modal dialog in the frontend, and won't result
        # in their previous report disappearing.
        try:
            # Python 3 got rid of the native execfile() command, so we read
            # the file, compile it, and exec() it. This implementation is
            # compatible with both 2 and 3.
            with open(self._report.script_path) as f:
                filebody = f.read()

            if config.get_option("runner.magicEnabled"):
                filebody = magic.add_magic(filebody, self._report.script_path)

            code = compile(
                filebody,
                # Pass in the file path so it can show up in exceptions.
                self._report.script_path,
                # We're compiling entire blocks of Python, so we need "exec"
                # mode (as opposed to "eval" or "single").
                mode="exec",
                # Don't inherit any flags or "future" statements.
                flags=0,
                dont_inherit=1,
                # Parameter not supported in Python2:
                # optimize=-1,
            )

        except BaseException as e:
            # We got a compile error. Send an error event and bail immediately.
            LOGGER.debug("Fatal script error: %s" % e)
            self.on_event.send(
                ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR,
                exception=e)
            return

        # If we get here, we've successfully compiled our script. The next step
        # is to run it. Errors thrown during execution will be shown to the
        # user as ExceptionElements.

        # Update Report.argv
        if rerun_data.argv is not None:
            argv = rerun_data.argv
            self._report.argv = rerun_data.argv
        else:
            argv = self._report.argv

        # Update the Widget singleton with the new widget_state
        if rerun_data.widget_state is not None:
            self._widgets.set_state(rerun_data.widget_state)

        if config.get_option("runner.installTracer"):
            self._install_tracer()

        # This will be set to a RerunData instance if our execution
        # is interrupted by a RerunException.
        rerun_with_data = None

        try:
            # Create fake module. This gives us a name global namespace to
            # execute the code in.
            module = _new_module("__main__")

            # Install the fake module as the __main__ module. This allows
            # the pickle module to work inside the user's code, since it now
            # can know the module where the pickled objects stem from.
            # IMPORTANT: This means we can't use "if __name__ == '__main__'" in
            # our code, as it will point to the wrong module!!!
            sys.modules["__main__"] = module

            # Make it look like command-line args were set to whatever the user
            # asked them to be via the GUI.
            # IMPORTANT: This means we can't count on sys.argv in our code ---
            # but we already knew it from the argv surgery in cli.py.
            # TODO: Remove this feature when we implement interactivity!
            # This is not robust in a multi-user environment.
            sys.argv = argv

            # Add special variables to the module's globals dict.
            module.__dict__["__file__"] = self._report.script_path

            with modified_sys_path(self._report), self._set_execing_flag():
                exec(code, module.__dict__)

        except RerunException as e:
            rerun_with_data = e.rerun_data

        except StopException:
            pass

        except BaseException as e:
            # Show exceptions in the Streamlit report.
            LOGGER.debug(e)
            import streamlit as st

            st.exception(e)  # This is OK because we're in the script thread.
            # TODO: Clean up the stack trace, so it doesn't include
            # ScriptRunner.

        finally:
            self._widgets.reset_triggers()
            self.on_event.send(ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS)

        # Use _log_if_error() to make sure we never ever ever stop running the
        # script without meaning to.
        _log_if_error(_clean_problem_modules)

        if rerun_with_data is not None:
            self._run_script(rerun_with_data)
Exemple #32
0
class Settings(abc.Mapping):
    """Specialized mapping for *Value instances"""
    def __init__(self):
        self._defaults = {}
        self._values = {}
        self._constructors = {}
        self._descriptions = {}
        self._signals = {}
        self._global_signal = Signal()

    def add(self, name, constructor, default, description=None):
        """
        Add new setting

        name:        Identifier for this setting
        constructor: Callable that takes one argument and returns a new value
                     for this setting
        default:     Initial and default value
        description: What the setting does
        """
        self._constructors[name] = constructor
        self._signals[name] = Signal()
        self._descriptions[name] = description
        self[name] = default
        self._defaults[name] = self[name]
        self._global_signal.send(self, name=name, value=self[name])

    def reset(self, name):
        """Reset setting `name` to default/initial value"""
        self[name] = self._defaults[name]

    def default(self, name):
        """Return settings default/initial value"""
        return self._defaults[name]

    def description(self, name):
        """Return setting's description"""
        return self._descriptions[name]

    def syntax(self, name):
        """Return setting's description"""
        return self._constructors[name].syntax

    def validate(self, name, value):
        """Pass `value` to `name`'s constructor and return the result"""
        if not isinstance(value, str) and isinstance(value, abc.Iterable):
            return self._constructors[name](*value)
        else:
            return self._constructors[name](value)

    def on_change(self, callback, name=None, autoremove=True):
        """
        Run `callback` every time a value changes

        If `name` is None, run `callback` if any signal changes.

        The signature of `callback` must be: (settings, name, value)

        If `autoremove` is True, stop calling `callback` once it is garbage
        collected.
        """
        if name is None:
            self._global_signal.connect(callback, weak=autoremove)
        else:
            self._signals[name].connect(callback, weak=autoremove)

    def __getitem__(self, name):
        return self._values[name]

    def __setitem__(self, name, value):
        value_ = self.validate(name, value)
        self._values[name] = value_
        self._global_signal.send(self, name=name, value=value_)
        self._signals[name].send(self, name=name, value=value_)

    def __contains__(self, name):
        return name in self._constructors

    def __iter__(self):
        return iter(self._constructors)

    def __len__(self):
        return len(self._constructors)
Exemple #33
0
class Variable(IItemBase):
    def __init__(self,
                 graph,
                 value,
                 name,
                 dataType,
                 accessLevel=AccessLevel.public,
                 structure=PinStructure.Single,
                 uid=None):
        super(Variable, self).__init__()
        # signals
        self.nameChanged = Signal(str)
        self.valueChanged = Signal(str)
        self.dataTypeChanged = Signal(str)
        self.structureChanged = Signal(str)
        self.accessLevelChanged = Signal(str)
        self.packageNameChanged = Signal(str)
        self.uuidChanged = Signal(object)
        self.killed = Signal()

        self.graph = graph

        self._name = name
        self._value = value
        self._dataType = dataType
        self._structure = structure
        self._accessLevel = accessLevel
        self._packageName = None
        self._uid = uuid.uuid4() if uid is None else uid
        assert (isinstance(self._uid, uuid.UUID))
        self.updatePackageName()
        self._uiWrapper = None

    def getWrapper(self):
        if self._uiWrapper is not None:
            return self._uiWrapper()
        return None

    def setWrapper(self, wrapper):
        if self._uiWrapper is None:
            self._uiWrapper = weakref.ref(wrapper)

    def location(self):
        return self.graph.location()

    def findRefs(self):
        """returns all getVar and setVar instances for this node
        """
        return self.graph.graphManager.findVariableRefs(self)

    def updatePackageName(self):
        self._packageName = findPinClassByType(self._dataType)._packageName

    @property
    def packageName(self):
        return self._packageName

    @packageName.setter
    def packageName(self, value):
        assert (isinstance(value, str))
        self._packageName = value
        self.packageNameChanged.send(value)

    @property
    def accessLevel(self):
        return self._accessLevel

    @accessLevel.setter
    def accessLevel(self, value):
        assert (isinstance(value, AccessLevel))
        self._accessLevel = value
        self.accessLevelChanged.send(value)

    @property
    def name(self):
        return self._name

    @name.setter
    def name(self, value):
        assert (isinstance(value, str))
        self._name = value
        self.nameChanged.send(value)

    @property
    def value(self):
        return self._value

    @value.setter
    def value(self, value):
        # type checking if this variable is not of any type
        if not self.dataType == 'AnyPin':
            supportedDataTypes = findPinClassByType(
                self.dataType).supportedDataTypes()
            if self.dataType not in supportedDataTypes:
                return

        try:
            if self._value != value:
                self._value = value
                self.valueChanged.send(value)
        except:
            self._value = value
            self.valueChanged.send(value)

    @property
    def dataType(self):
        return self._dataType

    @dataType.setter
    def dataType(self, value):
        assert (isinstance(value, str))
        if value != self._dataType:
            self._dataType = value
            self.updatePackageName()
            self.value = getPinDefaultValueByType(self._dataType)
            self.dataTypeChanged.send(value)

    @property
    def structure(self):
        return self._structure

    @structure.setter
    def structure(self, value):
        assert (isinstance(value, PinStructure))
        if value != self._structure:
            self._structure = value
            if self._structure == PinStructure.Array:
                self.value = list()
            self.structureChanged.send(self._structure)

    @property
    def uid(self):
        return self._uid

    @uid.setter
    def uid(self, value):
        assert (isinstance(value, uuid.UUID))
        graph.vars[value] = graph.vars.pop(self._uid)
        self._uid = value

    def serialize(self):
        pinClass = findPinClassByType(self.dataType)

        template = Variable.jsonTemplate()

        uidString = str(self.uid)

        template['name'] = self.name
        if self.dataType == 'AnyPin':
            template['value'] = None
        else:
            template['value'] = json.dumps(self.value,
                                           cls=pinClass.jsonEncoderClass())
        template['dataType'] = self.dataType
        template['structure'] = self.structure.name
        template['accessLevel'] = self.accessLevel.name
        template['package'] = self._packageName
        template['uuid'] = uidString

        return template

    @staticmethod
    def deserialize(graph, jsonData, *args, **kwargs):
        name = jsonData['name']
        dataType = jsonData['dataType']

        if dataType != "AnyPin":
            pinClass = findPinClassByType(dataType)
            value = json.loads(jsonData['value'],
                               cls=pinClass.jsonDecoderClass())
        else:
            value = getPinDefaultValueByType("AnyPin")

        accessLevel = AccessLevel[jsonData['accessLevel']]
        structure = PinStructure[jsonData['structure']]
        uid = uuid.UUID(jsonData['uuid'])
        return Variable(graph, value, name, dataType, accessLevel, structure,
                        uid)

    @staticmethod
    def jsonTemplate():
        template = {
            'name': None,
            'value': None,
            'dataType': None,
            'accessLevel': None,
            'package': None,
            'uuid': None
        }
        return template
class Axis(object):
	def __init__(self, inverted = False, scale={}):
		self.inverted = inverted
		self.scale = scale
		self.position = None
		self.running = None
		self.initializing = None
		self.initialized = None
		self.initiator_minus = None
		self.initiator_plus = None
		self.initiator_error = None
		self.temperature_warning = None
		self.onPosition = Signal()
		self.onStarted = Signal()
		self.onStopped = Signal()
		self.onInitializing = Signal()
		self.onInitialized = Signal()
		self.onInitiatorMinus = Signal()
		self.onInitiatorPlus = Signal()
		self.onInitiatorError = Signal()
		self.onTemperatureWarning = Signal()
	
	def update(self):
		last_position = self.position
		last_running = self.running
		last_initializing = self.initializing
		last_initialized = self.initialized
		last_initiator_minus = self.initiator_minus
		last_initiator_plus = self.initiator_plus
		last_initiator_error = self.initiator_error
		last_temperature_warning = self.temperature_warning

		self.do_update()

		if last_position != self.position:
			self.onPosition.send(position = self.position)

		if last_running != self.running:
			if self.running:
				self.onStarted.send()
			else:
				self.onStopped.send()

		if last_initializing != self.initializing:
			self.onInitializing.send(self, initializing = self.initializing)

		if last_initialized != self.initialized:
			self.onInitialized.send(self, initialized = self.initialized)

		if last_initiator_minus != self.initiator_minus:
			self.onInitiatorMinus.send(self, active = self.initiator_minus)

		if last_initiator_plus != self.initiator_plus:
			self.onInitiatorPlus.send(self, active = self.initiator_plus)

		if last_initiator_error != self.initiator_error:
			self.onInitiatorError.send(self, error = self.initiator_error)

		if last_temperature_warning != self.temperature_warning:
			self.onTemperatureWarning.send(self, warning = self.temperature_warning)

	def wait_for_stop(self):
		self.update()
		while self.running:
			self.update()

	def initiate(self):
		raise NotImplementedError()

	def goto_absolute(self, target, speed = None):
		raise NotImplementedError()

	def goto_relative(self, offset, speed = None):
		raise NotImplementedError()
class compound(NodeBase):
    """This node encapsulates a graph, like compound in xsi

    pins can be edited only from inside the compound
    """
    def __init__(self, name):
        super(compound, self).__init__(name)
        self.isCompoundNode = True
        self.pinExposed = Signal(object)
        self._rawGraph = None
        self._rawGraphJson = None
        self.__inputsMap = {}
        self.__outputsMap = {}
        self.bCacheEnabled = True

    @property
    def inputsMap(self):
        return self.__inputsMap

    @property
    def outputsMap(self):
        return self.__outputsMap

    @property
    def rawGraph(self):
        return self._rawGraph

    @rawGraph.setter
    def rawGraph(self, newGraph):
        assert (newGraph is not None)
        self._rawGraph = newGraph

    def syncPins(self):
        # look for graph nodes pins was added
        nodeInputPins = self.namePinInputsMap
        nodeOutputPins = self.namePinOutputsMap

        graphInputsNodes = self.rawGraph.getNodesList(
            classNameFilters=['graphInputs'])
        graphInputPins = {}
        for graphInputNode in graphInputsNodes:
            for outPin in graphInputNode.orderedOutputs.values():
                graphInputPins[outPin.name] = outPin
                # create companion pin if needed
                if outPin.name not in nodeInputPins:
                    self.onGraphInputPinCreated(outPin)

        graphOutputNodes = self.rawGraph.getNodesList(
            classNameFilters=['graphOutputs'])
        graphOutputPins = {}
        for graphOutputNode in graphOutputNodes:
            for inPin in graphOutputNode.orderedInputs.values():
                graphOutputPins[inPin.name] = inPin
                # create companion pin if needed
                if inPin.name not in nodeOutputPins:
                    self.onGraphOutputPinCreated(inPin)

        for nodeInputPinName, nodeInputPin in nodeInputPins.items():
            if nodeInputPinName not in graphInputPins:
                if nodeInputPin in self.__inputsMap:
                    nodeInputPin.kill()
                    clearSignal(nodeInputPin.killed)
                    self.__inputsMap.pop(nodeInputPin)

        for nodeOutputPinName, nodeOutputPin in nodeOutputPins.items():
            if nodeOutputPinName not in graphOutputPins:
                if nodeOutputPin in self.__outputsMap:
                    nodeOutputPin.kill()
                    clearSignal(nodeOutputPin.killed)
                    self.__outputsMap.pop(nodeOutputPin)

    def Tick(self, delta):
        self.syncPins()
        self.rawGraph.Tick(delta)
        super(compound, self).Tick(delta)

    def setName(self, name):
        super(compound, self).setName(name)
        if self.rawGraph is not None:
            self.rawGraph.name = self.getName()

    @staticmethod
    def category():
        return 'SubGraphs'

    @staticmethod
    def keywords():
        return []

    @staticmethod
    def description():
        return 'Encapsulate a graph inside a node'

    def serialize(self):
        default = NodeBase.serialize(self)
        default['graphData'] = self.rawGraph.serialize()
        return default

    def onGraphInputPinCreated(self, outPin):
        """Reaction when pin added to graphInputs node

        :param outPin: output pin on graphInputs node
        :type outPin: :class:`~PyFlow.Core.PinBase.PinBase`
        """

        # add companion pin for graphInputs node's output pin
        subgraphInputPin = self.createInputPin(outPin.name,
                                               outPin.__class__.__name__,
                                               outPin.defaultValue(),
                                               outPin.call,
                                               outPin.structureType,
                                               outPin.constraint,
                                               outPin.structConstraint,
                                               group=outPin.owningNode().name)
        if subgraphInputPin.isAny():
            subgraphInputPin.supportedDataTypes = outPin.supportedDataTypes
            subgraphInputPin.enableOptions(PinOptions.AllowAny
                                           | PinOptions.DictElementSupported)

        outPin.owningNode().constraints[outPin.constraint].append(
            subgraphInputPin)
        self.constraints[outPin.constraint].append(outPin)

        outPin.owningNode().structConstraints[outPin.structConstraint].append(
            subgraphInputPin)
        self.structConstraints[outPin.structConstraint].append(outPin)

        self.__inputsMap[subgraphInputPin] = outPin
        pinAffects(subgraphInputPin, outPin)

        # connect

        def forceRename(name):
            subgraphInputPin.setName(name, force=True)

        outPin.nameChanged.connect(forceRename, weak=False)

        # broadcast for UI wrapper class
        self.pinExposed.send(subgraphInputPin)

    def onGraphOutputPinCreated(self, inPin):
        """Reaction when pin added to graphOutputs node

        :param inPin: input pin on graphOutputs node
        :type inPin: :class:`~PyFlow.Core.PinBase.PinBase`
        """

        # add companion pin for graphOutputs node's input pin
        subgraphOutputPin = self.createOutputPin(inPin.name,
                                                 inPin.__class__.__name__,
                                                 inPin.defaultValue(),
                                                 inPin.structureType,
                                                 inPin.constraint,
                                                 inPin.structConstraint,
                                                 group=inPin.owningNode().name)
        if subgraphOutputPin.isAny():
            subgraphOutputPin.supportedDataTypes = inPin.supportedDataTypes
            subgraphOutputPin.enableOptions(PinOptions.AllowAny
                                            | PinOptions.DictElementSupported)

        if subgraphOutputPin.isExec():
            inPin.onExecute.connect(subgraphOutputPin.call)

        inPin.owningNode().constraints[inPin.constraint].append(
            subgraphOutputPin)
        self.constraints[inPin.constraint].append(inPin)

        inPin.owningNode().structConstraints[inPin.structConstraint].append(
            subgraphOutputPin)
        self.structConstraints[inPin.structConstraint].append(inPin)

        self.__outputsMap[subgraphOutputPin] = inPin
        pinAffects(inPin, subgraphOutputPin)

        # connect
        def forceRename(name):
            subgraphOutputPin.setName(name, force=True)

        inPin.nameChanged.connect(forceRename, weak=False)

        # broadcast for UI wrapper class
        self.pinExposed.send(subgraphOutputPin)

    def kill(self, *args, **kwargs):
        self.rawGraph.remove()
        super(compound, self).kill(*args, **kwargs)

    def postCreate(self, jsonTemplate=None):
        super(compound, self).postCreate(jsonTemplate=jsonTemplate)

        if jsonTemplate is not None and 'graphData' in jsonTemplate:
            parentGraph = self.graph().graphManager.findGraph(
                jsonTemplate['owningGraphName'])
            self.rawGraph = GraphBase(self.name,
                                      self.graph().graphManager, parentGraph)
            # recreate graph contents
            jsonTemplate['graphData']['name'] = self.getName()
            self.rawGraph.populateFromJson(jsonTemplate['graphData'])

            self.syncPins()

            inputsMap = self.namePinInputsMap
            for inpJson in jsonTemplate['inputs']:
                inputsMap[inpJson['name']].uid = uuid.UUID(inpJson['uuid'])

            outputsMap = self.namePinOutputsMap
            for outJson in jsonTemplate['outputs']:
                outputsMap[outJson['name']].uid = uuid.UUID(outJson['uuid'])
        else:
            self.rawGraph = GraphBase(self.name,
                                      self.graph().graphManager,
                                      self.graph().graphManager.activeGraph())

    def addNode(self, node):
        self.rawGraph.addNode(node)

    def autoAffectPins(self):
        pass

    def compute(self, *args, **kwargs):
        # put data from inner graph pins to outer compound node output companions
        for outputPin, innerPin in self.__outputsMap.items():
            outputPin.setData(innerPin.getData())
            outputPin.setClean()
Exemple #36
0
class AnyPin(PinBase):
    """**Abstract Pin -- "AnyPin"**
    
    This Pin Type is an abstraction of Pins, it is a Pin that will act as any other defined Pin.
    This type of Pin allow to create abstract Nodes that can operate in more than one dataType.

    By default AnyPin non Initialized will be marked as error, as Pyflow can't know what is inside.
    This Error can be avoided by enabling :py:attr:`PyFlow.Core.Common.PinOptions.AllowAny`. Thas how NonTyped Lists are made.

    By default :py:attr:`PyFlow.Core.Common.PinOptions.ChangeTypeOnConnection` is enabled, and that means that it will change
    its internal dataType to the new dataType provided by connection or user Initialization. If disabled, pin will not allow changes.
    
    Is important to define a bunch of allowedDataTypes on pin creation, this will restrict what pins can be connected and what no,
    so even being a AnyPin, it can be defined to allow for example only ["FloatPin","IntPin"] so only those could be connected.

    :param self.singleInit: can be set to True, so once initialized, it will never be able to change dataType
    :param self.checkForErrors: can be set To False so it will never try to find errors
    
    Signals:
        * **typeChanged** : Fired when dataType has change
    
    """

    def __init__(self, name, owningNode, direction, **kwargs):
        """        
        :param name: Pin name
        :type name: string
        :param owningNode: Owning Node
        :type owningNode: :py:class:`PyFlow.Core.NodeBase.NodeBase`
        :param direction: PinDirection , can be input or output
        :type direction: :py:class:`PyFlow.Core.Common.PinDirection`
        """
        super(AnyPin, self).__init__(name, owningNode, direction, **kwargs)
        self.typeChanged = Signal(str)
        self.dataTypeBeenSet = Signal()
        self.setDefaultValue(None)
        self._isAny = True
        # if True, setType and setDefault will work only once
        self.singleInit = False
        self.checkForErrors = True
        self.enableOptions(PinOptions.ChangeTypeOnConnection)
        self._defaultSupportedDataTypes = self._supportedDataTypes = tuple([pin.__name__ for pin in getAllPinClasses() if pin.IsValuePin()])
        self.canChange = True
        self.super = None
        self.prevDataType = None
        self._lastError2 = None
        
    @PinBase.dataType.getter
    def dataType(self):
        return self.activeDataType

    @staticmethod
    def supportedDataTypes():
        """Tuple with all the Defined value Pin Classes
        """
        return tuple([pin.__name__ for pin in getAllPinClasses() if pin.IsValuePin()])

    @staticmethod
    def IsValuePin():
        return True

    @staticmethod
    def defColor():
        return (200, 200, 200, 255)

    @staticmethod
    def color():
        return (200, 200, 200, 255)

    @staticmethod
    def pinDataTypeHint():
        return 'AnyPin', None

    @staticmethod
    def internalDataStructure():
        return type(None)

    @staticmethod
    def processData(data):
        return data

    def enableOptions(self, *options):
        super(AnyPin, self).enableOptions(*options)
        if not self.optionEnabled(PinOptions.ChangeTypeOnConnection):
            self.super = AnyPin
        self.updateError([])

    def disableOptions(self, *options):
        super(AnyPin, self).disableOptions(*options)
        if not self.optionEnabled(PinOptions.ChangeTypeOnConnection):
            self.super = AnyPin        
        self.updateError([])

    def setTypeFromData(self, data):
        """Initialize DataType from actual data
        
        Iterates all defined Pin and compares type(data) with Pin.internalDataStructure() to find a valid DataType

        :param data: Actual data to search Pin/dataType from
        """
        for pin in [pin for pin in getAllPinClasses() if pin.IsValuePin()]:
            pType = pin.internalDataStructure()
            if type(data) == pType:
                if pin.__name__ != self.activeDataType:
                    if self.optionEnabled(PinOptions.ChangeTypeOnConnection):
                        traverseConstrainedPins(self, lambda x: self.updateOnConnectionCallback(x, pin.__name__, True, None))
                        self.owningNode().checkForErrors()
                break

    def updateError(self, traversed=[], updateNeis=False):
        """Check is Pin dataType is "AnyPin" and if it is, checks if it can change Type on conection, and if it can, marked as error.
        Is a iterative Function that traverses conected and constrained Pins
        
        :param traversed: Current Iterated neighbours, defaults to []
        :type traversed: list, optional
        :param updateNeis: Try to update Constrained Pins parents error display, it can be slow so use carefully, defaults to False
        :type updateNeis: bool, optional
        """
        if not self.checkForErrors:
            return
        nodePins = set([self])
        if self.constraint:
            nodePins = set(self.owningNode().constraints[self.constraint])
        for connectedPin in getConnectedPins(self):
            if connectedPin.isAny():
                nodePins.add(connectedPin)
        for neighbor in nodePins:
            if neighbor not in traversed:
                if all([neighbor.activeDataType == "AnyPin",
                        neighbor.canChangeTypeOnConection([], neighbor.optionEnabled(PinOptions.ChangeTypeOnConnection), []) or not neighbor.optionEnabled(PinOptions.AllowAny)]) :
                    neighbor.setError("AnyPin Not Initialized")
                    neighbor.super = None
                else:
                    neighbor.clearError()
                    if neighbor.activeDataType == "AnyPin":
                        neighbor.super = AnyPin
                traversed.append(neighbor)
                if neighbor.isAny():
                    neighbor.updateError(traversed, updateNeis)
                if updateNeis:
                    neighbor.owningNode().checkForErrors()

    def serialize(self):
        """Stores The data to Json
        
        Appends current value and currentDataType to default :py:func:`PyFlow.Core.PinBase.PinBase.serialize` method
        :returns: json data
        :rtype: {dict}
        """
        dt = super(AnyPin, self).serialize()
        constrainedType = self.activeDataType
        if constrainedType != self.__class__.__name__:
            pinClass = findPinClassByType(constrainedType)
            # serialize with active type's encoder
            dt['value'] = json.dumps(self.currentData(), cls=pinClass.jsonEncoderClass())
            dt['currDataType'] = constrainedType
        return dt

    def deserialize(self, jsonData):
        """Reconstruct Pin from saved jsonData

        :param jsonData: Input Json Saved data
        :type jsonData: dict
        """
        super(AnyPin, self).deserialize(jsonData)
        if "currDataType" in jsonData:
            self.setType(jsonData["currDataType"])

        pinClass = findPinClassByType(self.activeDataType)
        try:
            self.setData(json.loads(jsonData['value'], cls=pinClass.jsonDecoderClass()))
        except:
            self.setData(self.defaultValue())

        self.updateError([])

    def pinConnected(self, other):
        """Pin Connection been Made

        We update Error here to search for nonInitialized Pins in current Node, and in connected Nodes if initializing

        :param other: Pin that has been conected to this Pin.
        :type other: :py:class:`PyFlow.Core.PinBase.PinBase`
        """
        super(AnyPin, self).pinConnected(other)
        self._lastError2 = self._lastError
        self.updateError([],self.activeDataType == "AnyPin" or self.prevDataType == "AnyPin")
        self.owningNode().checkForErrors()

    def aboutToConnect(self, other):
        """Function called before real connection but after :py:func:`PyFlow.Core.Common.canConnectPins` returns True
        
        We traverse conected and constrained Pins here to search if we can change Pin dataType, and if we can we traverse again
        changing all the necesary datatypes in conected Graph Pins.

        :param other: Pin that will be conected to this Pin.
        :type other: :py:class:`PyFlow.Core.PinBase.PinBase`
        """
        if self.canChangeTypeOnConection([], self.optionEnabled(PinOptions.ChangeTypeOnConnection), []):
            dataType = other.dataType
            traverseConstrainedPins(self, lambda pin: self.updateOnConnectionCallback(pin, dataType, False, other))
        super(AnyPin, self).aboutToConnect(other)

    def pinDisconnected(self, other):
        """Pin has been disconnected
        
        We update error here and checkFor errors in owning Node

        :param other: Pin that has been disconected to this Pin.
        :type other: :py:class:`PyFlow.Core.PinBase.PinBase`
        """
        super(AnyPin, self).pinDisconnected(other)
        self.updateError([],self.activeDataType == "AnyPin" or self.prevDataType == "AnyPin")
        self._lastError2 = self._lastError
        if self.activeDataType == "AnyPin" and self._lastError2 == None:
            self.prevDataType = "AnyPin"
        else:
            self.prevDataType = None        
        self.owningNode().checkForErrors()

    def updateOnConnectionCallback(self, pin, dataType, init=False, other=None):
        """Method Called in traverse function :py:func:`PyFlow.Core.Common.traverseConstrainedPins`
        
        This Function is called for all the conected Pins to the initial Pin calling it. 
        Here we traverse all pins and call :py:func:`AnyPin.setType` for all of them.
        We also intersect all the conected pins allowedDataTypes.
        :param pin: Pin to perform operations on
        :type pin: :py:class:`AnyPin`
        :param dataType: New DataType to apply
        :type dataType: string
        :param init: If initializing AnyPin can have same strenght as other types, if not, "AnyPin" Pin will always be weaker than other dataType, if, defaults to False
        :type init: bool, optional
        :param other: other Pin to heredate stuff from him, defaults to None
        :type other: :py:class:`PyFlow.Core.PinBase.PinBase`, optional
        """
        free = pin.checkFree([])
        if free:
            if (dataType == "AnyPin" and not init):
                if not other:
                    return
                else:
                    if pin.dataType != "AnyPin" and pin.dataType in other.allowedDataTypes([], other._supportedDataTypes) and other.canChangeTypeOnConection([], other.optionEnabled(PinOptions.ChangeTypeOnConnection), []):
                        dataType = pin.dataType

            if any([dataType in pin.allowedDataTypes([], pin._supportedDataTypes),
                    dataType == "AnyPin",
                    (pin.checkFree([], False) and dataType in pin.allowedDataTypes([], pin._defaultSupportedDataTypes, defaults=True))]):
                a = pin.setType(dataType)               
                if a:
                    if other:
                        if pin.optionEnabled(PinOptions.ChangeTypeOnConnection):
                            pin._supportedDataTypes = other.allowedDataTypes([], other._supportedDataTypes)
                    if dataType == "AnyPin":
                        if pin.optionEnabled(PinOptions.ChangeTypeOnConnection):
                            pin._supportedDataTypes = pin._defaultSupportedDataTypes
                            pin.supportedDataTypes = lambda: pin._supportedDataTypes                          

    def checkFree(self, checked=[], selfChek=True):
        """Recursive Function to find if all connected Pins are of type :py:class:`AnyPin` and canChange On conection,
        so basically it checks if a Pin is free to change its dataType to another one

        :param checked: Already visited Pins, defaults to []
        :type checked: list, optional
        :param selfChek: Define if check Pin itself or no, this is useful when trying to override a conection that is in fact
                        the only conection that make hole graphed nodes not be able to change Type, defaults to True
        :type selfChek: bool, optional
        :returns: True if Pin can change current dataType
        :rtype: {bool}
        """
        if self.constraint is None or self.dataType == self.__class__.__name__:
            return True
        else:
            con = []
            if selfChek:
                free = not self.hasConnections()
                if not free:
                    for c in getConnectedPins(self):
                        if c not in checked:
                            con.append(c)
            else:
                free = True
                checked.append(self)
            canChange = self.canChangeTypeOnConection([], self.optionEnabled(PinOptions.ChangeTypeOnConnection), [])                
            free = canChange
            for port in self.owningNode().constraints[self.constraint] + con:
                if port not in checked:
                    checked.append(port)
                    if not isinstance(port, AnyPin):
                        free = False
                    elif free:
                        free = port.checkFree(checked)
            return free

    def allowedDataTypes(self, checked=[], dataTypes=[], selfChek=True, defaults=False):
        """Recursive Function to intersect allowedDatatypes of all conected pins.

        :param checked: Already visited Pins, defaults to []
        :type checked: list, optional
        :param dataTypes: Intersected dataTypes, defaults to []
        :type dataTypes: list, optional
        :param selfChek: Define if check Pin itself or no, this is useful when trying to override a conection that is in fact
                        the only conection that make hole graphed nodes not be able to change Type, defaults to True
        :type selfChek: bool, optional
        :param defaults: Define if we are intersecting current allowedDataTypes, or default (as in definition of node) allowedDataTypes, defaults to False
        :type defaults: bool, optional
        :returns: List contatining all the intersected dataTypes
        :rtype: {list}
        """
        if not self.optionEnabled(PinOptions.ChangeTypeOnConnection) and self.activeDataType == "AnyPin":
            return self._defaultSupportedDataTypes
        con = []
        neis = []
        if selfChek:
            if self.hasConnections():
                for c in getConnectedPins(self):
                    if c not in checked:
                        con.append(c)
        else:
            checked.append(self)
        if self.constraint:
            neis = self.owningNode().constraints[self.constraint]
        for port in neis + con:
            if port not in checked:
                checked.append(port)
                if not defaults:
                    dataTypes = list(set(dataTypes) & set(port._supportedDataTypes))
                else:
                    dataTypes = list(set(dataTypes) & set(port._defaultSupportedDataTypes))
                dataTypes = port.allowedDataTypes(checked, dataTypes, selfChek=True, defaults=defaults)
        return dataTypes

    def initType(self, dataType, initializing=False):
        """Same as :py:func:`AnyPin.aboutToConnect` but instead of using another Pin using a dataType name
        
        :param dataType: New DataType we want the pin to became
        :type dataType: string
        :param initializing:  If initializing AnyPin can have same strenght as other types, if not, "AnyPin" Pin will always be weaker than other dataType, if, defaults to False
        :type initializing: bool, optional
        :returns: True if it can change to the asked dataType
        :rtype: {bool}
        """
        if self.canChangeTypeOnConection([], self.optionEnabled(PinOptions.ChangeTypeOnConnection), []):
            traverseConstrainedPins(self, lambda pin: self.updateOnConnectionCallback(pin, dataType, initializing))
            self._lastError2 = self._lastError
            self.updateError([],self.activeDataType == "AnyPin" or self.prevDataType == "AnyPin")
            self.owningNode().checkForErrors()
            return True
        return False

    def setType(self, dataType):
        """Here is where :py:class:`AnyPin` heredates all the properties from other defined dataTypes and act like those
        
        :param dataType: New DataType
        :type dataType: string
        :returns: True if succes setting dataType
        :rtype: {bool}
        """
        if self.activeDataType == dataType:
            return True

        if not self.optionEnabled(PinOptions.ChangeTypeOnConnection):
            return False

        if self.activeDataType != self.__class__.__name__ and self.singleInit:
            # Marked as single init. Type already been set. Skip
            return False

        otherClass = findPinClassByType(dataType)
        if dataType != "AnyPin":
            self.super = otherClass
        else:
            self.super = None

        if self.activeDataType == "AnyPin" and self._lastError2 == None:
            self.prevDataType = "AnyPin"
        else:
            self.prevDataType = None

        self.activeDataType = dataType
        if not self.isArray():
            self.setData(getPinDefaultValueByType(self.activeDataType))
        else:
            self.setData([])
        self.setDefaultValue(self._data)

        self.color = otherClass.color
        self.dirty = True
        self.jsonEncoderClass = otherClass.jsonEncoderClass
        self.jsonDecoderClass = otherClass.jsonDecoderClass
        self.supportedDataTypes = otherClass.supportedDataTypes
        self._supportedDataTypes = otherClass.supportedDataTypes()
        self.typeChanged.send(self.activeDataType)
        self.dataBeenSet.send(self)

        return True
Exemple #37
0
class EditorHistory(object):

    """docstring for EditorHistory."""
    def __init__(self, app):

        self.statePushed = Signal(object)
        self.stateRemoved = Signal(object)
        self.stateSelected = Signal(object)

        self.app = app
        self.stack = list()
        try:
            self._capacity = int(ConfigManager().getPrefsValue("PREFS", "General/HistoryDepth"))
        except:
            self._capacity = 10

        self.activeState = None

    def shutdown(self):
        clearSignal(self.statePushed)
        clearSignal(self.stateRemoved)
        clearSignal(self.stateSelected)
        clearList(self.stack)

    def getStack(self):
        return self.stack

    def count(self):
        return len(self.stack)

    @property
    def capacity(self):
        return self._capacity

    @capacity.setter
    def capacity(self, value):
        self._capacity = value
        if value < len(self.stack):
            for i in range(len(self.stack) - value):
                state = self.stack.pop()
                self.stateRemoved.send(state)

    def clear(self):
        clearList(self.stack)

    def stateIndex(self, state):
        if state in self.stack:
            return self.stack.index(state)
        return -1

    @property
    def currentIndex(self):
        if self.activeState is not None:
            return self.stateIndex(self.activeState)
        return -1

    def push(self, edState):

        if self.currentIndex < self.count() - 1:
            nextState = None
            while True:
                index = self.count() - 1
                nextState = self.stack[index]
                if nextState == self.activeState:
                    break
                state = self.stack.pop()
                self.stateRemoved.send(state)

        self.stack.append(edState)

        if len(self.stack) >= self.capacity:
            poppedState = self.stack.pop(0)
            self.stateRemoved.send(poppedState)

        self.statePushed.send(edState)
        self.activeState = edState
        self.stateSelected.send(edState)

    def selectState(self, state):
        for st in self.stack:
            if state == st:
                self.app.loadFromData(st.editorState)
                self.activeState = st
                self.stateSelected.send(st)
                break

    def select(self, index):
        index = clamp(index, 0, self.count() - 1)

        if index == self.currentIndex:
            return

        if len(self.stack) == 0:
            return

        stateData = self.stack[index].editorState

        self.app.loadFromData(stateData)

        state = self.stack[index]
        self.activeState = state
        self.stateSelected.send(state)

    def saveState(self, text):
        self.push(_EditorState(text))

    def undo(self):
        if self.currentIndex > 0:
            self.select(self.currentIndex - 1)

    def redo(self):
        self.select(self.currentIndex + 1)
Exemple #38
0
class TransmissionRPC():
    """
    Low-level AsyncIO Transmission RPC communication

    This class handles connecting to a Transmission daemon via the RPC
    interface.  It does not implement the RPC protocol, only basic things like
    authentication, sending requests and receiving responses.  High-level RPCs
    are done in the *API classes.
    """
    def __init__(self,
                 host='localhost',
                 port=9091,
                 *,
                 tls=False,
                 user='',
                 password='',
                 proxy='',
                 path='/transmission/rpc',
                 enabled=True):
        self.host = host
        self.port = port
        self.path = path
        self.tls = tls
        self.user = user
        self.password = password
        self.proxy = proxy
        self._headers = {'content-type': 'application/json'}
        self._session = None
        self._enabled_event = asyncio.Event()
        self.enabled = enabled
        self._request_lock = asyncio.Lock()
        self._connecting_lock = asyncio.Lock()
        self._connection_tested = False
        self._connection_exception = None
        self._timeout = TIMEOUT
        self._version = None
        self._rpcversion = None
        self._rpcversionmin = None
        self._on_connecting = Signal()
        self._on_connected = Signal()
        self._on_disconnected = Signal()
        self._on_error = Signal()

    def on(self, signal, callback, autoremove=True):
        """
        Register `callback` for `signal`

        signal: 'connecting', 'connected', 'disconnected' or 'error'
        callback: a callable that receives this instance as a positional
                  argument and, in case of the 'error' signal, the exception as
                  a keyword argument with the name 'error'

        Callbacks are automatically unsubscribed when they are
        garbage-collected.
        """
        try:
            sig = getattr(self, '_on_' + signal)
        except AttributeError:
            raise ValueError('Unknown signal: {!r}'.format(signal))
        else:
            if not isinstance(sig, Signal):
                raise ValueError('Unknown signal: {!r}'.format(signal))
            else:
                log.debug('Registering %r for %r event', callback, signal)
                sig.connect(callback, weak=autoremove)

    @property
    def version(self):
        """Version of the Transmission daemon or None if not connected"""
        return self._version

    @property
    def rpcversion(self):
        """RPC version of the Transmission daemon or None if not connected"""
        return self._rpcversion

    @property
    def rpcversionmin(self):
        """Oldest RPC version supported by Transmission daemon or None if not connected"""
        return self._rpcversionmin

    @property
    def host(self):
        """
        Hostname or IP of the Transmission RPC interface

        Setting this property calls disconnect().
        """
        return self._host

    @host.setter
    def host(self, host):
        self._host = str(host) if host is not None else 'localhost'
        asyncio.ensure_future(self.disconnect('Changing host: %r' %
                                              self._host))

    @property
    def path(self):
        """
        Path of the Transmission RPC interface

        Setting this property calls disconnect().
        """
        return self._path

    @path.setter
    def path(self, path):
        if path is None:
            path = '/transmission/rpc'
        elif not path or path[0] != '/':
            path = '/' + path
        self._path = path
        asyncio.ensure_future(self.disconnect('Changing path: %r' %
                                              self._path))

    @property
    def port(self):
        """
        Port of the Transmission RPC interface

        Setting this property calls disconnect().
        """
        return self._port

    @port.setter
    def port(self, port):
        self._port = int(port) if port is not None else 9091
        asyncio.ensure_future(self.disconnect('Changing port: %r' %
                                              self._port))

    @property
    def user(self):
        """
        Username for authenticating to the Transmission RPC interface or empty string

        Setting this property calls disconnect().
        """
        return self._user

    @user.setter
    def user(self, user):
        self._user = str(user) if user is not None else ''
        asyncio.ensure_future(self.disconnect('Changing user: %r' %
                                              self._user))

    @property
    def password(self):
        """
        Password for authenticating to the Transmission RPC interface or empty string

        Setting this property calls disconnect().
        """
        return self._password

    @password.setter
    def password(self, password):
        self._password = str(password) if password is not None else ''
        asyncio.ensure_future(
            self.disconnect('Changing password: %r' % self._password))

    @property
    def tls(self):
        """
        Whether to use HTTPS for connecting to the Transmission RPC interface

        Setting this property calls disconnect().
        """
        return self._tls

    @tls.setter
    def tls(self, tls):
        self._tls = bool(tls) if tls is not None else False
        asyncio.ensure_future(self.disconnect('Changing tls: %r' % self._tls))

    @property
    def url(self):
        """
        URL of the Transmission RPC interface

        Setting or getting this property sets or gets the following properties: tls, user,
        password, host, port, path

        Missing parts are filled in with defaults, e.g. "example.org:1234" results in
        "http://example.org:1234/transmission/rpc".

        While user and password are supported when setting ("user:password@localhost"),
        they are not included in the return value for security reasons.  See the
        url_unsafe property.
        """
        return '%s://%s:%d%s' % ('https' if self.tls else 'http', self.host,
                                 self.port, self.path)

    @url.setter
    def url(self, url):
        if url is None:
            url = URL('http://localhost:9091/transmission/rpc')
        else:
            url = URL(url)
        self._user = url.user or ''
        self._password = url.password or ''
        if url.scheme == 'https':
            self._tls = True
        elif url.scheme == 'http':
            self._tls = False
        else:
            raise ValueError('Invalid scheme: %r' % (url.scheme, ))
        self._host = url.host
        self._port = int(url.port) if url.port is not None else 9091
        self._path = url.path if url.path is not None else '/transmission/rpc'
        asyncio.ensure_future(self.disconnect('Changing url: %r' % self._url))

    @property
    def url_unsafe(self):
        """URL of the Transmission RPC interface with user and password if given"""
        if self.user or self.password:
            return '%s://%s:%s@%s:%d%s' % ('https' if self.tls else 'http',
                                           self.user, self.password, self.host,
                                           self.port, self.path)
        else:
            return self.url

    @property
    def proxy(self):
        """URL of a SOCKS5, SOCKS4 or HTTP proxy"""
        return self._proxy

    @proxy.setter
    def proxy(self, proxy):
        if proxy:
            try:
                import aiohttp_socks
            except ImportError:
                raise ValueError('Missing extra: proxy (aiohttp_socks)')
            self._connector = aiohttp_socks.ProxyConnector.from_url(proxy)
            self._proxy = URL(proxy)
        else:
            self._connector = None
            self._proxy = URL('')
        asyncio.ensure_future(
            self.disconnect('Changing proxy: %r' % self._proxy))

    @property
    def timeout(self):
        """Number of seconds to try to connect before giving up"""
        return self._timeout

    @timeout.setter
    def timeout(self, timeout):
        self._timeout = float(timeout)

    @property
    def enabled(self):
        """
        Whether requests should connect

        If this is set to False, requests will wait for it to be set to True.
        This allows you to block any connection attempts until the connection
        parameters (host, user, password, etc) are specified to prevent any
        unwarranted error messages.
        """
        return self._enabled_event.is_set()

    @enabled.setter
    def enabled(self, enabled):
        if enabled and not self.enabled:
            log.debug('Enabling %r', self)
            self._enabled_event.set()
        elif not enabled and self.enabled:
            log.debug('Disabling %r', self)
            self._enabled_event.clear()
            if self.connected:
                asyncio.ensure_future(self.disconnect())

    @property
    def connected(self):
        """Return True if connected, False otherwise"""
        return (self._session is not None and not self._session.closed
                and self._connection_tested)

    async def connect(self):
        """
        Connect to running daemon

        If the `enabled` property is set to False, this method blocks until
        `enabled` is set to True.

        Raises RPCError, ConnectionError or AuthError.
        """
        log.debug('Connecting to %s (timeout=%ss)', self.url, self.timeout)
        self._on_connecting.send(self)

        if self._connecting_lock.locked():
            if self._connection_exception is not None:
                # The other connect() call failed
                log.debug('Found connection error: %r',
                          self._connection_exception)
                raise self._connection_exception

            log.debug('Connection is already being established - Waiting ...')
            try:
                async with async_timeout.timeout(self.timeout):
                    await self._enabled_event.wait()
            except asyncio.TimeoutError:
                raise TimeoutError(self.timeout, self.url)
            else:
                if self.connected:
                    log.debug('Connection is up: %r', self.url)
                    return

        async with self._connecting_lock:
            log.debug('Acquired connect() lock')

            if self.connected:
                await self.disconnect('Reconnecting')

            # Block until we're enabled
            await self._enabled_event.wait()

            import aiohttp
            session_args = {}
            if self.user or self.password:
                session_args['auth'] = aiohttp.BasicAuth(self.user,
                                                         self.password,
                                                         encoding='utf-8')
            if self._connector is not None:
                session_args['connector'] = self._connector
                session_args['connector_owner'] = False
            self._session = aiohttp.ClientSession(**session_args)

            # Check if connection works
            log.debug('Testing connection to %s', self.url)
            try:
                test_request = json.dumps({'method': 'session-get'})
                info = await self._send_request(test_request)
            except ClientError as e:
                self._connection_exception = e
                log.debug('Caught during connection test: %r', e)
                await self._reset()
                self._on_error.send(self, error=e)
                raise
            else:
                self._version = info['version']
                self._rpcversion = info['rpc-version']
                self._rpcversionmin = info['rpc-version-minimum']
                self._connection_tested = True
                self._connection_exception = None
                log.debug('Connection established: %s', self.url)
                self._on_connected.send(self)

            log.debug('Releasing connect() lock')

    async def disconnect(self, reason=None):
        """
        Disconnect if connected

        reason: Why are we disconnecting? Only used in a debugging message.
        """
        if self.connected:
            await self._reset()
            log.debug('Disconnecting from %s (%s)', self.url,
                      reason if reason is not None else 'for no reason')
            self._on_disconnected.send(self)

    async def _reset(self):
        if self._session is not None:
            await self._session.close()
        self._session = None
        self._version = None
        self._rpcversion = None
        self._rpcversionmin = None
        self._connection_tested = False

    async def _post(self, data):
        async with async_timeout.timeout(self.timeout):
            response = await self._session.post(self.url,
                                                data=data,
                                                headers=self._headers)

            if response.status == CSRF_ERROR_CODE:
                # Send request again with CSRF header
                self._headers[CSRF_HEADER] = response.headers[CSRF_HEADER]
                log.debug('Setting CSRF header: %s = %s', CSRF_HEADER,
                          response.headers[CSRF_HEADER])
                await response.release()
                return await self._post(data)

            elif response.status == AUTH_ERROR_CODE:
                await response.release()
                log.debug('Authentication failed: %s: user=%r, password=%r',
                          self.url, self.user, self.password)
                raise AuthError(self.url)

            else:
                try:
                    answer = await response.json()
                except json.JSONDecodeError as e:
                    raise RPCError('Server sent malformed JSON: %s: %s' %
                                   (e, await response.text()))
                else:
                    return answer

    async def _send_request(self, post_data):
        """
        Send RPC POST request to daemon

        post_data: Any valid RPC request as JSON string

        If applicable, returns response['arguments']['torrents'] or
        response['arguments'], otherwise response.

        Raises ClientError.
        """
        # NOTE #163: Letting asyncio.CancelledError bubble up seems to fix the issue that
        #            causes empty torrent lists in new tabs until the next poll iteration.
        #            But I've seen this error pop up in the TUI log: "Unclosed client
        #            session client_session: <aiohttp.client.ClientSession object at
        #            0x7f35d98d1be0>" This may or may not be related.
        import aiohttp
        try:
            from aiohttp_socks import ProxyConnectionError, ProxyError, ProxyTimeoutError
        except ImportError:

            class ProxyError(Exception):
                pass

            class ProxyConnectionError(Exception):
                pass

            class ProxyTimeoutError(Exception):
                pass

        try:
            answer = await self._post(post_data)
        except aiohttp.ClientError as e:
            log.debug('Caught during POST request: %r', e)
            raise ConnectionError(self.url)

        except (ProxyError, ProxyConnectionError) as e:
            log.debug('Caught during POST request: %r', e)
            raise ConnectionError(self.proxy)

        except asyncio.TimeoutError as e:
            log.debug('Caught during POST request: %r', e)
            raise TimeoutError(self.timeout, self.url)

        except ProxyTimeoutError as e:
            log.debug('Caught during POST request: %r', e)
            raise TimeoutError(self.timeout, self.proxy)

        else:
            if answer['result'] != 'success':
                raise RPCError(answer['result'].capitalize())
            else:
                if 'arguments' in answer:
                    if 'torrents' in answer['arguments']:
                        return answer['arguments']['torrents']
                    else:
                        return answer['arguments']
                return answer

    def __getattr__(self, method):
        """
        Return asyncio coroutine that sends RPC request and returns response

        method: Any method from the RPC specs with every '-' replaced with '_'.
                For arguments see the RPC specs.

        Example:
        >>> stats = await client.session_stats()
        >>> torrents = await client.torrent_get(ids=(1,2,3), fields=('status','name'))

        Raises RPCError, ConnectionError, AuthError
        """
        async def request(arguments=None, **kwargs):
            arguments = arguments or {}

            async with self._request_lock:
                if not self.connected:
                    log.debug('Autoconnecting for %r', method)
                    await self.connect()

                arguments.update(**kwargs)
                data = {
                    'method': method.replace('_', '-'),
                    'arguments': arguments
                }
                try:
                    rpc_request = json.dumps(data)
                except Exception as e:
                    raise RuntimeError('Invalid JSON data: %s: %r' %
                                       (e, data)) from None

                try:
                    return await self._send_request(rpc_request)
                except ClientError as e:
                    log.debug('Caught ClientError in %r request: %r', method,
                              e)

                    # RPCError does not mean host is unreachable, there was just a
                    # misunderstanding, so we're still connected.
                    if not isinstance(e, RPCError) and self.connected:
                        await self.disconnect(str(e))

                    self._on_error.send(self, error=e)
                    raise

        request.__name__ = method
        request.__qualname__ = method
        return request
Exemple #39
0
class GraphManager(object):
    """docstring for GraphManager."""
    def __init__(self):
        super(GraphManager, self).__init__()
        self.graphChanged = Signal(object)
        self._graphs = {}
        self._activeGraph = None
        self._activeGraph = GraphBase(ROOT_GRAPH_NAME, self)
        self._activeGraph.setIsRoot(True)

    def findRootGraph(self):
        roots = []
        for graph in self.getAllGraphs():
            if graph.isRoot():
                roots.append(graph)
        assert (len(roots) == 1), "Fatal! Multiple roots!"
        return roots[0]

    def selectRootGraph(self):
        self.selectGraph(self.findRootGraph())

    def serialize(self):
        rootGraph = self.findRootGraph()
        saved = rootGraph.serialize()
        saved["fileVersion"] = str(version.currentVersion())
        saved["activeGraph"] = self.activeGraph().name
        return saved

    @dispatch(str)
    def removeGraph(self, name):
        graph = self.findGraph(name)
        if graph is not None:
            graph.clear()
            self._graphs.pop(graph.uid)
            if graph.parentGraph is not None:
                if graph in graph.parentGraph.childGraphs:
                    graph.parentGraph.childGraphs.remove(graph)
            del graph

    @dispatch(object)
    def removeGraph(self, graph):
        if graph.uid in self._graphs:
            graph.clear()
            self._graphs.pop(graph.uid)
            if graph.parentGraph is not None:
                if graph in graph.parentGraph.childGraphs:
                    graph.parentGraph.childGraphs.remove(graph)
            del graph

    def deserialize(self, data):
        if "fileVersion" in data:
            fileVersion = version.Version.fromString(data["fileVersion"])
        else:
            # handle older version
            pass
        self.clear(keepRoot=False)
        self._activeGraph = GraphBase(str('root'), self)
        self._activeGraph.populateFromJson(data)
        self._activeGraph.setIsRoot(True)
        self.selectGraph(self._activeGraph)

    def clear(self, keepRoot=True, *args, **kwargs):
        self.selectGraph(ROOT_GRAPH_NAME)
        self.removeGraph(ROOT_GRAPH_NAME)
        self._graphs.clear()
        self._graphs = {}
        del self._activeGraph
        self._activeGraph = None
        if keepRoot:
            self._activeGraph = GraphBase(ROOT_GRAPH_NAME, self)
            self.selectGraph(self._activeGraph)
            self._activeGraph.setIsRoot(True)

    def Tick(self, deltaTime):
        for graph in self._graphs.values():
            graph.Tick(deltaTime)

    def findVariableRefs(self, variable):
        result = []
        for node in self.getAllNodes(classNameFilters=['getVar', 'setVar']):
            if node.variableUid() == variable.uid:
                result.append(node)
        return result

    @dispatch(str)
    def findGraph(self, name):
        graphs = self.graphsDict
        if name in graphs:
            return graphs[name]
        return None

    def findPinByName(self, pinFullName):
        result = None
        for graph in self.getAllGraphs():
            result = graph.findPin(pinFullName)
            if result is not None:
                break
        return result

    @dispatch(str)
    def findNode(self, name):
        """Finds a node across all graphs
        """
        result = None
        for graph in self.getAllGraphs():
            result = graph.findNode(name)
            if result is not None:
                break
        return result

    @dispatch(uuid.UUID)
    def findNode(self, uid):
        """Finds a node across all graphs
        """
        for graph in self.getAllGraphs():
            if uid in graph.nodes:
                return graph.nodes[uid]
        return None

    @dispatch(uuid.UUID)
    def findVariable(self, uuid):
        """Finds a variable across all graphs
        """
        result = None
        for graph in self._graphs.values():
            if uuid in graph.vars:
                result = graph.vars[uuid]
                break
        return result

    @dispatch(str)
    def findVariable(self, name):
        """Finds a variable across all graphs
        """
        for graph in self._graphs.values():
            for var in graph.vars.values():
                if var.name == name:
                    return var
        return None

    def location(self):
        location = [self.activeGraph().name]
        parent = self.activeGraph().parentGraph
        while parent is not None:
            location.insert(0, parent.name)
            parent = parent.parentGraph
        return location

    @property
    def graphsDict(self):
        result = {}
        for graph in self.getAllGraphs():
            result[graph.name] = graph
        return result

    def add(self, graph):
        graph.name = self.getUniqGraphName(graph.name)
        self._graphs[graph.uid] = graph

    def activeGraph(self):
        return self._activeGraph

    @dispatch(str)
    def selectGraph(self, name):
        graphs = self.graphsDict
        if name in graphs:
            if name != self.activeGraph().name:
                oldGraph = self.activeGraph()
                newGraph = graphs[name]
                self._activeGraph = newGraph
                self.graphChanged.send(self.activeGraph())

    @dispatch(object)
    def selectGraph(self, graph):
        for newGraph in self.getAllGraphs():
            if newGraph.name == graph.name:
                if newGraph.name != self.activeGraph().name:
                    oldGraph = self.activeGraph()
                    self._activeGraph = newGraph
                    self.graphChanged.send(self.activeGraph())
                    break

    def getAllGraphs(self):
        return [g for g in self._graphs.values()]

    def getAllNodes(self, classNameFilters=[]):
        allNodes = []
        for graph in self.getAllGraphs():
            if len(classNameFilters) == 0:
                allNodes.extend(list(graph.nodes.values()))
            else:
                allNodes.extend([
                    node for node in graph.nodes.values()
                    if node.__class__.__name__ in classNameFilters
                ])
        return allNodes

    def getAllVariables(self):
        result = []
        for graph in self.getAllGraphs():
            result.extend(list(graph.vars.values()))
        return result

    def getUniqGraphPinName(self, graph, name):
        existingNames = []
        for node in graph.getNodes(
                classNameFilters=['graphInputs', 'graphOutputs']):
            existingNames.extend([pin.name for pin in node.pins])
        return getUniqNameFromList(existingNames, name)

    def getUniqPinName(self, name):
        existingNames = []
        for node in self.getAllNodes():
            existingNames.extend([pin.name for pin in node.pins])
        return getUniqNameFromList(existingNames, name)

    def getAllNames(self):
        existingNames = [g.name for g in self.getAllGraphs()]
        existingNames.extend([n.name for n in self.getAllNodes()])
        existingNames.extend([var.name for var in self.getAllVariables()])
        for node in self.getAllNodes():
            existingNames.extend([pin.name for pin in node.pins])
        return existingNames

    def getUniqName(self, name):
        existingNames = self.getAllNames()
        return getUniqNameFromList(existingNames, name)

    def getUniqGraphName(self, name):
        existingNames = [g.name for g in self.getAllGraphs()]
        return getUniqNameFromList(existingNames, name)

    def getUniqNodeName(self, name):
        existingNames = [n.name for n in self.getAllNodes()]
        if name in existingNames:
            existingNames.remove(name)
        return getUniqNameFromList(existingNames, name)

    def getUniqVariableName(self, name):
        existingNames = [var.name for var in self.getAllVariables()]
        return getUniqNameFromList(existingNames, name)

    def plot(self):
        root = self.findRootGraph()
        print("Active graph: {0}".format(str(self.activeGraph().name)),
              "All graphs:", [g.name for g in self._graphs.values()])
        root.plot()