class WalkoffSignal(object): """A signal to send Walkoff data The class is a wrapper around a blinker.Signal Attributes: name (str): The name of the signal signal (Signal): The signal object which sends the event and data event_type (EventType): The event type of this signal is_sent_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True message (str): Human readable message for this event Args: name (str): The name of the signal event_type (EventType): The event type of this signal send_to_interfaces (bool, optional): Should this event get sent to the interface dispatcher? Defaults to True message (str, optional): Human readable message for this event. Defaults to empty string """ _signals = {} def __init__(self, name, event_type, send_to_interfaces=True, message=''): self.name = name self.signal = Signal(name) self.event_type = event_type self.is_sent_to_interfaces = send_to_interfaces self.message = message def send(self, sender, **kwargs): """Sends the signal with data Args: sender: The thing that is sending the signal Kwargs: data: Additional data to send with the signal """ self.signal.send(sender, **kwargs) def connect(self, func, weak=True): """A decorator which registers a function as a callback for this signal Args: func (func): The function to register weak (bool, optional): Should a weak reference be used for this connection? Defaults to True Returns: func: The function connected """ self.signal.connect(func) if not weak: WalkoffSignal._store_callback(func) return func @classmethod def _store_callback(cls, func): """ Stores callbacks so they aren't garbage collected and the weak references of the signals disappear """ cls._signals[id(func)] = func
def test_temp_connection_for_sender(): sig = Signal() canary = [] receiver = lambda sender: canary.append(sender) with sig.connected_to(receiver, sender=2): sig.send(1) sig.send(2) assert canary == [2] assert not sig.receivers
class Foo: def __init__(self): self.on_hit = Signal() self.on_hit.connect(self.handle_hit) self.on_hit.connect(self.handle_again) def handle_hit(self, *args): print('hit!', self, args) def handle_again(self, *args): print('hit again!', self, args)
def __init__(self, new_func=None, total_count=None, *args, **kwargs): self.func = new_func self.total_count = total_count self.count = 0 self.task_queue = [] self.process_list = [] self.process_map = {} self.task_map = {} self.args = args self.kwargs = kwargs signal.signal(signal.SIGWINCH, self.action) # self.window = curses.initscr() self.add_task_signal = Signal(1000) self.count_signal = Signal(1001) self.reg_task_signal() self.reg_count_sinal()
def __init__(self, username, password, account_id, auth_token=None, user_directory='mpx', region='US1', service_registry=None, token_duration=43200000, # 12 hours token_idle_timeout=14400000, # 4 hours use_ssl=True, ): self.username = username self.password = password self.account = account_id self.auth_token = auth_token self.user_directory = user_directory self.region = region self.token_duration = token_duration self.token_idle_timeout = token_idle_timeout self.use_ssl = use_ssl self.registry_url = REGISTRY_URL.format(tld=self.regional_tld) self.signin_url = SIGN_IN_URL.format(tld=self.regional_tld) self.post_sign_in = Signal() self._registry = service_registry self.session = requests.Session() self.session.mount('https://', TLS1Adapter()) self.session.headers.update({ 'Content-Type': 'application/json', 'Accept': 'application/json', 'User-Agent': 'Python Mediaamp %s' % __version__, })
def test_temp_connection_alias(): sig = Signal() canary = [] receiver = lambda sender: canary.append(sender) sig.send(1) with sig.temporarily_connected_to(receiver): sig.send(2) sig.send(3) assert canary == [2] assert not sig.receivers
class EventService(Service): def __init__(self, env): """event service""" super(EventService, self).__init__(env) self._channel = Signal('event_channel') def subscribe(self, func, event_type=None ): ''' :param func: def func(event_type, **kwarg): pass :param event_filter: option :return: ''' # sender = event_type or ANY # weak = True # if isinstance(event_type, basestring): # weak = False # self._channel.connect(func, sender, weak) sender = event_type or ANY self._channel.connect(func, sender) def unsubscribe(self, func): self._channel.disconnect(func) def publish(self, event_type, **kwarg): self._channel.send(event_type, **kwarg)
def __init__(self, inverted = False, scale={}): self.inverted = inverted self.scale = scale self.position = None self.running = None self.initializing = None self.initialized = None self.initiator_minus = None self.initiator_plus = None self.initiator_error = None self.temperature_warning = None self.onPosition = Signal() self.onStarted = Signal() self.onStopped = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onInitiatorError = Signal() self.onTemperatureWarning = Signal()
def __init__(self, axes, constraints = None): import Queue self.axes = axes self.constraints = constraints self.action_queue = Queue.Queue() self.abort_action = NullAction() self.onCycleStarted = Signal() self.onCycleFinished = Signal() self.onCycleAborted = Signal() self.active = False self.target = None
class MailboxesAccountItem(NSObject): def init(self): self = super(MailboxesAccountItem, self).init() self.folder_items = [] self.updated = Signal() return self @classmethod def newBlank(cls): return cls.alloc().init() @classmethod def newWithAccount_(cls, account): self = cls.alloc().init() self.account = account account_updated.connect(objc_callback(self.folders_updated), account) self.folders_updated(account) return self def folders_updated(self, account): self.folder_items[:] = [MailboxesFolderItem.newWithFolder_(f) for n, f in sorted(account._folders.items())] self.updated.send(self)
def test_temp_connection_failure(): sig = Signal() canary = [] receiver = lambda sender: canary.append(sender) class Failure(Exception): pass try: sig.send(1) with sig.connected_to(receiver): sig.send(2) raise Failure sig.send(3) except Failure: pass else: raise AssertionError("Context manager did not propagate.") assert canary == [2] assert not sig.receivers
def __init__(self, axes, constraints = None): from Action import EmergencyStop from blinker import Signal self.axes_idx = dict() for i, axis in enumerate(axes): self.axes_idx[axis] = i self.axes = [axis for axis in axes] self.constraints = constraints self.abort_action = EmergencyStop(self.axes) self.onCycleStarted = Signal() self.onCycleFinished = Signal() self.onCycleAborted = Signal() self.onDestinationChanged = Signal() self.onRunning = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onPositionChanged = Signal() for axis in self.axes: axis.onInitializing.connect(self.onInitializing_repeat) axis.onInitialized.connect(self.onInitialized_repeat) axis.onInitiatorMinus.connect(self.onInitiatorMinus_repeat) axis.onInitiatorPlus.connect(self.onInitiatorPlus_repeat) axis.onRunning.connect(self.onRunning_repeat) axis.onPosition.connect(self.onPosition_repeat) self.worker_thread = None self.active = False self.destination = None self.cycle_clear() self.update()
def __init__(self, env): """event service""" super(EventService, self).__init__(env) self._channel = Signal('event_channel')
# -*- coding: utf-8 -*- import logging import time from loki.node.nodes import TreeNode import requests import ujson as json import gevent from blinker import Signal from loki.app import mail, settings from loki.mail import Message from loki.job.statuses import Status from loki.utils import catch_exception # for deployment on_status_changed = Signal() logger = logging.getLogger("signals.deploy") SLACK_WEB_HOOK = "http://slack-proxy.hy01.internal.nosa.me/post" @on_status_changed.connect @catch_exception(logger) def send_mail(sender, operator=None, **kwargs): """ :param sender: job.models.Deployment """ logger.debug("catch status changed signal for send_mail") status = Status(sender.status).name \ if sender.status != Status.unknown \
def __init__(self, conf, conf_base_path=None): Synchronizable.__init__(self) self._global_conf = conf self._expanded_global_conf = conf.clone().expand_vars() self._conf = self._expanded_global_conf.get("wok", default=Data.element) self._conf_base_path = conf_base_path self._log = logger.get_logger("wok.engine") self._work_path = self._conf.get("work_path", os.path.join(os.getcwd(), "wok-files")) if not os.path.exists(self._work_path): os.makedirs(self._work_path) self._cases = [] self._cases_by_name = {} self._stopping_cases = {} #self._lock = Lock() self._cvar = threading.Condition(self._lock) self._run_thread = None self._running = False self._finished_event = threading.Event() self._job_task_map = {} self._logs_threads = [] self._logs_queue = Queue() self._join_thread = None self._join_queue = Queue() self._num_log_threads = self._conf.get("num_log_threads", cpu_count()) self._max_alive_threads = 2 + self._num_log_threads self._num_alive_threads = AtomicCounter() self._started = False self._notified = False recover = self._conf.get("recover", False) db_path = os.path.join(self._work_path, "engine.db") if not recover and os.path.exists(db_path): os.remove(db_path) self._db = db.create_engine("sqlite:///{}".format(db_path), drop_tables=not recover) # platforms self._platforms = self._create_platforms() self._platforms_by_name = {} for platform in self._platforms: self._platforms_by_name[platform.name] = platform default_platform_name = self._conf.get("default_platform", self._platforms[0].name) if default_platform_name not in self._platforms_by_name: self._log.warn("Platform '{}' not found, using '{}' as the default platform".format( default_platform_name, self._platforms[0].name)) default_platform_name = self._platforms[0].name self._default_platform = self._platforms_by_name[default_platform_name] # projects if conf_base_path is None: conf_base_path = os.getcwd() projects_conf = self._global_conf.get("wok.projects") self._projects = ProjectManager(projects_conf, base_path=conf_base_path) self._projects.initialize() # signals self.case_created = Signal() self.case_state_changed = Signal() self.case_started = Signal() self.case_finished = Signal() self.case_removed = Signal() # recovering if recover: self.__recover_from_db()
def __init__( self, session_id: str, session_data: SessionData, enqueue_forward_msg: Callable[[ForwardMsg], None], client_state: ClientState, request_queue: ScriptRequestQueue, session_state: SessionState, uploaded_file_mgr: UploadedFileManager, ): """Initialize the ScriptRunner. (The ScriptRunner won't start executing until start() is called.) Parameters ---------- session_id : str The AppSession's id. session_data : SessionData The AppSession's session data. enqueue_forward_msg : Callable Function to call to send a ForwardMsg to the frontend. (When not running a unit test, this will be the enqueue function of the AppSession instance that created this ScriptRunner.) client_state : ClientState The current state from the client (widgets and query params). request_queue : ScriptRequestQueue The queue that the AppSession is publishing ScriptRequests to. ScriptRunner will continue running until the queue is empty, and then shut down. uploaded_file_mgr : UploadedFileManager The File manager to store the data uploaded by the file_uploader widget. """ self._session_id = session_id self._session_data = session_data self._enqueue_forward_msg = enqueue_forward_msg self._request_queue = request_queue self._uploaded_file_mgr = uploaded_file_mgr self._client_state = client_state self._session_state: SessionState = session_state self._session_state.set_widgets_from_proto(client_state.widget_states) self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs. This signal is *not* emitted on the same thread that the ScriptRunner was created on. Parameters ---------- event : ScriptRunnerEvent exception : BaseException | None Our compile error. Set only for the SCRIPT_STOPPED_WITH_COMPILE_ERROR event. widget_states : streamlit.proto.WidgetStates_pb2.WidgetStates | None The ScriptRunner's final WidgetStates. Set only for the SHUTDOWN event. """) # Set to true when we process a SHUTDOWN request self._shutdown_requested = False # Set to true while we're executing. Used by # _maybe_handle_execution_control_request. self._execing = False # This is initialized in start() self._script_thread: Optional[threading.Thread] = None
class MotionControl(object): def __init__(self, axes, constraints = None): import Queue self.axes = axes self.constraints = constraints self.action_queue = Queue.Queue() self.abort_action = NullAction() self.onCycleStarted = Signal() self.onCycleFinished = Signal() self.onCycleAborted = Signal() self.active = False self.target = None def __del__(self): self.abort() def __getattr__(self, name): if name == 'position': return [axis.position for axis in self.axes] def update(self): for axis in self.axes: axis.update() def set_target(self, target): if isinstance(target, list): if len(target) != len(self.axes): raise ValueError self.target = target if isinstance(target, dict): for k,v in target: self.target[k] = v if isinstance(target, tuple): self.target[target[0]] = target[1] speed = None current_position = self.position if not None in current_position: delta = [abs(a-b) for a,b in zip(target, current_position)] max_delta = max(delta) speed = [float(d)/float(max_delta) for d in delta] self.action_queue = Queue.Queue() self.action_queue.put(GotoAbsolute(self.axes, self.target, speed)) def can_cycle_start(self): if self.active: return False return True # FIXME: Add constraint tests here def start_cycle(self): import threading, weakref if not self.can_cycle_start(): return False self.current_action = None self.active = True self.worker_thread = threading.Thread(target = MotionControl.cycle_worker, name = "MotionControl.worker", args=(weakref.proxy(self),)) self.worker_thread.daemon =True self.worker_thread.start() self.onCycleStarted.send() def abort(self): self.active = False self.worker_thread.join() def __del__(self): self.abort() def cycle_worker(ref): abort_action = ref.abort_action try: import time while True: if not ref.active: raise CycleAbort() ref.update() if not ref.current_action or ref.current_action.ended(): if ref.action_queue.empty(): break ref.current_action = ref.action_queue.get_nowait() ref.current_action.execute() while True: if not ref.active: raise CycleAbort() ref.update() if ref.current_action.ended(): break ref.action_queue.task_done() ref.onCycleFinished.send() except CycleAbort: ref.abort_action.execute() ref.onCycleAborted.send() finally: try: while not ref.action_queue.empty(): ref.action_queue.get_nowait() ref.action_queue.task_done() except: pass ref.active = False
try: return int(v) except Exception: pass try: return float(v) except Exception: pass return v # Allow outside modules to wait for the config file to be parsed before doing # something. _on_config_parsed = Signal(doc="Emitted when the config file is parsed.") CONFIG_FILENAMES = [ file_util.get_streamlit_file_path("config.toml"), file_util.get_project_streamlit_file_path("config.toml"), ] def get_config_options( force_reparse=False, options_from_flags: Optional[Dict[str, Any]] = None ) -> Dict[str, ConfigOption]: """Create and return a dict mapping config option names to their values, returning a cached dict if possible. Config option values are sourced from the following locations. Values
class compound(NodeBase): """this node encapsulates a graph, like compound in xsi pins can be edited only from inside the compound """ def __init__(self, name): super(compound, self).__init__(name) self.isCompoundNode = True self.pinExposed = Signal(object) self._rawGraph = None self.__inputsMap = {} self.__outputsMap = {} self.bCacheEnabled = False @property def inputsMap(self): return self.__inputsMap @property def outputsMap(self): return self.__outputsMap @property def rawGraph(self): return self._rawGraph @rawGraph.setter def rawGraph(self, newGraph): assert (newGraph is not None) self._rawGraph = newGraph def syncPins(self): # look for graph nodes pins was added nodeInputPins = self.namePinInputsMap nodeOutputPins = self.namePinOutputsMap graphInputsNodes = self.rawGraph.getNodes( classNameFilters=['graphInputs']) graphInputPins = {} for graphInputNode in graphInputsNodes: for outPin in graphInputNode.outputs.values(): graphInputPins[outPin.name] = outPin # create companion pin if needed if outPin.name not in nodeInputPins: self.onGraphInputPinCreated(outPin) graphOutputNodes = self.rawGraph.getNodes( classNameFilters=['graphOutputs']) graphOutputPins = {} for graphOutputNode in graphOutputNodes: for inPin in graphOutputNode.inputs.values(): graphOutputPins[inPin.name] = inPin # create companion pin if needed if inPin.name not in nodeOutputPins: self.onGraphOutputPinCreated(inPin) for nodeInputPinName, nodeInputPin in nodeInputPins.items(): if nodeInputPinName not in graphInputPins: if nodeInputPin in self.__inputsMap: nodeInputPin.kill() clearSignal(nodeInputPin.killed) self.__inputsMap.pop(nodeInputPin) for nodeOutputPinName, nodeOutputPin in nodeOutputPins.items(): if nodeOutputPinName not in graphOutputPins: if nodeOutputPin in self.__outputsMap: nodeOutputPin.kill() clearSignal(nodeOutputPin.killed) self.__outputsMap.pop(nodeOutputPin) def Tick(self, delta): self.syncPins() self.rawGraph.Tick(delta) super(compound, self).Tick(delta) def setName(self, name): super(compound, self).setName(name) if self.rawGraph is not None: self.rawGraph.name = self.getName() @staticmethod def pinTypeHints(): return {'inputs': [], 'outputs': []} @staticmethod def category(): return 'Common' @staticmethod def keywords(): return [] @staticmethod def description(): return 'Encapsulate a graph inside a node' def serialize(self): default = NodeBase.serialize(self) default['graphData'] = self.rawGraph.serialize() return default def onGraphInputPinCreated(self, outPin): """Reaction when pin added to graphInputs node Arguments: outPin {PinBase} -- output pin on graphInputs node """ # add companion pin for graphInputs node's output pin subgraphInputPin = self.createInputPin(outPin.name, outPin.__class__.__name__, outPin.defaultValue(), outPin.call, outPin.structureType, outPin.constraint, outPin.structConstraint, group=outPin.owningNode().name) if subgraphInputPin.isAny(): subgraphInputPin.supportedDataTypes = outPin.supportedDataTypes #subgraphInputPin.singleInit = True subgraphInputPin.setType(outPin.dataType) outPin.owningNode().constraints[outPin.constraint].append( subgraphInputPin) self.constraints[outPin.constraint].append(outPin) outPin.owningNode().structConstraints[outPin.structConstraint].append( subgraphInputPin) self.structConstraints[outPin.structConstraint].append(outPin) self.__inputsMap[subgraphInputPin] = outPin pinAffects(subgraphInputPin, outPin) # connect def forceRename(name): subgraphInputPin.setName(name, force=True) outPin.nameChanged.connect(forceRename, weak=False) # broadcast for UI wrapper class self.pinExposed.send(subgraphInputPin) def onGraphOutputPinCreated(self, inPin): """Reaction when pin added to graphOutputs node Arguments: inPin {PinBase} -- input pin on graphOutputs node """ # add companion pin for graphOutputs node's input pin subgraphOutputPin = self.createOutputPin(inPin.name, inPin.__class__.__name__, inPin.defaultValue(), inPin.structureType, inPin.constraint, inPin.structConstraint, group=inPin.owningNode().name) if subgraphOutputPin.isAny(): subgraphOutputPin.supportedDataTypes = inPin.supportedDataTypes subgraphOutputPin.setType(inPin.dataType) if subgraphOutputPin.isExec(): inPin.onExecute.connect(subgraphOutputPin.call) inPin.owningNode().constraints[inPin.constraint].append( subgraphOutputPin) self.constraints[inPin.constraint].append(inPin) inPin.owningNode().structConstraints[inPin.structConstraint].append( subgraphOutputPin) self.structConstraints[inPin.structConstraint].append(inPin) self.__outputsMap[subgraphOutputPin] = inPin pinAffects(inPin, subgraphOutputPin) # connect def forceRename(name): subgraphOutputPin.setName(name, force=True) inPin.nameChanged.connect(forceRename, weak=False) # broadcast for UI wrapper class self.pinExposed.send(subgraphOutputPin) def kill(self, *args, **kwargs): self.rawGraph.remove() super(compound, self).kill(*args, **kwargs) def postCreate(self, jsonTemplate=None): super(compound, self).postCreate(jsonTemplate=jsonTemplate) if jsonTemplate is not None and 'graphData' in jsonTemplate: parentGraph = self.graph().graphManager.findGraph( jsonTemplate['owningGraphName']) self.rawGraph = GraphBase(self.name, self.graph().graphManager, parentGraph) # recreate graph contents jsonTemplate['graphData']['name'] = self.getName() self.rawGraph.populateFromJson(jsonTemplate['graphData']) self.syncPins() inputsMap = self.namePinInputsMap for inpJson in jsonTemplate['inputs']: inputsMap[inpJson['name']].uid = uuid.UUID(inpJson['uuid']) outputsMap = self.namePinOutputsMap for outJson in jsonTemplate['outputs']: outputsMap[outJson['name']].uid = uuid.UUID(outJson['uuid']) else: self.rawGraph = GraphBase(self.name, self.graph().graphManager, self.graph().graphManager.activeGraph()) def addNode(self, node): self.rawGraph.addNode(node) def autoAffectPins(self): pass def compute(self, *args, **kwargs): # put data from inner graph pins to outer compound node output companions for outputPin, innerPin in self.__outputsMap.items(): outputPin.setData(innerPin.getData())
""" weitersager.signals ~~~~~~~~~~~~~~~~~~~ Signals :Copyright: 2007-2022 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ from blinker import Signal irc_channel_joined = Signal() message_received = Signal()
def __init__(self, md5, modification_time): self.md5 = md5 self.modification_time = modification_time self.on_file_changed = Signal()
class Reuse(db.Datetimed, WithMetrics, BadgeMixin, db.Owned, db.Document): title = db.StringField(required=True) slug = db.SlugField( max_length=255, required=True, populate_from='title', update=True) description = db.StringField(required=True) type = db.StringField(required=True, choices=REUSE_TYPES.keys()) url = db.StringField(required=True) urlhash = db.StringField(required=True, unique=True) image_url = db.StringField() image = db.ImageField( fs=images, basename=default_image_basename, max_size=IMAGE_MAX_SIZE, thumbnails=IMAGE_SIZES) datasets = db.ListField( db.ReferenceField('Dataset', reverse_delete_rule=db.PULL)) tags = db.TagListField() # badges = db.ListField(db.EmbeddedDocumentField(ReuseBadge)) private = db.BooleanField() ext = db.MapField(db.GenericEmbeddedDocumentField()) extras = db.ExtrasField() featured = db.BooleanField() deleted = db.DateTimeField() def __unicode__(self): return self.title or '' __badges__ = {} meta = { 'indexes': ['-created_at', 'urlhash'] + db.Owned.meta['indexes'], 'ordering': ['-created_at'], 'queryset_class': ReuseQuerySet, } before_save = Signal() after_save = Signal() on_create = Signal() on_update = Signal() before_delete = Signal() after_delete = Signal() on_delete = Signal() verbose_name = _('reuse') @classmethod def pre_save(cls, sender, document, **kwargs): # Emit before_save cls.before_save.send(document) @classmethod def post_save(cls, sender, document, **kwargs): cls.after_save.send(document) if kwargs.get('created'): cls.on_create.send(document) else: cls.on_update.send(document) if document.deleted: cls.on_delete.send(document) def url_for(self, *args, **kwargs): return url_for('reuses.show', reuse=self, *args, **kwargs) display_url = property(url_for) @property def external_url(self): return self.url_for(_external=True) @property def type_label(self): return REUSE_TYPES[self.type] def clean(self): '''Auto populate urlhash from url''' if not self.urlhash or 'url' in self._get_changed_fields(): self.urlhash = hash_url(self.url) super(Reuse, self).clean() @classmethod def get(cls, id_or_slug): obj = cls.objects(slug=id_or_slug).first() return obj or cls.objects.get_or_404(id=id_or_slug) @classmethod def url_exists(cls, url): urlhash = hash_url(url) return cls.objects(urlhash=urlhash).count() > 0 @cached_property def json_ld(self): result = { '@context': 'http://schema.org', '@type': 'CreativeWork', 'alternateName': self.slug, 'dateCreated': self.created_at.isoformat(), 'dateModified': self.last_modified.isoformat(), 'url': url_for('reuses.show', reuse=self, _external=True), 'name': self.title, 'isBasedOnUrl': self.url, } if self.description: result['description'] = mdstrip(self.description) if self.organization: author = self.organization.json_ld elif self.owner: author = self.owner.json_ld else: author = None if author: result['author'] = author return result
def __init__(self): self.on_hit = Signal() self.on_hit.connect(self.handle_hit) self.on_hit.connect(self.handle_again)
def __init__( self, report, main_dg, sidebar_dg, widget_states, request_queue, uploaded_file_mgr=None, ): """Initialize the ScriptRunner. (The ScriptRunner won't start executing until start() is called.) Parameters ---------- report : Report The ReportSession's report. main_dg : DeltaGenerator The ReportSession's main DeltaGenerator. sidebar_dg : DeltaGenerator The ReportSession's sidebar DeltaGenerator. widget_states : streamlit.proto.Widget_pb2.WidgetStates The ReportSession's current widget states request_queue : ScriptRequestQueue The queue that the ReportSession is publishing ScriptRequests to. ScriptRunner will continue running until the queue is empty, and then shut down. uploaded_file_mgr : UploadedFileManager The File manager to store the data uploaded by the file_uplpader widget. """ self._report = report self._main_dg = main_dg self._sidebar_dg = sidebar_dg self._request_queue = request_queue self._uploaded_file_mgr = uploaded_file_mgr self._widgets = Widgets() self._widgets.set_state(widget_states) self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs. This signal is *not* emitted on the same thread that the ScriptRunner was created on. Parameters ---------- event : ScriptRunnerEvent exception : BaseException | None Our compile error. Set only for the SCRIPT_STOPPED_WITH_COMPILE_ERROR event. widget_states : streamlit.proto.Widget_pb2.WidgetStates | None The ScriptRunner's final WidgetStates. Set only for the SHUTDOWN event. """) # Set to true when we process a SHUTDOWN request self._shutdown_requested = False # Set to true while we're executing. Used by # maybe_handle_execution_control_request. self._execing = False # This is initialized in start() self._script_thread = None
class EditorHistory(object): """docstring for EditorHistory.""" def __init__(self, app): self.statePushed = Signal(object) self.stateRemoved = Signal(object) self.stateSelected = Signal(object) self.app = app self.stack = list() try: self._capacity = int(ConfigManager().getPrefsValue( "PREFS", "General/HistoryDepth")) except: self._capacity = 10 self.activeState = None def getStack(self): return self.stack def count(self): return len(self.stack) @property def capacity(self): return self._capacity @capacity.setter def capacity(self, value): self._capacity = value if value < len(self.stack): for i in range(len(self.stack) - value): state = self.stack.pop() self.stateRemoved.send(state) def clear(self): self.stack.clear() def stateIndex(self, state): if state in self.stack: return self.stack.index(state) return -1 @property def currentIndex(self): if self.activeState is not None: return self.stateIndex(self.activeState) return -1 def push(self, edState): if self.currentIndex < self.count() - 1: nextState = None while True: index = self.count() - 1 nextState = self.stack[index] if nextState == self.activeState: break state = self.stack.pop() self.stateRemoved.send(state) self.stack.append(edState) if len(self.stack) >= self.capacity: poppedState = self.stack.pop(0) self.stateRemoved.send(poppedState) self.statePushed.send(edState) self.activeState = edState self.stateSelected.send(edState) def selectState(self, state): for st in self.stack: if state == st: self.app.loadFromData(st.editorState) self.activeState = st self.stateSelected.send(st) break def select(self, index): index = clamp(index, 0, self.count() - 1) if index == self.currentIndex: return if len(self.stack) == 0: return stateData = self.stack[index].editorState self.app.loadFromData(stateData) state = self.stack[index] self.activeState = state self.stateSelected.send(state) def saveState(self, text): self.push(_EditorState(text)) def undo(self): if self.currentIndex > 0: self.select(self.currentIndex - 1) def redo(self): self.select(self.currentIndex + 1)
class Session(object): def __init__(self, username, password, account_id, auth_token=None, user_directory='mpx', region='US1', service_registry=None, token_duration=43200000, # 12 hours token_idle_timeout=14400000, # 4 hours use_ssl=True, ): self.username = username self.password = password self.account = account_id self.auth_token = auth_token self.user_directory = user_directory self.region = region self.token_duration = token_duration self.token_idle_timeout = token_idle_timeout self.use_ssl = use_ssl self.registry_url = REGISTRY_URL.format(tld=self.regional_tld) self.signin_url = SIGN_IN_URL.format(tld=self.regional_tld) self.post_sign_in = Signal() self._registry = service_registry self.session = requests.Session() self.session.mount('https://', TLS1Adapter()) self.session.headers.update({ 'Content-Type': 'application/json', 'Accept': 'application/json', 'User-Agent': 'Python Mediaamp %s' % __version__, }) @property def registry(self): if self._registry is None: self._registry = self.resolve_domain() return self._registry @property def regional_tld(self): return 'eu' if 'eu' in self.region.lower() else 'com' @property def signin_username(self): return self.user_directory + '/' + self.username def resolve_domain(self): resp = self.get(self.registry_url, params={ 'schema': '1.1', '_accountId': self.account, }) try: return resp['resolveDomainResponse'] except KeyError: raise MediaAmpError('Unexpected response loading registry.') def sign_in(self): self.auth_token = None self.session.auth = HTTPBasicAuth(self.signin_username, self.password) result = self.get(self.signin_url, is_signin_request=True, params={ 'schema': '1.0', '_duration': self.token_duration, '_idleTimeout': self.token_idle_timeout, }) try: self.auth_token = result['signInResponse']['token'] except KeyError: raise AuthenticationError('Could not retrieve token.') self.post_sign_in.send(self) def request_json(self, method, url, retry_sign_in=True, is_signin_request=False, **kwargs): """ Requests JSON content from the supplied URL. This is the primary function to be used to make requests to the MPX API. Not only does it ensure that the body of the response can be encoded as Python via JSON, it also wraps exceptions and will auto-login using the supplied credentials when necessary (e.g. when a token expires). This API is known to return 200 statuses for requests that fail. It's their convention to include the HTTP response code in the body of the JSON returned. This checks for that case and turns them into actual exceptions. """ if self.auth_token is not None: self.session.auth = HTTPBasicAuth('', self.auth_token) elif not is_signin_request: self.sign_in() response = getattr(self.session, method)(url, **kwargs) try: response.raise_for_status() except requests.HTTPError as e: wrap_http_error(e) try: data = response.json() if response.text else {} except ValueError: raise MediaAmpError('Response body can not be read as JSON. ') try: raise_for_json_exception(data) except InvalidTokenError: if retry_sign_in: self.sign_in() return self.request_json(method, url, retry_sign_in=False, **kwargs) else: raise return data def get(self, url, **kwargs): return self.request_json('get', url, **kwargs) def put(self, url, **kwargs): return self.request_json('put', url, **kwargs) def post(self, url, **kwargs): return self.request_json('post', url, **kwargs) def delete(self, url, **kwargs): return self.request_json('delete', url, **kwargs) def __getitem__(self, key): url = self.registry.get(key) if url is None: url = self.registry(key + ' read-only') if url is None: raise KeyError(key + ' not available.') if self.use_ssl: url = url.replace('http://', 'https://') return services[key](self, url)
class Organization(WithMetrics, BadgeMixin, db.Datetimed, db.Document): name = db.StringField(required=True) acronym = db.StringField(max_length=128) slug = db.SlugField(max_length=255, required=True, populate_from='name', update=True) description = db.StringField(required=True) url = db.StringField() image_url = db.StringField() logo = db.ImageField(fs=avatars, basename=default_image_basename, max_size=LOGO_MAX_SIZE, thumbnails=LOGO_SIZES) members = db.ListField(db.EmbeddedDocumentField(Member)) teams = db.ListField(db.EmbeddedDocumentField(Team)) requests = db.ListField(db.EmbeddedDocumentField(MembershipRequest)) ext = db.MapField(db.GenericEmbeddedDocumentField()) zone = db.StringField() extras = db.ExtrasField() deleted = db.DateTimeField() meta = { 'indexes': ['-created_at', 'slug'], 'ordering': ['-created_at'], 'queryset_class': OrganizationQuerySet, } def __unicode__(self): return self.name or '' __badges__ = { PUBLIC_SERVICE: _('Public Service'), CERTIFIED: _('Certified'), } before_save = Signal() after_save = Signal() on_create = Signal() on_update = Signal() before_delete = Signal() after_delete = Signal() @classmethod def pre_save(cls, sender, document, **kwargs): cls.before_save.send(document) @classmethod def post_save(cls, sender, document, **kwargs): cls.after_save.send(document) if kwargs.get('created'): cls.on_create.send(document) else: cls.on_update.send(document) def url_for(self, *args, **kwargs): return url_for('organizations.show', org=self, *args, **kwargs) display_url = property(url_for) @property def external_url(self): return self.url_for(_external=True) @property def pending_requests(self): return [r for r in self.requests if r.status == 'pending'] @property def refused_requests(self): return [r for r in self.requests if r.status == 'refused'] @property def accepted_requests(self): return [r for r in self.requests if r.status == 'accepted'] @property def certified(self): return any(b.kind == CERTIFIED for b in self.badges) @property def public_service(self): is_public_service = any(b.kind == PUBLIC_SERVICE for b in self.badges) return self.certified and is_public_service def member(self, user): for member in self.members: if member.user == user: return member return None def is_member(self, user): return self.member(user) is not None def is_admin(self, user): member = self.member(user) return member is not None and member.role == 'admin' def pending_request(self, user): for request in self.requests: if request.user == user and request.status == 'pending': return request return None @classmethod def get(cls, id_or_slug): obj = cls.objects(slug=id_or_slug).first() return obj or cls.objects.get_or_404(id=id_or_slug) def by_role(self, role): return filter(lambda m: m.role == role, self.members) def check_availability(self): from udata.models import Dataset # Circular imports. # Performances: only check the first 20 datasets for now. return chain(*[ dataset.check_availability() for dataset in Dataset.objects(organization=self).visible()[:20] ]) @cached_property def json_ld(self): type_ = 'GovernmentOrganization' if self.public_service \ else 'Organization' result = { '@context': 'http://schema.org', '@type': type_, 'alternateName': self.slug, 'url': url_for('organizations.show', org=self, _external=True), 'name': self.name, } if self.description: result['description'] = mdstrip(self.description) logo = self.logo(external=True) if logo: result['logo'] = logo return result
class ValueBase(): """Name:value pair with validation, default value and description""" def __init__(self, name, default, description='No description available'): """Create new value""" self.__name = name self.__value = self.__default = self.convert(default) self.__description = description self.__on_change = Signal() # Must be set by derived classes type = NotImplemented typename = '<NOT IMPLEMENTED>' @property def value(self): return self.__value @property def name(self): return self.__name @property def default(self): return self.__default @property def description(self): return self.__description def on_change(self, callback, autoremove=True): """Pass this object to `callback` every time its value changes `callback` may raise ValueError to revert the change (see `set`). If `autoremove` is True, stop calling callback once it is garbage collected. """ self.__on_change.connect(callback, weak=autoremove) def set(self, value): """Change value if valid, reset to default if None Callbacks connected to `on_change` are passed this object every time a value is changed. If a callback raises ValueError, the change is reverted and a ValueError is raised. """ if value is None: value = self.__default try: new_value = self.convert(value) self.validate(new_value) except ValueError as e: raise ValueError('{} = {}: {}'.format(self.name, self.str(value), e)) else: prev_value = self.__value self.__value = new_value # Callbacks can revert the change by raising ValueError try: self.__on_change.send(self) except ValueError as e: self.__value = prev_value raise ValueError('{} = {}: {}'.format(self.name, self.str(value), e)) def get(self): """Return current value""" return self.value def validate(self, value): """Raise ValueError if value is not valid""" if not isinstance(value, self.type): raise ValueError('Not a {}'.format(self.typename)) def convert(self, value): """Try to convert value to correct type before validation (e.g. str->int) Raise ValueError if impossible""" if isinstance(value, self.type): return value try: if isinstance(value, abc.Iterable): return self.type(''.join(value)) else: return self.type(value) except Exception: raise ValueError('Not a {}'.format(self.typename)) def __eq__(self, other): return self.value == other.value def __ne__(self, other): return self.value != other.value def str(self, value=None, default=False): """Return prettily stringified value value: None to return current value, or specific value default: Whether to return current or default value """ if default: return str(self.default) elif value is not None: return str(value) else: return str(self.value) def __str__(self): return self.str() def __repr__(self): return '{}={!r}'.format(self.name, self.value)
class ScriptRunner(object): def __init__( self, session_id, report, enqueue_forward_msg, client_state, request_queue, session_state, uploaded_file_mgr=None, ): """Initialize the ScriptRunner. (The ScriptRunner won't start executing until start() is called.) Parameters ---------- session_id : str The ReportSession's id. report : Report The ReportSession's report. client_state : streamlit.proto.ClientState_pb2.ClientState The current state from the client (widgets and query params). request_queue : ScriptRequestQueue The queue that the ReportSession is publishing ScriptRequests to. ScriptRunner will continue running until the queue is empty, and then shut down. widget_mgr : WidgetManager The ReportSession's WidgetManager. uploaded_file_mgr : UploadedFileManager The File manager to store the data uploaded by the file_uploader widget. """ self._session_id = session_id self._report = report self._enqueue_forward_msg = enqueue_forward_msg self._request_queue = request_queue self._uploaded_file_mgr = uploaded_file_mgr self._client_state = client_state self._session_state: SessionState = session_state self._session_state.set_widgets_from_proto(client_state.widget_states) self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs. This signal is *not* emitted on the same thread that the ScriptRunner was created on. Parameters ---------- event : ScriptRunnerEvent exception : BaseException | None Our compile error. Set only for the SCRIPT_STOPPED_WITH_COMPILE_ERROR event. widget_states : streamlit.proto.WidgetStates_pb2.WidgetStates | None The ScriptRunner's final WidgetStates. Set only for the SHUTDOWN event. """) # Set to true when we process a SHUTDOWN request self._shutdown_requested = False # Set to true while we're executing. Used by # maybe_handle_execution_control_request. self._execing = False # This is initialized in start() self._script_thread = None def __repr__(self) -> str: return util.repr_(self) def start(self): """Start a new thread to process the ScriptEventQueue. This must be called only once. """ if self._script_thread is not None: raise Exception("ScriptRunner was already started") self._script_thread = ReportThread( session_id=self._session_id, enqueue=self._enqueue_forward_msg, query_string=self._client_state.query_string, session_state=self._session_state, uploaded_file_mgr=self._uploaded_file_mgr, target=self._process_request_queue, name="ScriptRunner.scriptThread", ) self._script_thread.start() def _process_request_queue(self): """Process the ScriptRequestQueue and then exits. This is run in a separate thread. """ LOGGER.debug("Beginning script thread") while not self._shutdown_requested and self._request_queue.has_request: request, data = self._request_queue.dequeue() if request == ScriptRequest.STOP: LOGGER.debug("Ignoring STOP request while not running") elif request == ScriptRequest.SHUTDOWN: LOGGER.debug("Shutting down") self._shutdown_requested = True elif request == ScriptRequest.RERUN: self._run_script(data) else: raise RuntimeError("Unrecognized ScriptRequest: %s" % request) # Send a SHUTDOWN event before exiting. This includes the widget values # as they existed after our last successful script run, which the # ReportSession will pass on to the next ScriptRunner that gets # created. client_state = ClientState() client_state.query_string = self._client_state.query_string widget_states = self._session_state.as_widget_states() client_state.widget_states.widgets.extend(widget_states) self.on_event.send(ScriptRunnerEvent.SHUTDOWN, client_state=client_state) def _is_in_script_thread(self): """True if the calling function is running in the script thread""" return self._script_thread == threading.current_thread() def maybe_handle_execution_control_request(self): if not self._is_in_script_thread(): # We can only handle execution_control_request if we're on the # script execution thread. However, it's possible for deltas to # be enqueued (and, therefore, for this function to be called) # in separate threads, so we check for that here. return if not self._execing: # If the _execing flag is not set, we're not actually inside # an exec() call. This happens when our script exec() completes, # we change our state to STOPPED, and a statechange-listener # enqueues a new ForwardEvent return # Pop the next request from our queue. request, data = self._request_queue.dequeue() if request is None: return LOGGER.debug("Received ScriptRequest: %s", request) if request == ScriptRequest.STOP: raise StopException() elif request == ScriptRequest.SHUTDOWN: self._shutdown_requested = True raise StopException() elif request == ScriptRequest.RERUN: raise RerunException(data) else: raise RuntimeError("Unrecognized ScriptRequest: %s" % request) def _install_tracer(self): """Install function that runs before each line of the script.""" def trace_calls(frame, event, arg): self.maybe_handle_execution_control_request() return trace_calls # Python interpreters are not required to implement sys.settrace. if hasattr(sys, "settrace"): sys.settrace(trace_calls) @contextmanager def _set_execing_flag(self): """A context for setting the ScriptRunner._execing flag. Used by maybe_handle_execution_control_request to ensure that we only handle requests while we're inside an exec() call """ if self._execing: raise RuntimeError("Nested set_execing_flag call") self._execing = True try: yield finally: self._execing = False def _run_script(self, rerun_data): """Run our script. Parameters ---------- rerun_data: RerunData The RerunData to use. """ assert self._is_in_script_thread() LOGGER.debug("Running script %s", rerun_data) # Reset DeltaGenerators, widgets, media files. in_memory_file_manager.clear_session_files() ctx = get_report_ctx() if ctx is None: # This should never be possible on the script_runner thread. raise RuntimeError( "ScriptRunner thread has a null ReportContext. Something has gone very wrong!" ) ctx.reset(query_string=rerun_data.query_string) self.on_event.send(ScriptRunnerEvent.SCRIPT_STARTED) # Compile the script. Any errors thrown here will be surfaced # to the user via a modal dialog in the frontend, and won't result # in their previous report disappearing. try: with source_util.open_python_file(self._report.script_path) as f: filebody = f.read() if config.get_option("runner.magicEnabled"): filebody = magic.add_magic(filebody, self._report.script_path) code = compile( filebody, # Pass in the file path so it can show up in exceptions. self._report.script_path, # We're compiling entire blocks of Python, so we need "exec" # mode (as opposed to "eval" or "single"). mode="exec", # Don't inherit any flags or "future" statements. flags=0, dont_inherit=1, # Use the default optimization options. optimize=-1, ) except BaseException as e: # We got a compile error. Send an error event and bail immediately. LOGGER.debug("Fatal script error: %s" % e) self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = False self.on_event.send( ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR, exception=e) return # If we get here, we've successfully compiled our script. The next step # is to run it. Errors thrown during execution will be shown to the # user as ExceptionElements. if config.get_option("runner.installTracer"): self._install_tracer() # This will be set to a RerunData instance if our execution # is interrupted by a RerunException. rerun_with_data = None try: # Create fake module. This gives us a name global namespace to # execute the code in. module = _new_module("__main__") # Install the fake module as the __main__ module. This allows # the pickle module to work inside the user's code, since it now # can know the module where the pickled objects stem from. # IMPORTANT: This means we can't use "if __name__ == '__main__'" in # our code, as it will point to the wrong module!!! sys.modules["__main__"] = module # Add special variables to the module's globals dict. # Note: The following is a requirement for the CodeHasher to # work correctly. The CodeHasher is scoped to # files contained in the directory of __main__.__file__, which we # assume is the main script directory. module.__dict__["__file__"] = self._report.script_path with modified_sys_path(self._report), self._set_execing_flag(): # Run callbacks for widgets whose values have changed. if rerun_data.widget_states is not None: # Update the WidgetManager with the new widget_states. # The old states, used to skip callbacks if values # haven't changed, are also preserved in the # WidgetManager. self._session_state.compact_state() self._session_state.set_widgets_from_proto( rerun_data.widget_states) self._session_state.call_callbacks() ctx.on_script_start() exec(code, module.__dict__) self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = True except RerunException as e: rerun_with_data = e.rerun_data except StopException: pass except BaseException as e: self._session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY] = False handle_uncaught_app_exception(e) finally: self._on_script_finished(ctx) # Use _log_if_error() to make sure we never ever ever stop running the # script without meaning to. _log_if_error(_clean_problem_modules) if rerun_with_data is not None: self._run_script(rerun_with_data) def _on_script_finished(self, ctx: ReportContext) -> None: """Called when our script finishes executing, even if it finished early with an exception. We perform post-run cleanup here. """ self._session_state.reset_triggers() self._session_state.cull_nonexistent(ctx.widget_ids_this_run.items()) # Signal that the script has finished. (We use SCRIPT_STOPPED_WITH_SUCCESS # even if we were stopped with an exception.) self.on_event.send(ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS) # Delete expired files now that the script has run and files in use # are marked as active. in_memory_file_manager.del_expired_files() # Force garbage collection to run, to help avoid memory use building up # This is usually not an issue, but sometimes GC takes time to kick in and # causes apps to go over resource limits, and forcing it to run between # script runs is low cost, since we aren't doing much work anyway. if config.get_option("runner.postScriptGC"): gc.collect(2)
class DictConfig(SubConfig, DotDict, SignalEmitter): """`SubConfig` for dictionaries, extension of ``qcodes.config``. This is a SubConfig child class for dictionaries. The DictConfig is a ``DotDict``, meaning that its elements can be accessed as attributes. For example, the following lines are identical: >>> dict_config['item1']['item2'] >>> dict_config.item1.item2 Args: name: Config name. SilQ config root is ``config``. folder: Absolute config folder path. Automatically set for child SubConfigs in the root SubConfig. parent: Parent SubConfig (None for root SubConfig). config: Pre-existing config to load into new DictConfig. save_as_dir: Save SubConfig as dir. If False, SubConfig and all elements in it are saved as a JSON file. If True, SubConfig is saved as a folder, each dict key being a separate JSON file. """ exclude_from_dict = [ 'name', 'folder', 'parent', 'initializing', 'signal', '_signal_chain', '_signal_modifiers', '_mirrored_config_attrs', '_inherited_configs', 'save_as_dir', 'config_path', 'sender', 'multiple_senders' ] signal = Signal() def __init__(self, name: str, folder: str = None, parent: SubConfig = None, config: dict = None, save_as_dir: bool = None): self.initializing = True self._mirrored_config_attrs = {} self._inherited_configs = [] SubConfig.__init__(self, name=name, folder=folder, parent=parent, save_as_dir=save_as_dir) DotDict.__init__(self) SignalEmitter.__init__(self, initialize_signal=False) if config is not None: update_dict(self, config) elif folder is not None: self.load() if self.parent is None: self._attach_mirrored_items() def __contains__(self, key): if DotDict.__contains__(self, key): return True elif DotDict.__contains__(self, 'inherit'): try: if self['inherit'].startswith('config:') or \ self['inherit'].startswith('environment:'): return key in self[self['inherit']] else: return key in self.parent[self['inherit']] except KeyError: return False else: return False def __getitem__(self, key): if key.startswith('config:'): if self.parent is not None: # Let parent config deal with this return self.parent[key] elif key == 'config:': return self else: return self[key.replace('config:', '')] elif key.startswith('environment:'): if self.parent is None: if silq.environment is None: environment_config = self else: environment_config = self[silq.environment] if key == 'environment:': return environment_config else: return environment_config[key.replace('environment:', '')] else: # Pass environment:path along to parent return self.parent[key] elif DotDict.__contains__(self, key): val = DotDict.__getitem__(self, key) if key == 'inherit': return val elif isinstance(val, str) and \ (val.startswith('config:') or val.startswith('environment:')): try: return self[val] except KeyError: raise KeyError( f"Couldn't retrieve mirrored key {key} -> {val}") else: return val elif 'inherit' in self: if self['inherit'].startswith('config:') or \ self['inherit'].startswith('environment:'): return self[self['inherit']][key] else: return self.parent[self['inherit']][key] else: raise KeyError(f"Couldn't retrieve key {key}") def __setitem__(self, key, val): if not isinstance(key, str): raise TypeError( f'Config key {key} must have type str, not {type(key)}') # Update item in dict (modified version of DotDict) if '.' in key: myKey, restOfKey = key.split('.', 1) self.setdefault( myKey, DictConfig(name=myKey, config={restOfKey: val}, parent=self)) else: if isinstance(val, SubConfig): val.parent = self dict.__setitem__(self, key, val) elif isinstance(val, dict): # First set item, then update the dict. This avoids circular # referencing from mirrored attributes sub_dict = DictConfig(name=key, parent=self) dict.__setitem__(self, key, sub_dict) update_dict(self[key], val) # If self.initializing, sub_dict._attach_mirrored_items will be # called at the end of initialization, otherwise call now if not self.initializing: sub_dict._attach_mirrored_items() elif isinstance(val, list): dict.__setitem__(self, key, ListConfig(name=key, parent=self)) self[key] += val else: dict.__setitem__(self, key, val) if (self.initializing and (key == 'inherit' or (isinstance(val, str) and (val.startswith('config:') or val.startswith('environment:'))))): return if key == 'inherit': if (val.startswith('config:') or val.startswith('environment:')): config_path = val else: # inherit a neighbouring dict element config_path = join_config_path(self.parent.config_path, val) # Register inheritance for signal sending self[config_path]._inherited_configs.append( self.config_path) if isinstance(val, str) and (val.startswith('config:') or val.startswith('environment:')): # item should mirror another config item. target_config_path, target_attr = split_config_path(val) target_config = self[target_config_path] if not target_attr in target_config: raise KeyError(f'{target_config} does not have {target_attr}') if target_attr not in target_config._mirrored_config_attrs: target_config._mirrored_config_attrs[target_attr] = [] target_config._mirrored_config_attrs[target_attr].append( (self.config_path, key)) # Retrieve value from self, which also handles mirroring/inheriting value = self[key] # Add key to config path before sending attr_config_path = join_config_path(self.config_path, key) # We make sure to get the value, in case the original value is mirrored self.signal.send(attr_config_path, value=value) if silq.environment is None: attr_environment_config_path = attr_config_path.replace( 'config:', 'environment:') self.signal.send(attr_environment_config_path, value=value) # If any other config attributes mirror the attribute being set, # also send signals with sender being the mirrored attributes if self._inherited_configs: self._inherited_configs = self._send_ancillary_signals( value=value, target_paths=self._inherited_configs, attr=key, attr_path=attr_config_path) # If any other config dicts inherit from this DictConfig via 'inherit', # Also emit signals with sender being the inherited dicts if self._mirrored_config_attrs.get(key, []): updated_mirrored_config = self._send_ancillary_signals( value=value, target_paths=self._mirrored_config_attrs[key], attr=None, attr_path=attr_config_path) if updated_mirrored_config: self._mirrored_config_attrs[key] = updated_mirrored_config else: self._mirrored_config_attrs.pop(key, None) def _send_ancillary_signals(self, value: Any, target_paths: List[Union[str, Tuple[str]]], attr: str = None, attr_path: str = None): # mirrored_config_attrs = self._mirrored_config_attrs.get(key, []) updated_target_paths = [] for target_full_path in target_paths: try: if attr is None: # Attr is the second argument of the full path target_path, target_attr = target_full_path else: # Use default attr target_path, target_attr = target_full_path, attr # Check if mirrored attr value still referencing current # attr. Getting the unreferenced value is a bit cumbersome target_config = self[target_path] # Target either inherits all attrs of current dict, or one of # its attributes mirrors this attribute. Here we check if this # hasn't changed inheritance = dict.get(target_config, 'inherit', None) if inheritance == self.config_path \ or dict.get(target_config, target_attr) == attr_path \ or (inheritance == self.name and target_config.parent ==self.parent): target_attr_path = join_config_path( target_path, target_attr) self.signal.send(target_attr_path, value=value) if silq.environment is None: target_attr_environment_path = target_attr_path.replace( 'config:', 'environment:') self.signal.send(target_attr_environment_path, value=value) updated_target_paths.append(target_full_path) except KeyError: pass return updated_target_paths def _attach_mirrored_items(self): """Attach mirrored items, to be done at the end of initialization. Mirrored items are those that inherit, or whose values start with ``config:`` or ``environment:`` Note: Attribute ``initializing`` will be set to False """ self.initializing = False for key, val in self.items(dependent_value=False): if isinstance(val, DictConfig): val._attach_mirrored_items() elif (key == 'inherit' or (isinstance(val, str) and (val.startswith('config:') or val.startswith('environment:')))): self[key] = val def values(self): return [self[key] for key in self.keys()] def items(self, dependent_value=True): if dependent_value: return {key: self[key] for key in self.keys()}.items() else: return {key: dict.__getitem__(self, key) for key in self.keys()}.items() def get(self, key: str, default: Any = None): """Override dictionary get, because it does not call __getitem__. Args: key: key to get default: default value if key not found. None by default Returns: value of key if in dictionary, else default value. """ try: return self[key] except KeyError: return default def load(self, folder: str = None, update: bool = True): """Load SubConfig from folder. Args: folder: Folder from which to load SubConfig. """ if update: self.clear() config = super().load(folder=folder) if update: update_dict(self, config) return config def to_dict(self, dependent_value: bool = True): """Convert DictConfig including all its children to a dictionary.""" d = {} for key, val in self.items(dependent_value=dependent_value): if isinstance(val, DictConfig): d[key] = val.to_dict(dependent_value=dependent_value) elif isinstance(val, ListConfig): d[key] = val.to_list(dependent_value=dependent_value) else: d[key] = val return d serialize = to_dict def __deepcopy__(self, memo): return copy.deepcopy(self.to_dict())
def __init__( self, session_id, report, enqueue_forward_msg, client_state, request_queue, session_state, uploaded_file_mgr=None, ): """Initialize the ScriptRunner. (The ScriptRunner won't start executing until start() is called.) Parameters ---------- session_id : str The ReportSession's id. report : Report The ReportSession's report. client_state : streamlit.proto.ClientState_pb2.ClientState The current state from the client (widgets and query params). request_queue : ScriptRequestQueue The queue that the ReportSession is publishing ScriptRequests to. ScriptRunner will continue running until the queue is empty, and then shut down. widget_mgr : WidgetManager The ReportSession's WidgetManager. uploaded_file_mgr : UploadedFileManager The File manager to store the data uploaded by the file_uploader widget. """ self._session_id = session_id self._report = report self._enqueue_forward_msg = enqueue_forward_msg self._request_queue = request_queue self._uploaded_file_mgr = uploaded_file_mgr self._client_state = client_state self._session_state: SessionState = session_state self._session_state.set_widgets_from_proto(client_state.widget_states) self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs. This signal is *not* emitted on the same thread that the ScriptRunner was created on. Parameters ---------- event : ScriptRunnerEvent exception : BaseException | None Our compile error. Set only for the SCRIPT_STOPPED_WITH_COMPILE_ERROR event. widget_states : streamlit.proto.WidgetStates_pb2.WidgetStates | None The ScriptRunner's final WidgetStates. Set only for the SHUTDOWN event. """) # Set to true when we process a SHUTDOWN request self._shutdown_requested = False # Set to true while we're executing. Used by # maybe_handle_execution_control_request. self._execing = False # This is initialized in start() self._script_thread = None
class WokEngine(Synchronizable): """ The Wok engine manages the execution of workflow cases. Each case represents a workflow loaded with a certain configuration. """ def __init__(self, conf, conf_base_path=None): Synchronizable.__init__(self) self._global_conf = conf self._expanded_global_conf = conf.clone().expand_vars() self._conf = self._expanded_global_conf.get("wok", default=Data.element) self._conf_base_path = conf_base_path self._log = logger.get_logger("wok.engine") self._work_path = self._conf.get("work_path", os.path.join(os.getcwd(), "wok-files")) if not os.path.exists(self._work_path): os.makedirs(self._work_path) self._cases = [] self._cases_by_name = {} self._stopping_cases = {} #self._lock = Lock() self._cvar = threading.Condition(self._lock) self._run_thread = None self._running = False self._finished_event = threading.Event() self._job_task_map = {} self._logs_threads = [] self._logs_queue = Queue() self._join_thread = None self._join_queue = Queue() self._num_log_threads = self._conf.get("num_log_threads", cpu_count()) self._max_alive_threads = 2 + self._num_log_threads self._num_alive_threads = AtomicCounter() self._started = False self._notified = False recover = self._conf.get("recover", False) db_path = os.path.join(self._work_path, "engine.db") if not recover and os.path.exists(db_path): os.remove(db_path) self._db = db.create_engine("sqlite:///{}".format(db_path), drop_tables=not recover) # platforms self._platforms = self._create_platforms() self._platforms_by_name = {} for platform in self._platforms: self._platforms_by_name[platform.name] = platform default_platform_name = self._conf.get("default_platform", self._platforms[0].name) if default_platform_name not in self._platforms_by_name: self._log.warn("Platform '{}' not found, using '{}' as the default platform".format( default_platform_name, self._platforms[0].name)) default_platform_name = self._platforms[0].name self._default_platform = self._platforms_by_name[default_platform_name] # projects if conf_base_path is None: conf_base_path = os.getcwd() projects_conf = self._global_conf.get("wok.projects") self._projects = ProjectManager(projects_conf, base_path=conf_base_path) self._projects.initialize() # signals self.case_created = Signal() self.case_state_changed = Signal() self.case_started = Signal() self.case_finished = Signal() self.case_removed = Signal() # recovering if recover: self.__recover_from_db() def _create_platforms(self): """ Creates the platform according to the configuration :return: Platform """ platform_confs = self._conf.get("platforms") if platform_confs is None: platform_confs = Data.list() elif not Data.is_list(platform_confs): self._log.error("Wrong configuration type for 'platforms': {}".format(platform_confs)) platform_confs = Data.list() if len(platform_confs) == 0: platform_confs += [Data.element(dict(type="local"))] platforms = [] names = {} for pidx, platform_conf in enumerate(platform_confs): if isinstance(platform_conf, basestring): if not os.path.isabs(platform_conf) and self._conf_base_path is not None: platform_conf = os.path.join(self._conf_base_path, platform_conf) platform_conf = ConfigLoader(platform_conf).load() if not Data.is_element(platform_conf): raise errors.ConfigTypeError("wok.platforms[{}]".format(pidx, platform_conf)) ptype = platform_conf.get("type", "local") name = platform_conf.get("name", ptype) if name in names: name = "{}-{}".format(name, names[name]) names[name] += 1 else: names[name] = 2 platform_conf["name"] = name if "work_path" not in platform_conf: platform_conf["work_path"] = os.path.join(self._work_path, "platform_{}".format(name)) self._log.info("Creating '{}' platform ...".format(name)) self._log.debug("Platform configuration: {}".format(repr(platform_conf))) platforms += [create_platform(ptype, platform_conf)] return platforms def _on_job_update(self, event, **kwargs): self.notify() def __recover_from_db(self): raise NotImplementedError() def __queue_adaptative_get(self, queue, start_timeout=1.0, max_timeout=6.0): timeout = start_timeout msg = None while self._running and msg is None: try: msg = queue.get(timeout=timeout) except Empty: if timeout < max_timeout: timeout += 0.5 except: break return msg # Not used anywhere def __queue_batch_get(self, queue, start_timeout=1, max_timeout=5): timeout = start_timeout msg_batch = [] while self._running and len(msg_batch) == 0: try: msg_batch += [queue.get(timeout=timeout)] while not queue.empty(): msg_batch += [queue.get(timeout=timeout)] except Empty: if timeout < max_timeout: timeout += 1 return msg_batch def __job_submissions(self, session, platform): #FIXME Be fair with priorities between different cases ? query = session.query(db.WorkItem)\ .filter(db.WorkItem.state == runstates.READY)\ .filter(db.WorkItem.platform == platform.name)\ .order_by(db.WorkItem.priority) for workitem in query: case = self._cases_by_name[workitem.case.name] task = case.component(workitem.task.cname) js = JobSubmission( case=case, task=task, workitem_id=workitem.id, job_name=workitem.cname, task_conf=task.conf, priority=workitem.priority) execution = task.execution cmd_builder = create_command_builder(execution.mode) js.script, js.env = cmd_builder.prepare(case, task, workitem.index) yield js def __remove_case(self, session, case): """ Definitively remove a case. The engine should be locked and no case jobs running. """ self._log.info("Dropping case {} ...".format(case.name)) del self._cases_by_name[case.name] self._cases.remove(case) # remove engine db objects and finalize case self._log.debug(" * database ...") case.remove(session) self._lock.release() try: #TODO clean the job manager output files try: self._log.debug(" * logs ...") logs_path = os.path.join(self._work_path, "logs", case.name) shutil.rmtree(logs_path) except: self._log.exception("Error removing logs at {}".format(logs_path)) # remove data self._log.debug(" * data ...") for platform in case.platforms: platform.data.remove_case(case.name) # remove storage self._log.debug(" * storage ...") for platform in case.platforms: platform.storage.delete_container(case.name) # emit signal self.case_removed.send(case) finally: self._lock.acquire() # threads ---------------------- @synchronized def _run(self): set_thread_title() num_exc = 0 self._running = True self._num_alive_threads += 1 # Start the logs threads for i in range(self._num_log_threads): t = threading.Thread(target=self._logs, args=(i, ), name="wok-engine-logs-%d" % i) self._logs_threads += [t] t.start() # Start the join thread self._join_thread = threading.Thread(target=self._join, name="wok-engine-join") self._join_thread.start() _log = logger.get_logger("wok.engine.run") _log.debug("Engine run thread ready") while self._running: session = db.Session() try: #_log.debug("Scheduling new tasks ...") set_thread_title("scheduling") updated_tasks = set() # schedule tasks ready to be executed and save new workitems into the db for case in self._cases: tasks = case.schedule(session) updated_tasks.update(tasks) session.commit() # submit workitems ready to be executed for platform in self._platforms: job_submissions = self.__job_submissions(session, platform) for js, job_id, job_state in platform.submit(job_submissions): workitem = session.query(db.WorkItem).filter(db.WorkItem.id == js.workitem_id).one() workitem.job_id = job_id workitem.state = job_state js.task.dirty = True session.commit() updated_tasks.add(js.task) session.close() session = None #_log.debug("Waiting for events ...") set_thread_title("waiting") while len(updated_tasks) == 0 and not self._notified and self._running: self._cvar.wait(1) self._notified = False if not self._running: break session = db.Session() # there is a session.close() in the finished block #_log.debug("Stopping jobs for aborting instances ...") set_thread_title("working") # check stopping instances for case in self._cases: if (case.state == runstates.ABORTING or case.removed) and case not in self._stopping_cases: num_job_ids = session.query(db.WorkItem.job_id).filter(db.WorkItem.case_id == case.id)\ .filter(~db.WorkItem.state.in_(runstates.TERMINAL_STATES)).count() if num_job_ids == 0: if case.state == runstates.ABORTING: _log.debug("Aborted case {} with no running jobs".format(case.name)) dbcase = session.query(db.Case).filter(db.Case.id == case.id) dbcase.state = case.state = runstates.ABORTED session.commit() else: _log.debug("Stopped case {} with no running jobs".format(case.name)) if case.removed: _log.debug("Removing case {} with no running jobs".format(case.name)) self.__remove_case(session, case) session.commit() else: _log.info("Stopping {} jobs for case {} ...".format(num_job_ids, case.name)) self._stopping_cases[case] = set() for platform in self._platforms: job_ids = [int(r[0]) for r in session.query(db.WorkItem.job_id) .filter(db.WorkItem.case_id == case.id)\ .filter(db.WorkItem.platform == platform.name)\ .filter(~db.WorkItem.state.in_(runstates.TERMINAL_STATES))] self._stopping_cases[case].update(job_ids) platform.jobs.abort(job_ids) #_log.debug("Checking job state changes ...") # detect workitems which state has changed for platform in self._platforms: for job_id, state in platform.jobs.state(): try: workitem = session.query(db.WorkItem).filter(db.WorkItem.job_id == job_id).one() except NoResultFound: _log.warn("No work-item available for the job {0} while retrieving state".format(job_id)) platform.jobs.abort([job_id]) platform.jobs.join(job_id) continue if workitem.state != state: case = self._cases_by_name[workitem.case.name] task = case.component(workitem.task.cname) task.dirty = True workitem.state = state workitem.substate = runstates.LOGS_RETRIEVAL session.commit() updated_tasks.add(task) # if workitem has finished, queue it for logs retrieval if state in runstates.TERMINAL_STATES: self._logs_queue.put((workitem.id, job_id)) _log.debug("[{}] Work-Item {} changed state to {}".format(case.name, workitem.cname, state)) #_log.debug("Updating components state ...") # update affected components state updated_cases = set([task.case for task in updated_tasks]) for case in updated_cases: case.update_states(session) case.update_count_by_state(session) case.clean_components(session) session.commit() if case.state == runstates.RUNNING: self._lock.release() try: self.case_started.send(case) finally: self._lock.acquire() for task in updated_tasks: case = task.case #_log.debug("[{}] Component {} updated state to {} ...".format( # component.case.name, component.cname, component.state)) count = task.workitem_count_by_state sb = ["[{}] {} ({})".format(case.name, task.cname, task.state.title)] sep = " " for state in runstates.STATES: if state in count: sb += [sep, "{}={}".format(state.symbol, count[state])] if sep == " ": sep = ", " if task.state == runstates.FINISHED and task.state in count: elapsed = str(task.elapsed) elapsed = elapsed.split(".")[0] sb += [" ", "<{}>".format(elapsed)] self._log.info("".join(sb)) except BaseException as ex: num_exc += 1 _log.warn("Exception in run thread ({}): {}".format(num_exc, str(ex))) #if num_exc > 3: # raise #else: from traceback import format_exc _log.debug(format_exc()) try: if session is not None: session.rollback() except Exception as ex: _log.warn("Session rollback failed") _log.exception(ex) finally: try: if session is not None: session.close() except Exception as ex: _log.warn("Session close failed") _log.exception(ex) session = None set_thread_title("finishing") try: # print cases state before leaving the thread #for case in self._cases: # _log.debug("Case state:\n" + repr(case)) for t in self._logs_threads: t.join() self._lock.release() self._join_thread.join() self._lock.acquire() _log.debug("Engine run thread finished") except Exception as ex: _log.exception(ex) self._running = False self._num_alive_threads -= 1 def _logs(self, index): "Log retrieval thread" set_thread_title() self._num_alive_threads += 1 _log = logger.get_logger("wok.engine.logs-{}".format(index)) _log.debug("Engine logs thread ready") num_exc = 0 while self._running: set_thread_title("waiting") # get the next task to retrieve the logs job_info = self.__queue_adaptative_get(self._logs_queue) if job_info is None: continue workitem_id, job_id = job_info session = db.Session() task = None try: workitem = session.query(db.WorkItem).filter(db.WorkItem.id == workitem_id).one() case = self._cases_by_name[workitem.case.name] task = case.component(workitem.task.cname) set_thread_title(workitem.cname) _log.debug("[{}] Reading logs for work-item {} ...".format(case.name, workitem.cname)) output = task.platform.jobs.output(job_id) if output is None: output = StringIO.StringIO() path = os.path.join(self._work_path, "logs", case.name, task.cname) if not os.path.isdir(path): try: os.makedirs(path) except: if not os.path.isdir(path): raise path = os.path.join(path, "{:08}.db".format(workitem.index)) if os.path.isfile(path): os.remove(path) logs_db = LogsDb(path) logs_db.open() logs_db.add(case.name, task.cname, workitem.index, output) logs_db.close() _log.debug("[{}] Done with logs of work-item {}".format(case.name, workitem.cname)) except BaseException as ex: num_exc += 1 session.rollback() _log.info("Exception in logs thread ({}): {}".format(num_exc, str(ex))) from traceback import format_exc _log.debug(format_exc()) finally: workitem.substate = runstates.JOINING self._join_queue.put(job_info) session.commit() session.close() self._num_alive_threads -= 1 _log.debug("Engine logs thread finished") def _join(self): "Joiner thread" set_thread_title() self._num_alive_threads += 1 _log = logger.get_logger("wok.engine.join") _log.debug("Engine join thread ready") session = None num_exc = 0 while self._running: try: set_thread_title("waiting") job_info = self.__queue_adaptative_get(self._join_queue) if job_info is None: continue workitem_id, job_id = job_info with self._lock: session = db.Session() workitem = session.query(db.WorkItem).filter(db.WorkItem.id == workitem_id).one() case = self._cases_by_name[workitem.case.name] task = case.component(workitem.task.cname) set_thread_title(task.cname) #_log.debug("Joining work-item %s ..." % task.cname) jr = task.platform.jobs.join(job_id) wr = Data.element(dict( hostname=jr.hostname, created=jr.created.strftime(_DT_FORMAT) if jr.created is not None else None, started=jr.started.strftime(_DT_FORMAT) if jr.started is not None else None, finished=jr.finished.strftime(_DT_FORMAT) if jr.finished is not None else None, exitcode=jr.exitcode.code if jr.exitcode is not None else None)) r = task.platform.data.load_workitem_result(case.name, task.cname, workitem.index) if r is not None: if r.exception is not None: wr["exception"] = r.exception if r.trace is not None: wr["trace"] = r.trace workitem.substate = None workitem.result = wr case.num_active_workitems -= 1 session.commit() # check if there are still more work-items num_workitems = session.query(func.count(db.WorkItem.id)).filter( ~db.WorkItem.state.in_(runstates.TERMINAL_STATES)).scalar() if self._single_run and num_workitems == 0: stop_engine = True for case in self._cases: stop_engine = stop_engine and (case.state in runstates.TERMINAL_STATES) #self._running = not stop_engine if stop_engine: self._finished_event.set() _log.debug("[{}] Joined work-item {}".format(case.name, workitem.cname)) # check stopping instances if case in self._stopping_cases: job_ids = self._stopping_cases[case] if job_id in job_ids: job_ids.remove(job_id) if len(job_ids) == 0: del self._stopping_cases[case] if case.state == runstates.ABORTING: workitem.case.state = case.state = runstates.ABORTED session.commit() if case.removed: self.__remove_case(session, case) session.commit() else: _log.debug("Still waiting for {} jobs to stop".format(len(job_ids))) if case.state in runstates.TERMINAL_STATES and case.num_active_workitems == 0: _log.info("[{}] Case {}. Total time: {}".format(case.name, case.state.title, str(case.elapsed))) self._lock.release() try: self.case_finished.send(case) finally: self._lock.acquire() except BaseException as ex: num_exc += 1 _log.warn("Exception in join thread ({}): {}".format(num_exc, str(ex))) from traceback import format_exc _log.debug(format_exc()) try: if session is not None: session.rollback() except Exception as ex: _log.warn("Session rollback failed") _log.exception(ex) finally: try: if session is not None: session.close() except Exception as ex: _log.warn("Session close failed") _log.exception(ex) self._num_alive_threads -= 1 _log.debug("Engine join thread finished") # API ----------------------------------- @property def conf(self): return self._conf @property def work_path(self): return self._work_path @property def projects(self): return self._projects def platform(self, name): return self._platforms_by_name.get(name) @property def default_platform(self): return self._default_platform @synchronized def start(self, wait=True, single_run=False): self._log.info("Starting engine ...") started_platforms = [] try: for platform in self._platforms: started_platforms += [platform] platform.start() platform.callbacks.add(events.JOB_UPDATE, self._on_job_update) except BaseException as ex: self._log.error(str(ex)) for platform in started_platforms: platform.close() raise #for project in self._projects: # self._default_platform.sync_project(project) self._single_run = single_run self._run_thread = threading.Thread(target=self._run, name="wok-engine-run") self._run_thread.start() self._lock.release() try: try: self._num_alive_threads.wait_condition(lambda value: value < self._max_alive_threads) self._started = True self._log.info("Engine started") except KeyboardInterrupt: wait = False self._log.warn("Ctrl-C pressed ...") except Exception as e: wait = False self._log.error("Exception while waiting for the engine to start") self._log.exception(e) if wait: self.wait() finally: self._lock.acquire() def wait(self): self._log.info("Waiting for the engine to finish ...") try: finished = self._finished_event.wait(1) while not finished: finished = self._finished_event.wait(1) except KeyboardInterrupt: self._log.warn("Ctrl-C pressed ...") except Exception as e: self._log.error("Exception while waiting for the engine to finish, stopping the engine ...") self._log.exception(e) self._log.info("Finished waiting for the engine ...") def _stop_threads(self): self._log.info("Stopping threads ...") if self._run_thread is not None: with self._lock: self._running = False self._cvar.notify() while self._run_thread.isAlive(): try: self._run_thread.join(1) except KeyboardInterrupt: self._log.warn("Ctrl-C pressed, killing the process ...") import signal os.kill(os.getpid(), signal.SIGTERM) except Exception as e: self._log.error("Exception while waiting for threads to finish ...") self._log.exception(e) self._log.warn("killing the process ...") exit(-1) import signal os.kill(os.getpid(), signal.SIGTERM) self._run_thread = None self._log.info("All threads finished ...") @synchronized def stop(self): self._log.info("Stopping the engine ...") self._finished_event.set() self._lock.release() try: if self._run_thread is not None: self._stop_threads() for platform in self._platforms: platform.close() finally: self._lock.acquire() self._started = False self._log.info("Engine stopped") def running(self): return self._started def notify(self, lock=True): if lock: self._lock.acquire() self._notified = True self._cvar.notify() if lock: self._lock.release() @synchronized def cases(self): instances = [] for inst in self._cases: instances += [SynchronizedCase(self, inst)] return instances @synchronized def case(self, name): inst = self._cases_by_name.get(name) if inst is None: return None return SynchronizedCase(self, inst) @synchronized def exists_case(self, name): return name in self._cases_by_name @synchronized def create_case(self, case_name, conf_builder, project_name, flow_name, container_name): "Creates a new workflow case" session = db.Session() if session.query(db.Case).filter(db.Case.name==case_name).count() > 0: raise Exception("A case with this name already exists: {}".format(case_name)) flow_uri = "{}:{}".format(project_name, flow_name) self._log.info("Creating case {} from {} ...".format(case_name, flow_uri)) try: try: flow = self.projects.load_flow(flow_uri) project = flow.project except: self._log.error("Error while loading the workflow from {}".format(flow_uri)) raise for platform in self._platforms: try: platform.data.remove_case(case_name) platform.data.create_case(case_name) except: self._log.error("Error while initializing data for case {}".format(case_name)) raise try: case = Case(case_name, conf_builder, project, flow, container_name, engine=self) self._cases += [case] self._cases_by_name[case_name] = case case.persist(session) session.flush() self.notify(lock=False) except: self._log.error("Error while creating case {} for the workflow {} with configuration {}".format( case_name, flow_uri, conf_builder.get_conf())) raise except: session.rollback() #self._log.error("Error while creating case {} for the workflow {} with configuration {}".format( # case_name, flow_uri, conf_builder.get_conf())) raise session.close() self._log.debug("\n" + repr(case)) self._lock.release() try: self.case_created.send(case) finally: self._lock.acquire() return SynchronizedCase(self, case) @synchronized def remove_case(self, name): if name in self._cases_by_name: session = db.Session() case = self._cases_by_name[name] dbcase = session.query(db.Case).filter(db.Case.id == case.id).first() dbcase.removed = case.removed = True if case.state not in runstates.TERMINAL_STATES + [runstates.READY]: dbcase.state = case.state = runstates.ABORTING num_retries = 3 while num_retries > 0: try: session.commit() self.notify(lock=False) self._log.debug("Case {} marked for removal".format(case.name)) except BaseException as ex: num_retries -= 1 _log.info("Exception in remove_case: {}".format(str(ex))) if num_retries > 0: _log.info("Remaining retries = {}".format(num_retries)) import time time.sleep(1) else: from traceback import format_exc _log.debug(format_exc()) session.rollback() finally: session.close() else: self._log.error("Trying to remove a non existing case: {}".format(name)) '''
def __repr__(self): base = Signal.__repr__(self) return "%s; %r>" % (base[:-1], self.name)
class ScriptRunner(object): def __init__( self, report, main_dg, sidebar_dg, widget_states, request_queue, uploaded_file_mgr=None, ): """Initialize the ScriptRunner. (The ScriptRunner won't start executing until start() is called.) Parameters ---------- report : Report The ReportSession's report. main_dg : DeltaGenerator The ReportSession's main DeltaGenerator. sidebar_dg : DeltaGenerator The ReportSession's sidebar DeltaGenerator. widget_states : streamlit.proto.Widget_pb2.WidgetStates The ReportSession's current widget states request_queue : ScriptRequestQueue The queue that the ReportSession is publishing ScriptRequests to. ScriptRunner will continue running until the queue is empty, and then shut down. uploaded_file_mgr : UploadedFileManager The File manager to store the data uploaded by the file_uplpader widget. """ self._report = report self._main_dg = main_dg self._sidebar_dg = sidebar_dg self._request_queue = request_queue self._uploaded_file_mgr = uploaded_file_mgr self._widgets = Widgets() self._widgets.set_state(widget_states) self.on_event = Signal(doc="""Emitted when a ScriptRunnerEvent occurs. This signal is *not* emitted on the same thread that the ScriptRunner was created on. Parameters ---------- event : ScriptRunnerEvent exception : BaseException | None Our compile error. Set only for the SCRIPT_STOPPED_WITH_COMPILE_ERROR event. widget_states : streamlit.proto.Widget_pb2.WidgetStates | None The ScriptRunner's final WidgetStates. Set only for the SHUTDOWN event. """) # Set to true when we process a SHUTDOWN request self._shutdown_requested = False # Set to true while we're executing. Used by # maybe_handle_execution_control_request. self._execing = False # This is initialized in start() self._script_thread = None def start(self): """Start a new thread to process the ScriptEventQueue. This must be called only once. """ if self._script_thread is not None: raise Exception("ScriptRunner was already started") self._script_thread = ReportThread( main_dg=self._main_dg, sidebar_dg=self._sidebar_dg, widgets=self._widgets, target=self._process_request_queue, name="ScriptRunner.scriptThread", uploaded_file_mgr=self._uploaded_file_mgr, ) self._script_thread.start() def _process_request_queue(self): """Process the ScriptRequestQueue and then exits. This is run in a separate thread. """ LOGGER.debug("Beginning script thread") while not self._shutdown_requested and self._request_queue.has_request: request, data = self._request_queue.dequeue() if request == ScriptRequest.STOP: LOGGER.debug("Ignoring STOP request while not running") elif request == ScriptRequest.SHUTDOWN: LOGGER.debug("Shutting down") self._shutdown_requested = True elif request == ScriptRequest.RERUN: self._run_script(data) else: raise RuntimeError("Unrecognized ScriptRequest: %s" % request) # Send a SHUTDOWN event before exiting. This includes the widget values # as they existed after our last successful script run, which the # ReportSession will pass on to the next ScriptRunner that gets # created. self.on_event.send(ScriptRunnerEvent.SHUTDOWN, widget_states=self._widgets.get_state()) def _is_in_script_thread(self): """True if the calling function is running in the script thread""" return self._script_thread == threading.current_thread() def maybe_handle_execution_control_request(self): if not self._is_in_script_thread(): # We can only handle execution_control_request if we're on the # script execution thread. However, it's possible for deltas to # be enqueued (and, therefore, for this function to be called) # in separate threads, so we check for that here. return if not self._execing: # If the _execing flag is not set, we're not actually inside # an exec() call. This happens when our script exec() completes, # we change our state to STOPPED, and a statechange-listener # enqueues a new ForwardEvent return # Pop the next request from our queue. request, data = self._request_queue.dequeue() if request is None: return LOGGER.debug("Received ScriptRequest: %s", request) if request == ScriptRequest.STOP: raise StopException() elif request == ScriptRequest.SHUTDOWN: self._shutdown_requested = True raise StopException() elif request == ScriptRequest.RERUN: raise RerunException(data) else: raise RuntimeError("Unrecognized ScriptRequest: %s" % request) def _install_tracer(self): """Install function that runs before each line of the script.""" def trace_calls(frame, event, arg): self.maybe_handle_execution_control_request() return trace_calls # Python interpreters are not required to implement sys.settrace. if hasattr(sys, "settrace"): sys.settrace(trace_calls) @contextmanager def _set_execing_flag(self): """A context for setting the ScriptRunner._execing flag. Used by maybe_handle_execution_control_request to ensure that we only handle requests while we're inside an exec() call """ if self._execing: raise RuntimeError("Nested set_execing_flag call") self._execing = True try: yield finally: self._execing = False def _run_script(self, rerun_data): """Run our script. Parameters ---------- rerun_data: RerunData The RerunData to use. """ assert self._is_in_script_thread() LOGGER.debug("Running script %s", rerun_data) # Reset delta generator so it starts from index 0. import streamlit as st st._reset(self._main_dg, self._sidebar_dg) self.on_event.send(ScriptRunnerEvent.SCRIPT_STARTED) # Compile the script. Any errors thrown here will be surfaced # to the user via a modal dialog in the frontend, and won't result # in their previous report disappearing. try: # Python 3 got rid of the native execfile() command, so we read # the file, compile it, and exec() it. This implementation is # compatible with both 2 and 3. with source_util.open_python_file(self._report.script_path) as f: filebody = f.read() if config.get_option("runner.magicEnabled"): filebody = magic.add_magic(filebody, self._report.script_path) code = compile( filebody, # Pass in the file path so it can show up in exceptions. self._report.script_path, # We're compiling entire blocks of Python, so we need "exec" # mode (as opposed to "eval" or "single"). mode="exec", # Don't inherit any flags or "future" statements. flags=0, dont_inherit=1, # Parameter not supported in Python2: # optimize=-1, ) except BaseException as e: # We got a compile error. Send an error event and bail immediately. LOGGER.debug("Fatal script error: %s" % e) self.on_event.send( ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR, exception=e) return # If we get here, we've successfully compiled our script. The next step # is to run it. Errors thrown during execution will be shown to the # user as ExceptionElements. # Update the Widget singleton with the new widget_state if rerun_data.widget_state is not None: self._widgets.set_state(rerun_data.widget_state) if config.get_option("runner.installTracer"): self._install_tracer() # This will be set to a RerunData instance if our execution # is interrupted by a RerunException. rerun_with_data = None try: # Create fake module. This gives us a name global namespace to # execute the code in. module = _new_module("__main__") # Install the fake module as the __main__ module. This allows # the pickle module to work inside the user's code, since it now # can know the module where the pickled objects stem from. # IMPORTANT: This means we can't use "if __name__ == '__main__'" in # our code, as it will point to the wrong module!!! sys.modules["__main__"] = module # Add special variables to the module's globals dict. # Note: The following is a requirement for the CodeHasher to # work correctly. The CodeHasher is scoped to # files contained in the directory of __main__.__file__, which we # assume is the main script directory. module.__dict__["__file__"] = self._report.script_path with modified_sys_path(self._report), self._set_execing_flag(): exec(code, module.__dict__) except RerunException as e: rerun_with_data = e.rerun_data except StopException: pass except BaseException as e: # Show exceptions in the Streamlit report. LOGGER.debug(e) import streamlit as st st.exception(e) # This is OK because we're in the script thread. # TODO: Clean up the stack trace, so it doesn't include # ScriptRunner. finally: self._widgets.reset_triggers() self.on_event.send(ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS) # Use _log_if_error() to make sure we never ever ever stop running the # script without meaning to. _log_if_error(_clean_problem_modules) if rerun_with_data is not None: self._run_script(rerun_with_data)
def __init__(self, df: DataFrame, categorical_columns: typing.Union[typing.List[str], None] = None, sample: typing.Union[float, int, None] = None, seed: typing.Union[int, None] = None, *args, **kwargs): """ :param df: A pandas.DataFrame object. :param categorical_columns: If given, specifies which columns are to be interpreted as categorical. Those columns have to include all columns of the DataFrame which have type `object`, `str`, `bool` or `category`. This means it can only add columns which do not have the aforementioned types. :param seed: Random seed used for sampling the data. Values can be any integer between 0 and 2**32 - 1 inclusive or None. :param args: args for HasTraits superclass :param kwargs: kwargs for HasTraits superclass """ super().__init__(*args, **kwargs) validate.validate_data_frame(df) validate.validate_sample(sample) validate.validate_seed(seed) self.selection_type = SelectionType.STANDARD if sample is None: self._df = df else: if isinstance(sample, float): if sample < 0.0 or sample > 1.0: raise ValueError( "Sample has to be between 0.0 and 1.0. Invalid value : %d" % sample) self._df = df.sample(frac=sample, random_state=seed) else: if sample < 0 or sample > len(df): raise ValueError( "Sample has to be between 0 and the length of the DataFrame (%d). Invalid value: " "%d" % (len(df), sample)) self._df = df.sample(n=sample, random_state=seed) self.columns = list(self._df.columns.values) self.column_store = ColumnStore(self._df, self.columns, categorical_columns) self.numerical_columns = self.column_store.numerical_columns self.time_columns = self.column_store.time_columns self.categorical_columns = self.column_store.categorical_columns if self.categorical_columns is not None: self._df[self.categorical_columns].astype(dtype="category") self._length = len(self._df) self._indices = set(range(self._length)) self._brushed_indices: typing.Set[int] = self._indices self.brushed_data_invalidated = True self._brushed_data = None self.on_indices_changed = Signal() if len(self.columns) < 2: raise ValueError( "The passed DataFrame only has %d column, which is insufficient for analysis." % len(self.columns)) self.few_num_cols = len(self.numerical_columns) < 2 self.few_cat_cols = len(self.categorical_columns) < 2
import sys from blinker import Signal signal = Signal('disconnect_signal') @signal.connect_via(Signal.ANY) def receive_data(sender, **kw): print("Caught strong signal from : {}, data: {}".format(sender, kw)) return "test" @signal.connect_via(Signal.ANY) def receive_data2(sender, **kw): print("Caught signal2 from : {}, data: {}".format(sender, kw)) return "received2" if __name__ == "__main__": if len(sys.argv) > 1 and sys.argv[1] == 'disconnect': signal.disconnect(receive_data, Signal.ANY) result = signal.send('anonymous', abc=123) print(result)
class GraphBase(ISerializable): """Data structure representing a nodes graph :var graphManager: reference to graph manager :vartype graphManager: :class:`~PyFlow.Core.GraphManager.GraphManager` :var nameChanged: signal emitted after graph name was changed :vartype nameChanged: :class:`~blinker.base.Signal` :var categoryChanged: signal emitted after graph category was changed :vartype categoryChanged: :class:`~blinker.base.Signal` :var childGraphs: a set of child graphs :vartype childGraphs: :class:`set` :var nodes: nodes storage. Dictionary with :class:`uuid.UUID` as key and :class:`~PyFlow.Core.NodeBase.NodeBase` as value :vartype nodes: :class:`dict` :var uid: Unique identifier :vartype uid: :class:`uuid.UUID` .. py:method:: parentGraph :property: :getter: Returns a reference to parent graph or None if this graph is root :setter: Sets new graph as new parent for this graph .. py:method:: name :property: :getter: Returns graph name :setter: Sets new graph name and fires signal .. py:method:: category :property: :getter: Returns graph category :setter: Sets new graph category and fires signal .. py:method:: pins :property: :getter: Returns dictionary with :class:`uuid.UUID` as key and :class:`~PyFlow.Core.PinBase.PinBase` as value :rtype: dict """ def __init__(self, name, manager, parentGraph=None, category='', uid=None, *args, **kwargs): super(GraphBase, self).__init__(*args, **kwargs) self.graphManager = manager self._isRoot = False self.nameChanged = Signal(str) self.categoryChanged = Signal(str) self.__name = name self.__category = category self._parentGraph = None self.childGraphs = set() self.parentGraph = parentGraph self._nodes = {} self._vars = {} self.uid = uuid.uuid4() if uid is None else uid manager.add(self) def setIsRoot(self, bIsRoot): """Sets this graph as root .. warning:: Used internally :param bIsRoot: -- Root or not :type bIsRoot: :class:`bool` """ self._isRoot = bIsRoot def isRoot(self): """Whether this graph is root or not :rtype: :class:`bool` """ return self._isRoot def getVars(self): """Returns this graph's variables storage :returns: :class:`uuid.UUID` - :class:`~PyFlow.Core.NodeBase.NodeBase` dict :rtype: :class:`dict` """ return self._vars @property def parentGraph(self): return self._parentGraph @parentGraph.setter def parentGraph(self, newParentGraph): if self.isRoot(): self._parentGraph = None return if newParentGraph is not None: if self._parentGraph is not None: # remove self from old parent's children set if self in self._parentGraph.childGraphs: self._parentGraph.childGraphs.remove(self) # add self to new parent's children set newParentGraph.childGraphs.add(self) # update parent self._parentGraph = newParentGraph def depth(self): """Returns depth level of this graph :rtype: int """ result = 1 parent = self._parentGraph while parent is not None: result += 1 parent = parent.parentGraph return result def getVarList(self): """return list of variables from active graph :rtype: list(:class:`~PyFlow.Core.Variable.Variable`) """ result = list(self._vars.values()) parent = self._parentGraph while parent is not None: result += list(parent._vars.values()) parent = parent.parentGraph return result def serialize(self, *args, **kwargs): """Returns serialized representation of this graph :rtype: dict """ result = { 'name': self.name, 'category': self.category, 'vars': [v.serialize() for v in self._vars.values()], 'nodes': [n.serialize() for n in self._nodes.values()], 'depth': self.depth(), 'isRoot': self.isRoot(), 'parentGraphName': str(self._parentGraph.name) if self._parentGraph is not None else str(None) } return result def populateFromJson(self, jsonData): """Populates itself from serialized data :param jsonData: serialized graph :type jsonData: dict """ self.clear() parentGraphName = jsonData['parentGraphName'] parentGraph = self.graphManager.findGraph(parentGraphName) self.parentGraph = parentGraph self.name = self.graphManager.getUniqGraphName(jsonData['name']) self.category = jsonData['category'] self.setIsRoot(jsonData['isRoot']) if self.isRoot(): self.name = "root" # restore vars for varJson in jsonData['vars']: var = Variable.deserialize(self, varJson) self._vars[var.uid] = var # restore nodes for nodeJson in jsonData['nodes']: # check if variable getter or setter and pass variable nodeArgs = () nodeKwargs = {} if nodeJson['type'] in ('getVar', 'setVar'): nodeKwargs['var'] = self._vars[uuid.UUID(nodeJson['varUid'])] nodeJson['owningGraphName'] = self.name node = getRawNodeInstance(nodeJson['type'], packageName=nodeJson['package'], libName=nodeJson['lib'], *nodeArgs, **nodeKwargs) self.addNode(node, nodeJson) # restore connection for nodeJson in jsonData['nodes']: for nodeOutputJson in nodeJson['outputs']: for linkData in nodeOutputJson['linkedTo']: try: lhsNode = self._nodes[uuid.UUID(linkData["lhsNodeUid"])] except Exception as e: lhsNode = self.findNode(linkData["lhsNodeName"]) try: lhsPin = lhsNode.orderedOutputs[linkData["outPinId"]] except Exception as e: continue try: rhsNode = self._nodes[uuid.UUID(linkData["rhsNodeUid"])] except Exception as e: rhsNode = self.findNode(linkData["rhsNodeName"]) try: rhsPin = rhsNode.orderedInputs[linkData["inPinId"]] except Exception as e: continue if not arePinsConnected(lhsPin, rhsPin): connected = connectPins(lhsPin, rhsPin) # assert(connected is True), "Failed to restore connection" if not connected: print("Failed to restore connection", lhsPin, rhsPin) connectPins(lhsPin, rhsPin) def remove(self): """Removes this graph as well as child graphs. Deepest graphs will be removed first """ # graphs should be removed from leafs to root for childGraph in set(self.childGraphs): childGraph.remove() # remove itself self.graphManager.removeGraph(self) def clear(self): """Clears content of this graph as well as child graphs. Deepest graphs will be cleared first """ # graphs should be cleared from leafs to root for childGraph in set(self.childGraphs): childGraph.clear() # clear itself for node in list(self._nodes.values()): node.kill() self._nodes.clear() for var in list(self._vars.values()): self.killVariable(var) self._vars.clear() @property def name(self): return self.__name @name.setter def name(self, value): value = str(value) if self.__name != value: self.__name = value self.nameChanged.send(self.__name) @property def category(self): return self.__category @category.setter def category(self, value): self.__category = str(value) self.categoryChanged.send(self.__category) def Tick(self, deltaTime): """Executed periodically :param deltaTime: Elapsed time since last tick :type deltaTime: float """ for node in self._nodes.values(): node.Tick(deltaTime) @property def pins(self): result = {} for n in self.getNodesList(): for pin in tuple(n.inputs.values()) + tuple(n.outputs.values()): result[pin.uid] = pin return result def createVariable(self, dataType=str('AnyPin'), accessLevel=AccessLevel.public, uid=None, name=str("var")): """Creates variable inside this graph scope :param dataType: Variable data type :type dataType: str :param accessLevel: Variable access level :type accessLevel: :class:`~PyFlow.Core.Common.AccessLevel` :param uid: Variable unique identifier :type uid: :class:`uuid.UUID` :param name: Variable name :type name: str """ name = self.graphManager.getUniqVariableName(name) var = Variable(self, getPinDefaultValueByType(dataType), name, dataType, accessLevel=accessLevel, uid=uid) self._vars[var.uid] = var return var # TODO: add arguments to deal with references of this var # disconnect pins or mark nodes invalid def killVariable(self, var): """Removes variable from this graph :param var: Variable to remove :type var: :class:`~PyFlow.Core.Variable.Variable` """ assert(isinstance(var, Variable)) if var.uid in self._vars: popped = self._vars.pop(var.uid) popped.killed.send() def getNodes(self): """Returns this graph's nodes storage :rtype: dict(:class:`~PyFlow.Core.NodeBase.NodeBase`) """ return self._nodes def getNodesList(self, classNameFilters=[]): """Returns this graph's nodes list :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`) """ if len(classNameFilters) > 0: return [n for n in self._nodes.values() if n.__class__.__name__ in classNameFilters] else: return [n for n in self._nodes.values()] def findNode(self, name): """Tries to find node by name :param name: Node name :type name: str or None """ for i in self._nodes.values(): if i.name == name: return i return None def getNodesByClassName(self, className): """Returns a list of nodes filtered by class name :param className: Class name of target nodes :type className: str :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`) """ nodes = [] for i in self.getNodesList(): if i.__class__.__name__ == className: nodes.append(i) return nodes def findPinByUid(self, uid): """Tries to find pin by uuid :param uid: Unique identifier :type uid: :class:`~uuid.UUID` :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None """ pin = None if uid in self.pins: pin = self.pins[uid] return pin def findPin(self, pinName): """Tries to find pin by name :param pinName: String to search by :type pinName: str :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None """ result = None for pin in self.pins.values(): if pinName == pin.getFullName(): result = pin break return result def getInputNode(self): """Creates and adds to graph :class:`~PyFlow.Packages.PyFlowBase.Nodes.graphNodes.graphInputs` node pins on this node will be exposed on compound node as input pins :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase` """ node = getRawNodeInstance("graphInputs", "PyFlowBase") self.addNode(node) return node def getOutputNode(self): """Creates and adds to graph :class:`~PyFlow.Packages.PyFlowBase.Nodes.graphNodes.graphOutputs` node. pins on this node will be exposed on compound node as output pins :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase` """ node = getRawNodeInstance("graphOutputs", "PyFlowBase") self.addNode(node) return node def addNode(self, node, jsonTemplate=None): """Adds node to storage :param node: Node to add :type node: NodeBase :param jsonTemplate: serialized representation of node. This used when graph deserialized to do custom stuff after node will be added. :type jsonTemplate: dict :rtype: bool """ from PyFlow.Core.PathsRegistry import PathsRegistry assert(node is not None), "failed to add node, None is passed" if node.uid in self._nodes: return False # Check if this node is variable get/set. Variables created in child graphs are not visible to parent ones # Do not disrupt variable scope if node.__class__.__name__ in ['getVar', 'setVar']: var = self.graphManager.findVariableByUid(node.variableUid()) variableLocation = var.location() if len(variableLocation) > len(self.location()): return False if len(variableLocation) == len(self.location()): if Counter(variableLocation) != Counter(self.location()): return False node.graph = weakref.ref(self) if jsonTemplate is not None: jsonTemplate['name'] = self.graphManager.getUniqNodeName(jsonTemplate['name']) else: node.setName(self.graphManager.getUniqNodeName(node.name)) self._nodes[node.uid] = node node.postCreate(jsonTemplate) PathsRegistry().rebuild() return True def location(self): """Returns path to current location in graph tree Example: >>> ["root", "compound1", "compound2"] means: >>> # root >>> # |- compound >>> # |- compound2 :rtype: list(str) """ result = [self.name] parent = self._parentGraph while parent is not None: result.insert(0, parent.name) parent = parent.parentGraph return result def count(self): """Returns number of nodes :rtype: int """ return self._nodes.__len__() def plot(self): """Prints graph to console. May be useful for debugging """ depth = self.depth() prefix = "".join(['-'] * depth) if depth > 1 else '' parentGraphString = str(None) if self.parentGraph is None else self.parentGraph.name print(prefix + "GRAPH:" + self.name + ", parent:{0}".format(parentGraphString)) assert(self not in self.childGraphs) for child in self.childGraphs: child.plot()
class BaseProcess(object): def __init__(self, new_func=None, total_count=None, *args, **kwargs): self.func = new_func self.total_count = total_count self.count = 0 self.task_queue = [] self.process_list = [] self.process_map = {} self.task_map = {} self.args = args self.kwargs = kwargs signal.signal(signal.SIGWINCH, self.action) # self.window = curses.initscr() self.add_task_signal = Signal(1000) self.count_signal = Signal(1001) self.reg_task_signal() self.reg_count_sinal() def reg_count_sinal(self): if not self.total_count: return else: self.count_signal.connect(self.process_count) def reg_task_signal(self): self.add_task_signal.connect(self.process_func) def create_new_task(self, work_num=1): for i in range(work_num): self.task_queue.append((self.func, self.args, self.kwargs)) self.add_task_signal.send() if self.total_count: for i in range(self.total_count): self.count_signal.send() self.process_map = {x: 0 for x in range(work_num)} def make_func(self): def add_count_deco(self, func): def wrapper(func): @functools.wraps(func) def _wrapper(*args, **kwargs): func(*args, **kwargs) self.count += 1 return _wrapper return wrapper @add_count_deco(self,self.func) def _make_func(*args,**kwargs): pass return _make_func def process_func(self, sender): while self.task_queue != []: self.task_queue.pop() process = multiprocessing.Process(target=self.func, args=self.args, kwargs=self.kwargs) process.start() def process_count(self, sender): self.count += 1 bar_length = 20 percent = float(self.count * 1.0 / self.total_count) hashes = '#' * int(percent * bar_length) spaces = ' ' * (bar_length - len(hashes)) multiprocessing.Process( target=sys.stdout.write("\rPercent: [%s] %d%%" % (hashes + spaces, percent * 100))).start() def action(self): key = self.window.getch() if key == ord('p'): pass if key == ord('r'): pass def start(self): while True: self.action()
class GraphManager(object): """Data structure that holds graph tree This class switches active graph. Can insert or remove graphs to tree, can search nodes and variables across all graphs. Also this class responsible for giving unique names. """ def __init__(self): super(GraphManager, self).__init__() self.terminationRequested = False #: used by cli only self.graphChanged = Signal(object) self._graphs = {} self._activeGraph = None self._activeGraph = GraphBase(ROOT_GRAPH_NAME, self) self._activeGraph.setIsRoot(True) def findRootGraph(self): """Returns top level root graph :rtype: :class:`~PyFlow.Core.GraphBase.GraphBase` """ roots = [] for graph in self.getAllGraphs(): if graph.isRoot(): roots.append(graph) assert (len(roots) == 1), "Fatal! Multiple roots!" return roots[0] def selectRootGraph(self): """Selects root graph """ self.selectGraph(self.findRootGraph()) def serialize(self): """Serializes itself to json. All child graphs will be serialized. :rtype: dict """ rootGraph = self.findRootGraph() saved = rootGraph.serialize() saved["fileVersion"] = str(version.currentVersion()) saved["activeGraph"] = self.activeGraph().name return saved def removeGraphByName(self, name): """Removes graph by :attr:`~PyFlow.Core.GraphBase.GraphBase.name` :param name: name of graph to be removed :type name: str """ graph = self.findGraph(name) if graph is not None: graph.clear() self._graphs.pop(graph.uid) if graph.parentGraph is not None: if graph in graph.parentGraph.childGraphs: graph.parentGraph.childGraphs.remove(graph) del graph def removeGraph(self, graph): """Removes supplied graph :param graph: Graph to be removed :type graph: :class:`~PyFlow.Core.GraphBase.GraphBase` """ if graph.uid in self._graphs: graph.clear() self._graphs.pop(graph.uid) if graph.parentGraph is not None: if graph in graph.parentGraph.childGraphs: graph.parentGraph.childGraphs.remove(graph) del graph def deserialize(self, data): """Populates itself from serialized data :param data: Serialized data :type data: dict """ if "fileVersion" in data: fileVersion = version.Version.fromString(data["fileVersion"]) else: # handle older version pass self.clear(keepRoot=False) self._activeGraph = GraphBase(str('root'), self) self._activeGraph.populateFromJson(data) self._activeGraph.setIsRoot(True) self.selectGraph(self._activeGraph) def clear(self, keepRoot=True, *args, **kwargs): """Wipes everything. :param keepRoot: Whether to remove root graph or not :type keepRoot: bool """ self.selectGraphByName(ROOT_GRAPH_NAME) self.removeGraphByName(ROOT_GRAPH_NAME) self._graphs.clear() self._graphs = {} del self._activeGraph self._activeGraph = None if keepRoot: self._activeGraph = GraphBase(ROOT_GRAPH_NAME, self) self.selectGraph(self._activeGraph) self._activeGraph.setIsRoot(True) def Tick(self, deltaTime): """Periodically calls :meth:`~PyFlow.Core.GraphBase.GraphBase.Tick` on all graphs :param deltaTime: Elapsed time from last call :type deltaTime: float """ for graph in self._graphs.values(): graph.Tick(deltaTime) def findVariableRefs(self, variable): """Returns a list of variable accessors spawned across all graphs :param variable: Variable to search accessors for :type variable: :class:`~PyFlow.Core.Variable.Variable` :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`) """ result = [] for node in self.getAllNodes(classNameFilters=['getVar', 'setVar']): if node.variableUid() == variable.uid: result.append(node) return result def findGraph(self, name): """Tries to find graph by :attr:`~PyFlow.Core.GraphBase.GraphBase.name` :param name: Name of target graph :type name: str :rtype: :class:`~PyFlow.Core.GraphBase.GraphBase` or None """ graphs = self.getGraphsDict() if name in graphs: return graphs[name] return None def findPinByName(self, pinFullName): """Tries to find pin by name across all graphs :param pinFullName: Full name of pin including node namespace :type pinFullName: str :rtype: :class:`~PyFlow.Core.PinBase.PinBase` or None """ result = None for graph in self.getAllGraphs(): result = graph.findPin(pinFullName) if result is not None: break return result def findNode(self, name): """Finds a node across all graphs :param name: Node name to search by :type name: str :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase` """ result = None for graph in self.getAllGraphs(): result = graph.findNode(name) if result is not None: break return result def findVariableByUid(self, uuid): """Finds a variable across all graphs :param uuid: Variable unique identifier :type uuid: :class:`~uuid.UUID` :rtype: :class:`~PyFlow.Core.Variable.Variable` or None """ result = None for graph in self._graphs.values(): if uuid in graph.getVars(): result = graph.getVars()[uuid] break return result def findVariableByName(self, name): """Finds a variable across all graphs :param name: Variable name :type name: str :rtype: :class:`~PyFlow.Core.Variable.Variable` or None """ for graph in self._graphs.values(): for var in graph.getVars().values(): if var.name == name: return var return None def location(self): """Returns location of active graph .. seealso :: :meth:`PyFlow.Core.GraphBase.GraphBase.location` """ return self.activeGraph().location() def getGraphsDict(self): """Creates and returns dictionary where graph name associated with graph :rtype: dict(str, :class:`~PyFlow.Core.GraphBase.GraphBase`) """ result = {} for graph in self.getAllGraphs(): result[graph.name] = graph return result def add(self, graph): """Adds graph to storage and ensures that graph name is unique :param graph: Graph to add :type graph: :class:`~PyFlow.Core.GraphBase.GraphBase` """ graph.name = self.getUniqGraphName(graph.name) self._graphs[graph.uid] = graph def activeGraph(self): """Returns active graph :rtype: :class:`~PyFlow.Core.GraphBase.GraphBase` """ return self._activeGraph def selectGraphByName(self, name): """Sets active graph by graph name and fires event :param name: Name of target graph :type name: str """ graphs = self.getGraphsDict() if name in graphs: if name != self.activeGraph().name: oldGraph = self.activeGraph() newGraph = graphs[name] self._activeGraph = newGraph self.graphChanged.send(self.activeGraph()) def selectGraph(self, graph): """Sets supplied graph as active and fires event :param graph: Target graph :type graph: :class:`~PyFlow.Core.GraphBase.GraphBase` """ for newGraph in self.getAllGraphs(): if newGraph.name == graph.name: if newGraph.name != self.activeGraph().name: oldGraph = self.activeGraph() self._activeGraph = newGraph self.graphChanged.send(self.activeGraph()) break def getAllGraphs(self): """Returns all graphs :rtype: list(:class:`~PyFlow.Core.GraphBase.GraphBase`) """ return [g for g in self._graphs.values()] def getAllNodes(self, classNameFilters=[]): """Returns all nodes across all graphs :param classNameFilters: If class name filters specified, only those node classes will be considered :type classNameFilters: list(str) :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`) """ allNodes = [] for graph in self.getAllGraphs(): if len(classNameFilters) == 0: allNodes.extend(list(graph.getNodes().values())) else: allNodes.extend([ node for node in graph.getNodes().values() if node.__class__.__name__ in classNameFilters ]) return allNodes def getAllVariables(self): """Returns a list of all variables :rtype: list(:class:`~PyFlow.Core.Variable.Variable`) """ result = [] for graph in self.getAllGraphs(): result.extend(list(graph.getVars().values())) return result def getUniqGraphPinName(self, graph, name): """Returns unique pin name for graph Used by compound node and graphInputs graphOutputs nodes. To make all exposed to compound pins names unique. :param graph: Target graph :type graph: :class:`~PyFlow.Core.GraphBase.GraphBase` :param name: Target pin name :type name: str :rtype: str """ existingNames = [] for node in graph.getNodesList( classNameFilters=['graphInputs', 'graphOutputs']): existingNames.extend([pin.name for pin in node.pins]) return getUniqNameFromList(existingNames, name) def getAllNames(self): """Returns list of all registered names Includes graphs, nodes, pins, variables names :rtype: list(str) """ existingNames = [g.name for g in self.getAllGraphs()] existingNames.extend([n.name for n in self.getAllNodes()]) existingNames.extend([var.name for var in self.getAllVariables()]) for node in self.getAllNodes(): existingNames.extend([pin.name for pin in node.pins]) return existingNames def getUniqName(self, name): """Returns unique name :param name: Source name :type name: str :rtype: str """ existingNames = self.getAllNames() return getUniqNameFromList(existingNames, name) def getUniqGraphName(self, name): """Returns unique graph name :param name: Source name :type name: str :rtype: str """ existingNames = [g.name for g in self.getAllGraphs()] return getUniqNameFromList(existingNames, name) def getUniqNodeName(self, name): """Returns unique node name :param name: Source name :type name: str :rtype: str """ existingNames = [n.name for n in self.getAllNodes()] return getUniqNameFromList(existingNames, name) def getUniqVariableName(self, name): """Returns unique variable name :param name: Source name :type name: str :rtype: str """ existingNames = [var.name for var in self.getAllVariables()] return getUniqNameFromList(existingNames, name) def plot(self): """Prints all data to console. May be useful for debugging """ root = self.findRootGraph() print("Active graph: {0}".format(str(self.activeGraph().name)), "All graphs:", [g.name for g in self._graphs.values()]) root.plot()
class MotionStage(object): def __init__(self, axes, constraints = None): from Action import EmergencyStop from blinker import Signal self.axes_idx = dict() for i, axis in enumerate(axes): self.axes_idx[axis] = i self.axes = [axis for axis in axes] self.constraints = constraints self.abort_action = EmergencyStop(self.axes) self.onCycleStarted = Signal() self.onCycleFinished = Signal() self.onCycleAborted = Signal() self.onDestinationChanged = Signal() self.onRunning = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onPositionChanged = Signal() for axis in self.axes: axis.onInitializing.connect(self.onInitializing_repeat) axis.onInitialized.connect(self.onInitialized_repeat) axis.onInitiatorMinus.connect(self.onInitiatorMinus_repeat) axis.onInitiatorPlus.connect(self.onInitiatorPlus_repeat) axis.onRunning.connect(self.onRunning_repeat) axis.onPosition.connect(self.onPosition_repeat) self.worker_thread = None self.active = False self.destination = None self.cycle_clear() self.update() def __del__(self): self.abort() def cycle_clear(self): import Queue self.action_queue = Queue.Queue() def cycle_add_action(self, action): if not self.action_queue: self.cycle_clear() self.action_queue.put(action) @property def running(self): return tuple([axis.running for axis in self.axes]) @property def initializing(self): return tuple([axis.initializing for axis in self.axes]) @property def initialized(self): return tuple([axis.initialized for axis in self.axes]) @property def initiator_minus(self): return tuple([axis.initiator_minus for axis in self.axes]) @property def initiator_plus(self): return tuple([axis.initiator_plus for axis in self.axes]) @property def position(self): return tuple([axis.position for axis in self.axes]) def onRunning_repeat(self, sender, running): self.onRunning.send(self, axis=self.axes_idx[sender], running=running) def onPosition_repeat(self, sender, position): self.onPositionChanged.send(self, axis=self.axes_idx[sender], position=position) def onInitializing_repeat(self, sender, initializing): self.onInitializing.send(self, axis=self.axes_idx[sender], initializing=initializing) def onInitialized_repeat(self, sender, initialized): self.onInitialized.send(self, axis=self.axes_idx[sender], initialized=initialized) def onInitiatorMinus_repeat(self, sender, active): self.onInitiatorMinus.send(self, axis=self.axes_idx[sender], active=active) def onInitiatorPlus_repeat(self, sender, active): self.onInitiatorPlus.send(self, axis=self.axes_idx[sender], active=active) def update(self): old_position = self.position for axis in self.axes: axis.update() def set_destination(self, destination): current_position = self.position if not self.destination: self.update() self.destination = current_position if isinstance(destination, list) or isinstance(destination, tuple): if len(destination) != len(self.axes): raise ValueError self.destination = tuple(destination) if isinstance(destination, dict): new_destination = list(self.destination) for k in destination: new_destination[k] = destination[k] self.destination = tuple(new_destination) for i,dest in enumerate(self.destination): self.onDestinationChanged.send(self, axis = i, destination = dest) speed = None if not None in current_position: delta = [abs(a-b) for a,b in zip(self.destination, current_position)] max_delta = max(delta) if max_delta > 0: speed = [float(d)/float(max_delta) for d in delta] from Action import GotoAbsolute self.cycle_clear() self.cycle_add_action(GotoAbsolute(self.axes, self.destination, speed)) def can_cycle_start(self): return True # FIXME: Add constraint tests here def cycle_start(self): import threading, weakref if self.active: return False # if not self.can_cycle_start(): # return False self.current_action = None self.active = True self.worker_thread = threading.Thread(target = MotionStage.cycleWorker, name = "MotionControl.cycleWorker", args=(weakref.proxy(self),)) self.worker_thread.daemon = True self.worker_thread.start() self.onCycleStarted.send(self) def abort(self): import threading self.active = False if isinstance(self.worker_thread, threading.Thread): self.worker_thread.join() def cycleWorker(ref): abort_action = ref.abort_action try: import time while True: if not ref.active: raise CycleAbort() ref.update() if not ref.current_action or ref.current_action.ended(): if ref.action_queue.empty(): break ref.current_action = ref.action_queue.get_nowait() ref.current_action.execute() while True: if not ref.active: raise CycleAbort() ref.update() if ref.current_action.ended(): break ref.action_queue.task_done() ref.onCycleFinished.send(ref) except CycleAbort: ref.abort_action.execute() ref.onCycleAborted.send(ref) finally: try: while not ref.action_queue.empty(): ref.action_queue.get_nowait() ref.action_queue.task_done() except: pass ref.active = False def reference(self): from Action import Initiate, GotoAbsolute self.destination = None self.cycle_clear() self.cycle_add_action(Initiate([self.axes[0]])) self.cycle_add_action(GotoAbsolute([self.axes[0]], [0])) self.cycle_start()
class Axis(object): def __init__(self, inverted = False, scale={}): self.inverted = inverted self.scale = scale self.position = None self.running = None self.initializing = None self.initialized = None self.initiator_minus = None self.initiator_plus = None self.initiator_error = None self.temperature_warning = None self.onPosition = Signal() self.onStarted = Signal() self.onStopped = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onInitiatorError = Signal() self.onTemperatureWarning = Signal() def update(self): last_position = self.position last_running = self.running last_initializing = self.initializing last_initialized = self.initialized last_initiator_minus = self.initiator_minus last_initiator_plus = self.initiator_plus last_initiator_error = self.initiator_error last_temperature_warning = self.temperature_warning self.do_update() if last_position != self.position: self.onPosition.send(position = self.position) if last_running != self.running: if self.running: self.onStarted.send() else: self.onStopped.send() if last_initializing != self.initializing: self.onInitializing.send(self, initializing = self.initializing) if last_initialized != self.initialized: self.onInitialized.send(self, initialized = self.initialized) if last_initiator_minus != self.initiator_minus: self.onInitiatorMinus.send(self, active = self.initiator_minus) if last_initiator_plus != self.initiator_plus: self.onInitiatorPlus.send(self, active = self.initiator_plus) if last_initiator_error != self.initiator_error: self.onInitiatorError.send(self, error = self.initiator_error) if last_temperature_warning != self.temperature_warning: self.onTemperatureWarning.send(self, warning = self.temperature_warning) def wait_for_stop(self): self.update() while self.running: self.update() def initiate(self): raise NotImplementedError() def goto_absolute(self, target, speed = None): raise NotImplementedError() def goto_relative(self, offset, speed = None): raise NotImplementedError()
class DataSource: """ The DataSource object provides the data itself to the plots and also manages the brushing between the plots. If the plots observe the brushed_indices property of this class, they can react to any change in the data. It is also possible to set the brushed_indices property to trigger the change in any instances that observe this property. In addition to the brushed indices, this class also provides the brushed data directly, which is cached to speed up subsequent access to the data. """ def __init__(self, df: DataFrame, categorical_columns: typing.Union[typing.List[str], None] = None, sample: typing.Union[float, int, None] = None, seed: typing.Union[int, None] = None, *args, **kwargs): """ :param df: A pandas.DataFrame object. :param categorical_columns: If given, specifies which columns are to be interpreted as categorical. Those columns have to include all columns of the DataFrame which have type `object`, `str`, `bool` or `category`. This means it can only add columns which do not have the aforementioned types. :param seed: Random seed used for sampling the data. Values can be any integer between 0 and 2**32 - 1 inclusive or None. :param args: args for HasTraits superclass :param kwargs: kwargs for HasTraits superclass """ super().__init__(*args, **kwargs) validate.validate_data_frame(df) validate.validate_sample(sample) validate.validate_seed(seed) self.selection_type = SelectionType.STANDARD if sample is None: self._df = df else: if isinstance(sample, float): if sample < 0.0 or sample > 1.0: raise ValueError( "Sample has to be between 0.0 and 1.0. Invalid value : %d" % sample) self._df = df.sample(frac=sample, random_state=seed) else: if sample < 0 or sample > len(df): raise ValueError( "Sample has to be between 0 and the length of the DataFrame (%d). Invalid value: " "%d" % (len(df), sample)) self._df = df.sample(n=sample, random_state=seed) self.columns = list(self._df.columns.values) self.column_store = ColumnStore(self._df, self.columns, categorical_columns) self.numerical_columns = self.column_store.numerical_columns self.time_columns = self.column_store.time_columns self.categorical_columns = self.column_store.categorical_columns if self.categorical_columns is not None: self._df[self.categorical_columns].astype(dtype="category") self._length = len(self._df) self._indices = set(range(self._length)) self._brushed_indices: typing.Set[int] = self._indices self.brushed_data_invalidated = True self._brushed_data = None self.on_indices_changed = Signal() if len(self.columns) < 2: raise ValueError( "The passed DataFrame only has %d column, which is insufficient for analysis." % len(self.columns)) self.few_num_cols = len(self.numerical_columns) < 2 self.few_cat_cols = len(self.categorical_columns) < 2 def notify_indices_changed(self): # This has the effect that the cached value for brushed_data is being re-indexed once it is needed. self.brushed_data_invalidated = True self.on_indices_changed.send(self) def reset_selection(self): """ Reset all the indices to the original state, that is all indices are selected. :return: None """ self._brushed_indices = self._indices self.notify_indices_changed() @property def len(self) -> int: """ :return: The length of the DataFrame. """ return self._length def __len__(self): """ :return: The length of the DataFrame. """ return self._length @property def brushed_indices(self) -> typing.Set[int]: """ :return: The currently selected indices. """ return self._brushed_indices @brushed_indices.setter def brushed_indices(self, indices: typing.List[int]): """ Sets the specified indices as selection in the data according to the current selection type. :param indices: indices of data points that should be brushed. """ if self.selection_type == SelectionType.STANDARD: self._brushed_indices = set(indices) elif self.selection_type == SelectionType.ADDITIVE: self._brushed_indices = self._brushed_indices.union(indices) elif self.selection_type == SelectionType.SUBTRACTIVE: self._brushed_indices = self._brushed_indices.difference(indices) self.notify_indices_changed() @property def brushed_data(self) -> DataFrame: """ Only determines brushed data if it was invalidated by new selected indices. This gives more efficiency if only the brushed indices are needed and not the brushed data. :return: The selected data corresponding to the indices. """ if self.brushed_data_invalidated: self._brushed_data = self._df.iloc[list(self._brushed_indices), :] self.brushed_data_invalidated = False return self._brushed_data @property def indices(self) -> typing.Set[int]: """ :return: All indices of the data frame. This is a list from 0 to len-1. """ return self._indices @property def data(self) -> DataFrame: """ :return: The DataFrame for this :class:`pandas_visual_analysis.data_source.DataSource` object. """ return self._df @staticmethod def read_csv(path: str, header: typing.Union[int, None] = 0): """ Read a comma-separated values (csv) file into DataSource. :param path: Any valid string path is acceptable. The string could be a URL. Valid URL schemes include http, ftp, s3, and file. :param header: Row (0-indexed) to use for the column labels of the parsed DataFrame. Use None if there is no header. :return: The DataSource containing the data from the specified file. """ df = pd.read_csv(path, header=header) return DataSource(df) @staticmethod def read_tsv(path: str, header: typing.Union[int, None] = 0): """ Read a tab-separated values (tsv) file into DataSource. :param path: Any valid string path is acceptable. The string could be a URL. Valid URL schemes include http, ftp, s3, and file. :param header: Row (0-indexed) to use for the column labels of the parsed DataFrame. Use None if there is no header. :return: The DataSource containing the data from the specified file. """ df = pd.read_table(path, header=header) return DataSource(df) @staticmethod def read_json(path: str, orient: str): """ Read a json file into a DataSource. :param path: Any valid string path is acceptable. The string could be a URL. Valid URL schemes include http, ftp, s3, and file. :param orient: Indication of expected JSON string format produced by DataFrame.to_json() with a corresponding orient value. :return: The DataSource containing the data from the specified file. """ df = pd.read_json(path, orient=orient) return DataSource(df) @staticmethod def read(path: str, *args, **kwargs): """ Reads the data specified by the path into a DataSource. Infers file type by extension. Supported extensions are: .csv, .tsv and .json. :param path: Any valid string path is acceptable. The string could be a URL. Valid URL schemes include http, ftp, s3, and file. :param args: Arguments passed to inferred methods. :param kwargs: Keyword arguments passed to inferred methods. :return: The DataSource containing the data from the specified file. """ filename, extension = os.path.splitext(path) supported_extensions = {".csv", ".tsv", ".json"} if extension not in supported_extensions: raise ValueError("The file extension %s is not supported. " "Supported extensions are: .csv, .tsv, .json. ") if extension == ".csv": return DataSource.read_csv(path, *args, **kwargs) elif extension == ".tsv": return DataSource.read_tsv(path, *args, **kwargs) elif extension == ".json": return DataSource.read_json(path, *args, **kwargs) # context manager def __enter__(self): """ Enters the context. :return: Returns self to use as a resource. """ return self def __exit__(self, exc_type, exc_value, traceback): """ Exits the context. No resources have to be freed and all Exceptions are delegated. :param exc_type: Type of any raised Exception. :param exc_value: Value of any raised Exception. :param traceback: Traceback if an error occurred. :return: None """ pass # delegate Exceptions
class UploadedFileManager(object): """Holds files uploaded by users of the running Streamlit app, and emits an event signal when a file is added. """ def __init__(self): self._files_by_id = {} # type: Dict[Tuple[str, str], UploadedFileList] # Prevents concurrent access to the _files_by_id dict. # In remove_session_files(), we iterate over the dict's keys. It's # an error to mutate a dict while iterating; this lock prevents that. self._files_lock = threading.Lock() self.on_files_added = Signal( doc="""Emitted when a file list is added to the manager. Parameters ---------- files : UploadedFileList The file list that was added. """) def add_files(self, session_id, widget_id, files): """Add a list of files to the FileManager. If another list with the same (session_id, widget_id) key exists, it will be replaced with this one. The "on_file_added" Signal will be emitted after the list is added. Parameters ---------- session_id : str The session ID of the report that owns the files. widget_id : str The widget ID of the FileUploader that created the files. files : List[UploadedFile] The files to add. """ file_list = UploadedFileList(session_id=session_id, widget_id=widget_id, files=files) with self._files_lock: self._files_by_id[file_list.id] = file_list self.on_files_added.send(file_list) def get_files(self, session_id, widget_id): """Return the file list with the given ID, or None if the ID doesn't exist. Parameters ---------- session_id : str The session ID of the report that owns the file. widget_id : str The widget ID of the FileUploader that created the file. Returns ------- list of UploadedFile or None """ files_id = session_id, widget_id with self._files_lock: file_list = self._files_by_id.get(files_id, None) return file_list.files if file_list is not None else None def remove_files(self, session_id, widget_id): """Remove the file list with the given ID, if it exists. Parameters ---------- session_id : str The session ID of the report that owns the file. widget_id : str The widget ID of the FileUploader that created the file. """ files_id = session_id, widget_id with self._files_lock: self._files_by_id.pop(files_id, None) def remove_session_files(self, session_id): """Remove all files that belong to the given report. Parameters ---------- session_id : str The session ID of the report whose files we're removing. """ # Copy the keys into a list, because we'll be mutating the dictionary. with self._files_lock: all_ids = list(self._files_by_id.keys()) for files_id in all_ids: if files_id[0] == session_id: self.remove_files(*files_id)
def _send_signal(self, sig: Signal, **data: Any) -> None: sig.send(self.kuyruk, task=self, **data)
class Organization(WithMetrics, db.Datetimed, db.Document): name = db.StringField(max_length=255, required=True) acronym = db.StringField(max_length=128) slug = db.SlugField(max_length=255, required=True, populate_from='name', update=True) description = db.StringField(required=True) url = db.StringField() image_url = db.StringField() logo = db.ImageField(fs=avatars, basename=default_image_basename, thumbnails=LOGO_SIZES) members = db.ListField(db.EmbeddedDocumentField(Member)) teams = db.ListField(db.EmbeddedDocumentField(Team)) requests = db.ListField(db.EmbeddedDocumentField(MembershipRequest)) ext = db.MapField(db.GenericEmbeddedDocumentField()) extras = db.ExtrasField() deleted = db.DateTimeField() # TODO: Extract into extension public_service = db.BooleanField() meta = { 'allow_inheritance': True, 'indexes': ['-created_at', 'slug'], 'ordering': ['-created_at'], 'queryset_class': OrganizationQuerySet, } def __str__(self): return self.name or '' __unicode__ = __str__ before_save = Signal() after_save = Signal() on_create = Signal() on_update = Signal() before_delete = Signal() after_delete = Signal() @classmethod def pre_save(cls, sender, document, **kwargs): cls.before_save.send(document) @classmethod def post_save(cls, sender, document, **kwargs): cls.after_save.send(document) if kwargs.get('created'): cls.on_create.send(document) else: cls.on_update.send(document) @property def display_url(self): return url_for('organizations.show', org=self) @property def external_url(self): return url_for('organizations.show', org=self, _external=True) @property def pending_requests(self): return [r for r in self.requests if r.status == 'pending'] @property def refused_requests(self): return [r for r in self.requests if r.status == 'refused'] @property def accepted_requests(self): return [r for r in self.requests if r.status == 'accepted'] def member(self, user): for member in self.members: if member.user == user: return member return None def is_member(self, user): return self.member(user) is not None def is_admin(self, user): member = self.member(user) return member is not None and member.role == 'admin' def pending_request(self, user): for request in self.requests: if request.user == user and request.status == 'pending': return request return None @classmethod def get(cls, id_or_slug): obj = cls.objects(slug=id_or_slug).first() return obj or cls.objects.get_or_404(id=id_or_slug) def by_role(self, role): return filter(lambda m: m.role == role, self.members)
def __init__(self, name, default, description='No description available'): """Create new value""" self.__name = name self.__value = self.__default = self.convert(default) self.__description = description self.__on_change = Signal()
class PinBase(IPin): """ **Base class for pins** This is the base class that stores the data in the graph. This class is intended to be subclassed for each new registered data type you want to create. :param _packageName: This holds the package where the sub classed pin is registered. It is not intended to be set by developer, PyFlow automatically fills this property at registration point :type _packageName: str Signals: * **serializationHook** : Fired when Serialize Pin called, so Ui wrapers can append data to the serialization object * **onPinConnected** : Fired when a new connection is made to this Pin, sends other Pin * **onPinDisconnected** : Fired when some disconnection is made to this Pin, sends other Pin * **nameChanged** : Fired when pin.setName() called, sends New Name * **killed** : Fired when Pin gets deleted * **onExecute** : Fired when Pin execution gets called * **containerTypeChanged** : Fired when Pin Structure Changes * **dataBeenSet** : Fired when data changes, sends New Data * **dictChanged** : Fired when current structure changes to :py:const:`PyFlow.Core.Common.StructureType.Dict`, sends Dict key DataType * **errorOccurred** : Fired when some error fired, like incorrect dataType set, sends ocurred Error * **errorCleared** : Fired when error cleared :ivar owningNode: Weak reference to owning node :ivar reconnectionPolicy: What to do if connect with busy pin. Used when :attr:`~PyFlow.Core.Common.PinOptions.AllowMultipleConnections` flag is disabled :ivar dirty: This flag for lazy evaluation :ivar affects: List of pins this pin affects to :ivar affected_by: List of pins that affects to this pin :ivar name: Pin name :ivar direction: Pin direction :ivar inputWidgetVariant: Input widget variant tag :ivar constraint: **description here** :ivar structConstraint: **description here** :ivar super: **description here** :ivar activeDataType: Current data type of this pin. Used by AnyPin :ivar pinIndex: Position of this pin on node :ivar description: Text description of this pin """ _packageName = "" def __init__(self, name, owningNode, direction): super(PinBase, self).__init__() # signals self.serializationHook = Signal() self.onPinConnected = Signal(object) self.onPinDisconnected = Signal(object) self.nameChanged = Signal(str) self.killed = Signal() self.onExecute = Signal(object) self.containerTypeChanged = Signal() self.dataBeenSet = Signal(object) self.dictChanged = Signal(str) self.markedAsDirty = Signal() self.errorOccured = Signal(object) self.errorCleared = Signal() self._lastError = None ## Access to the node self.owningNode = weakref.ref(owningNode) self._uid = uuid.uuid4() self._data = None self._defaultValue = None self.reconnectionPolicy = PinReconnectionPolicy.DisconnectIfHasConnections self.dirty = True self.affects = set() self.affected_by = set() self.name = name self._group = "" self.direction = direction # gui class weak ref self._wrapper = None self.__wrapperJsonData = None self.annotationDescriptionDict = None self._inputWidgetVariant = "DefaultWidget" # Constraint ports self.constraint = None self.structConstraint = None # Flags self._flags = PinOptions.Storable self._origFlags = self._flags self._structure = StructureType.Single self._currStructure = self._structure self._isAny = False self._isArray = False self._isDict = False self._alwaysList = False self._alwaysDict = False self._alwaysSingle = False self._defaultSupportedDataTypes = self._supportedDataTypes = self.supportedDataTypes( ) self.canChange = False self._isDictElement = False self.hidden = False # DataTypes self.super = self.__class__ self.activeDataType = self.__class__.__name__ self._keyType = None # registration self.owningNode().pins.add(self) self.owningNode().pinsCreationOrder[self.uid] = self # This is for to be able to connect pins by location on node self.pinIndex = 0 if direction == PinDirection.Input: self.pinIndex = len(self.owningNode().orderedInputs) if direction == PinDirection.Output: self.pinIndex = len(self.owningNode().orderedOutputs) self.description = "{} instance".format(self.dataType) @property def wrapperJsonData(self): try: dt = self.__wrapperJsonData.copy() return dt except Exception as e: return None def getInputWidgetVariant(self): return self._inputWidgetVariant def setInputWidgetVariant(self, value): self._inputWidgetVariant = value def path(self): owningNodePath = self.owningNode().path() return "{}.{}".format(owningNodePath, self.getName()) @property def group(self): """Pin group This is just a tag which can be used in ui level :rtype: str """ return self._group @group.setter def group(self, value): self._group = str(value) def enableOptions(self, *options): """Enables flags on pin instance Example: >>> self.pinInstance.enableOptions(PinOptions.RenamingEnabled) You can also pass array/set of flags >>> self.pinInstance.enableOptions({PinOptions.RenamingEnabled, PinOptions.Dynamic}) This is equivalent of >>> self.pinInstance.enableOptions(PinOptions.RenamingEnabled | PinOptions.Dynamic) """ for option in options: self._flags = self._flags | option self._origFlags = self._flags def disableOptions(self, *options): """Same as :meth:`~PyFlow.Core.PinBase.PinBase.enableOptions` but inverse """ for option in options: self._flags = self._flags & ~option self._origFlags = self._flags def optionEnabled(self, option): """Is option enabled or not :param option: Option to check :type option: :class:`~PyFlow.Core.Common.PinOptions` :rtype: bool """ return bool(self._flags & option) def isAny(self): """Wheter this pin of type Any or not :rtype: bool """ return self._isAny @property def packageName(self): """Returns name of package this pin belongs to :rtype: bool """ return self._packageName @property def linkedTo(self): """store connection from pins from left hand side to right hand side .. code-block:: python { "lhsNodeName": "", "outPinId": 0, "rhsNodeName": "", "inPinId": 0 } where pin id is order in which pin was added to node :returns: Serialized connections :rtype: list(dict) """ result = list() if self.direction == PinDirection.Output: for i in getConnectedPins(self): connection = { "lhsNodeName": "", "outPinId": 0, "rhsNodeName": "", "inPinId": 0 } connection["lhsNodeName"] = self.owningNode().getName() connection["lhsNodeUid"] = str(self.owningNode().uid) connection["outPinId"] = self.pinIndex connection["rhsNodeName"] = i.owningNode().getName() connection["rhsNodeUid"] = str(i.owningNode().uid) connection["inPinId"] = i.pinIndex result.append(connection) if self.direction == PinDirection.Input: for i in getConnectedPins(self): connection = { "lhsNodeName": "", "outPinId": 0, "rhsNodeName": "", "inPinId": 0 } connection["lhsNodeName"] = i.owningNode().getName() connection["lhsNodeUid"] = str(i.owningNode().uid) connection["outPinId"] = i.pinIndex connection["rhsNodeName"] = self.owningNode().getName() connection["rhsNodeUid"] = str(self.owningNode().uid) connection["inPinId"] = self.pinIndex result.append(connection) return result def __repr__(self): return "[{0}:{1}:{2}:{3}]".format(self.dataType, self.getFullName(), self.dirty, self.currentData()) def isExec(self): """Returns whether this is exec pin or not :rtype: bool """ return False def initAsArray(self, bIsArray): """Sets this pins to be a list always :param bIsArray: Define as array :type bIsArray: bool """ self._alwaysList = bool(bIsArray) if bool(bIsArray): self._alwaysDict = False self.setAsArray(bool(bIsArray)) def initAsDict(self, bIsDict): """Sets this pins to be a dict always :param bIsArray: Define as dict :type bIsArray: bool """ self._alwaysDict = bool(bIsDict) if bool(bIsDict): self._alwaysList = False self.setAsDict(bool(bIsDict)) def setAsArray(self, bIsArray): """Sets this pins to be a list :param bIsArray: Define as Array :type bIsArray: bool """ bIsArray = bool(bIsArray) if self._isArray == bIsArray: return self._isArray = bIsArray if bIsArray: if self.isDict(): self.setAsDict(False) # list pins supports only lists by default self.enableOptions(PinOptions.SupportsOnlyArrays) self._currStructure = StructureType.Array self._isDict = False else: self._currStructure = self._structure self._data = self.defaultValue() self.containerTypeChanged.send() def setAsDict(self, bIsDict): """Sets this pins to be a dict :param bIsArray: Define as Array :type bIsArray: bool """ bIsDict = bool(bIsDict) if self._isDict == bIsDict: return self._isDict = bIsDict if bIsDict: if self.isArray(): self.setAsArray(False) # list pins supports only lists by default self.enableOptions(PinOptions.SupportsOnlyArrays) self._currStructure = StructureType.Dict self._isArray = False else: self._currStructure = self._structure self._keyType = None self._data = self.defaultValue() self.containerTypeChanged.send() def isArray(self): """Returns whether this pin is array or not :rtype: bool """ return self._isArray def isDict(self): """Returns whether this pin is dict or not :rtype: bool """ return self._isDict def setWrapper(self, wrapper): """Sets ui wrapper instance :param wrapper: Whatever ui class that represents this pin """ if self._wrapper is None: self._wrapper = weakref.ref(wrapper) def getWrapper(self): """Returns ui wrapper instance """ return self._wrapper def deserialize(self, jsonData): """Restores itself from supplied serialized data :param jsonData: Json representation of pin :type jsonData: dict """ self.setName(jsonData["name"]) self.uid = uuid.UUID(jsonData['uuid']) for opt in PinOptions: if opt.value in jsonData["options"]: self.enableOptions(opt) else: self.disableOptions(opt) self.changeStructure(jsonData["structure"]) self._alwaysList = jsonData['alwaysList'] self._alwaysSingle = jsonData['alwaysSingle'] self._alwaysDict = jsonData['alwaysDict'] try: self.setData( json.loads(jsonData['value'], cls=self.jsonDecoderClass())) except Exception as e: self.setData(self.defaultValue()) if "wrapper" in jsonData: self.__wrapperJsonData = jsonData["wrapper"] def serialize(self): """Serializes itself to json :rtype: dict """ storable = self.optionEnabled(PinOptions.Storable) serializedData = None if not self.dataType == "AnyPin": if storable: serializedData = json.dumps(self.currentData(), cls=self.jsonEncoderClass()) #else: # serializedData = json.dumps(self.defaultValue(), cls=self.jsonEncoderClass()) data = { 'name': self.name, 'package': self.packageName, 'fullName': self.getFullName(), 'dataType': self.__class__.__name__, 'direction': int(self.direction), 'value': serializedData, 'uuid': str(self.uid), 'linkedTo': list(self.linkedTo), 'pinIndex': self.pinIndex, 'options': [i.value for i in PinOptions if self.optionEnabled(i)], 'structure': int(self._currStructure), 'alwaysList': self._alwaysList, 'alwaysSingle': self._alwaysSingle, 'alwaysDict': self._alwaysDict } # Wrapper class can subscribe to this signal and return # UI specific data which will be considered on serialization # Blinker returns a tuple (receiver, return val) wrapperData = self.serializationHook.send(self) if wrapperData is not None: if len(wrapperData) > 0: # We take return value from one wrapper data['wrapper'] = wrapperData[0][1] return data @property def uid(self): return self._uid @uid.setter def uid(self, value): if not value == self._uid: self._uid = value def setName(self, name, force=False): """Sets pin name and fires events :param name: New pin name :type name: str :param force: If True - name will be changed even if option :attr:`~PyFlow.Core.Common.PinOptions.RenamingEnabled` is turned off :type force: bool :returns: Whether renaming performed or not :rtype: bool """ if not force: if not self.optionEnabled(PinOptions.RenamingEnabled): return False if name == self.name: return False self.name = self.owningNode().getUniqPinName(name) self.nameChanged.send(self.name) return True def getName(self): return self.name def getFullName(self): """Returns full pin name, including node name :rtype: str """ return self.owningNode().name + '_' + self.name def allowedDataTypes(self, checked=[], dataTypes=[], selfCheck=True, defaults=False): return list(self.supportedDataTypes()) def checkFree(self, checked=[], selfCheck=True): return False def defaultValue(self): """Returns default value of this pin """ if self.isArray(): return [] elif self.isDict(): return PFDict("StringPin", "AnyPin") else: return self._defaultValue def getData(self): """Returns pin value If something is connected to this pin, graph will be evaluated .. seealso:: :class:`~PyFlow.Core.EvaluationEngine.DefaultEvaluationEngine_Impl` """ return EvaluationEngine().getPinData(self) def clearError(self): """Clears any last error on this pin and fires event """ if self._lastError is not None: self._lastError = None self.errorCleared.send() def setError(self, err): """Marks this pin as invalid by setting error message to it. Also fires event :param err: Error message :type err: str """ self._lastError = str(err) self.errorOccured.send(self._lastError) def validateArray(self, array, func): valid = True if isinstance(array, list): for i in array: self.validateArray(i, func) else: func(array) return valid def setData(self, data): """Sets value to pin :param data: Data to be set :type data: object """ if self.super is None: return try: self.setDirty() if isinstance(data, DictElement) and not self.optionEnabled( PinOptions.DictElementSupported): data = data[1] if not self.isArray() and not self.isDict(): if isinstance(data, DictElement): self._data = DictElement(data[0], self.super.processData(data[1])) else: if isinstance(data, list): self._data = data else: self._data = self.super.processData(data) elif self.isArray(): if isinstance(data, list): if self.validateArray(data, self.super.processData): self._data = data else: raise Exception("Some Array Input is not valid Data") else: self._data = [self.super.processData(data)] elif self.isDict(): if isinstance(data, PFDict): self._data = PFDict(data.keyType, data.valueType) for key, value in data.items(): self._data[key] = self.super.processData(value) elif isinstance(data, DictElement) and len(data) == 2: self._data.clear() self._data[data[0]] = self.super.processData(data[1]) if self.direction == PinDirection.Output: for i in self.affects: i.setData(self.currentData()) elif self.direction == PinDirection.Input and self.owningNode( ).__class__.__name__ == "compound": for i in self.affects: i.setData(self.currentData()) if self.direction == PinDirection.Input or self.optionEnabled( PinOptions.AlwaysPushDirty): push(self) self.clearError() self.dataBeenSet.send(self) except Exception as exc: self.setError(exc) self.setDirty() if self._lastError is not None: self.owningNode().setError(self._lastError) wrapper = self.owningNode().getWrapper() if wrapper: wrapper.update() def call(self, *args, **kwargs): if self.owningNode().isValid(): self.onExecute.send(*args, **kwargs) def disconnectAll(self): if self.direction == PinDirection.Input: for o in list(self.affected_by): disconnectPins(self, o) self.affected_by.clear() if self.direction == PinDirection.Output: for i in list(self.affects): disconnectPins(self, i) self.affects.clear() @property def dataType(self): """Returns data type of this pin :rtype: str """ return self.__class__.__name__ @property def structureType(self): """Returns current structure of this pin :rtype: :class:`~PyFlow.Core.Common.StructureType` """ return self._structure @structureType.setter def structureType(self, structure): self._structure = structure self._currStructure = structure # PinBase methods def kill(self, *args, **kwargs): """Deletes this pin """ self.disconnectAll() if self in self.owningNode().pins: self.owningNode().pins.remove(self) if self.uid in self.owningNode().pinsCreationOrder: self.owningNode().pinsCreationOrder.pop(self.uid) # Fix pin indexes on owning node if self.optionEnabled(PinOptions.Dynamic): # sort owning node pins indexes index = 1 if self.direction == PinDirection.Input: for inputPin in self.owningNode().orderedInputs.values(): if inputPin == self: continue inputPin.pinIndex = index index += 1 index = 1 if self.direction == PinDirection.Output: for outputPin in self.owningNode().orderedOutputs.values(): if outputPin == self: continue outputPin.pinIndex = index index += 1 self.killed.send(self) clearSignal(self.killed) def currentData(self): """Returns current value of this pin, without any graph evaluation :rtype: object """ if self._data is None: return self._defaultValue return self._data def aboutToConnect(self, other): """This method called right before two pins connected :param other: Pin which this pin is going to be connected with :type other: :class:`~PyFlow.Core.PinBase.PinBase` """ if other.structureType != self.structureType: if self.optionEnabled( PinOptions.ChangeTypeOnConnection ) or self.structureType == StructureType.Multi: self.changeStructure(other._currStructure) self.onPinConnected.send(other) def getCurrentStructure(self): """Returns this pin structure type :rtype: :class:`~PyFlow.Core.Common.StructureType` """ if self.structureType == StructureType.Multi: if self._alwaysSingle: return StructureType.Single elif self._alwaysList: return StructureType.Array elif self._alwaysDict: return StructureType.Dict else: return self.structureType else: return self.structureType def changeStructure(self, newStruct, init=False): """Changes this pin structure type :param newStruct: Target structure :type newStruct: :class:`~PyFlow.Core.Common.StructureType` :param init: **docs goes here** :type init: bool """ free = self.canChangeStructure(newStruct, [], init=init) if free: self.updateConstrainedPins(set(), newStruct, init, connecting=True) def canChangeStructure(self, newStruct, checked=[], selfCheck=True, init=False): """Recursive function to determine if pin can change its structure :param newStruct: New structure we want to apply :type newStruct: string :param checked: Already visited pins, defaults to [] :type checked: list, optional :param selfCheck: Define if check pin itself for connected pins, defaults to True :type selfCheck: bool, optional :param init: Initialization flag, if set multi pins can became other structure and don't be able to change after new call with init=True, defaults to False :type init: bool, optional :returns: True if pin can change structure to newStruct :rtype: bool """ if not init and (self._alwaysList or self._alwaysSingle or self._alwaysDict): return False if self.structConstraint is None or self.structureType == StructureType.Multi: return True elif self.structureType != StructureType.Multi: return False else: con = [] if selfCheck: free = not self.hasConnections() if not free: for c in getConnectedPins(self): if c not in checked: con.append(c) else: free = True checked.append(self) free = True if selfCheck: def testfree(): free = False for pin in getConnectedPins(self): if pin._structure == StructureType.Multi: free = True else: free = False break return free if any([ self._currStructure == StructureType.Single and newStruct == StructureType.Array and not self.optionEnabled(PinOptions.ArraySupported) and self.hasConnections(), self._currStructure == StructureType.Single and newStruct == StructureType.Dict and not self.optionEnabled(PinOptions.DictSupported) and self.hasConnections(), self._currStructure == StructureType.Array and newStruct == StructureType.Single and self.optionEnabled(PinOptions.SupportsOnlyArrays) and self.hasConnections(), self._currStructure == StructureType.Dict and newStruct == StructureType.Single and self.optionEnabled(PinOptions.SupportsOnlyArrays) and self.hasConnections(), self._currStructure == StructureType.Array and newStruct == StructureType.Dict and self.hasConnections(), self._currStructure == StructureType.Dict and newStruct == StructureType.Array and self.hasConnections() ]): free = testfree() if free: for port in self.owningNode().structConstraints[ self.structConstraint] + con: if port not in checked: checked.append(port) free = port.canChangeStructure(newStruct, checked, True, init=init) if not free: break return free def updateConstrainedPins(self, traversed, newStruct, init=False, connecting=False): nodePins = set() if self.structConstraint is not None: nodePins = set( self.owningNode().structConstraints[self.structConstraint]) else: nodePins = set([self]) for connectedPin in getConnectedPins(self): if connectedPin.structureType == StructureType.Multi: if connectedPin.canChangeStructure(self._currStructure, init=init): nodePins.add(connectedPin) for neighbor in nodePins: if neighbor not in traversed: neighbor.setAsArray(newStruct == StructureType.Array) neighbor.setAsDict(newStruct == StructureType.Dict) if connecting: if init: neighbor._alwaysList = newStruct == StructureType.Array neighbor._alwaysSingle = newStruct == StructureType.Single neighbor._alwaysDict = newStruct == StructureType.Dict neighbor._currStructure = newStruct neighbor.disableOptions(PinOptions.ArraySupported) neighbor.disableOptions(PinOptions.DictSupported) if newStruct == StructureType.Array: neighbor.enableOptions(PinOptions.ArraySupported) elif newStruct == StructureType.Dict: neighbor.enableOptions(PinOptions.DictSupported) elif newStruct == StructureType.Multi: neighbor.enableOptions(PinOptions.ArraySupported) neighbor.enableOptions(PinOptions.DictSupported) elif newStruct == StructureType.Single: neighbor.disableOptions(PinOptions.SupportsOnlyArrays) else: neighbor._currStructure = neighbor._structure neighbor._data = neighbor.defaultValue() traversed.add(neighbor) neighbor.setData(neighbor.defaultValue()) neighbor.updateConstrainedPins(traversed, newStruct, init, connecting=connecting) def pinConnected(self, other): push(self) if self.isDict(): self.updateConnectedDicts([], self._data.keyType) def pinDisconnected(self, other): self.onPinDisconnected.send(other) push(other) def canChangeTypeOnConnection(self, checked=[], can=True, extraPins=[], selfCheck=True): """Recursive function to determine if pin can change its dataType :param checked: Already visited pins, defaults to [] :type checked: list, optional :param can: Variable Updated during iteration, defaults to True :type can: bool, optional :param extraPins: extra pins, non constrained or connected to this pin but that want to check also, defaults to [] :type extraPins: list, optional :param selfCheck: Define if check pin itself for connected pins, defaults to True :type selfCheck: bool, optional :returns: True if pin can becabe other dataType :rtype: bool """ if not self.optionEnabled(PinOptions.ChangeTypeOnConnection): return False con = [] neis = [] if selfCheck: if self.hasConnections(): for c in getConnectedPins(self): if c not in checked: con.append(c) else: checked.append(self) if self.constraint: neis = self.owningNode().constraints[self.constraint] for port in neis + con + extraPins: if port not in checked and can: checked.append(port) can = port.canChangeTypeOnConnection(checked, can, selfCheck=True) return can def getDictElementNode(self, checked=[], node=None): """Get the connected :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeDictElement.makeDictElement` to this pin recursively :param checked: Currently visited pins, defaults to [] :type checked: list, optional :param node: founded node, defaults to None :rtype: :class:`~PyFlow.Core.NodeBase.NodeBase` or None """ if self.owningNode().__class__.__name__ == "makeDictElement": return self.owningNode() con = [] neis = [] if self.hasConnections() and self.direction == PinDirection.Input: for c in getConnectedPins(self): if c not in checked: con.append(c) if self.constraint: neis = self.owningNode().constraints[self.constraint] for port in con + neis: if port not in checked and node == None: checked.append(port) node = port.getDictElementNode(checked, node) return node def getDictNode(self, checked=[], node=None): """Get the connected :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeDict.makeDict` or :py:class:`PyFlow.Packages.PyFlowBase.Nodes.makeAnyDict.makeAnyDict` to this pin recursively :param checked: Currently visited pins, defaults to [] :type checked: list, optional :param node: founded node, defaults to None :returns: founded node or None if not found """ if self.owningNode().__class__.__name__ in ["makeDict", "makeAnyDict"]: return self.owningNode() con = [] neis = [] if self.hasConnections(): for c in getConnectedPins(self): if c not in checked: con.append(c) if self.constraint: neis = self.owningNode().constraints[self.constraint] for port in con + neis: if port not in checked and node == None: checked.append(port) node = port.getDictNode(checked, node) return node def supportDictElement(self, checked=[], can=True, selfCheck=True): """Iterative functions that search in all connected pins to see if they support DictElement nodes. :param checked: Already visited pins, defaults to [] :type checked: list, optional :param can: this is the variable that will be actualized during the recursive function, defaults to False :type can: bool, optional :param selfCheck: Define if look itself or no, defaults to True :type selfCheck: bool, optional :returns: True if can connect DictElement nodes to this pin :rtype: bool """ if not self.optionEnabled(PinOptions.DictElementSupported): return False con = [] neis = [] if selfCheck: if self.hasConnections() and self.direction == PinDirection.Input: for c in getConnectedPins(self): if c not in checked: con.append(c) else: checked.append(self) if self.constraint and self.owningNode( ).__class__.__name__ != "makeDictElement": neis = self.owningNode().constraints[self.constraint] for port in neis + con: if port not in checked and can: checked.append(port) can = port.supportDictElement(checked, can, selfCheck=True) return can def supportOnlyDictElement(self, checked=[], can=False, selfCheck=True): """Iterative Functions that search in all connected pins to see if they support only DictElement nodes, this is done for nodes like makeDict and simmilars. :param checked: Already Visited Pins, defaults to [] :type checked: list, optional :param can: this is the variable that will be actualized during the recursive function, defaults to False :type can: bool, optional :param selfCheck: Defines if look itself or no, defaults to True :type selfCheck: bool, optional :returns: True if can connect only DictElement and Dicts nodes to this Pin :rtype: bool """ if self.isDict(): return True con = [] neis = [] if selfCheck: if self.hasConnections() and self.direction == PinDirection.Output: for c in getConnectedPins(self): if c not in checked: con.append(c) else: checked.append(self) if self.constraint and self.owningNode( ).__class__.__name__ != "makeDictElement": neis = self.owningNode().constraints[self.constraint] for port in neis + con: if port not in checked and not can: checked.append(port) can = port.supportOnlyDictElement(checked, can, selfCheck=True) return can def updateConnectedDicts(self, checked=[], keyType=None): """Iterate over connected dicts pins and DictElements pins updating key data type :param checked: Already visited pins, defaults to [] :type checked: list, optional :param keyType: KeyDataType to set, defaults to None :type keyType: string, optional """ if not self.isDict(): return con = [] neis = [] if self.hasConnections(): for c in getConnectedPins(self): if c not in checked: con.append(c) if self.constraint: neis = self.owningNode().constraints[self.constraint] for port in con + neis: if port not in checked and port.isDict(): checked.append(port) port._keyType = keyType if port._data.keyType != keyType: port._data = PFDict(keyType, port.dataType) port.dictChanged.send(keyType) if port.getWrapper(): port.getWrapper()().update() port.updateConnectedDicts(checked, keyType) def setClean(self): """Sets dirty flag to True """ self.dirty = False if self.direction == PinDirection.Output: for i in self.affects: i.dirty = False def setDirty(self): """Sets dirty flag to True """ if self.isExec(): return self.dirty = True for i in self.affects: i.dirty = True self.markedAsDirty.send() def hasConnections(self): """Return the number of connections this pin has :rtype: int """ numConnections = 0 if self.direction == PinDirection.Input: numConnections += len(self.affected_by) elif self.direction == PinDirection.Output: numConnections += len(self.affects) return numConnections > 0 def setDefaultValue(self, val): """In python, all user-defined classes are mutable So make sure to store separate copy of value For example if this is a Matrix, default value will be changed each time data has been set in original Matrix :param val: defaultValue :type val: object """ self._defaultValue = copy(val) def updateConstraint(self, constraint): self.constraint = constraint if constraint in self.owningNode().constraints: self.owningNode().constraints[constraint].append(self) else: self.owningNode().constraints[constraint] = [self] def updateStructConstraint(self, constraint): self.structConstraint = constraint if constraint in self.owningNode().structConstraints: self.owningNode().structConstraints[constraint].append(self) else: self.owningNode().structConstraints[constraint] = [self] @staticmethod def IsValuePin(): """Returns whether this pin is value pin or not :rtype: bool """ return True @staticmethod def pinDataTypeHint(): """Hint of what data type is this pin, as well as default value for this data type. Used to easily find pin classes by type id. :rtype: tuple(str, object) :raises NotImplementedError: If not implemented """ raise NotImplementedError( 'pinDataTypeHint method of PinBase is not implemented') @staticmethod def supportedDataTypes(): return () @staticmethod def jsonEncoderClass(): """Returns json encoder class for this pin """ return json.JSONEncoder @staticmethod def jsonDecoderClass(): """Returns json decoder class for this pin """ return json.JSONDecoder
def __init__(self, name, event_type, send_to_interfaces=True, message=''): self.name = name self.signal = Signal(name) self.event_type = event_type self.is_sent_to_interfaces = send_to_interfaces self.message = message
def __init__(self, name, owningNode, direction): super(PinBase, self).__init__() # signals self.serializationHook = Signal() self.onPinConnected = Signal(object) self.onPinDisconnected = Signal(object) self.nameChanged = Signal(str) self.killed = Signal() self.onExecute = Signal(object) self.containerTypeChanged = Signal() self.dataBeenSet = Signal(object) self.dictChanged = Signal(str) self.markedAsDirty = Signal() self.errorOccured = Signal(object) self.errorCleared = Signal() self._lastError = None ## Access to the node self.owningNode = weakref.ref(owningNode) self._uid = uuid.uuid4() self._data = None self._defaultValue = None self.reconnectionPolicy = PinReconnectionPolicy.DisconnectIfHasConnections self.dirty = True self.affects = set() self.affected_by = set() self.name = name self._group = "" self.direction = direction # gui class weak ref self._wrapper = None self.__wrapperJsonData = None self.annotationDescriptionDict = None self._inputWidgetVariant = "DefaultWidget" # Constraint ports self.constraint = None self.structConstraint = None # Flags self._flags = PinOptions.Storable self._origFlags = self._flags self._structure = StructureType.Single self._currStructure = self._structure self._isAny = False self._isArray = False self._isDict = False self._alwaysList = False self._alwaysDict = False self._alwaysSingle = False self._defaultSupportedDataTypes = self._supportedDataTypes = self.supportedDataTypes( ) self.canChange = False self._isDictElement = False self.hidden = False # DataTypes self.super = self.__class__ self.activeDataType = self.__class__.__name__ self._keyType = None # registration self.owningNode().pins.add(self) self.owningNode().pinsCreationOrder[self.uid] = self # This is for to be able to connect pins by location on node self.pinIndex = 0 if direction == PinDirection.Input: self.pinIndex = len(self.owningNode().orderedInputs) if direction == PinDirection.Output: self.pinIndex = len(self.owningNode().orderedOutputs) self.description = "{} instance".format(self.dataType)
class Axis(object): def __init__(self, limits=None, scale={}): from blinker import Signal self.limits = limits self.scale = scale self.position = None self.running = None self.initializing = None self.initialized = None self.initiator_minus = None self.initiator_plus = None self.initiator_error = None self.temperature_warning = None self.onPosition = Signal() self.onRunning = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onInitiatorError = Signal() self.onTemperatureWarning = Signal() def update(self): last_position = self.position last_running = self.running last_initializing = self.initializing last_initialized = self.initialized last_initiator_minus = self.initiator_minus last_initiator_plus = self.initiator_plus last_initiator_error = self.initiator_error last_temperature_warning = self.temperature_warning self.do_update() if last_position != self.position: self.onPosition.send(self, position=self.position) if last_running != self.running: self.onRunning.send(self, running=self.running) if last_initializing != self.initializing: self.onInitializing.send(self, initializing=self.initializing) if last_initialized != self.initialized: self.onInitialized.send(self, initialized=self.initialized) if last_initiator_minus != self.initiator_minus: self.onInitiatorMinus.send(self, active=self.initiator_minus) if last_initiator_plus != self.initiator_plus: self.onInitiatorPlus.send(self, active=self.initiator_plus) if last_initiator_error != self.initiator_error: self.onInitiatorError.send(self, error=self.initiator_error) if last_temperature_warning != self.temperature_warning: self.onTemperatureWarning.send(self, warning=self.temperature_warning) def wait_for_stop(self): self.update() while self.running: self.update() def initiate(self): raise NotImplementedError() def goto_absolute(self, target, speed=None): if self.limits and (target < min(self.limits) or target > max(self.limits)): raise ValueError() self.do_goto_absolute(target, speed) def do_goto_absolute(self, target, speed): raise NotImplementedError() def goto_relative(self, offset, speed=None): raise NotImplementedError()
class AccountToUpdate(object): on_ready = Signal() on_complete = Signal() on_error = Signal() on_failure = Signal() def __init__(self, current_app, account_number, role_name, arns_list): self.current_app = current_app self.account_number = account_number self.role_name = role_name self.arn_list = arns_list self.conn_details = { 'account_number': account_number, 'assume_role': role_name, 'session_name': 'aardvark', 'region': self.current_app.config.get('REGION') or 'us-east-1', 'arn_partition': self.current_app.config.get('ARN_PARTITION') or 'aws' } self.max_access_advisor_job_wait = 5 * 60 # Wait 5 minutes before giving up on jobs def update_account(self): """ Updates Access Advisor data for a given AWS account. 1) Gets list of IAM Role ARNs in target account. 2) Gets IAM credentials in target account. 3) Calls GenerateServiceLastAccessedDetails for each role 4) Calls GetServiceLastAccessedDetails for each role to retrieve data :return: Return code and JSON Access Advisor data for given account """ self.on_ready.send(self) arns = self._get_arns() if not arns: self.current_app.logger.warn("Zero ARNs collected. Exiting") sys.exit(-1) client = self._get_client() try: details = self._call_access_advisor(client, list(arns)) except Exception as e: self.on_failure.send(self, error=e) self.current_app.logger.exception('Failed to call access advisor', exc_info=True) return 255, None else: self.on_complete.send(self) return 0, details def _get_arns(self): """ Gets a list of all Role ARNs in a given account, optionally limited by class property ARN filter :return: list of role ARNs """ client = boto3_cached_conn( 'iam', service_type='client', **self.conn_details) account_arns = set() for role in list_roles(**self.conn_details): account_arns.add(role['Arn']) for user in list_users(**self.conn_details): account_arns.add(user['Arn']) for page in client.get_paginator('list_policies').paginate(Scope='Local'): for policy in page['Policies']: account_arns.add(policy['Arn']) for page in client.get_paginator('list_groups').paginate(): for group in page['Groups']: account_arns.add(group['Arn']) result_arns = set() for arn in self.arn_list: if arn.lower() == 'all': return account_arns if arn not in account_arns: self.current_app.logger.warn("Provided ARN {arn} not found in account.".format(arn=arn)) continue result_arns.add(arn) return list(result_arns) def _get_client(self): """ Assumes into the target account and obtains IAM client :return: boto3 IAM client in target account & role """ client = boto3_cached_conn( 'iam', **self.conn_details) return client def _call_access_advisor(self, iam, arns): jobs = self._generate_job_ids(iam, arns) details = self._get_job_results(iam, jobs) if arns and not details: self.current_app.error("Didn't get any results from Access Advisor") return details @rate_limited() def _generate_service_last_accessed_details(self, iam, arn): """ Wrapping the actual AWS API calls for rate limiting protection. """ return iam.generate_service_last_accessed_details(Arn=arn)['JobId'] @rate_limited() def _get_service_last_accessed_details(self, iam, job_id): """ Wrapping the actual AWS API calls for rate limiting protection. """ return iam.get_service_last_accessed_details(JobId=job_id) def _generate_job_ids(self, iam, arns): jobs = {} for role_arn in arns: try: job_id = self._generate_service_last_accessed_details(iam, role_arn) jobs[job_id] = role_arn except iam.exceptions.NoSuchEntityException: """ We're here because this ARN disappeared since the call to self._get_arns(). Log the missing ARN and move along. """ self.current_app.logger.info('ARN {arn} found gone when fetching details'.format(arn=role_arn)) except Exception as e: self.on_error.send(self, error=e) self.current_app.logger.error('Could not gather data from {0}.'.format(role_arn), exc_info=True) return jobs def _get_job_results(self, iam, jobs): access_details = {} job_queue = list(jobs.keys()) last_job_completion_time = time.time() while job_queue: # Check for timeout now = time.time() if now - last_job_completion_time > self.max_access_advisor_job_wait: # We ran out of time, some jobs are unfinished self._log_unfinished_jobs(job_queue, jobs) break # Pull next job ID job_id = job_queue.pop() role_arn = jobs[job_id] try: details = self._get_service_last_accessed_details(iam, job_id) except Exception as e: self.on_error.send(self, error=e) self.current_app.logger.error('Could not gather data from {0}.'.format(role_arn), exc_info=True) continue # Check job status if details['JobStatus'] == 'IN_PROGRESS': job_queue.append(job_id) continue # Check for job failure if details['JobStatus'] != 'COMPLETED': log_str = "Job {job_id} finished with unexpected status {status} for ARN {arn}.".format( job_id=job_id, status=details['JobStatus'], arn=role_arn) failing_arns = self.current_app.config.get('FAILING_ARNS', {}) if role_arn in failing_arns: self.current_app.logger.info(log_str) else: self.current_app.logger.error(log_str) continue # Job status must be COMPLETED. Save result. last_job_completion_time = time.time() updated_list = [] for detail in details.get('ServicesLastAccessed'): # create a copy, we're going to modify the time to epoch updated_item = copy.copy(detail) # AWS gives a datetime, convert to epoch last_auth = detail.get('LastAuthenticated') if last_auth: last_auth = int(time.mktime(last_auth.timetuple()) * 1000) else: last_auth = 0 updated_item['LastAuthenticated'] = last_auth updated_list.append(updated_item) access_details[role_arn] = updated_list return access_details def _log_unfinished_jobs(self, job_queue, job_details): for job_id in job_queue: role_arn = job_details[job_id] self.current_app.logger.error("Job {job_id} for ARN {arn} didn't finish".format( job_id=job_id, arn=role_arn, ))
def __init__(self, limits=None, scale={}): from blinker import Signal self.limits = limits self.scale = scale self.position = None self.running = None self.initializing = None self.initialized = None self.initiator_minus = None self.initiator_plus = None self.initiator_error = None self.temperature_warning = None self.onPosition = Signal() self.onRunning = Signal() self.onInitializing = Signal() self.onInitialized = Signal() self.onInitiatorMinus = Signal() self.onInitiatorPlus = Signal() self.onInitiatorError = Signal() self.onTemperatureWarning = Signal()
class GraphBase(ISerializable): def __init__(self, name, manager, parentGraph=None, category='', uid=None, *args, **kwargs): super(GraphBase, self).__init__(*args, **kwargs) self.graphManager = manager self._isRoot = False # signals self.nameChanged = Signal(str) self.categoryChanged = Signal(str) self.__name = name self.__category = category self._parentGraph = None self.childGraphs = set() self.parentGraph = parentGraph self.nodes = {} self.vars = {} self.uid = uuid.uuid4() if uid is None else uid manager.add(self) def setIsRoot(self, bIsRoot): self._isRoot = bIsRoot def isRoot(self): return self._isRoot @property def parentGraph(self): return self._parentGraph @parentGraph.setter def parentGraph(self, newParentGraph): if self.isRoot(): self._parentGraph = None return if newParentGraph is not None: if self._parentGraph is not None: # remove self from old parent's children set if self in self._parentGraph.childGraphs: self._parentGraph.childGraphs.remove(self) # add self to new parent's children set newParentGraph.childGraphs.add(self) # update parent self._parentGraph = newParentGraph def depth(self): result = 1 parent = self._parentGraph while parent is not None: result += 1 parent = parent.parentGraph return result def getVarList(self): """return list of variables from active graph """ result = list(self.vars.values()) parent = self._parentGraph while parent is not None: result += list(parent.vars.values()) parent = parent.parentGraph return result def serialize(self, *args, **Kwargs): result = { 'name': self.name, 'category': self.category, 'vars': [v.serialize() for v in self.vars.values()], 'nodes': [n.serialize() for n in self.nodes.values()], 'depth': self.depth(), 'isRoot': self.isRoot(), 'parentGraphName': str(self._parentGraph.name) if self._parentGraph is not None else str(None) } return result def populateFromJson(self, jsonData): self.clear() parentGraphName = jsonData['parentGraphName'] parentGraph = self.graphManager.findGraph(parentGraphName) self.parentGraph = parentGraph self.name = jsonData['name'] self.category = jsonData['category'] self.setIsRoot(jsonData['isRoot']) # restore vars for varJson in jsonData['vars']: var = Variable.deserialize(self, varJson) self.vars[var.uid] = var # restore nodes for nodeJson in jsonData['nodes']: # check if variable getter or setter and pass variable nodeArgs = () nodeKwargs = {} if nodeJson['type'] in ('getVar', 'setVar'): nodeKwargs['var'] = self.vars[uuid.UUID(nodeJson['varUid'])] nodeJson['owningGraphName'] = self.name node = getRawNodeInstance(nodeJson['type'], packageName=nodeJson['package'], libName=nodeJson['lib'], *nodeArgs, **nodeKwargs) self.addNode(node, nodeJson) # restore connections for nodeJson in jsonData['nodes']: for nodeOutputJson in nodeJson['outputs']: lhsPin = self.findPin(str(nodeOutputJson['fullName'])) for rhsPinFullName in nodeOutputJson['linkedTo']: rhsPin = self.findPin(rhsPinFullName) connected = connectPins(lhsPin, rhsPin) assert (connected is True), "Failed to restore connection" def remove(self): """Removes this graph as well as child graphs. Deepest graphs will be removed first """ # graphs should be removed from leafs to root for childGraph in set(self.childGraphs): childGraph.remove() # remove itself self.graphManager.removeGraph(self) def clear(self): """Clears content of this graph as well as child graphs. Deepest graphs will be cleared first """ # graphs should be cleared from leafs to root for childGraph in self.childGraphs: childGraph.clear() # clear itself for node in list(self.nodes.values()): node.kill() self.nodes.clear() for var in list(self.vars.values()): self.killVariable(var) self.vars.clear() @property def name(self): return self.__name @name.setter def name(self, value): value = str(value) if self.__name != value: self.__name = value self.nameChanged.send(self.__name) @property def category(self): return self.__category @category.setter def category(self, value): self.__category = str(value) self.categoryChanged.send(self.__category) def Tick(self, deltaTime): for node in self.nodes.values(): node.Tick(deltaTime) @property def pins(self): result = {} for n in self.getNodes(): for pin in tuple(n.inputs.values()) + tuple(n.outputs.values()): result[pin.uid] = pin return result def createVariable(self, dataType=str('AnyPin'), accessLevel=AccessLevel.public, uid=None, name=str("var")): name = self.graphManager.getUniqVariableName(name) var = Variable(self, getPinDefaultValueByType(dataType), name, dataType, accessLevel=accessLevel, uid=uid) self.vars[var.uid] = var return var def killVariable(self, var): assert (isinstance(var, Variable)) if var.uid in self.vars: popped = self.vars.pop(var.uid) popped.killed.send() def getEvaluationOrder(self, node): order = {0: []} # include first node only if it is callable if not node.bCallable: order[0].append(node) def foo(n, process=True): if not process: return next_layer_nodes = self.getNextLayerNodes(n, PinDirection.Input) layer_idx = max(order.keys()) + 1 for n in next_layer_nodes: if layer_idx not in order: order[layer_idx] = [] order[layer_idx].append(n) for i in next_layer_nodes: foo(i) foo(node) # make sure no copies of nodes in higher layers (non directional cycles) for i in reversed(sorted([i for i in order.keys()])): for iD in range(i - 1, -1, -1): for check_node in order[i]: if check_node in order[iD]: order[iD].remove(check_node) return order @staticmethod def getNextLayerNodes(node, direction=PinDirection.Input): nodes = [] ''' callable nodes skipped because execution flow is defined by execution wires ''' if direction == PinDirection.Input: nodeInputs = node.inputs if not len(nodeInputs) == 0: for i in nodeInputs.values(): if not len(i.affected_by) == 0: for a in i.affected_by: if not a.owningNode().bCallable: nodes.append(a.owningNode()) return nodes if direction == PinDirection.Output: nodeOutputs = node.outputs if not len(nodeOutputs) == 0: for i in nodeOutputs.values(): if not len(i.affects) == 0: for p in i.affects: if not p.owningNode().bCallable: nodes.append(p.owningNode()) return nodes def getNodes(self, classNameFilters=[]): """return all nodes without compound's nodes """ if len(classNameFilters) > 0: return [ n for n in self.nodes.values() if n.__class__.__name__ in classNameFilters ] else: return [n for n in self.nodes.values()] @dispatch(str) def findNode(self, name): for i in self.nodes.values(): if i.name == name: return i return None @dispatch(uuid.UUID) def findNode(self, uuid): return None def getNodesByClassName(self, className): nodes = [] for i in self.getNodes(): if i.__class__.__name__ == className: nodes.append(i) return nodes @dispatch(uuid.UUID) def findPin(self, uid): pin = None if uid in self.pins: pin = self.pins[uid] return pin @dispatch(str) def findPin(self, pinName): result = None for pin in self.pins.values(): if pinName == pin.getName(): result = pin break return result def getInputNode(self): """Creates and adds to graph 'graphInputs' node pins on this node will be exposed on compound node as input pins """ node = getRawNodeInstance("graphInputs", "PyflowBase") self.addNode(node) return node def getOutputNode(self): """Creates and adds to graph 'graphOutputs' node. pins on this node will be exposed on compound node as output pins """ node = getRawNodeInstance("graphOutputs", "PyflowBase") self.addNode(node) return node def addNode(self, node, jsonTemplate=None): assert (node is not None), "failed to add node, None is passed" if node.uid in self.nodes: return False # Check if this node is variable get/set. Variables created in child graphs are not visible to parent ones # Do not disrupt variable scope if node.__class__.__name__ in ['getVar', 'setVar']: var = self.graphManager.findVariable(node.variableUid()) variableLocation = var.location() if len(variableLocation) > len(self.location()): return False if jsonTemplate is not None: jsonTemplate['name'] = self.graphManager.getUniqName( jsonTemplate['name']) else: node.setName(self.graphManager.getUniqName(node.name)) self.nodes[node.uid] = node node.graph = weakref.ref(self) node.postCreate(jsonTemplate) return True def location(self): result = [self.name] parent = self._parentGraph while parent is not None: result.insert(0, parent.name) parent = parent.parentGraph return result def count(self): return self.nodes.__len__() def plot(self): depth = self.depth() prefix = "".join(['-'] * depth) if depth > 1 else '' parentGraphString = str( None) if self.parentGraph is None else self.parentGraph.name print(prefix + "GRAPH:" + self.name + ", parent:{0}".format(parentGraphString)) # for n in self.getNodes(): # print(prefix + "-Node:{}".format(n.name)) assert (self not in self.childGraphs) for child in self.childGraphs: child.plot()
def init(self): self = super(MailboxesAccountItem, self).init() self.folder_items = [] self.updated = Signal() return self
from blinker import Signal on_init = Signal() on_session = Signal() on_parse = Signal() on_meta = Signal() on_wait = Signal()