Esempio n. 1
0
    def on_message(self, message):
        logging.debug(HELLO + "Got message '%s'" % message)

        message = LiveReloadMessage.parse(message)
        ### HELLO
        if message['command'] == 'hello':
            supported_common = list(
                set(LiveRealoadProtocols.LIVE_REALOD_SUPPORTED_PROTOCOLS).
                intersection(message['protocols']))
            if len(supported_common):
                self.instance_settings['protocols'] = supported_common
                hello = {
                    'command': 'hello',
                    'protocols': supported_common,
                    'serverName': APP_NAME
                }
                self.send_message(hello)
                logging.debug("Processed 'hello'")
        ### INFO and URL (update push from client)
        elif message['command'] in ('info', 'url'):

            new_url = message['url']

            # server does NOT listen to this
            # External consumer code will listen on this channel
            # if they need to do anything with new connections.
            events.publish(
                'LiveRealoadServer.NewConnection', self, {
                    'domain': new_url,
                    'from': self.request.remote_ip,
                    'through': self.request.host,
                    'protocol': 'livereload'
                })

            logging.info('New LiveReload connection for URL "%s"' % new_url)
Esempio n. 2
0
	def __init__(self):
		
		self.tasktypes = TaskCollection()
		for tasktype in self.tasktypes_iter():
			self.tasktypes[tasktype.ID] = tasktype

		events.publish(
			'TaskHiveCoordinator.TaskTypes.Changed'
			, self.tasktypes
		)

		self.taskhives = {}
		self.taskhives_lock = threading.Lock()
		for taskhive_data in storage.query(r'^TaskHive\..+'):
			taskhive_id = taskhive_data['id']
			self.taskhives[taskhive_id] = TaskHive( self.tasktypes, taskhive_data )

		# making listing of tasktypes available through pubsub system.
		events.subscribe('TaskHiveCoordinator.GetTaskTypes', self.get_tasktypes_metadata)

		events.publish(
			'Storage.On'
			, 'set'
			, r'^TaskHive\.'
			, self._hive_set_handler
		)
Esempio n. 3
0
    def stop(self, path, filter_regex, callback, *a, **kw):
        '''
		@param {String} path Filesystem path to watched folder.
		@param {Callback} callback to be called by runner when done (or passed to async delegate)
		'''
        events.publish('PathWatchCoordinator.Stop', os.path.abspath(path),
                       filter_regex, callback)
Esempio n. 4
0
    def run(self, path, filter_regex, callback, *a, **kw):
        '''
		@param {String} path Filesystem path to watched folder.
		@param {RegExObj[]} filter_regex An array of pairs of (Python-style regular expression, True/False for "must match" or "must NOT match") to be applied as filter againts the paths of the changed files. Applied only to the portion of the path AFTER the base, watched folder name. Each pair is evaluated in order, looking for first combined true value. Include `(".*",true)` as the last entry on the list to pick up all changes.
		@param {Callback} callback to be called by runner when done (or passed to async delegate)
		'''
        logging.debug(HELLO + "Running path watcher for '%s'" % (path))
        events.publish('PathWatchCoordinator.Watch', os.path.abspath(path),
                       filter_regex, callback)
Esempio n. 5
0
 def _add_to_watched_tasks(self, taskid):
     callback = None
     with self._taskswatchers_lock:
         if not taskid in self._taskswatchers:
             callback = self._taskswatchers[taskid] = bind(
                 self._task_change_handler, taskid)
     if callback:
         events.publish('Storage.On', 'change',
                        r'^' + taskid.replace('.', r'\.') + '$', callback)
Esempio n. 6
0
	def stop(self, path, filter_regex, callback, *a, **kw):
		'''
		@param {String} path Filesystem path to watched folder.
		@param {Callback} callback to be called by runner when done (or passed to async delegate)
		'''
		events.publish(
			'PathWatchCoordinator.Stop'
			, os.path.abspath( path )
			, filter_regex
			, callback
		)
Esempio n. 7
0
	def _add_to_watched_tasks(self, taskid):
		callback = None
		with self._taskswatchers_lock:
			if not taskid in self._taskswatchers:
				callback = self._taskswatchers[taskid] = bind(self._task_change_handler, taskid)
		if callback:
			events.publish(
				'Storage.On'
				, 'change'
				, r'^' + taskid.replace('.',r'\.') + '$'
				, callback
			)
Esempio n. 8
0
	def run(self, path, filter_regex, callback, *a, **kw):
		'''
		@param {String} path Filesystem path to watched folder.
		@param {RegExObj[]} filter_regex An array of pairs of (Python-style regular expression, True/False for "must match" or "must NOT match") to be applied as filter againts the paths of the changed files. Applied only to the portion of the path AFTER the base, watched folder name. Each pair is evaluated in order, looking for first combined true value. Include `(".*",true)` as the last entry on the list to pick up all changes.
		@param {Callback} callback to be called by runner when done (or passed to async delegate)
		'''
		logging.debug(HELLO + "Running path watcher for '%s'" % (path))
		events.publish(
			'PathWatchCoordinator.Watch'
			, os.path.abspath( path )
			, filter_regex
			, callback
		)
Esempio n. 9
0
    def _new_client_connected(self, connection, metadata):
        '''
		metadata = {
			'domain': site's url
			, 'from': self.request.remote_ip
			, 'through': self.request.host
		}
		'''

        with self.clients_lock:
            self.clients[connection] = metadata

        events.publish('TaskHiveCoordinator.AssociateConsumerWithHive',
                       metadata)
Esempio n. 10
0
	def _new_client_connected(self, connection, metadata):
		'''
		metadata = {
			'domain': site's url
			, 'from': self.request.remote_ip
			, 'through': self.request.host
		}
		'''

		with self.clients_lock:
			self.clients[connection] = metadata

		events.publish(
			'TaskHiveCoordinator.AssociateConsumerWithHive'
			, metadata
		)
Esempio n. 11
0
	def __init__(self, tasktypes, taskhive_metadata = None):
		self.get_tasktypes = weakref.ref( tasktypes )

		changed = False

		if not taskhive_metadata:
			taskhive_metadata = TASKHIVE_DEFAULT_DATA.copy()
			changed = True
		if 'tasks' not in taskhive_metadata:
			taskhive_metadata['tasks'] = []
			changed = True

		if 'id' in taskhive_metadata:
			self.id = taskhive_metadata['id']
		else:
			self.id = taskhive_metadata['id'] = taskhive_metadata['type'] + '.' + str(uuid.uuid1())
			changed = True

		self._running_tasks = weakref.WeakKeyDictionary()
		self._running_tasks_map = weakref.WeakValueDictionary()
		# used by self._hive_tasks_change_handler
		self._tasks = set(taskhive_metadata.get('tasks', []))
		# we are watching on Storage.
		# there, it stores callbacks as weak refs.
		# this means we have to keep a ref to callback locally as long
		# as we care about this task
		self._taskswatchers = {}
		self._taskswatchers_lock = threading.Lock()

		if changed:
			storage.set(self.id, taskhive_metadata)

		if not bool(taskhive_metadata.get('paused')):
			self.start()

		self.hive_change_handlers = {
			'tasks': self._hive_tasks_change_handler
		}

		# listen for changes to self
		events.publish(
			'Storage.On'
			, 'change'
			, r'^' + self.id.replace('.',r'\.') + r'$'
			, self._hive_change_handler
		)
Esempio n. 12
0
    def __init__(self, tasktypes, taskhive_metadata=None):
        self.get_tasktypes = weakref.ref(tasktypes)

        changed = False

        if not taskhive_metadata:
            taskhive_metadata = TASKHIVE_DEFAULT_DATA.copy()
            changed = True
        if 'tasks' not in taskhive_metadata:
            taskhive_metadata['tasks'] = []
            changed = True

        if 'id' in taskhive_metadata:
            self.id = taskhive_metadata['id']
        else:
            self.id = taskhive_metadata[
                'id'] = taskhive_metadata['type'] + '.' + str(uuid.uuid1())
            changed = True

        self._running_tasks = weakref.WeakKeyDictionary()
        self._running_tasks_map = weakref.WeakValueDictionary()
        # used by self._hive_tasks_change_handler
        self._tasks = set(taskhive_metadata.get('tasks', []))
        # we are watching on Storage.
        # there, it stores callbacks as weak refs.
        # this means we have to keep a ref to callback locally as long
        # as we care about this task
        self._taskswatchers = {}
        self._taskswatchers_lock = threading.Lock()

        if changed:
            storage.set(self.id, taskhive_metadata)

        if not bool(taskhive_metadata.get('paused')):
            self.start()

        self.hive_change_handlers = {'tasks': self._hive_tasks_change_handler}

        # listen for changes to self
        events.publish('Storage.On', 'change',
                       r'^' + self.id.replace('.', r'\.') + r'$',
                       self._hive_change_handler)
Esempio n. 13
0
    def watch(self, path, filter_regex, callback):
        '''
		Tell what path to watch. 

		Do it over events.publish('PathWatchCoordinator.Watch', path, filter_string )

		@param {String} path Path of the folder to watch_path
		@param {RegExOb[]} filter_regex An array of pairs of (Python-style regular expression, True/False for "must match" or "must NOT match") to be applied as filter againts the paths of the changed files. Applied only to the portion of the path AFTER the base, watched folder name. Each pair is evaluated in order, looking for first combined true value. Include `(".*",true)` as the last entry on the list to pick up all changes.
		@param {Callback} callback Function to be called with Change[] (Array of Change objects)
		'''
        with self.paths_lock:
            callbacks = self.paths[path] = self.paths.get(path, set())
            callbacks.add(callback)

        with self.filters_lock:
            self.filters[callback] = filter_regex

        logging.debug('PathWatchCoordinator: watching %s' % path)

        events.publish('PathWatchCoordinator.StartPathWatchRunner', path)
Esempio n. 14
0
    def __init__(self):

        self.tasktypes = TaskCollection()
        for tasktype in self.tasktypes_iter():
            self.tasktypes[tasktype.ID] = tasktype

        events.publish('TaskHiveCoordinator.TaskTypes.Changed', self.tasktypes)

        self.taskhives = {}
        self.taskhives_lock = threading.Lock()
        for taskhive_data in storage.query(r'^TaskHive\..+'):
            taskhive_id = taskhive_data['id']
            self.taskhives[taskhive_id] = TaskHive(self.tasktypes,
                                                   taskhive_data)

        # making listing of tasktypes available through pubsub system.
        events.subscribe('TaskHiveCoordinator.GetTaskTypes',
                         self.get_tasktypes_metadata)

        events.publish('Storage.On', 'set', r'^TaskHive\.',
                       self._hive_set_handler)
Esempio n. 15
0
    def on_message(self, message):
        logging.debug(HELLO + "Got message '%s'" % message)

        message = LiveReloadMessage.parse(message)
        ### HELLO
        if message['command'] == 'hello':
            supported_common = list(
                set(LiveRealoadProtocols.LIVE_REALOD_SUPPORTED_PROTOCOLS).intersection(
                    message['protocols']
                )
            )
            if len(supported_common):
                self.instance_settings['protocols'] = supported_common
                hello = {
                    'command': 'hello'
                    , 'protocols': supported_common
                    , 'serverName': APP_NAME
                }
                self.send_message(hello)
                logging.debug("Processed 'hello'")
        ### INFO and URL (update push from client)
        elif message['command'] in ('info','url'):
            
            new_url = message['url']

            # server does NOT listen to this
            # External consumer code will listen on this channel
            # if they need to do anything with new connections.
            events.publish(
                'LiveRealoadServer.NewConnection'
                , self
                , {
                    'domain': new_url
                    , 'from': self.request.remote_ip
                    , 'through': self.request.host
                    , 'protocol': 'livereload'
                }
            )

            logging.info('New LiveReload connection for URL "%s"' % new_url)
Esempio n. 16
0
	def watch(self, path, filter_regex, callback):
		'''
		Tell what path to watch. 

		Do it over events.publish('PathWatchCoordinator.Watch', path, filter_string )

		@param {String} path Path of the folder to watch_path
		@param {RegExOb[]} filter_regex An array of pairs of (Python-style regular expression, True/False for "must match" or "must NOT match") to be applied as filter againts the paths of the changed files. Applied only to the portion of the path AFTER the base, watched folder name. Each pair is evaluated in order, looking for first combined true value. Include `(".*",true)` as the last entry on the list to pick up all changes.
		@param {Callback} callback Function to be called with Change[] (Array of Change objects)
		'''
		with self.paths_lock:
			callbacks = self.paths[path] = self.paths.get(path, set())
			callbacks.add(callback)

		with self.filters_lock:
			self.filters[callback] = filter_regex

		logging.debug('PathWatchCoordinator: watching %s' % path)

		events.publish(
			'PathWatchCoordinator.StartPathWatchRunner'
			, path
		)
Esempio n. 17
0
    def on_message(self, data):
        logging.debug(HELLO + "Got message")  # '%s'" % data)

        try:
            method, params, message_id = Messager.parse(data)
        except Exception as ex:
            logging.debug(
                HELLO +
                "Received the following error from message parser: %s" % ex)
            return

        ## we speak JSON-RPC (http://json-rpc.org/wiki/specification) over WebSocket

        ### RPC method
        if method == 'rpc':
            '''
            'rpc' method is a wrapper for an actual JSON-RPC packet. The reason we
            do it is to allow passing through authentication tocken for the connection,
            without polluting the arguments of the actual JSON-RPC call.

            We look like this:
            {
                method: rpc
                , id: None or number or string
                , params: {
                    method: actual_called_method's_name
                    , id: exact same as above, but irrelevant since we use the id from above
                    , params: params to be passed to the called method
                    --------------------
                    , authentication_token: something we got from server at hello stage, at inception of this websocket connection.
                    --------------------
                }
            }

            The wrapper call (when auth is good) calls one and only interface - PubSub.
                actual_called_method's_name is the channel name
                params are flattened into Python's *args, **kw boxes. 
                id is special. Presence of it means we need a callback added to *kw
                    This ID, WSConnection are packaged into the callback.
                    The called method calls callback when done.
            '''

            if not params or type(params) != dict or not params.get('method'):
                logging.debug(
                    HELLO + " RPC call seems to have sub-call parts missing.")
                return

            actual_method = params['method']
            args = []
            kw = {}

            if 'params' in params:
                actual_params = params['params']
                if type(actual_params) == list:
                    args.extend(actual_params)
                elif type(actual_params) == dict:
                    kw.update(actual_params)
                # all other types of objects are not allowed as values for 'params' in JSON-RPC

            # presence of message ID means, it's not "Notification"
            # (where caller does not expect a return value)
            # but is a "Call" where there is expectation of a return value.
            # since we are async, we can't return, we can only callback.
            if message_id:
                kw['callback'] = bind(process_rpc_response, weakref.ref(self),
                                      message_id)

            events.publish(actual_method, *args, **kw)

        ### HELLO
        elif method == 'hello':

            supported_common = list(
                set(SUPPORTED_PROTOCOLS).intersection(params['protocols']))

            if len(supported_common):
                hello = {
                    'method': 'hello',
                    'params': {
                        'protocols': supported_common,
                        'serverName': APP_NAME
                    },
                    'id': message_id
                }
                self.send_message(hello)
Esempio n. 18
0
def configure(*args, **kw):
	events.publish('Application.Configuration.Show')
Esempio n. 19
0
def onexit(*args, **kw):
	events.publish('Application.Exit')
Esempio n. 20
0
    def on_message(self, data):
        logging.debug(HELLO + "Got message") # '%s'" % data)

        try:
            method, params, message_id = Messager.parse(data)
        except Exception as ex:
            logging.debug(HELLO + "Received the following error from message parser: %s" % ex)
            return

        ## we speak JSON-RPC (http://json-rpc.org/wiki/specification) over WebSocket

        ### RPC method
        if method == 'rpc':
            '''
            'rpc' method is a wrapper for an actual JSON-RPC packet. The reason we
            do it is to allow passing through authentication tocken for the connection,
            without polluting the arguments of the actual JSON-RPC call.

            We look like this:
            {
                method: rpc
                , id: None or number or string
                , params: {
                    method: actual_called_method's_name
                    , id: exact same as above, but irrelevant since we use the id from above
                    , params: params to be passed to the called method
                    --------------------
                    , authentication_token: something we got from server at hello stage, at inception of this websocket connection.
                    --------------------
                }
            }

            The wrapper call (when auth is good) calls one and only interface - PubSub.
                actual_called_method's_name is the channel name
                params are flattened into Python's *args, **kw boxes. 
                id is special. Presence of it means we need a callback added to *kw
                    This ID, WSConnection are packaged into the callback.
                    The called method calls callback when done.
            '''

            if not params or type(params) != dict or not params.get('method'):
                logging.debug(HELLO + " RPC call seems to have sub-call parts missing.")
                return

            actual_method = params['method']
            args = []
            kw = {}

            if 'params' in params:
                actual_params = params['params']
                if type(actual_params) == list:
                    args.extend(actual_params)
                elif type(actual_params) == dict:
                    kw.update(actual_params)
                # all other types of objects are not allowed as values for 'params' in JSON-RPC

            # presence of message ID means, it's not "Notification" 
            # (where caller does not expect a return value)
            # but is a "Call" where there is expectation of a return value.
            # since we are async, we can't return, we can only callback.
            if message_id:
                kw['callback'] = bind(
                    process_rpc_response
                    , weakref.ref(self)
                    , message_id
                )

            events.publish(
                actual_method
                , *args
                , **kw
            )

        ### HELLO
        elif method == 'hello':

            supported_common = list(
                set(SUPPORTED_PROTOCOLS).intersection(
                    params['protocols']
                )
            )

            if len(supported_common):
                hello = {
                    'method': 'hello'
                    , 'params': {
                        'protocols': supported_common
                        , 'serverName': APP_NAME
                    }
                    , 'id': message_id
                }
                self.send_message(hello)
Esempio n. 21
0
    def _watch_coordinator(self, watched_path, changes, *args, **kw):
        '''
		Filters through multitude of fired 'file changed' events
		and allows some of them to bubble up to consumer callbacks.

		Also restarts the path watch threads.
		'''
        # 1. Take a snapshot of callback-filter listenning on the watched_path
        # 2. If there are listeners, spin up new watch thread.
        # 2. Go over each (callback, filter_regex) pair and
        # 3. loop over changed entries, testing against regex and
        # 4. create new Change[] for each callback.
        # 5. when done looping, fire all callbacks.

        # copying the list of callbacks
        with self.paths_lock:
            callbacks = self.paths.get(watched_path, set()).copy()

        if callbacks:
            events.publish('PathWatchCoordinator.StartPathWatchRunner',
                           watched_path)
        else:
            return

        with self.filters_lock:
            filters = [(self.filters[c], c, []) for c in callbacks
                       if c in self.filters]

        # loop over changes and check each change against the regex filter.
        # if matches, add to that filter's change collection.
        for p, c, a in changes:
            # changes is an array of tuples
            # [(watched_path, changed_path, change_action),...]
            # change is (watched_path, changed_path, change_action)

            # if the actual watched folder is deleted
            if c == '' and a == 'Deleted':
                # for now let's stop trying to watch it.
                # in the future think about switching to long-poll
                # and reconnecting when it reappears.
                with self.paths_lock:
                    del self.paths[watched_path]

            if self.recently_fired.get(os.path.join(p, c) + ":" + a,
                                       0) > time.time() - 2:
                continue
            else:
                self.recently_fired[os.path.join(p, c) + ":" + a] = time.time()

        # now the main event:
            for regexcollection, callback, array in filters:
                append = False
                for regex, tone in regexcollection:
                    # tone is either True or False, which stands for
                    # "Regex must match" or "Regext must NOT match" respectively.
                    # example: ('\.git/', False)
                    # if regex mathches, it's True, and compared to False it fails the test.
                    # Only when a given regex + tone yield a match, we signal up.
                    # if not tone:
                    # logging.debug(HELLO + "testing regex %s on %s" % ( regex, c) )
                    if bool(re.search(regex, c)):
                        append = tone
                        # logging.debug(HELLO + "matched regex %s on %s" % ( regex, c) )
                        break

                if append:
                    array.append({'domain': p, 'path': c, 'change': a})

        for regex, callback, changes in filters:
            if changes:  # is not empty
                th = threading.Thread(target=callback, args=[changes])
                th.daemon = True
                th.start()
Esempio n. 22
0
def onexit(*args, **kw):
    events.publish('Application.Exit')
Esempio n. 23
0
	def _watch_coordinator(self, watched_path, changes, *args, **kw):
		'''
		Filters through multitude of fired 'file changed' events
		and allows some of them to bubble up to consumer callbacks.

		Also restarts the path watch threads.
		'''
		# 1. Take a snapshot of callback-filter listenning on the watched_path
		# 2. If there are listeners, spin up new watch thread.
		# 2. Go over each (callback, filter_regex) pair and 
		# 3. loop over changed entries, testing against regex and
		# 4. create new Change[] for each callback.
		# 5. when done looping, fire all callbacks.

		# copying the list of callbacks
		with self.paths_lock:
			callbacks = self.paths.get(watched_path, set()).copy()

		if callbacks:
			events.publish(
				'PathWatchCoordinator.StartPathWatchRunner'
				, watched_path
			)
		else:
			return

		with self.filters_lock:
			filters = [(self.filters[c], c, []) for c in callbacks if c in self.filters]

		# loop over changes and check each change against the regex filter.
		# if matches, add to that filter's change collection.
		for p, c, a in changes:
			# changes is an array of tuples
			# [(watched_path, changed_path, change_action),...]
			# change is (watched_path, changed_path, change_action)

			# if the actual watched folder is deleted
		 	if c == '' and a == 'Deleted':
		 		# for now let's stop trying to watch it.
		 		# in the future think about switching to long-poll
		 		# and reconnecting when it reappears.
		 		with self.paths_lock:
					del self.paths[watched_path]
			
			if self.recently_fired.get(os.path.join(p,c) + ":" + a, 0) > time.time() - 2:
				continue
			else:
				self.recently_fired[os.path.join(p,c) + ":" + a] =  time.time()

			# now the main event:
			for regexcollection, callback, array in filters:
				append = False
				for regex, tone in regexcollection:
					# tone is either True or False, which stands for 
					# "Regex must match" or "Regext must NOT match" respectively.
					# example: ('\.git/', False)
					# if regex mathches, it's True, and compared to False it fails the test.
					# Only when a given regex + tone yield a match, we signal up.
					# if not tone:
					# logging.debug(HELLO + "testing regex %s on %s" % ( regex, c) )
					if bool(re.search(regex, c)):
						append = tone
						# logging.debug(HELLO + "matched regex %s on %s" % ( regex, c) )
						break

				if append:
					array.append({
						'domain': p
						, 'path': c
						, 'change': a
					})

		for regex, callback, changes in filters:
			if changes: # is not empty
				th = threading.Thread(
					target = callback
					, args = [changes]
				)
				th.daemon = True
				th.start()
Esempio n. 24
0
def configure(*args, **kw):
    events.publish('Application.Configuration.Show')