Ejemplo n.º 1
0
 def __init__(self, db, librarypath):
     self.url = librarypath
     self.db = db
     self.scanrecursif()
     ob = Observer()
     ob.schedule(Filehandler(self), self.url, recursive=True)
     ob.start()
Ejemplo n.º 2
0
def _watch(root=u'.', dest=u'_site', pattern=u'**/*.html', exclude=u'_*/**'):

    try:
        from watchdog.observers import Observer
        from watchdog.events import FileSystemEventHandler
    except ImportError:
        msg = "The build --watch feature requires watchdog. \n"\
            + "Please install it with 'easy_install watchdog'."
        print(msg)
        return None

    class handler(FileSystemEventHandler):
        def on_any_event(self, event):
            exclude_path = os.path.join(os.getcwd(), exclude)
            if not utils.matches_pattern(exclude_path, event.src_path):
                build_files(root=root,
                            dest=dest,
                            pattern=pattern,
                            exclude=exclude)

    observer = Observer()
    observer.schedule(handler(), root, recursive=True)
    observer.start()

    print("Watching '{0}' ...".format(root))

    return observer
Ejemplo n.º 3
0
class AlertEventHandler(FileSystemEventHandler):
    def __init__(self, sensor_uuid, alert_file, hpc):
        """
        Initializes a filesytem watcher that will watch
        the specified file for changes.
        `alert_file` is the absolute path of the snort alert file.
        `hpc` is the hpfeeds client
        """
        self.sensor_uuid = sensor_uuid
        self.alert_file = alert_file
        self.hpc = hpc
        logger.info('connected to hpfeeds broker {}'.format(hpc.brokername))

        self.observer = Observer()
        self.observer.schedule(self, os.path.dirname(alert_file), False)
        
    @property
    def latest_alert_date(self):
        return safe_unpickle('alert_date.pkl')

    @latest_alert_date.setter
    def latest_alert_date(self, date):
        safe_pickle('alert_date.pkl', date)

    def on_any_event(self, event):
        if (not event.event_type == 'deleted') and (event.src_path == self.alert_file):
            alerts = Alert.from_log(self.sensor_uuid, self.alert_file, self.latest_alert_date)
            if alerts:
                logger.info("submitting {} alerts to {}".format(len(alerts), self.hpc.brokername))
                alerts.sort(key=lambda e: e.date)
                self.latest_alert_date = alerts[-1].date            
                
                for alert in alerts:
                    self.hpc.publish("snort.alerts", alert.to_json())
Ejemplo n.º 4
0
def serve(host='localhost', port=8000):
    """Runs the development server at given `host` and `port`,
    watches the changes and regenerates the site.
    """
    http_server = BaseHTTPServer.HTTPServer(
        (host, port), SimpleHTTPServer.SimpleHTTPRequestHandler)

    # Event to be set when the project has changes and needs to be rebuilt
    new_changes_event = threading.Event()

    # Both `shutdowner` and `observer` are daemon threads
    shutdowner = Shutdowner(http_server, new_changes_event)
    shutdowner.start()

    observer = Observer()
    observer.start()

    project_dir = os.getcwd()
    www_dir = os.path.join(project_dir, 'www')
    event_handler = EventHandler(project_dir, new_changes_event)
    observer.schedule(event_handler, path=project_dir, recursive=True)

    from carcade.cli import build  # To resolve a circular import
    while True:
        os.chdir(project_dir)
        build(to=www_dir, atomically=True)
        if not os.path.exists(www_dir):
            return 1
        os.chdir(www_dir)

        http_server.serve_forever()
Ejemplo n.º 5
0
    def __init__(self, zkconn, root_node_path, conf):
        super(ZkFarmJoiner, self).__init__()
        self.update_remote_timer = None
        self.update_local_timer = None

        self.zkconn = zkconn
        self.conf = conf
        self.node_path = "%s/%s" % (root_node_path, self.myip())

        # force the hostname info key
        info = conf.read()
        info["hostname"] = gethostname()
        conf.write(info)

        zkconn.create(self.node_path, serialize(conf.read()), zc.zk.OPEN_ACL_UNSAFE, EPHEMERAL)

        observer = Observer()
        observer.schedule(self, path=conf.file_path, recursive=True)
        observer.start()

        zkconn.get(self.node_path, self.node_watcher)

        while True:
            with self.cv:
                self.wait()
Ejemplo n.º 6
0
	def start(filename,format,callback=None,verbose=False):
		observer = Observer()
		fm = FileMonitor(observer,filename,format,callback,verbose)
		fm._handle()
		observer.schedule(fm, path=os.path.dirname(filename), recursive=False)
		observer.start()
		return fm
class RoleBasedAuthorizationProvider(AbstractAuthorizationProvider,
                                     FileSystemEventHandler):

    def __init__(self, role_loader, roles_config_file_path):
        self.lgr = logging.getLogger(FLASK_SECUREST_LOGGER_NAME)
        self.role_loader = role_loader
        self.permissions_by_roles = None
        self.roles_config_file_path = os.path.abspath(roles_config_file_path)
        self.observer = Observer()
        self.observer.schedule(self,
                               path=os.path.dirname(
                                   self.roles_config_file_path),
                               recursive=False)
        self.load_roles_config()
        self.observer.start()

    def load_roles_config(self):
        try:
            with open(self.roles_config_file_path, 'r') as config_file:
                self.permissions_by_roles = yaml.safe_load(config_file.read())
                self.lgr.info('Loading of roles configuration ended '
                              'successfully')
        except (yaml.parser.ParserError, IOError) as e:
            err = 'Failed parsing {role_config_file} file. Error: {error}.' \
                .format(role_config_file=self.roles_config_file_path, error=e)
            self.lgr.warning(err)
            raise ValueError(err)

    def on_modified(self, event):
        if os.path.abspath(event.src_path) == self.roles_config_file_path:
            self.load_roles_config()

    def authorize(self):
        target_endpoint = rest_security.get_endpoint()
        target_method = rest_security.get_http_method()
        roles = self.role_loader.get_roles()
        return self._is_allowed(target_endpoint, target_method, roles) and \
            not self._is_denied(target_endpoint, target_method, roles)

    def _is_allowed(self, target_endpoint, target_method, user_roles):
        return self._evaluate_permission_by_type(target_endpoint,
                                                 target_method, user_roles,
                                                 'allow')

    def _is_denied(self, target_endpoint, target_method, user_roles):
        return self._evaluate_permission_by_type(target_endpoint,
                                                 target_method, user_roles,
                                                 'deny')

    def _evaluate_permission_by_type(self, target_endpoint, target_method,
                                     user_roles, permission_type):
        for role in user_roles:
            role_permissions = self.permissions_by_roles.get(role,
                                                             {'allow': {},
                                                              'deny': {}})
            relevant_permissions = role_permissions.get(permission_type, {})
            if _is_permission_matching(target_endpoint, target_method,
                                       relevant_permissions):
                return True
        return False
Ejemplo n.º 8
0
 def add_path(self,name, path):
     if name=='default': self._base = path
     self.paths[name] = path
     if self.instant_reload:
         observer = Observer()
         observer.schedule(self.reload_handler, path=path, recursive=True)
         observer.start()
Ejemplo n.º 9
0
    def _setup_watchdog(self):
        # Monkey-patch Watchdog to
        # - Set the Windows hack delay to 0 in WindowsApiEmitter, otherwise we might miss some events
        # - Increase the ReadDirectoryChangesW buffer size for Windows
        if self._windows:
            try:
                import watchdog.observers
                watchdog.observers.read_directory_changes.WATCHDOG_TRAVERSE_MOVED_DIR_DELAY = 0
                watchdog.observers.winapi.BUFFER_SIZE = self._windows_watchdog_event_buffer
            except:
                log.trace('read_directory_changes import error', exc_info=True)
                log.warn('Cannot import read_directory_changes, probably under Windows XP'
                         ', watchdog will fall back on polling')
        from watchdog.observers import Observer
        log.debug("Watching FS modification on : %s", self.client.base_folder)
        self._event_handler = DriveFSEventHandler(self)
        self._root_event_handler = DriveFSRootEventHandler(self, os.path.basename(self.client.base_folder))
        self._observer = Observer()
        self._observer.schedule(self._event_handler, self.client.base_folder, recursive=True)
        self._observer.start()
        self._check_watchdog()

        self._root_observer = Observer()
        self._root_observer.schedule(self._root_event_handler, os.path.dirname(self.client.base_folder), recursive=False)
        self._root_observer.start()
Ejemplo n.º 10
0
class ToolWatcher(object):

    def __init__(self, toolbox):
        if not can_watch:
            raise Exception("Watchdog library unavailble, cannot watch tools.")
        self.toolbox = toolbox
        self.tool_file_ids = {}
        self.tool_dir_callbacks = {}
        self.monitored_dirs = {}
        self.observer = Observer()
        self.event_handler = ToolFileEventHandler(self)
        self.start()

    def start(self):
        self.observer.start()

    def monitor(self, dir):
        self.observer.schedule(self.event_handler, dir, recursive=False)

    def watch_file(self, tool_file, tool_id):
        tool_file = os.path.abspath( tool_file )
        self.tool_file_ids[tool_file] = tool_id
        tool_dir = os.path.dirname( tool_file )
        if tool_dir not in self.monitored_dirs:
            self.monitored_dirs[ tool_dir ] = tool_dir
            self.monitor( tool_dir )

    def watch_directory(self, tool_dir, callback):
        tool_dir = os.path.abspath( tool_dir )
        self.tool_dir_callbacks[tool_dir] = callback
        if tool_dir not in self.monitored_dirs:
            self.monitored_dirs[ tool_dir ] = tool_dir
            self.monitor( tool_dir )
Ejemplo n.º 11
0
    def __init__(self, dirpath='.', output='output'):
        '''
        local path for load config
        '''
        logger.info("Initialing BOSS")
        if os.path.isdir(dirpath):
            self.dirpath = dirpath
            logger.info("loading job config folder: " + dirpath)
        else:
            logger.info(dirpath + " is invalid, use default path instead")
        self.output = output
        logger.info("Setup output folder: " + output)
        if not os.path.isdir(output):
            logger.info("target directory "
                        + output
                        + " doesn't exist, creating..")
            os.makedirs(output)

        self.scheduler = BackgroundScheduler()
        self.scheduler.start()

        self.load_dir(dirpath)

        event_handler = JsonHandler(patterns=["*.json"],
                                    ignore_directories=True)
        event_handler.set_handler(oncreated=self.load,
                                  onmodified=self.load,
                                  ondeleted=self.remove)
        observer = Observer()
        observer.schedule(event_handler, self.dirpath, recursive=True)
        observer.start()
Ejemplo n.º 12
0
 def _setup_watchdog(self):
     from watchdog.observers import Observer
     log.debug("Watching FS modification on : %s", self.client.base_folder)
     self._event_handler = DriveFSEventHandler(self)
     self._root_event_handler = DriveFSRootEventHandler(self, os.path.basename(self.client.base_folder))
     self._observer = Observer()
     self._observer.schedule(self._event_handler, self.client.base_folder, recursive=True)
     self._observer.start()
     # Be sure to have at least one watchdog event
     timeout = 30
     lock = self.client.unlock_ref('/', False)
     try:
         fname = self.client._abspath('/.watchdog_setup')
         while (self._watchdog_queue.empty()):
             with open(fname, 'a'):
                 os.utime(fname, None)
             sleep(1)
             timeout = timeout - 1
             if timeout < 0:
                 log.debug("Can't have watchdog setup. Fallback to full scan mode ?")
                 os.remove(fname)
                 raise Exception
             os.remove(fname)
         if os.path.exists(fname):
             os.remove(fname)
     finally:
         self.client.lock_ref('/', lock)
     self._root_observer = Observer()
     self._root_observer.schedule(self._root_event_handler, os.path.dirname(self.client.base_folder), recursive=False)
     self._root_observer.start()
Ejemplo n.º 13
0
class PlexTVDaemon(Daemon):
	def __init__(self, pidfile, tv, patterns):
		super(PlexTVDaemon, self).__init__(pidfile)
		self._tv = tv
		self._handler = PlexTVEventHandler(tv, patterns)
		self._observer = Observer()
		self._observer.schedule(self.handler, self.tv.source, recursive=True)

	@property
	def handler(self):
		return self._handler

	@property
	def observer(self):
		return self._observer

	@property
	def tv(self):
		return self._tv

	def run(self):
		self.tv.clean_broken_links() and self.tv.create_all_links()
		observer.start()

		while True:
			time.sleep(1)

	def stop(self):
		self.observer.stop()
		self.observer.join()
		super.stop()
Ejemplo n.º 14
0
class Monitor(object):
    def __init__(self, transport):
        """ Watches file change events (creation, modification) in the
        watched project.
        """

        self.config = Config()
        self.transport = transport

        # Initialize the event handler class to use depending on the SCM to use
        handler = None
        scm_classes = EventHandler.__subclasses__()

        for cls in scm_classes:
            tmp_inst = cls(self.transport)
            if tmp_inst.scm_name == self.config.scm:
                self.logger.debug("Uses the %s class for the monitoring of FS " "changes" % tmp_inst.scm_name)
                handler = tmp_inst
                break
        else:
            # Raises this BaboonException if no plugin has found
            # according to the scm entry in the config file
            raise BaboonException(
                "Cannot get a valid FS event handler" " class for your SCM written in your" " baboonrc file"
            )

        self.monitor = Observer()
        try:
            self.monitor.schedule(handler, self.config.path, recursive=True)
        except OSError, err:
            self.logger.error(err)
            raise BaboonException(err)
Ejemplo n.º 15
0
def server (options, info):
	""" Run server and monitor for changes """
	
	class MyEventHandler(FileSystemEventHandler):
		def on_any_event(self, event):
			print "Changes detected"
			print "Building html"
			paver.doctools.html()

	settings = {
	    "debug":True
	}

	event_handler = MyEventHandler()

	application = tornado.web.Application( 	
		[(r"/", tornado.web.RedirectHandler,dict(url="/index.html")),
		(r"/(.*)",tornado.web.StaticFileHandler, {"path": "build/html"})],
		**settings)

	print "Running server on port 8888"

	observer= Observer()

	observer.schedule(event_handler, "source", recursive=True)
	observer.start()

	application.listen(8888)
	
	while True: 
		print "Starting the server"
		tornado.ioloop.IOLoop.instance().start()
Ejemplo n.º 16
0
class Watcher(object):
    def __init__(self, ctx):
        self.ctx = ctx
        self.observer = Observer()

    def watch_file(self, name, action):
        self.observer.schedule(
            FileModified(self.ctx, name, action),
            self.ctx.pkg.root,
            recursive=True
        )

    def watch_directory(self, path, ext, action):
        self.observer.schedule(
            DirectoryModified(self.ctx, path, ext, action),
            self.ctx.pkg.root,
            recursive=True
        )

    def start(self):
        print 'watching for changes.. (Ctrl-C to exit)'
        self.observer.start()
        try:
            while 1:
                time.sleep(1)
        except KeyboardInterrupt:
            self.observer.stop()
        self.observer.join()
Ejemplo n.º 17
0
    def _watch(self, app, arguments, builder):
        # By default we're watching for events in content directory.
        watch_paths = [
            app.conf['paths.content'],
        ]

        # But it'd be nice to watch themes directories either.
        for theme in app._themes:
            if os.path.exists(theme):
                watch_paths.append(theme)

        observer = Observer()
        for path in watch_paths:
            observer.schedule(
                _ChangeWatcher(builder, ignore=[
                    os.path.abspath(arguments.conf),
                ]),
                path, recursive=True)

        # We also should watch for user's settings explicitly, because
        # they may located not in the content directory.
        if os.path.exists(arguments.conf):
            observer.schedule(
                _ChangeWatcher(builder, recreate_app=True, watch_for=[
                    os.path.abspath(arguments.conf),
                ]),
                os.path.abspath(os.path.dirname(arguments.conf)))

        return observer
Ejemplo n.º 18
0
def watch(script, callback):
    script_dir = os.path.dirname(os.path.abspath(script.args[0]))
    event_handler = _ScriptModificationHandler(callback)
    observer = Observer()
    observer.schedule(event_handler, script_dir)
    observer.start()
    _observers[script] = observer
Ejemplo n.º 19
0
 def start(self):
     event_handler = LessWatcher.EventHandler(self)
     observer = Observer()
     observer.schedule(event_handler, self.path, recursive=True)
     observer.start()
     self._observer = observer
     return self
Ejemplo n.º 20
0
    def run(self):
        '''Main running function for a process watching a particular condor
        job. Creates its own logfile, watches for changes and then exits'''
        observer = Observer()
        observer.schedule(self, self.__watchdir, recursive=True)
        files = 0
        file_space = 0
        job_ad = classad.parseOne(open(self.__watchdir+"/.job.ad", "r"))
        jobdate = datetime.datetime.fromtimestamp(
            int(job_ad['JobStartDate'])).strftime('%Y-%m-%d %H:%M:%S')
        try:
            logname = ''.join([LOG_DIR, job_ad['Owner'], ".",
                               job_ad['UidDomain'], ".", str(job_ad['QDate']),
                               ".", str(job_ad['ClusterId']),
                               ".", str(job_ad['ProcId']), ".log"])
            logfile = open(logname, "wb")
        except IOError:
            sys.stderr.write("Problem creating logfile {0}".format(logname))
            return

        logwriter = csv.writer(logfile)
        logwriter.writerow([job_ad['User'], jobdate])
        observer.start()
        while not self.__exit.is_set():
            time.sleep(1)
            for item in self.stat_monitors.copy():
                try:
                    file_space += os.path.getsize(item)
                except OSError:
                    pass      # File has been deleted during our loop
                files += 1
            logwriter.writerow([int(time.time()), files, file_space])
            files = 0
            file_space = 0
        logfile.close()
Ejemplo n.º 21
0
class Watcher:
    def __init__(self, win, watchpath, patterns=None, ignore_patterns=None, 
                 ignore_directories=False, case_sensitive=False,
                 create=True, modify=True, delete=False, rename=False, 
                 subDir=True):
        self.win = win
        self.path = watchpath
        # self.ignore_patterns = ignore_patterns
        # self.patterns = patterns
        self.event_handler = wxLogEventHandler(self.win, patterns, 
                                ignore_patterns, ignore_directories, 
                                case_sensitive, create, modify, delete, rename)
        self.observer = Observer()
        self.observer.schedule(self.event_handler, self.path, recursive=subDir)
        # print "Watcher created ", self.win, self.path


    def Start(self):
        self.running = True
        thread.start_new_thread(self.observer.start, ())
        # self.observer.start()
        print "Thread %s Started..." % self.path
        # print "Ignore Pattern", self.ignore_patterns
        # print "Watch Pattern", self.patterns


    def Stop(self):
        self.running = False
        self.observer.stop()
        print "Thread %s Stopped..." % self.path


    def IsRunning(self):
        return self.running
Ejemplo n.º 22
0
    def watch_and_spawn(self, conf):
        from watchdog.observers import Observer
        from watchdog.events import FileSystemEventHandler, FileSystemMovedEvent, FileModifiedEvent, DirModifiedEvent

        print "Monitoring for changes..."
        self.create_subprocess()

        parent = self

        class AggressiveEventHandler(FileSystemEventHandler):
            def should_reload(self, event):
                for t in (FileSystemMovedEvent, FileModifiedEvent, DirModifiedEvent):
                    if isinstance(event, t):
                        return True
                return False

            def on_modified(self, event):
                if self.should_reload(event):
                    parent.server_process.kill()
                    parent.create_subprocess()

        # Determine a list of file paths to monitor
        paths = self.paths_to_monitor(conf)

        event_handler = AggressiveEventHandler()
        for path, recurse in paths:
            observer = Observer()
            observer.schedule(event_handler, path=path, recursive=recurse)
            observer.start()

        try:
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            pass
Ejemplo n.º 23
0
class FSWatcherBehavior(object):
    """File System Watcher behavior.

    :Events:
        `on_operations`
            Fired when there is any event on watched dir/file.
    """

    operations = ListProperty(None)
    '''Contains the list of operations/event on wateched folder.
    '''

    path= StringProperty()
    "Watched Path"

    def __init__(self, **kwargs):
        super(FSWatcherBehavior, self).__init__(**kwargs)
        #Now, bind yourseilf on list of templates
        self._observer = Observer()
        self._event_handler = MyFileEventHandler(self)

    def on_path(self, *args):
        self._observer.schedule(self._event_handler, self.path)
        self._observer.start()
        print 'Start Watching'
Ejemplo n.º 24
0
    def watch(self):
        """publish any top-level files added to the drive"""
        if self.watch_timer:
            raise Exception("only one watch can be active on a ramdisk")

        class DelayedEventHandler(FileSystemEventHandler):
            """
                Event handler that sends file change messages only if no other event
                 occurs for that file within .1 seconds.
            """

            previous_events = {}

            def dispatch(self, event):
                self.previous_events[event.src_path] = event
                threading.Timer(0.5, self.check_time, args=[event]).start()

            def check_time(self, event):
                if self.previous_events[event.src_path] == event:
                    wx.CallAfter(
                        pub.sendMessage,
                        "ramdisk.files_added",
                        event_type=event.event_type,
                        path=event.src_path,
                        is_directory=event.is_directory,
                    )

        observer = Observer()
        observer.schedule(DelayedEventHandler(), self.path, recursive=True)
        observer.start()

        self.watch_timer = observer
Ejemplo n.º 25
0
    def _register_observers(self):
        """Setup a watcher to rebuild the nav whenever a file has changed."""
        _this = self

        class ContentHandler(FileSystemEventHandler):

            """Custom event handler for changed files."""

            def on_modified(self, event):
                logging.debug('%s "%s" was "%s"',
                              'Directory' if event.is_directory else "File",
                              event.src_path,
                              event.event_type)

                _this.start()

        event_handler = ContentHandler()

        # Listen for content changes
        self.content_observer = Observer()
        self.content_observer.schedule(event_handler,
                                       self.config['CONTENT_PATH'],
                                       recursive=True)
        self.content_observer.start()

        # If we're debugging, listen for theme changes
        if self.debug:
            self.theme_observer = Observer()
            self.theme_observer.schedule(event_handler,
                                         self.config['THEME_FOLDER'],
                                         recursive=True)
            self.theme_observer.start()
Ejemplo n.º 26
0
def tricks_from(args):
    from watchdog.observers import Observer
    add_to_sys_path(path_split(args.python_path))
    observers = []
    for tricks_file in args.files:
        observer = Observer(timeout=args.timeout)
        if not os.path.exists(tricks_file):
            raise IOError('cannot find tricks file: %s' % tricks_file)
        config = load_config(tricks_file)
        try:
            tricks = config[CONFIG_KEY_TRICKS]
        except KeyError:
            raise KeyError("No `%s' key specified in %s." % (CONFIG_KEY_TRICKS, tricks_file))

        if CONFIG_KEY_PYTHON_PATH in config:
            add_to_sys_path(config[CONFIG_KEY_PYTHON_PATH])
        dir_path = os.path.dirname(tricks_file)
        if not dir_path:
            dir_path = os.path.relpath(os.getcwd())
        schedule_tricks(observer, tricks, dir_path, args.recursive)
        observer.start()
        observers.append(observer)

    try:
        while True:
            time.sleep(1)

    except KeyboardInterrupt:
        for o in observers:
            o.unschedule_all()
            o.stop()

    for o in observers:
        o.join()
Ejemplo n.º 27
0
    def __init__(self):
        """ Watches file change events (creation, modification) in the
        watched project.
        """

        from baboon.baboon.plugins.git.monitor_git import EventHandlerGit

        self.dancer = Dancer(sleeptime=1)

        # All monitor will be stored in this dict. The key is the project name,
        # the value is the monitor instance.
        self.monitors = {}

        try:
            # Avoid to use iteritems (python 2.x) or items (python 3.x) in
            # order to support both versions.
            for project in sorted(config['projects']):
                project_attrs = config['projects'][project]
                project_path = os.path.expanduser(project_attrs['path'])
                self.handler = EventHandlerGit(project_path)

                monitor = Observer()
                monitor.schedule(self.handler, project_path, recursive=True)

                self.monitors[project_path] = monitor
        except OSError as err:
            self.logger.error(err)
            raise BaboonException(err)
Ejemplo n.º 28
0
class WatchedFolderTree(TreeView):
    path = StringProperty()
    updated = BooleanProperty(False)

    def __init__(self, *args, **kwargs):
        TreeView.__init__(self, *args, **kwargs)
        #Now, bind yourseilf on list of templates
        self.observer = Observer()
        self.event_handler = MyFileEventHandler(self)

    def on_path(self, *args):
        self.rebuilt()
        self.observer.schedule(self.event_handler, self.path)
        self.observer.start()
        print 'starting', self.updated

    def rebuilt(self):
        print 'rebuilt called', self.updated
        self.clear_widgets()
        self.root.nodes = []
        print "Nodes:", self.root.nodes
        from os import listdir
        for f in listdir(self.path):
            self.add_node(TreeViewLabel(text=f, color_selected=(.6,.6,.6,.8)))

    def on_updated(self, *args):
        print 'on updated', args
        if self.updated:
            self.updated = False
            self.rebuilt()
Ejemplo n.º 29
0
    def _watchdog(self):
        """Runs Watchdog to listen to filesystem events.

        When first run, the `Cakefile` is touched to trigger the
        initial build.

        """
        if not hasattr(self.app, "static_url_path"):
            from warnings import warn

            warn(DeprecationWarning("static_path is called static_url_path since Flask 0.7"), stacklevel=2)

            static_url_path = self.app.static_path
        else:
            static_url_path = self.app.static_url_path

        static_dir = self.app.root_path + static_url_path

        cakedir = os.path.join(static_dir, self.cakeparent)

        # Setup Watchdog
        handler = Events(cakedir=cakedir, tasks=self.tasks)
        observer = Observer(timeout=5000)
        observer.schedule(handler, path=cakedir, recursive=True)
        observer.start()

        # "Touch" the Cakefile to signal the initial build
        cakefile = os.path.join(cakedir, "Cakefile")
        with file(cakefile, "a"):
            os.utime(cakefile, None)
Ejemplo n.º 30
0
 def __init__(self, watchpath=CONDOR_WATCHDIR):
     observer = Observer()
     observer.schedule(self, watchpath, recursive=False)
     observer.start()
     self.job_watchers = {}
     while True:
         time.sleep(1)
Ejemplo n.º 31
0
    # configure logging
    if args.log_file is None:
        # use stdout by default
        logging.basicConfig(stream=sys.stdout,
                            level=logging.INFO,
                            format='%(asctime)s - %(message)s',
                            datefmt='%Y-%m-%d %H:%M:%S')
    else:
        logging.basicConfig(filename=args.log_file,
                            level=logging.INFO,
                            format='%(asctime)s - %(message)s',
                            datefmt='%Y-%m-%d %H:%M:%S')

    # setup event handler
    event_handler = file_handler.FileHandler(args.fhicl_configuration,
                                             args.input_dir)

    # setup observer for file system changes
    observer = Observer()
    observer.schedule(event_handler, args.input_dir)
    observer.start()

    # Sleep Forever
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
    observer.join()
Ejemplo n.º 32
0
import ctypes

libc = ctypes.cdll.LoadLibrary('libc.so.6')

logger = logging.getLogger("grabber")
"""
cmd = "/sbin/udevadm", "info", "-n", self.path, "-q", "path"
sysfs_path = subprocess.check_output(cmd)[1:]
self.sysfs_path = os.path.realpath(os.path.join("/sys", sysfs_path, "..", ".."))
print("Unpowering:", self.sysfs_path)
with open(os.path.join(self.sysfs_path, "authorized"), "w") as fh:
    fh.write("0")
"""
BLANK = np.zeros((480, 640, 3), dtype=np.uint8)

observer = Observer()
observer.start()


class CaptureHotplugHandler(FileSystemEventHandler):
    def __init__(self, grabber):
        self.grabber = grabber
        FileSystemEventHandler.__init__(self)

    def on_created(self, event):
        if event.src_path != self.grabber.path:
            return
        logger.info("Attached:", event.src_path)
        self.grabber.wake.set()

Ejemplo n.º 33
0
class ViewFileSystemEventHandler(FileSystemEventHandler):
    def __init__(self, viewer):
        self._viewer = viewer

    def on_created(self, event):
        self.event(event)

    def on_modified(self, event):
        self.event(event)

    def event(self, event):
        if event.is_directory:
            return
        self._viewer.update(event.src_path)


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO,
                        format='%(asctime)s - %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')
    path = sys.argv[1] if len(sys.argv) > 1 else '.'
    viewer = Viewer()
    event_handler = ViewFileSystemEventHandler(viewer)

    observer = Observer()
    observer.schedule(event_handler, path, recursive=True)
    observer.start()
    viewer.run()
    observer.stop()
    observer.join()
Ejemplo n.º 34
0
            print('%s has been created.' % event.src_path)

    #def on_modified(self, event):
    #    if event.is_directory:
    #        return
    #    if getext(event.src_path) in ('.crdownload'):
    #        print('%s has been modified.' % event.src_path)

    def on_deleted(self, event):
        if event.is_directory:
            return
        if getext(event.src_path) in ('.crdownload'):
            print('%s has been deleted.' % event.src_path)


if __name__ in '__main__':
    while 1:
        event_handler = ChangeHandler('test.txt')
        observer = Observer()
        observer.schedule(event_handler, BASEDIR, recursive=True)
        observer.start()
        try:
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            observer.stop()
        except:
            print('File created')
            observer.stop()
        observer.stop()
Ejemplo n.º 35
0
 def __init__(self):
     self.observer = Observer()
Ejemplo n.º 36
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--settingsFile",
        "--idChannelMappingFromFile",
        type=str,
        help=
        "JSON formatted file with the ADC name <-> Amplifier mapping and optional stimset scale factors.",
        required=True)
    feature_parser = parser.add_mutually_exclusive_group(required=True)
    feature_parser.add_argument("--filename",
                                type=str,
                                help="Name of the generated JSON file",
                                default="mcc-output.json")
    feature_parser.add_argument(
        "--watchFolder",
        type=str,
        help=
        "Gather settings each time a new ABF file is created in this folder.")

    args = parser.parse_args()

    if not os.path.isfile(args.settingsFile):
        raise ValueError(
            "The parameter settingsFile requires an existing file in JSON format."
        )

    if not args.watchFolder:
        writeSettingsToFile(args.settingsFile, args.filename)
        return None

    if not os.path.isdir(args.watchFolder):
        raise ValueError(
            "The parameter watchFolder requires an existing folder.")

    eventHandler = SettingsFetcher(args.settingsFile, )
    eventHandler2 = LiveQC(args.settingsFile, )
    observer = Observer()
    observer.schedule(eventHandler2, args.watchFolder, recursive=False)
    observer.schedule(eventHandler, args.watchFolder, recursive=False)

    observer.start()

    print(
        f"Starting to watch {args.watchFolder} for ABF files to appear. Press Ctrl + Break to stop."
    )

    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()

    observer.join()
Ejemplo n.º 37
0
from watchdog.events import LoggingEventHandler
from watchdog.events import FileSystemEventHandler

while (1):

    class Handler(FileSystemEventHandler):
        def on_modified(self, event):
            for filename in os.listdir("D:/testing"):
                if ".png" in filename:
                    old_path_1 = "D:/testing" + "/" + filename
                    print(old_path_1)
                    shutil.move(old_path_1, "D:/testing_2")
                    print("done!")
                else:
                    print("not a png file ")

    # Initialize Observer
    observer = Observer()
    event_handler = Handler()
    observer.schedule(event_handler, "D:/testing", recursive=True)

    # Start the observer
    observer.start()
    print("yo")
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
    observer.join()
Ejemplo n.º 38
0
            elif (filename.endswith('.zip')):
                folder_destination = "/mnt/d/Josh/Documents/zipFiles/"
            elif (filename.endswith('.pdf')):
                folder_destination = "/mnt/d/Josh/Documents/pdfFiles/"
            elif (filename.endswith('.exe')):
                folder_destination = "/mnt/d/Josh/Documents/applicationFiles/"
            elif (filename.endswith('.docx')):
                folder_destination = "/mnt/d/Josh/Documents/docFiles/"
            elif (filename.endswith('.ppt') or filename.endswith('.pptx')):
                folder_destination = "/mnt/d/Josh/Documents/pptFiles/"
            elif (filename.endswith('.mp3') or filename.endswith('.mp4')):
                folder_destination = "/mnt/d/Josh/Documents/mediaFiles/"
            else:
                folder_destination = "/mnt/d/Josh/Documents/miscFiles/"
            new_destination = folder_destination + "/" + filename
            os.rename(src, new_destination)


folder_to_track = "/mnt/d/Josh/Documents/organize/"
event_handler = MyHandler()
observer = Observer()
observer.schedule(event_handler, folder_to_track, recursive=True)
observer.start()

try:
    while True:
        time.sleep(10)
except KeyboardInterrupt:
    observer.stop()
observer.join()
Ejemplo n.º 39
0
def on_deleted(event):
    print(f"what the f**k! Someone deleted {event.src_path}!")


def on_modified(event):
    print(f"hey buddy, {event.src_path} has been modified")


def on_moved(event):
    print(f"ok ok ok, someone moved {event.src_path} to {event.dest_path}")


my_event_handler.on_created = on_created
my_event_handler.on_deleted = on_deleted
my_event_handler.on_modified = on_modified
my_event_handler.on_moved = on_moved

path = "."
go_recursively = False
my_observer = Observer()
my_observer.schedule(my_event_handler, path, recursive=go_recursively)

my_observer.start()
try:
    while True:
        time.sleep(1)
except KeyboardInterrupt:
    my_observer.stop()
    my_observer.join()
Ejemplo n.º 40
0
class CouchappWatcher(object):

    SIG_QUEUE = []
    SIGNALS = map(lambda x: getattr(signal, "SIG%s" % x),
                  "QUIT INT TERM".split())

    SIG_NAMES = dict((getattr(signal, name), name[3:].lower())
                     for name in dir(signal)
                     if name[:3] == "SIG" and name[3] != "_")

    def __init__(self, doc, dbs, update_delay=DEFAULT_UPDATE_DELAY,
                 noatomic=False):
        self.doc_path = absolute_path(doc.docdir)
        self.event_handler = CouchappEventHandler(doc, dbs,
                                                  update_delay=update_delay,
                                                  noatomic=noatomic)
        self.observer = Observer()
        self.observer.schedule(self.event_handler,
                               self.doc_path, recursive=True)

    def init_signals(self):
        """\
        Initialize master signal handling. Most of the signals
        are queued. Child signals only wake up the master.
        """
        map(lambda s: signal.signal(s, self.signal), self.SIGNALS)
        signal.signal(signal.SIGCHLD, self.handle_chld)

    def signal(self, sig, frame):
        if len(self.SIG_QUEUE) < 5:
            self.SIG_QUEUE.append(sig)
        else:
            log.warn("Dropping signal: %s" % sig)

    def handle_chld(self, sig, frame):
        return

    def handle_quit(self):
        raise StopIteration

    def handle_int(self):
        raise StopIteration

    def handle_term(self):
        raise StopIteration

    def run(self):
        log.info("Starting to listen changes in '%s'", self.doc_path)
        self.init_signals()
        self.observer.start()
        while True:
            try:
                sig = self.SIG_QUEUE.pop(0) if len(self.SIG_QUEUE) else None
                if sig is None:
                    self.event_handler.maybe_update()
                elif sig in self.SIG_NAMES:
                    signame = self.SIG_NAMES.get(sig)
                    handler = getattr(self, "handle_%s" % signame, None)
                    if not handler:
                        log.error("Unhandled signal: %s" % signame)
                        continue
                    log.info("handling signal: %s" % signame)
                    handler()
                else:
                    log.info("Ignoring unknown signal: %s" % sig)
                time.sleep(1)
            except (StopIteration, KeyboardInterrupt):
                self.observer.stop()
                return 0
            except Exception:
                log.info("unhandled exception in main loop:\n%s" %
                         traceback.format_exc())
                return -1
        self.observer.join()
Ejemplo n.º 41
0
        if event.src_path.lower().endswith('.py'):
            print("(compile.py) recompiling...")
            call(["python", "compile.py"])


class EvHandler(LoggingEventHandler):
    def dispatch(self, event):
        print("recompiling...")
        call(["python", "compile.py"])


logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s - %(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S')

path = "./templates"

event_handler = EvHandler()

observer = Observer()
observer.schedule(event_handler, "./templates", recursive=True)
observer.schedule(PyHandler(), '.', recursive=False)
observer.schedule(event_handler, '../tokens', recursive=True)

observer.start()
try:
    while True:
        time.sleep(1)
except KeyboardInterrupt:
    observer.stop()
observer.join()
Ejemplo n.º 42
0
  for d in dirs:
    if not os.path.exists(d):
      os.makedirs(d)

  return args

if __name__ == '__main__':
  print("PonyRelay version "+ __version__ +" started.")
  args = config()
  logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filename=args['logfile'], level=args["loglevel"])
  logging.info("  PonyRelay version "+ __version__ +" started.")
  logging.debug(" CONFIG: data dir:    "+ args['datadir'])
  logging.debug(" CONFIG: watch dir:   "+ args['watchdir'])
  logging.debug(" CONFIG: archive dir: "+ args['archivedir'])
  logging.debug(" CONFIG: failed dir:  "+ args['faileddir'])
  logging.debug(" CONFIG: file pattern: "+ args['pattern'])
  logging.debug(" CONFIG: log level: "+ args['loglevel'] +" into "+ args['logfile'])
  print("> watching for incoming files in", args['watchdir'])

  # start monitoring directory for incoming sms files
  observer = Observer()
  observer.schedule(MyEventHandler(), args['watchdir'])
  observer.start()

  try:
    while True:
      time.sleep(1)
  except KeyboardInterrupt:
    observer.stop()

  observer.join()
Ejemplo n.º 43
0
            payload = {"cmd": cmd, "action": action}
            payload = json.dumps(payload)
            headers = {
                "content-type": "application/json",
                "Authorization": "bearer " + token
            }
            conn = http.client.HTTPConnection(host="rcontrol", port=5000)
            conn.request("POST", "/cmd", payload, headers)
            res = conn.getresponse()
            data = res.read()
            conn.close()
        except Exception as e:
            print(e)


print('1', config.DevConfig.MAILBOX_PATH)
event_handler = MyFileSystemEventHandler()
observer = Observer()
observer.schedule(event_handler,
                  config.DevConfig.MAILBOX_PATH,
                  recursive=False)
observer.start()

catchterm = CatchTerm()
try:
    while not catchterm.kill_now:
        time.sleep(1)
except KeyboardInterrupt:
    pass
observer.stop()
observer.join()
Ejemplo n.º 44
0
def create_viewer_watchdog(client, ipc_dir, local_project_root):
    """Sets up the Watchdog to monitor the files used to remotely call the viewers.

    Args:
        client (DockerClient): Docker client configured to the host environment.
        ipc_dir (str): Directory where the files used to signal to the Watchdog reside.
        local_project_root (str): Path of the project on the host.
    """
    for ipc in config.DOCKER_IPCS:
        ipc_callsite = os.path.join(local_project_root, config.IPC_ROOT_NAME, ipc)
        with open(ipc_callsite, "w"):
            os.utime(ipc_callsite, None)

    event_handler = ViewerHandler(local_project_root)
    observer = Observer()
    observer.schedule(event_handler, path=ipc_dir, recursive=False)
    observer.start()

    global container_name
    try:
        while True:
            time.sleep(1)
            container = client.containers.get(container_name)
            if container.status == "exited":
                observer.stop()
                break
            for line in container.logs(stream=True):
                print(line.decode("utf8").strip())
    except KeyboardInterrupt:
        container = client.containers.get(container_name)
        container.stop()
        observer.stop()
    observer.join()
Ejemplo n.º 45
0
            if file_extension == ".pdf":  # Locates destination folder
                folder_destination = "C:\\DATA\\PDF_downloads\\"
            elif file_extension == ".txt":
                folder_destination = "C:\\DATA\\TXT_downloads\\"
            elif file_extension == ".jpg" or file_extension == ".png":
                folder_destination = "C:\\DATA\\IMAGE_downloads\\"
            elif file_extension == ".docx" or file_extension == ".doc":
                folder_destination = "C:\\DATA\\WORD_downloads\\"
            else:
                folder_destination = "C:\\DATA\\MISC_downloads\\"
            new_destination = folder_destination + filename  # Path to be moved too
            os.rename(src, new_destination)
            doc.write(" SUCCESS moved " + filename + " to " +
                      folder_destination + "\n")
            doc.close()


folder_to_track = "C:\\users\\DomRi\\Downloads"
folder_destination = "placeHolder_value"

event_handler = myHandler()  # initialises myHandler
my_observer = Observer()  # initialises observer (imported from watchdog)
my_observer.schedule(event_handler, folder_to_track, recursive=True)
my_observer.start()
try:
    while True:
        time.sleep(10)
except KeyboardInterrupt:
    my_observer.stop()
my_observer.join()
Ejemplo n.º 46
0
        	choose_answer(index_min)

# 自动选择答案
def choose_answer(num):
	# 6s上四个选项坐标分别是: (370, 735) (370, 865) (370, 1000) (370, 1144)
	# 延迟两秒,等界面刷新
	time.sleep(2)
	if num == 0:
		s.tap(370, 735)
	if num == 1:
		s.tap(370, 865)
	if num == 2:
		s.tap(370, 1000)
	if num == 3:
		s.tap(370, 1144)

if __name__ == "__main__":

	# 开始监听文件变化
    observer = Observer()
    event_handler = FileEventHandler()
    observer.schedule(event_handler,"./question.hortor.net/question/fight/",True)
    observer.start()
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
    observer.join()

Ejemplo n.º 47
0
import sys
import time
import logging
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler



class MyHandler(PatternMatchingEventHandler):
    def on_modified(self, event):
        print('event type: ' + event.event_type + '  path : ' +  event.src_path)


if __name__ == "__main__":
    file_name = sys.argv[1] if len(sys.argv) > 1 else '.'
    file_name = '*' + file_name
    patterns = [file_name]
    print('file_name : ' + file_name)
    event_handler = MyHandler(patterns = patterns)
    observer = Observer()
    observer.schedule(event_handler, '.', recursive=False)
    observer.start()

    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
    observer.join()
Ejemplo n.º 48
0
    parser = argparse.ArgumentParser(description="Stream book options.")
    parser.add_argument("file", help="file to run", type=os.path.abspath)
    parser.add_argument("--nowatch",
                        help="only process the file",
                        action="store_true")
    parser.add_argument(
        "--nostreamlit",
        help="does not launch streamlit (no effect if already --nowatch)",
        action="store_true")

    args = parser.parse_args()
    abs_path = args.file
    directory = os.path.dirname(abs_path)
    event_handler = MyHandler()
    observer = Observer()

    stream_file = abs_path[:-3] + ".streambook.py"
    notebook_file = abs_path[:-3] + ".notebook.py"

    open(stream_file, "w").close()
    print("Streambook Daemon\n")

    print("Watching directory for changes:")
    print(f"\n {directory}")
    print()
    print("View Command")
    print(f"streamlit run  --server.runOnSave true {stream_file}")
    print()
    print("Notebook Execution Command")
    print(f"jupytext --to notebook --execute {notebook_file}")
Ejemplo n.º 49
0
# CODE

from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import time, subprocess as s
p = s.Popen(command.split())


class Handler(FileSystemEventHandler):
    def on_modified(self, event):
        global p
        print('MODIFED', event.src_path)
        spl = event.src_path.split(delimiter)
        if spl[len(spl) - 1] in whitelist:
            print('MODIFED', event.src_path, 'reloading!')
            p.kill()
            p = s.Popen(command.split())
            print('STARTED!')


observer = Observer()
observer.schedule(Handler(), path=path, recursive=True)
observer.start()

try:
    while True:
        time.sleep(0.1)
except KeyboardInterrupt:
    observer.stop()
observer.join()
Ejemplo n.º 50
0
    params_file = path_to_watch + r'\parameters.cfg'
    n_pixels, parameters = pixel_utils.load.configuration(params_file)
    lines = parameters['lines_per_image']

    print('Pixels = ', n_pixels)
    print('Lines = ', lines)

    # tfp = np.random.randn(lines, n_pixels)
    # shift = np.random.randn(lines, n_pixels)

    tfp = np.zeros([lines, n_pixels])
    shift = np.zeros([lines, n_pixels])

    # initialize event handler
    my_observer = Observer()
    my_event_handler = MyHandler(parameters)
    my_observer.schedule(my_event_handler, path_to_watch, recursive=False)
    my_observer.start()

    # initialize plotting
    plt.ion()

    fig, ax = plt.subplots(nrows=1,
                           ncols=2,
                           figsize=(12, 6),
                           tight_layout=True,
                           facecolor='white')

    tfp_ax = ax[0]
    shift_ax = ax[1]
Ejemplo n.º 51
0
 def __init__(self):
     self.observer = Observer()
     for path in odoo.modules.module.ad_paths:
         _logger.info('Watching addons folder %s', path)
         self.observer.schedule(self, path, recursive=True)
Ejemplo n.º 52
0
 def __init__(self):
     multiprocessing.Process.__init__(self)
     self.observer = Observer()
Ejemplo n.º 53
0
class Site(FileSystemEventHandler):
    backup_timer = None

    # Execute a backup of this site.
    def backup(self):
        cnt = len(self.modified_files)
        sample = self.modified_files[:3]
        self.logger.info(
            f'[backup] triggered for {cnt} files, including: {sample}')
        self.backup_timer = None
        self.modified_files = []

        for mode in self.backup_modes:
            self.logger.info(f'[backup] mode {mode}')
            if mode == 'zip':
                fp = '/usr/local/%s' % self.cfg['fn_zip']
                cmd = ['cd %s &&' % self.cfg['site_dir'], 'zip', '-r']
                cmd += ['-x "%s"' % x for x in self.backup_exclude]
                if self.quiet: cmd.append('-q')
                cmd.append(fp)
                cmd.append('.')
                sh(' '.join(cmd))
                self.s3_copy(True)
                sh('rm -rf "%s"' % fp)
            elif mode == 'sync':
                self.s3_sync(True)
            else:
                self.logger.error(f'unknown backup mode: {mode}')
        self.logger.info('[backup] finished')

    # Sync the zip file up/down
    def s3_copy(self, up):
        if len(self.cfg['s3_bucket']) <= 0:
            raise Exception('s3_bucket required')
        fp = '/usr/local/%s' % self.cfg['fn_zip']
        s3_fp = "s3://%s/%s" % (self.cfg['s3_bucket'], self.cfg['fn_zip'])
        cmd = ['aws', 's3', 'cp']
        if self.quiet: cmd.append('--quiet')
        if up: cmd += [fp, s3_fp]
        else: cmd += [s3_fp, fp]
        return sh(' '.join(cmd))

    # Sync the local path up/down
    def s3_sync(self, up):
        if len(self.cfg['s3_bucket']) <= 0:
            raise Exception('s3_bucket required')
        quiet = not self.logger.isEnabledFor(logging.DEBUG)
        s3_path = "s3://%s/%s" % (self.cfg['s3_bucket'], self.cfg['site_name'])
        cmd = ['aws', 's3', 'sync']
        if up: cmd += [self.cfg['site_dir'], s3_path]
        else: cmd += [s3_path, self.cfg['site_dir']]
        cmd += ['--exclude "%s"' % x for x in self.backup_exclude]
        cmd.append('--delete')
        if self.quiet: cmd.append('--quiet')
        return sh(' '.join(cmd))

    # When watchdog receives a modification event, schedule a backup of the site.
    def on_any_event(self, event):
        if os.path.isdir(event.src_path): return
        if self.backup_ignore and self.backup_ignore.match(event.src_path):
            return
        if event.src_path in self.modified_files: return
        self.logger.debug('received file modification event for ' +
                          event.src_path)
        self.modified_files.append(event.src_path)
        if self.backup_timer: self.backup_timer.cancel()
        self.backup_timer = threading.Timer(float(self.cfg['backup_delay']),
                                            self.backup)
        self.backup_timer.start()

    # Use a templated config file and replace the variables, then append it to the destination file.
    def append_template_config(self, name, dst_fn):
        src_fn = "/usr/local/etc/templates/%s.conf.template" % name
        with open(src_fn, 'r') as src, open(dst_fn, 'a') as dst:
            cfg = src.read()
            for var_name in self.cfg:
                if self.cfg[var_name] and type(self.cfg[var_name]) is str:
                    cfg = cfg.replace("{{%s}}" % var_name.upper(),
                                      self.cfg[var_name])
                else:
                    cfg = cfg.replace("{{%s}}" % var_name.upper(), "")
            self.logger.debug(f'writing to {dst_fn}: {cfg}')
            dst.write(cfg)

    # Set up nginx & FPM for this site.
    def configure(self):
        self.logger.debug('configuring nginx, fpm, and docker')
        listen = self.cfg['server_port']
        if len(self.cfg['server_listen']) > 0:
            listen += ' ' + self.cfg['server_listen']
        nginx_cfg = OrderedDict({
            'listen': listen,
            'server_name': self.cfg['server_name'],
            'root': self.cfg['site_dir'],
            'index': 'index.php',
            'access_log': self.cfg['nginx_access_log'],
            'error_log': self.cfg['nginx_error_log'],
            'rewrite': self.cfg['rewrite'],
        })
        nginx_cfg = [
            f"    {k} {nginx_cfg[k]};" for k in nginx_cfg
            if len(nginx_cfg[k]) > 0
        ]
        self.cfg['nginx_server_config'] = "\n".join(nginx_cfg)
        self.append_template_config(
            "nginx", "/etc/nginx/conf.d/%s.conf" % self.cfg['site_name'])
        self.append_template_config(
            "fpm",
            "/usr/local/etc/php-fpm.d/zz-%s.conf" % self.cfg['site_name'])
        # self.append_template_config("docker", "/usr/local/etc/php-fpm.d/docker.conf")
        # self.append_template_config("zz-docker", "/usr/local/etc/php-fpm.d/zz-docker.conf")

    # Download any backups for this site
    def restore(self):
        mode = self.cfg['restore_mode'].lower()
        policy = self.cfg['restore_policy'].lower()
        if policy == 'missing':
            if os.path.isdir(self.cfg['site_dir']):
                self.logger.info(
                    'skipping restoration because the directory was already present'
                )
                return
        elif policy == 'never':
            self.logger.info('skipping restoration because policy=never')
            return
        elif policy != 'always':
            raise Exception(f'invalid restore policy {policy}')

        self.logger.info(f'restoring via {policy} {mode}')
        if mode == 'zip':
            fp = '/usr/local/%s' % self.cfg['fn_zip']
            ret = self.s3_copy(False)
            self.logger.info('copied zip? %s' % ret)
            if ret == 0:
                cmd = ['unzip']
                if self.quiet: cmd.append('-q')
                cmd.append('-o "%s"' % fp)
                cmd.append('-d "%s"' % self.cfg['site_dir'])
                sh(' '.join(cmd))
        elif mode == 'sync':
            self.s3_sync(False)
        elif len(mode) > 0:
            raise Exception(f'unknown restore mode {mode}')
        sh('mkdir -p %s' % self.cfg['site_dir'])
        sh('chown -Rf www-data.www-data %s' % self.cfg['site_dir'])

    # Install Wordpress (or tweak its settings, if necessary)
    def install(self):
        wp_cfg_fp = "%s/wp-config.php" % self.cfg['site_dir']
        if os.path.isfile(wp_cfg_fp):
            self.logger.debug('configuring wordpress')
            with open(wp_cfg_fp, 'r') as file:
                wp_cfg = file.read()
            with open(wp_cfg_fp, 'w') as file:
                for var_name in self.cfg:
                    if not var_name.startswith(
                            'wordpress_') or not self.cfg[var_name]:
                        continue
                    wp_name = var_name[len('wordpress_'):].upper()
                    self.logger.debug(
                        f'configure wordpress variable {wp_name}')
                    val = self.cfg[var_name]
                    wp_cfg = re.sub(rf"'{wp_name}',.*'.*'",
                                    f"'{wp_name}', '{val}'", wp_cfg)
                file.write(wp_cfg)
        else:
            sh('cd %s && /usr/local/bin/wp-entrypoint.sh php-fpm' %
               self.cfg['site_dir'])

    def watch_for_backup(self):
        self.logger.info('watching for file changes to trigger backups...')
        self.observer = Observer()
        self.observer.schedule(self, path=self.cfg['site_dir'], recursive=True)
        self.observer.daemon = True
        self.observer.start()

    def __init__(self, site_name, fpm_port):
        # Set up the default config:
        self.cfg = {
            'fpm_port': str(fpm_port),
            'server_port': "80",
            'server_listen': "",
            'server_name': '',
            'fastcgi_params': '',
            'rewrite': '',
            'site_dir': "/var/www/html/%s" % site_name,
            'fn_zip': '%s.zip' % site_name,
            'wordpress_db_host': None,
            'wordpress_db_user': None,
            'wordpress_db_password': None,
            'wordpress_db_name': None,
            'wordpress_table_prefix': None,
        }
        self.cfg.update(default_cfg)
        self.cfg = load_config_vars(
            self.cfg, '/etc/multipress/sites/%s.yaml' % site_name, site_name)
        self.cfg['site_name'] = site_name
        self.logger = logging.getLogger(site_name)
        self.logger.setLevel(self.cfg['log_level'])

        clean = [
            f"/etc/nginx/conf.d/{site_name}.conf",
            f"/usr/local/etc/php-fpm.d/{site_name}.conf"
        ]
        for fp in clean:
            if os.path.isfile(fp):
                os.remove(fp)

        if len(self.cfg['server_name']) <= 0:
            server_name = f'{site_name}.com www.{site_name}.com'
            self.logger.info(
                f'no server_name for {site_name}; assuming it is "{server_name}"'
            )
            self.cfg['server_name'] = server_name

        self.backup_modes = str2list(self.cfg['backup_mode'])
        self.backup_ignore = re.compile(self.cfg['backup_ignore']) if len(
            self.cfg['backup_ignore']) > 0 else None
        self.backup_exclude = str2list(self.cfg['backup_exclude'])
        self.quiet = not self.logger.isEnabledFor(logging.DEBUG)
        self.modified_files = []
        self.configure()
        self.restore()
        self.install()

        if len(self.backup_modes) > 0: self.watch_for_backup()
        self.logger.info(
            f'succesfully configured {site_name} on port {fpm_port}')
Ejemplo n.º 54
0
 def __init__(self, src_path):
     self.__src_path = src_path
     self.__event_handler = ImagesEventHandler()
     self.__event_observer = Observer()
Ejemplo n.º 55
0
class ContainerNotifier(object):
    """
    Notifies container about file changes in binded host-directory.
    """
    def __init__(self, container, host_dir, container_dir):
        """
        Initialize a new instance of ContainerNotifier

        Args:
            container: Container
            host_dir (str): Host directory
            container_dir (str): Container directory
        """
        self.container = container
        self.host_dir = host_dir
        self.container_dir = container_dir

        event_handler = PatternMatchingEventHandler(ignore_directories=True)
        handler = self.__change_handler
        event_handler.on_created = handler
        event_handler.on_moved = handler
        event_handler.on_modified = handler

        self.ignore_file_pattern = '(\.idea|\.git|node_modules|___jb_old___|___jb_tmp___)'
        if self.ignore_file_pattern:
            self.ignore_file_pattern_compiled = re.compile(
                self.ignore_file_pattern)
        else:
            self.ignore_file_pattern_compiled = None
        self.observer = Observer()
        self.observer.schedule(event_handler, host_dir, recursive=True)
        self.observer.start()

    def __str__(self):
        return '%s -> %s:%s' % (self.host_dir, self.container.name,
                                self.container_dir)

    def __change_handler(self, event):
        host_path = event.dest_path if hasattr(event,
                                               'dest_path') else event.src_path
        relative_host_path = relpath(host_path,
                                     self.host_dir).replace('\\', '/')
        absolute_path = posixpath.join(self.container_dir, relative_host_path)

        if self.ignore_file_pattern_compiled and not self.is_ignored(
                relative_host_path):
            self.notify(absolute_path)

    def is_ignored(self, path):
        match = bool(re.search(self.ignore_file_pattern_compiled, path))
        return match

    # @throttle(mindelta=1, groupByArgIndex=1)
    @debounce(wait=1)
    def notify(self, absolute_path):
        """
        Notify container about change in file.

        Args:
            absolute_path (str): Absolute path of changed file.
        """

        logging.info('Notifying container %s about change in %s',
                     self.container.name, absolute_path)
        try:
            permissions = self.container.exec_run(
                ['stat', '-c', '%a', absolute_path], privileged=True)
            if permissions.exit_code != 0:
                raise NonZeroExitError(permissions.exit_code)
            permissions = permissions.output.decode('utf-8').strip()
            response = self.container.exec_run(
                ['chmod', permissions, absolute_path], privileged=True)
            if response.exit_code != 0:
                raise NonZeroExitError(response.exit_code)
            if response:
                logging.info(response.output.decode('utf-8').strip())
        except docker.errors.APIError:
            logging.error('Failed to notify container %s about change in %s',
                          self.container.name,
                          absolute_path,
                          exc_info=True)
        except NonZeroExitError as exception:
            logging.error('Exec run returned non-zero exit code: %s',
                          exception.exit_code)

    def stop(self):
        """
        Stop observing host directory.
        """

        self.observer.stop()
        self.observer.join()
Ejemplo n.º 56
0
 def watch_for_backup(self):
     self.logger.info('watching for file changes to trigger backups...')
     self.observer = Observer()
     self.observer.schedule(self, path=self.cfg['site_dir'], recursive=True)
     self.observer.daemon = True
     self.observer.start()
Ejemplo n.º 57
0
import os
import sys

sys.path.append(os.path.join(os.path.dirname(__file__), "../"))

from pymongo import MongoClient
from watchdog.observers import Observer
from file_monitor import FileMonitor
from web_server import WebServer

if __name__ == "__main__":
    client = MongoClient()
    database = client.get_database("test")
    client.close()
    server = WebServer(database).web_server
    observer = Observer()
    file_monitor = FileMonitor(database, "./pages", True)
    observer.schedule(file_monitor, path="./pages", recursive=True)
    observer.start()
    server.run("localhost", 4444, debug=True)
Ejemplo n.º 58
0
class CharacterDetector(FileSystemEventHandler):
    def __init__(self, mainWindow, characterMenu):
        self.mainWindow = mainWindow
        self.characterMenu = characterMenu
        self.observer = Observer()
        
        if (platform.system() == "Windows"):
            import win32com.client
            oShell = win32com.client.Dispatch("Wscript.Shell")
            self.path = oShell.SpecialFolders("MyDocuments") + "\\EVE\\logs\\Gamelogs\\"
        else:
            self.path = os.environ['HOME'] + "/Documents/EVE/logs/Gamelogs/"
        
        self.menuEntries = []
        self.logReaders = _logReaders
        self.selectedIndex = IntVar()
        self.playbackLogReader = None
        
        try:
            oneDayAgo = datetime.datetime.now() - datetime.timedelta(hours=24)
            fileList = sorted(os.listdir(self.path), key=lambda file: os.stat(os.path.join(self.path, file)).st_mtime)
            for filename in fileList:
                timeString = re.sub(r'_[0-9]*\.txt$', '', filename)
                try:
                    fileTime = datetime.datetime.strptime(timeString, "%Y%m%d_%H%M%S")
                except ValueError:
                    continue
                if (fileTime >= oneDayAgo):
                    self.addLog(self.path + filename)
        
            self.selectedIndex.set(0)
            
            if len(self.menuEntries) == 0:
                self.characterMenu.menu.add_command(label='No character logs detected for past 24 hours', state=tk.DISABLED)
            
            self.observer.schedule(self, self.path, recursive=False)
            self.observer.start()
        except FileNotFoundError:
            logging.error('EVE logs directory not found, path checked: ' + self.path)
            messagebox.showerror("Error", "Can't find the EVE logs directory.  Do you have EVE installed?  \n\n" +
                                 "Path checked: " + self.path + "\n\n" +
                                 "PELD will continue to run, but will not track EVE data.")
            self.characterMenu.menu.add_command(label='No EVE installation detected', state=tk.DISABLED)

        self.characterMenu.menu.add_separator()
        from settings.overviewSettings import OverviewSettingsWindow
        self.characterMenu.menu.add_command(label='Open overview settings', command=OverviewSettingsWindow)
        
    def on_created(self, event):
        self.addLog(event.src_path)
        
    def addLog(self, logPath):
        logging.info('Processing log file: ' + logPath)
        log = open(logPath, 'r', encoding="utf8")
        log.readline()
        log.readline()
        characterLine = log.readline()
        try:
            character, language = ProcessCharacterLine(characterLine)
        except BadLogException:
            logging.info("Log " + logPath + " is not a character log.")
            return
        log.close()
        
        if len(self.menuEntries) == 0:
            self.characterMenu.menu.delete(0)
        
        for i in range(len(self.menuEntries)):
            if (character == self.menuEntries[i]):
                try:
                    newLogReader = LogReader(logPath, self.mainWindow)
                except BadLogException:
                    return
                self.logReaders[i] = newLogReader
                return
        
        try:
            newLogReader = LogReader(logPath, self.mainWindow)
        except BadLogException:
            return
        self.logReaders.append(newLogReader)
        self.characterMenu.menu.insert_radiobutton(0, label=character, variable=self.selectedIndex, 
                                                value=len(self.menuEntries), command=self.catchupLog)
        self.menuEntries.append(character)
        
    def stop(self):
        self.observer.stop()
        
    def playbackLog(self, logPath):
        try:
            self.mainWindow.animator.dataQueue = None
            self.playbackLogReader = PlaybackLogReader(logPath, self.mainWindow)
            self.mainWindow.addPlaybackFrame(self.playbackLogReader.startTimeLog, self.playbackLogReader.endTimeLog)
        except BadLogException:
            self.playbackLogReader = None
            
    def stopPlayback(self):
        self.playbackLogReader = None
        self.mainWindow.removePlaybackFrame()
        
    def readLog(self):
        if (self.playbackLogReader):
            return self.playbackLogReader.readLog()
        elif (len(self.menuEntries) > 0):
            return self.logReaders[self.selectedIndex.get()].readLog()
        else:
            return _emptyResult
    
    def catchupLog(self):
        self.mainWindow.animator.catchup()
        try:
            self.logReaders[self.selectedIndex.get()].catchup()
        except IndexError:
            pass
Ejemplo n.º 59
0
class AstroLauncher():
    """ Starts a new instance of the Server Launcher"""
    @dataclasses.dataclass
    class LauncherConfig():
        DisableAutoUpdate: bool = False
        ServerStatusFrequency: float = 2
        PlayfabAPIFrequency: float = 2
        DisableBackupRetention: bool = False
        BackupRetentionPeriodHours: float = 76
        BackupRetentionFolderLocation: str = r"Astro\Saved\Backup\LauncherBackups"
        EnableAutoRestart: bool = False
        AutoRestartEveryHours: float = 24
        AutoRestartSyncTimestamp: str = "00:00"
        DisableNetworkCheck: bool = False
        DisableWebServer: bool = False
        WebServerPort: int = 5000

        def __post_init__(self):
            # pylint: disable=no-member
            hasError = False
            for field, data in self.__dataclass_fields__.items():
                try:
                    self.__dict__[field] = data.type(self.__dict__[field])
                except ValueError:
                    hasError = True
                    AstroLogging.logPrint(
                        f"INI error: {field} must be of type {data.type.__name__}",
                        "critical")
            if hasError:
                AstroLogging.logPrint("Fix your launcher config file!",
                                      "critical")
                sys.exit()

    class SaveHandler(FileSystemEventHandler):
        def __init__(self, launcher):
            self.launcher = launcher
            self.astroPath = self.launcher.astroPath
            self.moveToPath = self.launcher.launcherConfig.BackupRetentionFolderLocation
            super().__init__()

        def on_modified(self, event):
            time.sleep(1)
            dirName = os.path.dirname(event.src_path)
            fileName = [
                f for f in os.listdir(dirName)
                if os.path.isfile(os.path.join(dirName, f))
            ][0]
            AstroLogging.logPrint(f"Server saved. {fileName}")
            self.launcher.saveObserver.stop()

    class BackupHandler(FileSystemEventHandler):
        def __init__(self, launcher):
            self.launcher = launcher
            self.astroPath = self.launcher.astroPath
            self.moveToPath = self.launcher.launcherConfig.BackupRetentionFolderLocation
            self.retentionPeriodHours = self.launcher.launcherConfig.BackupRetentionPeriodHours
            super().__init__()

        def on_modified(self, event):
            #AstroLogging.logPrint("File in save directory changed")
            path = os.path.join(self.astroPath, self.moveToPath)
            try:
                if not os.path.exists(path):
                    os.makedirs(path)
            except Exception as e:
                AstroLogging.logPrint(e, "error")
            now = time.time()
            try:
                for f in os.listdir(path):
                    fpath = os.path.join(path, f)
                    if os.stat(fpath).st_mtime < (
                            now - (self.retentionPeriodHours * 60 * 60)):
                        os.remove(fpath)
            except Exception as e:
                AstroLogging.logPrint(e, "error")
            AstroLogging.logPrint("Copying backup to retention folder.")
            time.sleep(1)
            try:
                dirName = os.path.dirname(event.src_path)
                newFile = os.path.join(dirName, [
                    f for f in os.listdir(dirName)
                    if os.path.isfile(os.path.join(dirName, f))
                ][0])
                #AstroLogging.logPrint(newFile, "debug")
                shutil.copy2(newFile, path)
                #AstroLogging.logPrint(copiedFile, "debug")
            except FileNotFoundError as e:
                AstroLogging.logPrint(e, "error")
            except Exception as e:
                AstroLogging.logPrint(e, "error")
            self.launcher.backupObserver.stop()

    def __init__(self,
                 astroPath,
                 launcherINI="Launcher.ini",
                 disable_auto_update=None):
        # check if path specified
        if astroPath is not None:
            if os.path.exists(os.path.join(astroPath, "AstroServer.exe")):
                self.astroPath = astroPath
            else:
                print("Specified path does not contain the server executable")
                time.sleep(5)

        # check if executable in current directory
        elif os.path.exists(os.path.join(os.getcwd(), "AstroServer.exe")):
            self.astroPath = os.getcwd()

        # fallback to automatic detection (experimental, do NOT rely on it)
        else:
            try:
                autoPath = AstroAPI.getInstallPath()
                if os.path.exists(os.path.join(autoPath, "AstroServer.exe")):
                    self.astroPath = autoPath
            except:
                AstroLogging.logPrint("Unable to find AstroServer.exe!",
                                      "critical")
                return

        AstroLogging.setup_logging(self.astroPath)
        self.launcherINI = launcherINI
        self.launcherConfig = self.LauncherConfig()
        self.refresh_launcher_config()
        if disable_auto_update is not None:
            self.launcherConfig.DisableAutoUpdate = disable_auto_update
        self.version = "v1.4.6"
        self.latestURL = "https://github.com/ricky-davis/AstroLauncher/releases/latest"
        self.isExecutable = os.path.samefile(sys.executable, sys.argv[0])
        self.headers = AstroAPI.base_headers
        self.DaemonProcess = None
        self.saveObserver = None
        self.backupObserver = None
        self.DedicatedServer = AstroDedicatedServer(self.astroPath, self)

        AstroLogging.logPrint(
            f"AstroLauncher - Unofficial Dedicated Server Launcher {self.version}"
        )
        AstroLogging.logPrint(
            "If you encounter any bugs please open a new issue at:")
        AstroLogging.logPrint(
            "https://github.com/ricky-davis/AstroLauncher/issues")
        AstroLogging.logPrint(
            "To safely stop the launcher and server press CTRL+C")
        self.check_for_update()

        AstroLogging.logPrint("Starting a new session")

        if not self.launcherConfig.DisableNetworkCheck:
            AstroLogging.logPrint("Checking the network configuration..")
            self.check_network_config()

        self.headers['X-Authorization'] = AstroAPI.generate_XAUTH(
            self.DedicatedServer.settings.ServerGuid)

        self.save_reporting()

        if not self.launcherConfig.DisableBackupRetention:
            self.backup_retention()
            AstroLogging.logPrint("Backup retention started")
        # setup queue for data exchange
        if not self.launcherConfig.DisableWebServer:
            self.webServerQueue = queue.SimpleQueue()
            self.webServerQueue.put(self.DedicatedServer)

            # start http server
            AstroWebServer.startWebServer(self.webServerQueue, self)
            AstroLogging.logPrint(
                f"HTTP Server started at 127.0.0.1:{self.launcherConfig.WebServerPort}"
            )

        atexit.register(self.DedicatedServer.kill_server,
                        reason="Launcher shutting down",
                        save=True)
        self.start_server()

    def save_reporting(self):
        if self.saveObserver:
            if not self.saveObserver.is_alive():
                self.saveObserver = None
                self.save_reporting()
        else:
            self.saveObserver = Observer()
            saveGamePath = r"Astro\Saved\SaveGames"
            watchPath = os.path.join(self.astroPath, saveGamePath)
            try:
                if not os.path.exists(watchPath):
                    os.makedirs(watchPath)
            except Exception as e:
                AstroLogging.logPrint(e)
            self.saveObserver.schedule(self.SaveHandler(self), watchPath)
            self.saveObserver.start()

    def backup_retention(self):
        if self.backupObserver:
            if not self.backupObserver.is_alive():
                self.backupObserver = None
                self.backup_retention()
        else:
            self.backupObserver = Observer()
            backupSaveGamePath = r"Astro\Saved\Backup\SaveGames"
            watchPath = os.path.join(self.astroPath, backupSaveGamePath)
            try:
                if not os.path.exists(watchPath):
                    os.makedirs(watchPath)
            except Exception as e:
                AstroLogging.logPrint(e)
            self.backupObserver.daemon = True

            self.backupObserver.schedule(self.BackupHandler(self), watchPath)
            self.backupObserver.start()

    def refresh_launcher_config(self):
        field_names = set(f.name
                          for f in dataclasses.fields(self.LauncherConfig))
        cleaned_config = {
            k: v
            for k, v in self.get_launcher_config().items() if k in field_names
        }
        self.launcherConfig = dataclasses.replace(self.launcherConfig,
                                                  **cleaned_config)

        config = MultiConfig()
        config.read_dict({"AstroLauncher": cleaned_config})
        with open(self.launcherINI, 'w') as configfile:
            config.write(configfile)

    def get_launcher_config(self):
        baseConfig = {
            "AstroLauncher": dataclasses.asdict(self.LauncherConfig())
        }
        config = MultiConfig().baseline(self.launcherINI, baseConfig)
        # print(settings)
        settings = config.getdict()['AstroLauncher']
        return settings

    def check_for_update(self):
        try:
            url = "https://api.github.com/repos/ricky-davis/AstroLauncher/releases/latest"
            data = ((requests.get(url)).json())
            latestVersion = data['tag_name']
            if latestVersion != self.version:
                AstroLogging.logPrint(
                    f"UPDATE: There is a newer version of the launcher out! {latestVersion}"
                )
                AstroLogging.logPrint(f"Download it at {self.latestURL}")
                if self.isExecutable and not self.launcherConfig.DisableAutoUpdate:
                    self.autoupdate(data)
        except:
            pass

    def autoupdate(self, data):
        x = data
        downloadFolder = os.path.dirname(sys.executable)
        for fileObj in x['assets']:
            downloadURL = fileObj['browser_download_url']
            fileName = (os.path.splitext(fileObj['name'])[0])
            downloadPath = os.path.join(downloadFolder, fileName)

            downloadCMD = [
                "powershell", '-executionpolicy', 'bypass', '-command',
                'Write-Host "Starting download of latest AstroLauncher.exe..";',
                'wait-process',
                str(os.getpid()), ';',
                '[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;',
                'Invoke-WebRequest', f"'{downloadURL}'", "-OutFile",
                f"'{downloadPath + '_new.exe'}'", ';', "Move-Item", "-path",
                f"'{downloadPath + '_new.exe'}'", "-destination",
                f"'{downloadPath + '.exe'}'", "-Force;", 'Start-Process',
                f"'{downloadPath + '.exe'}' --noupdate"
            ]
            # print(' '.join(downloadCMD))
            subprocess.Popen(downloadCMD,
                             shell=True,
                             creationflags=subprocess.DETACHED_PROCESS)
        time.sleep(2)
        self.DedicatedServer.kill_server("Auto-Update")

    def start_server(self):
        """
            Starts the Dedicated Server process and waits for it to be registered
        """
        self.DedicatedServer.status = "starting"
        self.DedicatedServer.busy = False
        oldLobbyIDs = self.DedicatedServer.deregister_all_server()
        AstroLogging.logPrint("Starting Server process...")
        if self.launcherConfig.EnableAutoRestart:
            AstroLogging.logPrint(
                f"Next restart is at {self.DedicatedServer.nextRestartTime}")
        time.sleep(5)
        startTime = time.time()
        self.DedicatedServer.start()
        self.DaemonProcess = AstroDaemon.launch(
            executable=self.isExecutable,
            consolePID=self.DedicatedServer.process.pid)

        # Wait for server to finish registering...
        while not self.DedicatedServer.registered:
            try:
                serverData = (AstroAPI.get_server(
                    self.DedicatedServer.ipPortCombo, self.headers))
                serverData = serverData['data']['Games']
                lobbyIDs = [x['LobbyID'] for x in serverData]
                if len(set(lobbyIDs) - set(oldLobbyIDs)) == 0:
                    time.sleep(self.launcherConfig.PlayfabAPIFrequency)
                else:
                    now = time.time()
                    if now - startTime > 15:
                        self.DedicatedServer.registered = True
                        del oldLobbyIDs
                        self.DedicatedServer.LobbyID = serverData[0]['LobbyID']

                if self.DedicatedServer.process.poll() is not None:
                    AstroLogging.logPrint(
                        "Server was forcefully closed before registration. Exiting...."
                    )
                    return False
            except KeyboardInterrupt:
                self.DedicatedServer.kill_server("Launcher shutting down")
            except:
                AstroLogging.logPrint(
                    "Failed to check server. Probably hit rate limit. Backing off and trying again..."
                )
                self.launcherConfig.PlayfabAPIFrequency += 1
                time.sleep(self.launcherConfig.PlayfabAPIFrequency)

        doneTime = time.time()
        elapsed = doneTime - startTime
        AstroLogging.logPrint(
            f"Server ready with ID {self.DedicatedServer.LobbyID}. Took {round(elapsed,2)} seconds to register."
        )
        self.DedicatedServer.status = "ready"
        self.DedicatedServer.server_loop()

    def check_network_config(self):
        networkCorrect = ValidateSettings.test_network(
            self.DedicatedServer.settings.PublicIP,
            int(self.DedicatedServer.settings.Port), False)
        if networkCorrect:
            AstroLogging.logPrint("Server network configuration good!")
        else:
            AstroLogging.logPrint(
                "I can't seem to validate your network settings..", "warning")
            AstroLogging.logPrint(
                f"Make sure to Port Forward ({self.DedicatedServer.settings.Port} UDP) and enable NAT Loopback",
                "warning")
            AstroLogging.logPrint("If nobody can connect, Port Forward.",
                                  "warning")
            AstroLogging.logPrint(
                "If others are able to connect, but you aren't, enable NAT Loopback.",
                "warning")

        rconNetworkCorrect = not (ValidateSettings.test_network(
            self.DedicatedServer.settings.PublicIP,
            int(self.DedicatedServer.settings.ConsolePort), True))
        if rconNetworkCorrect:
            AstroLogging.logPrint("Remote Console network configuration good!")
        else:
            AstroLogging.logPrint(
                f"SECURITY ALERT: Your console port ({self.DedicatedServer.settings.ConsolePort}) is Port Forwarded!",
                "warning")
            AstroLogging.logPrint(
                "SECURITY ALERT: This allows anybody to control your server.",
                "warning")
            AstroLogging.logPrint(
                "SECURITY ALERT: Disable this ASAP to prevent issues.",
                "warning")
            time.sleep(5)
Ejemplo n.º 60
0
            o = self.template.render(**note)
            f.write(o.encode('utf8'))

    def throttle_updates(self, timestamp):
        # Multiple FileModified events can fire, so only update
        # once per second.
        delta = timestamp - self.updatetime
        if delta.seconds > 0:
            self.updatepreview()
            print("{0}: Updated preview.html\n".format(
                timestamp.strftime("%H:%M:%S")))
            self.updatetime = datetime.datetime.now()

    def on_modified(self, event):
        self.throttle_updates(datetime.datetime.now())


handler = Handler(args.notesfile)
observer = Observer()
for notedir in notes_dirs:
    observer.schedule(handler, path=notedir, recursive=False)
observer.start()
print(
    "Updating preview.html every time data is modified, press Ctrl-C to end.")
try:
    while True:
        time.sleep(1)
except KeyboardInterrupt:
    observer.stop()
observer.join()