Example #1
0
 def __init__(self, browser: str, sid: str, token: str, to: str, from_: str,
              output_dir: str):
     self.__browser = browser
     self.__load_cookie_jar()
     self.__output_dir = output_dir
     self.__notifier = Notifier(sid, token, to, from_)
     self.__lock = Lock()
Example #2
0
 def __init__(self, dimension):
     """initialize a new problem"""
     Notifier.__init__(self)
     Listener.__init__(self)
     self.dimension = dimension
     self.prototype = {}
     self.cg = ConstraintGraph()
Example #3
0
 def __init__(self, methodclasses):
     """Create a new solver, using the given subclasses of ClusterMethod."""
     # init superclasses
     Notifier.__init__(self)
     # store arguments
     self._methodclasses = methodclasses
     self._pattern_methods = filter(lambda m: hasattr(m,"patterngraph"),self._methodclasses)
     self._handcoded_methods = filter(lambda m: hasattr(m,"handcoded_match"),self._methodclasses)
     self._incremental_methods = filter(lambda m: hasattr(m,"incremental_matcher"),self._methodclasses)
     # init instance vars
     self._graph = Graph()
     #self._graph.add_vertex("_root")
     # self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_clusters")
     self._graph.add_vertex("_methods")
     self._new = []
     self._mg = MethodGraph()
     # add prototype_selection boolean var to method graph
     self._prototype_selection_var = "_prototype_selection_enabled"
     self._mg.add_variable(self._prototype_selection_var)
     self._mg.set(self._prototype_selection_var, True)
     # store map of selection_constraints to SelectionMethod (or None)
     self._selection_method = {} 
     # store root cluster (will be assigned when first cluster added)
     self._rootcluster = None
     # an incrementally updated toplevel set
     self._toplevel = MutableSet()
     # incrementally updated set of applicable methods
     self._incremental_matchers = map(lambda method: method.incremental_matcher(self), self._incremental_methods)
     #print "incremental matchers:",self._incremental_matchers
     self._applicable_methods = Union(*self._incremental_matchers)
Example #4
0
    def __init__(self, client, email_info, owner):

        Notifier.__init__(self, client, email_info, owner)
        self.frm = email_info.get("from", None)
        self.name = email_info.get("name", None)
        self.smtp = email_info.get("smtp", None)
        self.username = email_info.get("username", None)
        self.password = email_info.get("password", None)
Example #5
0
 def __init__(self):
     """Create a new, empty ConstraintGraph"""
     Notifier.__init__(self)
     self._variables = {}
     """A set of variables"""
     self._constraints = {}
     """A set of constraints"""
     self._graph = Graph()
     """A graph for fast navigation. The graph contains an
Example #6
0
 def __init__(self):
     """Create a new, empty ConstraintGraph"""
     Notifier.__init__(self)
     self._variables = {}
     """A set of variables"""
     self._constraints = {}
     """A set of constraints"""
     self._graph = Graph()
     """A graph for fast navigation. The graph contains an
Example #7
0
    def set_notifier(self, pb_token, name=None):
        """
        Add a pushbullet notifier to the job.
        There can only be one notifier.

        """
        self.remove_notifier()
        if name is None:
            name = "%s - %s" % (self.jobname, self.jobid)
        self.notifier = Notifier(name, pb_token)
Example #8
0
    def __init__(self, id):
        alpr = Alpr("eu", "/etc/openalpr/openalpr.conf", "../runtime_data")
        if not alpr.is_loaded():
            print("Error loading OpenALPR")
            exit()
        alpr.set_top_n(20)

        self.alpr = alpr
        self.notifier = Notifier()
        self.id = id
        self.source = DataSource(self)
Example #9
0
 def __init__(self, graph=None):
     Notifier.__init__(self)
     self._dict = {}
     """the edges are stored in a dictionary of dictionaries"""
     self._reverse = {}
     """the reverse graph is stored here"""
     # copy input graph
     if graph:
         for v in graph.vertices():
             self.add_vertex(v)
         for e in graph.edges():
             (v,w) = e
             self.set(v,w,graph.get(v,w))
Example #10
0
 def __init__(self, graph=None):
     Notifier.__init__(self)
     self._dict = {}
     """the edges are stored in a dictionary of dictionaries"""
     self._reverse = {}
     """the reverse graph is stored here"""
     # copy input graph
     if graph:
         for v in graph.vertices():
             self.add_vertex(v)
         for e in graph.edges():
             (v,w) = e
             self.set(v,w,graph.get(v,w))
Example #11
0
 def __init__(self):
     # Notifier #
     self.notifier = Notifier(self.NOTIFIER_KEYS)
     self.notify = self.notifier.notify
     self.connect = self.notifier.connect
     self.disconnect = self.notifier.disconnect
     #
     if HAS_GCONF:
         self.gconf_client = gconf.client_get_default()
         self.quiz_file_path = \
                 self.gconf_client.get_string(self.DEFAULT_QUIZ_KEY)
         self.use_timer = self.gconf_client.get_bool(self.USE_TIMER_KEY)
         self.exam_length = self.gconf_client.get_int(self.EXAM_LENGTH_KEY)
         self.break_length = \
                 self.gconf_client.get_int(self.BREAK_LENGTH_KEY)
         self.snooze_length = \
                 self.gconf_client.get_int(self.SNOOZE_LENGTH_KEY)
         self.gconf_client.add_dir(self.DIR_KEY, gconf.CLIENT_PRELOAD_NONE)
         for gconf_key, notifier_key in zip(self.GCONF_KEYS,
                                            self.NOTIFIER_KEYS):
             self._add_listener(gconf_key, notifier_key,
                                self.value_is_in_range)
         for notifier_key in self.NOTIFIER_KEYS:
             self.connect(notifier_key, self.on_key_changed)
     else:
         self.edit_menuitem.set_visibility(False)
         # Default values #
         self.use_timer = True
         self.exam_length = 20
         self.break_length = 15
         self.snooze_length = 5
         self.quiz_file_path = resource_filename(
             __name__, '../quizzes/deu-fra.drill')
     if not os.path.exists(self.quiz_file_path):
         self.quiz_file_path = None
Example #12
0
 def __init__(self):
     self.config = config.Config()
     self.notifier = Notifier(**self.config.get("notify", None))
     self.watch = self.config.get("torrents", "watch_dir")
     self.torrents = []
     self.shows = self.config.get("torrents", "shows").split(",")
     self.quality = self.config.get("torrents", "quality")
Example #13
0
 def __init__(self, dimension):
     """Create a new empty solver"""
     Notifier.__init__(self)
     self.dimension = dimension
     self._graph = Graph()
     self._graph.add_vertex("_root")
     self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_distances")
     self._graph.add_vertex("_angles")
     self._graph.add_vertex("_rigids")
     self._graph.add_vertex("_hedgehogs")
     self._graph.add_vertex("_balloons")
     self._graph.add_vertex("_methods")
     # queue of new objects to process
     self._new = []
     # methodgraph
     self._mg = MethodGraph()
 def setUp(self):
     self.notifier = Notifier(['simple_key', 'key_with_message'])
     self.mock_listener = Mock()
     self.first_key_heared = self.mock_listener.first_key_heared
     self.first_key_heared_again = self.mock_listener.first_key_heared_again
     self.second_key_heared = self.mock_listener.second_key_heared
     self.notifier.connect('simple_key', self.first_key_heared)
     self.notifier.connect('simple_key', self.first_key_heared_again)
     self.notifier.connect('key_with_message', self.second_key_heared)
Example #15
0
def main():
    config, backup_paths, key_paths = parse_config()

    try:
        check_should_backup(config)
    except BackupPrerequisiteFailed as e:
        print('Not backing up, for reason:')
        print(e.args[0])
        sys.exit(1)

    notifier = Notifier()
    notifier.notify('Starting backup')

    for bp in backup_paths.values():
        if bp.automount:
            mount_path_if_necessary(bp.mount_path)

        try:
            for name, snapshot in search_snapshots(bp.path).items():
                if snapshot.newest == snapshot.base:
                    message = "Most recent snapshot for '{}' ({}) already on remote system".format(
                        name,
                        snapshot.newest,
                    )
                    print(message)
                else:
                    message = (
                        "Need to backup subvolume {} (base snapshot: {}, most recent: {})"
                    ).format(
                        name,
                        snapshot.base,
                        snapshot.newest,
                    )
                    print(message)
                    backup_snapshot(snapshot, config['server']['host'], key_paths)

        finally:
            if bp.automount:
                umount_path(bp.mount_path)

    notifier.notify('Backup complete')
Example #16
0
 def __init__(self):
     self.quiz_filer_list = []
     self.notifier = Notifier([
         "break_time", "question_changed", "direction_changed",
         "quiz_changed", "quiz_added", "quiz_removed", "quiz_list_emtpied",
         "quiz_list_non_emtpy"
     ])
     self.notify = self.notifier.notify
     self.connect = self.notifier.connect
     self.disconnect = self.notifier.disconnect
     self.current_quiz = None
     self.current_filer = None
Example #17
0
    def __init__(self, graph=None):
        Notifier.__init__(self)
        self._dict = {}
        """the edges are stored in a dictionary of dictionaries"""
        self._reverse = {}
        """the reverse graph is stored here"""

        self._fanin = {}
        """map from vertices to fan-in number"""  
        self._fanout = {}
        """map from vertices to fan-out number"""  
        self._infan = {}
        """map from fan-in numbers to vertices with that fan-in"""  
        self._outfan = {}
        """map from fan-out numbers to vertices with that fan-out"""  
        # copy input graph
        if graph:
            for v in graph.vertices():
                self.add_vertex(v)
            for e in graph.edges():
                (v,w) = e
                self.set(v,w,graph.get(v,w))
Example #18
0
class App:
    def __init__(self, id):
        alpr = Alpr("eu", "/etc/openalpr/openalpr.conf", "../runtime_data")
        if not alpr.is_loaded():
            print("Error loading OpenALPR")
            exit()
        alpr.set_top_n(20)

        self.alpr = alpr
        self.notifier = Notifier()
        self.id = id
        self.source = DataSource(self)

    def on_image_input(self, image_path):
        result = self.alpr.recognize_file(image_path)
        self.send_data(result)
        # self.print_file_result(image_path, result)

    def print_file_result(self, file_name, data):
        print("=====\nFile {}:\n---".format(file_name))
        results = data["results"]
        if len(results) == 0:
            print("No results found")
        else:
            print(results[0]["plate"])

    def send_data(self, data):
        results = data["results"]
        if len(results) == 0:
            return
        json = {
            "id": self.id,
            "plate": results[0]["plate"],
            "isEntering": True
        }
        self.notifier.post(json)

    def exit(self):
        self.alpr.unload()
Example #19
0
    def __init__(self, graph=None):
        Notifier.__init__(self)
        self._dict = {}
        """the edges are stored in a dictionary of dictionaries"""
        self._reverse = {}
        """the reverse graph is stored here"""

        self._fanin = {}
        """map from vertices to fan-in number"""  
        self._fanout = {}
        """map from vertices to fan-out number"""  
        self._infan = {}
        """map from fan-in numbers to vertices with that fan-in"""  
        self._outfan = {}
        """map from fan-out numbers to vertices with that fan-out"""  
        # copy input graph
        if graph:
            for v in graph.vertices():
                self.add_vertex(v)
            for e in graph.edges():
                (v,w) = e
                self.set(v,w,graph.get(v,w))
Example #20
0
 def __init__(self, methodclasses):
     """Create a new solver, using the given subclasses of ClusterMethod."""
     # init superclasses
     Notifier.__init__(self)
     # store arguments
     self._methodclasses = methodclasses
     self._pattern_methods = filter(lambda m: hasattr(m, "patterngraph"),
                                    self._methodclasses)
     self._handcoded_methods = filter(
         lambda m: hasattr(m, "handcoded_match"), self._methodclasses)
     self._incremental_methods = filter(
         lambda m: hasattr(m, "incremental_matcher"), self._methodclasses)
     # init instance vars
     self._graph = Graph()
     #self._graph.add_vertex("_root")
     # self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_clusters")
     self._graph.add_vertex("_methods")
     self._new = []
     self._mg = MethodGraph()
     # add prototype_selection boolean var to method graph
     self._prototype_selection_var = "_prototype_selection_enabled"
     self._mg.add_variable(self._prototype_selection_var)
     self._mg.set(self._prototype_selection_var, True)
     # store map of selection_constraints to SelectionMethod (or None)
     self._selection_method = {}
     # store root cluster (will be assigned when first cluster added)
     self._rootcluster = None
     # an incrementally updated toplevel set
     self._toplevel = MutableSet()
     # incrementally updated set of applicable methods
     self._incremental_matchers = map(
         lambda method: method.incremental_matcher(self),
         self._incremental_methods)
     #print "incremental matchers:",self._incremental_matchers
     self._applicable_methods = Union(*self._incremental_matchers)
Example #21
0
class Archeloos:
    def __init__(self):
        self.config = config.Config()
        self.notifier = Notifier(**self.config.get("notify", None))
        self.watch = self.config.get("torrents", "watch_dir")
        self.torrents = []
        self.shows = self.config.get("torrents", "shows").split(",")
        self.quality = self.config.get("torrents", "quality")

    def check_torrents(self, torrents):
        l = []
        for t in torrents:
            st = "%s" % t
            for s in self.shows:
                if s.lower() in st.lower():
                    if self.quality in t.quality and t.download(self.watch):
                        print(t)
                        l.append(t)
                    #print(t.link)
        #print(len(torrents))
        return l

    def run(self):
        l = []
        tracker = self.config.get("tracker", None)
        while True:
            new_torrents = []
            feed = list_torrents(tracker["rss_url"], tracker["user_id"], tracker["user_pass"])
            lt = parse_results(feed)
            new_torrents = [t for t in lt if t not in self.torrents]
            self.torrents = list(set(lt) | set(self.torrents))
            l = self.check_torrents(new_torrents)
            self.torrents = self.torrents + new_torrents
            self.notifier.notify(l)

            time.sleep(1800)
Example #22
0
 def __init__(self, quiz_pool, ask_from=0, exam_length=15):
     self.notifier = Notifier(
         ["break_time", "question_changed", "direction_changed"])
     self.notify = self.notifier.notify
     self.connect = self.notifier.connect
     self.disconnect = self.notifier.disconnect
     #
     self.previous_question = None
     self.quiz_pool = []
     self.answered = 0
     self.correct_answered = 0
     self.exam_length = exam_length
     self.session_length = exam_length
     self.ask_from = ask_from
     self.answer_to = 1 - ask_from
     self.add_quizzes(quiz_pool)
Example #23
0
    def __init__(self):
        self.config = config.Config()
        self.notifier = Notifier(**self.config.get("notify", None))
        self.watch = self.config.get("torrents", "watch_dir")
        self.torrents = []
        self.shows = self.config.get("torrents", "shows").split(",")
        self.fast_list = self.config.get("torrents", "fast_list").split(",")
        self.resolution = self.config.get("torrents", "resolution")
        self.quality = self.config.get("torrents", "quality")
        self.extra = self.config.get("torrents", "extra")

        tracker = self.config.get("tracker", None)
        self.downloader = Downloader(tracker["rss_url"], tracker["user_id"], tracker["user_pass"])

        print("watch: '%s'" % self.watch)
        print("shows: '%s'" % self.shows)
        print("fast_list: '%s'" % self.fast_list)
        print("resolution: '%s'" % self.resolution)
        print("quality: '%s'" % self.quality)
        print("extra: '%s'" % self.extra)
Example #24
0
def main():
    config, backup_paths, key_paths = parse_config()

    try:
        check_should_backup(config)
    except BackupPrerequisiteFailed as e:
        print('Not backing up, for reason:')
        print(e.args[0])
        sys.exit(1)

    notifier = Notifier()
    notifier.notify('Starting backup')

    for bp in backup_paths.values():
        if bp.automount:
            mount_path_if_necessary(bp.mount_path)

        try:
            for name, snapshot in search_snapshots(bp.path).items():
                if snapshot.newest == snapshot.base:
                    message = "Most recent snapshot for '{}' ({}) already on remote system".format(
                        name,
                        snapshot.newest,
                    )
                    print(message)
                else:
                    message = (
                        "Need to backup subvolume {} (base snapshot: {}, most recent: {})"
                    ).format(
                        name,
                        snapshot.base,
                        snapshot.newest,
                    )
                    print(message)
                    backup_snapshot(snapshot, config['server']['host'],
                                    key_paths)

        finally:
            if bp.automount:
                umount_path(bp.mount_path)

    notifier.notify('Backup complete')
Example #25
0
def restore_db():
    settings = get_settings()
    service = MontaguService(settings)
    notifier = Notifier(settings['notify_channel'])
    try:
        ok = service.status == 'running' and service.db_volume_present
        if not ok:
            raise Exception('montagu not in a state we can restore')
        bb8_backup.restore(service)
        database.setup(service)
        if settings["add_test_user"] is True:
            add_test_users()
        notifier.post("*Restored* data from backup on `{}` :recycle:".format(
            settings['instance_name']))
    except Exception as e:
        print(e)
        try:
            notifier.post("*Failed* to restore data on `{}` :bomb:",
                          settings['instance_name'])
        except:
            raise
Example #26
0
def _deploy():
    print_ascii_art()
    print("Beginning Montagu deploy")

    settings = get_settings()
    service = MontaguService(settings)
    status = service.status
    volume_present = service.db_volume_present
    is_first_time = (status is None) and (not volume_present)
    if is_first_time:
        print("Montagu not detected: Beginning new deployment")
    else:
        print("Montagu status: {}. "
              "Data volume present: {}".format(status, volume_present))

    notifier = Notifier(settings['notify_channel'])

    # Check that the deployment environment is clean enough
    version = git_check(settings)

    deploy_str = "montagu {} (`{}`) on `{}`".format(
        version['tag'] or "(untagged)", version['sha'][:7],
        settings['instance_name'])

    notifier.post("*Starting* deploy of " + deploy_str)

    # Pull images
    service.pull()

    # If Montagu is running, back it up before tampering with it
    if status == "running":
        if settings["bb8_backup"]:
            bb8_backup.backup()

    # Stop Montagu if it is running
    # (and delete data volume if persist_data is False)
    if not is_first_time:
        notifier.post("*Stopping* previous montagu "
                      "on `{}` :hand:".format(settings['instance_name']))
        service.stop()

    # Schedule backups
    if settings["bb8_backup"]:
        bb8_backup.schedule()

    # BB8 restore
    data_exists = (not is_first_time) and service.settings["persist_data"]
    if settings["initial_data_source"] == "bb8_restore":
        data_update = service.settings["update_on_deploy"] and \
                      not service.settings["bb8_backup"]
        if data_exists and not data_update:
            print("Skipping bb8 restore: 'persist_data' is set, "
                  "and this is not a first-time deployment")
        else:
            print("Running bb8 restore (while service is stopped)")
            bb8_backup.restore()

    # Start Montagu again
    service.start()
    try:
        print("Configuring Montagu")
        configure_montagu(service, data_exists)

        print("Starting Montagu metrics")
        service.start_metrics()

        print("Montagu metrics started")
    except Exception as e:
        print("An error occurred before deployment could be completed:")
        print(e)
        print("\nYou may need to call ./stop.py before redeploying.")
        try:
            notifier.post("*Failed* deploy of " + deploy_str + " :bomb:")
        except:
            pass
        raise

    if settings["add_test_user"] is True:
        print("Adding tests users")
        add_test_users()

    last_deploy_update(version)
    notifier.post("*Completed* deploy of " + deploy_str + " :shipit:")

    print("Finished deploying Montagu")
    if settings["open_browser"]:
        sleep(1)
        webbrowser.open("https://localhost:{}/".format(settings["port"]))
Example #27
0
class Watcher(object):
    def __init__(self, jobid, qstat, qdel, showstart, events, lowmem=0., pb_token=None):
        self.jobid = jobid
        self.qstat_cmd = qstat + [jobid]
        self.qdel_cmd = qdel + [jobid]
        self.showstart_cmd = showstart + [jobid]
        self.lowmem = lowmem
        self.events = events

        self.started = False
        self.finished = False
        self.notifier = None
        self.update()
        self.started = False  # Need to reset after qstat to get job details
        self.jobname = self.jobdetails['Job_Name']
        if pb_token:
            self.notifier = self.set_notifier(pb_token)

    def set_notifier(self, pb_token, name=None):
        """
        Add a pushbullet notifier to the job.
        There can only be one notifier.

        """
        self.remove_notifier()
        if name is None:
            name = "%s - %s" % (self.jobname, self.jobid)
        self.notifier = Notifier(name, pb_token)

    def remove_notifier(self):
        """
        Remove pushbullet notifier if it exists.
        """
        if self.notifier:
            self.notifier.delete_listener()
        self.notifier = None

    def update(self):
        """
        Poll this job.
        Checks run status, updates details, sends any relevant notifications,
        checks for and acts on pushes, and kills itself if required.
        """
        self.jobdetails = self.qstat()
        # Check and update run/finish status
        if self.jobdetails['job_state'] == 'R':
            if self.started:
                # Check memory use
                self.memory_safety()
            else:
                self.started = True
                self.nodes = self.get_nodes()
                if self.notifier and "start" in self.events:
                    self.start_notify()
        elif self.jobdetails['job_state'] != 'R' and self.started:
            # Job finished. Notify if appropriate
            self.finished = True
            if self.notifier and "finish" in self.events:
                self.finish_notify()
        # Check for and act on pushes
        if self.notifier:
            map(self.parse_push, self.notifier.check_pushes())

    def memory_safety(self):
        """
        Run a check on available node memory, kill the 
        job if it falls below threshold on any one node.
        """
        try:
            logger.debug("Checking memory on %s" % ", ".join(self.nodes))
            self.freemem = self.check_free()
            logger.debug(self.make_free_str())
            if filter(lambda (node, mem): float(mem) < self.lowmem, self.freemem):
                logger.debug("Free memory below threshold. Killing the job.")
                try:
                    self.kill_job()
                except Exception as e:
                    logger.error("qdel command failed.")
                    logger.error('Error was:')
                    logger.error(e)
                if self.notifier and "kill" in self.events:
                    self.kill_notify()
        except Exception as e:
            logger.error("Freemem check failed.")
            logger.error(e)


    @staticmethod
    def parse_job(jobdetails):
        """
        Turn the output of qstat -f into a dictionary.
        """
        lines = jobdetails.replace("\n\t", "").splitlines()[1:-1]
        return dict(map(lambda line: tuple(line.strip().split(" = ")), lines))

    def qstat(self):
        """
        Output of qstat command munged into a dictionary.
        """
        try:
            logger.debug("Checking status for job %s" % self.jobid)
            jobdetails = self.parse_job(check_output(self.qstat_cmd, timeout=60))
        except Exception as e:
            logger.error('qstat command failed. Bailing out.')
            logger.error('Error was:')
            logger.error(e)
            if self.notifier and "error" in self.events:
                self.error_notify(e)
            raise
        return jobdetails


    def get_nodes(self):
        """
        Return a list of the nodes in use from a job dictionary.
        """

        nodes = self.jobdetails['exec_host']
        return list(set(map(lambda x: x.split('/')[0], nodes.split('+'))))

    def kill_job(self):
        """
        Attempt to kill a job using qdel.
        """

        return call(self.qdel_cmd)


    def check_free(self):
        """
        Return a list of tuples where each one is a node, and the % free memory.
        """
        return zip(self.nodes, map(self._check_free, self.nodes))

    @staticmethod
    def _check_free(node):
        """
        Use rsh and free to get the percentage of free memory on a node.
        """

        return check_output(["rsh", node, "free", "|",  "awk",  "'FNR == 3 {print $4/($3+$4)*100}'"], timeout=60)

    def make_free_str(self):
        return "Free memory - %s" % ", ".join(map(lambda (node, free): "%s: %s%%" % (node, free.strip()), self.freemem))

    def start_notify(self):
        """
        Send a notification that the job has started.
        """
        title = "%s, id: %s, started." % (self.jobname, str(self.jobid))
        body = "Running on nodes %s, and started %s." % (", ".join(self.nodes), self.jobdetails['start_time']) 
        self.notifier.send_notification(title, body)

    def error_notify(self, error):
        """
        Send a notification that the watcher has hit an error.
        """
        title = "%s, id: %s, watcher error." % (self.jobname, str(self.jobid))
        body = str(error)
        self.notifier.send_notification(title, body)

    def finish_notify(self):
        """
        Send a notification that the job has completed.
        """
        title = "%s, id: %s, finished." % (self.jobname, str(self.jobid))
        body = ""
        self.notifier.send_notification(title, body)

    def kill_notify(self):
        """
        Send a notification that the job is being killed.
        """
        title = "Attempting to kill job %s, id: %s." % (self.jobname, str(self.jobid))
        body = self.make_free_str()
        self.notifier.send_notification(title, body)

    @staticmethod
    def mkwalltime(seconds):
        """
        Make an hours:minutes:seconds string from seconds.
        """
        seconds = int(seconds)
        m, s = divmod(seconds, 60)
        h, m = divmod(m, 60)
        return "%d:%02d:%02d" % (h, m, s)

    def parse_push(self, push):
        """
        Take a push, and execute some commands.
        This is very primitive - we look for preset strings
        in the body of the push.
        """
        logger.debug("Parsing push msg.")

        try:
            cmd = push['body'].lower()
            logger.debug(cmd)
            logger.debug(push)
            try:
                target = push['source_device_iden']
            except KeyError:
                logger.debug("No specific device to send to.")
                target = None
            commands = []
            if 'showstart' in cmd:
                # Return the starttime for this job.
                try:
                    body = check_output(self.showstart_cmd, timeout=60)
                    title = "Job %s (%s) Start Time" % (self.jobname, self.jobid)
                except Exception as e:
                    body = str(e)
                    title = "Showstart failed."
                self.notifier.send_notification(title, body, target=target)
                commands.append('showstart')
            if 'walltime' in cmd:
                # Return the starttime for this job.
                try:
                    remaining = self.mkwalltime(self.jobdetails['Walltime.Remaining'])
                    requested = self.jobdetails['Resource_List.walltime']
                    body = "%s walltime left of %s" % (remaining, requested)
                    title = "Job %s (%s) Remaining walltime" % (self.jobname, self.jobid)
                except Exception as e:
                    body = str(e)
                    title = "Walltime failed."
                self.notifier.send_notification(title, body, target=target)
                commands.append('walltime')
            if 'cancel' in cmd:
                # Cancel the job
                try:
                    self.kill_job()
                    self.kill_notify()
                except Exception as e:
                    body = str(e)
                    title = "qdel failed."
                    self.notifier.send_notification(title, body, target=target)
                commands.append('cancel')
            if 'freemem' in cmd:
                # Get the free memory for nodes
                try:
                    body = self.make_free_str()
                    title = "Job %s (%s) Free Memory" % (self.jobname, self.jobid)
                except Exception as e:
                    body = str(e)
                    title = "Freemem check failed."
                self.notifier.send_notification(title, body, target=target)
                commands.append('freemem')
            if 'killwatcher' in cmd:
                #Kill this watcher.
                raise Exception('Killed by Push command.')
            assert commands
        except KeyError as e:
            logger.debug(e)
            logger.debug("No body in this push.")
        except AssertionError:
            logger.debug("No commands in this push.")
Example #28
0
class Downloader:
    """A downloader which checks for new videos on the Digital Foundry homepage, and downloads them."""
    __scheme = 'https://'
    __domain = 'www.digitalfoundry.net'
    __url = __scheme + __domain
    __cache_file = 'cache'

    __download_strings = {
        'hevc': ' Download HEVC',
        'now': 'Download now',
    }

    def __init__(self, browser: str, sid: str, token: str, to: str, from_: str,
                 output_dir: str):
        self.__browser = browser
        self.__load_cookie_jar()
        self.__output_dir = output_dir
        self.__notifier = Notifier(sid, token, to, from_)
        self.__lock = Lock()

    def __load_cookie_jar(self):
        if self.__browser == 'chrome':
            self.__cj = chrome()
        elif self.__browser == 'safari':
            self.__cj = safari()
        elif self.__browser == 'firefox':
            self.__cj = firefox()
        else:
            raise ValueError

    def load_cookie_jar(self):
        self.__load_cookie_jar()

    def download(self) -> None:
        """Checks the Digital Foundry homepage for new videos, and downloads them."""
        if not self.__has_valid_cookie():
            return

        self.__lock.acquire()
        logging.info('Checking Digital Foundry Homepage...')
        r = get(self.__url, cookies=self.__cj)
        if not r.ok:
            msg = 'Can\'t reach Digital Foundry Homepage.'
            logging.warning(msg)
            self.__notifier.notify(msg)
            self.__lock.release()
            return

        hrefs = self.__get_links(r)
        total_downloads = len(hrefs)

        if total_downloads > 0:
            logging.info(
                f"Found {total_downloads} new video{'s' if total_downloads > 1 else ''}!"
            )
        for i in range(0, total_downloads):
            self.__process_downloads(hrefs[i], i + 1, total_downloads)
        logging.info("All videos downloaded.")
        self.__lock.release()
        return

    def __has_valid_cookie(self) -> bool:
        """Checks if there is a valid digital foundry cookie in the cookie jar"""
        df_cookie = None

        for cookie in self.__cj:
            if cookie.domain == self.__domain:
                df_cookie = cookie
                break

        if df_cookie is None:
            msg = 'No Digital Foundry cookie found. Please log in to Digital Foundry in your browser.'
            logging.warning(msg)
            self.__notifier.notify(msg)
            return False
        elif df_cookie.is_expired(time()):
            msg = 'Digital Foundry cookie expired. Please log in to Digital Foundry in your browser.'
            logging.warning(msg)
            self.__notifier.notify(msg)
            return False

        return True

    def __get_links(self, r: Response) -> List[Dict[str, str]]:
        """Gets all the download links from a given response. If link is in cache, it won't be added to list."""
        soup = bs4.BeautifulSoup(r.content, 'html.parser')

        if not _logged_in(soup):
            msg = 'Subscribe button found. Make sure you are logged in to Digital Foundry in your browser.'
            logging.warning(msg)
            self.__notifier.notify(msg)
            return []

        all_videos = soup.find_all('div', {'class', 'video'})

        hrefs = []

        total_downloads_available = 0

        cache = None
        whole_file = ""
        try:
            cache = open(self.__cache_file, "r")
        except Exception as ex:
            logging.error(
                f"Problem opening cache file from {self.__cache_file}: {ex}")
        finally:
            if cache is not None:
                whole_file = cache.read()
            for video in all_videos:
                art_tag = video.find('a', {'class', 'cover'})
                art = _get_art_link(art_tag)
                total_downloads_available += 1
                if (cache is not None and art_tag['href']
                        not in whole_file) or cache is None:
                    hrefs.append({'art': art, 'href': art_tag['href']})
            if cache is not None:
                cache.close()

        return hrefs

    def __process_downloads(self, href: Dict[str, str], current: int,
                            total: int) -> None:
        """Follows HEVC link on a page with two file types"""
        r = get(self.__url + href['href'], cookies=self.__cj)
        soup = bs4.BeautifulSoup(r.content, 'html.parser')
        dl_buttons = soup.find_all('a', class_='button wide download', limit=2)
        hevc_button = None
        for button in dl_buttons:
            if button.get_text() == self.__download_strings['hevc']:
                hevc_button = button
                break
        if hevc_button is None:
            return
        self.__process_hevc_download(hevc_button['href'], href, current, total)

    def __process_hevc_download(self, href: str, original_link: Dict[str, str],
                                current: int, total: int) -> None:
        """Follows Download Now link on HEVC download page"""
        r = get(self.__url + href, cookies=self.__cj)
        soup = bs4.BeautifulSoup(r.content, 'html.parser')
        download_button = soup.find('a', text=self.__download_strings['now'])
        self.__download_video(soup.title.get_text(), download_button['href'],
                              original_link, current, total)

    def __download_video(self, title: str, href: str, original_link: Dict[str,
                                                                          str],
                         current: int, total: int) -> None:
        """Downloads a file at the given href"""
        # Get actual video
        r = get(self.__url + href, cookies=self.__cj, stream=True)
        total_length = r.headers.get('content-length')
        title = _convert_title(title)
        if r.status_code == 404:
            logging.error(f"{self.__url}{href} returned 404")
            self.__notifier.notify(f"{title} returned 404")
            return

        logging.info('Downloading...')
        print(f'{current}/{total} {title}')
        try:
            with open(self.__output_dir + '/' + title + '.mp4', 'wb') as f:
                if original_link['art'] != "":
                    self.__download_art(original_link['art'], title)
                if total_length is None:  # no content length header
                    f.write(r.content)
                    self.__notifier.notify(f'New video downloaded: {title}')
                else:
                    _download_with_progress(r, f, int(total_length))
                    self.__notifier.notify(f'New video downloaded: {title}')
        except Exception as ex:
            logging.error(f"Failed to download {title}: {ex}")
        else:
            try:
                with open(self.__cache_file, 'a') as f:
                    f.write(original_link['href'] + '\n')
            except Exception as ex:
                logging.error(
                    f"Could not open cache file at {self.__cache_file}: {ex}")
        print()

    def __download_art(self, href: str, title: str):
        """Downloads a jpg at the given href"""
        art = get(href, cookies=self.__cj)
        with open(self.__output_dir + '/' + title + '.jpg', 'wb') as f:
            f.write(art.content)
Example #29
0
class Archeloos:
    def __init__(self):
        self.config = config.Config()
        self.notifier = Notifier(**self.config.get("notify", None))
        self.watch = self.config.get("torrents", "watch_dir")
        self.torrents = []
        self.shows = self.config.get("torrents", "shows").split(",")
        self.fast_list = self.config.get("torrents", "fast_list").split(",")
        self.resolution = self.config.get("torrents", "resolution")
        self.quality = self.config.get("torrents", "quality")
        self.extra = self.config.get("torrents", "extra")

        tracker = self.config.get("tracker", None)
        self.downloader = Downloader(tracker["rss_url"], tracker["user_id"], tracker["user_pass"])

        print("watch: '%s'" % self.watch)
        print("shows: '%s'" % self.shows)
        print("fast_list: '%s'" % self.fast_list)
        print("resolution: '%s'" % self.resolution)
        print("quality: '%s'" % self.quality)
        print("extra: '%s'" % self.extra)

    def pick_show(self, torrent):
        s = ("%s" % torrent.name).lower()
        shows = [name.lower() for name in self.shows]
        fast_list = [name.lower() for name in self.fast_list]
        print("new pick ", torrent)
        if not any(ss in s for ss in shows):
            return False
        if self.resolution.lower() not in torrent.resolution.lower():
            print("  - %s WRONG QUALITY    '%s' not in '%s'" % (torrent, self.resolution, torrent.resolution))
            return False
        if s in fast_list:
            print("  - %s FAST LIST" % torrent)
            return True
        if self.quality.lower() in torrent.quality.lower():
            print("  - %s OK" % torrent)
            return True
        print("  - %s NOT SELECTED  ('%s' and '%s') != ('%s' and '%s')" % (torrent, self.resolution, self.quality, torrent.resolution, torrent.quality))
        return False

    def check_torrents(self, torrents):
        l = []
        for t in torrents:
            if self.pick_show(t) and self.downloader.download(t, self.watch):
                l.append(t)
        #print(len(torrents))
        return l

    def run(self):
        l = []
        while True:
            new_torrents = []
            feed = self.downloader.list_torrents()
            lt = parse_results(feed)
            new_torrents = [t for t in lt if t not in self.torrents]
            self.torrents = list(set(lt) | set(self.torrents))
            l = self.check_torrents(new_torrents)
            self.torrents = self.torrents + new_torrents
            self.notifier.notify(l)

            time.sleep(900)
Example #30
0
 def __init__(self):
     """initialize ParametricConstraint"""
     Notifier.__init__(self)
     self._value = None
Example #31
0
from sms import SmsEngine
from grades import GradeSummary
from assignments import Assignment

logging.basicConfig(level=logging.INFO)

connInfo = {
    "dbname": os.getenv('DBNAME', "echoalert"),
    "user": os.getenv('DBUSER', "postgres"),
    "host": os.getenv('DBHOST', "localhost"),
    "port": os.getenv('DBPORT', "5432"),
    "password": os.getenv('DBPASS', "defaultpassword"),
}

PgDb.setup(**connInfo)
notifications = Notifier.get_new_notices()

sid = os.getenv("TWILIO_SID", "")
token = os.getenv("TWILIO_TOKEN", "")
sms_from = os.getenv("TWILIO_FROM", "")

logging.info("Checking for any pending notifications")
logging.info("twilio sid={}, token={}".format(sid, token))

sms = SmsEngine(sid, token, sms_from)

skip = []
for ndata in notifications:

    notify_type = ndata['notification_type']
    if notify_type not in skip:
Example #32
0
 def notify(self, notifyType):
     Notifier.insert(self.data['id'], notifyType, self.notify_date)
class Test_Notifier(unittest.TestCase):
    """
    Unittest for the class Notifier.
    """

    def setUp(self):
        self.notifier = Notifier(['simple_key', 'key_with_message'])
        self.mock_listener = Mock()
        self.first_key_heared = self.mock_listener.first_key_heared
        self.first_key_heared_again = self.mock_listener.first_key_heared_again
        self.second_key_heared = self.mock_listener.second_key_heared
        self.notifier.connect('simple_key', self.first_key_heared)
        self.notifier.connect('simple_key', self.first_key_heared_again)
        self.notifier.connect('key_with_message', self.second_key_heared)

    def test_notifier(self):
        self.mock_listener.expects(once()).first_key_heared()
        self.mock_listener.expects(once()).first_key_heared_again()
        self.mock_listener.expects(never()).second_key_heared()
        self.notifier.notify('simple_key')
        self.mock_listener.verify()

    def test_notifier_with_message(self):
        self.mock_listener.expects(never()).first_key_heared()
        self.mock_listener.expects(never()).first_key_heared_again()
        self.mock_listener.expects(once()).method('second_key_heared').\
                with_at_least(same('little message'))
        self.notifier.notify('key_with_message', 'little message')
        self.mock_listener.verify()

    def test_disconnect(self):
        self.mock_listener.expects(once()).first_key_heared()
        self.mock_listener.expects(never()).first_key_heared_again()
        self.notifier.disconnect('simple_key', self.first_key_heared_again)
        self.notifier.notify('simple_key')
        self.mock_listener.verify()

    def test_reconnect(self):
        self.mock_listener.expects(once()).first_key_heared()
        self.mock_listener.expects(once()).first_key_heared_again()
        self.notifier.disconnect('simple_key', self.first_key_heared_again)
        self.notifier.connect('simple_key', self.first_key_heared_again)
        self.notifier.notify('simple_key')
        self.mock_listener.verify()