def threaded_check_for_job_state_changes():
    """
    Checks the SQS queue specified in the
    :py:data:`SQS_JOB_STATE_CHANGE_QUEUE_NAME <media_nommer.conf.settings.SQS_JOB_STATE_CHANGE_QUEUE_NAME>`
    setting for announcements of state changes from the EC2_ instances running
    :doc:`../ec2nommerd`. This lets :doc:`../feederd` know it needs to get 
    updated job details from the SimpleDB_ domain defined in the
    :py:data:`SIMPLEDB_JOB_STATE_DOMAIN <media_nommer.conf.settings.SIMPLEDB_JOB_STATE_DOMAIN>`
    setting.
    """
    JobCache.refresh_jobs_with_state_changes()
    # If jobs have completed, remove them from the job cache.
    JobCache.uncache_finished_jobs()
def threaded_prune_jobs():
    """
    Sometimes failure happens, but a Nommer doesn't handle said failure
    gracefully. Instead of state changing to ``ERROR``, it gets stuck in
    some un-finished state in the SimpleDB_ domain defined in
    :py:data:`SIMPLEDB_JOB_STATE_DOMAIN <media_nommer.conf.settings.SIMPLEDB_JOB_STATE_DOMAIN>`
    setting.
    
    This process finds jobs that haven't been updated in a very long time
    (a day or so) that are probably dead. It marks them with an ``ABANDONED``
    state, letting us know something went really wrong.
    """
    JobCache.abandon_stale_jobs()
    # Expire any newly abandoned jobs, too. Removes them from job cache.
    JobCache.uncache_finished_jobs()
Example #3
0
def threaded_prune_jobs():
    """
    Sometimes failure happens, but a Nommer doesn't handle said failure
    gracefully. Instead of state changing to ``ERROR``, it gets stuck in
    some un-finished state in the SimpleDB_ domain defined in
    :py:data:`SIMPLEDB_JOB_STATE_DOMAIN <media_nommer.conf.settings.SIMPLEDB_JOB_STATE_DOMAIN>`
    setting.

    This process finds jobs that haven't been updated in a very long time
    (a day or so) that are probably dead. It marks them with an ``ABANDONED``
    state, letting us know something went really wrong.
    """
    JobCache.abandon_stale_jobs()
    # Expire any newly abandoned jobs, too. Removes them from job cache.
    JobCache.uncache_finished_jobs()
Example #4
0
def threaded_check_for_job_state_changes():
    """
    Checks the SQS queue specified in the
    :py:data:`SQS_JOB_STATE_CHANGE_QUEUE_NAME <media_nommer.conf.settings.SQS_JOB_STATE_CHANGE_QUEUE_NAME>`
    setting for announcements of state changes from the EC2_ instances running
    :doc:`../ec2nommerd`. This lets :doc:`../feederd` know it needs to get
    updated job details from the SimpleDB_ domain defined in the
    :py:data:`SIMPLEDB_JOB_STATE_DOMAIN <media_nommer.conf.settings.SIMPLEDB_JOB_STATE_DOMAIN>`
    setting.
    """
    changed_jobs = JobCache.refresh_jobs_with_state_changes()
    for job in changed_jobs:
        job_state_notifier.send_notification(job)
    # If jobs have completed, remove them from the job cache.
    JobCache.uncache_finished_jobs()
Example #5
0
    def view(self):
        print "REQ", self.request.args
        print "KW", self.kwargs
        print "CONT", self.context

        source_path = cgi.escape(self.request.args["source_path"][0])
        dest_path = cgi.escape(self.request.args["dest_path"][0])
        notify_url = cgi.escape(self.request.args["notify_url"][0])
        preset = cgi.escape(self.request.args["preset"][0])
        user_job_options = cgi.escape(self.request.args["job_options"][0])
        user_job_options = simplejson.loads(user_job_options)

        print "SOURCE", source_path
        print "DEST", dest_path
        print "NOTIFY", notify_url
        print "OPTIONS", user_job_options

        # Retrieve the given preset from nomconf.
        try:
            preset_dict = settings.PRESETS[preset]
        except KeyError:
            self.set_error("No such preset.")
            return

        # Determine the nommer based on the preset.
        nommer = preset_dict["nommer"]
        # Get the preset's job options dict.
        job_options = preset_dict["options"]
        # Override preset's options with user-specified values.
        # TODO: Fix this for multi-pass!
        # job_options.update(user_job_options)
        print "NEW OPTS", job_options

        # Create a new job and save it to the DB/queue.
        job = EncodingJob(source_path, dest_path, nommer, job_options, notify_url=notify_url)
        unique_job_id = job.save()
        # Add the job to the local job cache.
        JobCache.update_job(job)

        # This is serialized and returned to the user.
        self.context.update({"job_id": unique_job_id})
Example #6
0
    def set_context(self, request):
        payload = self.user_input
        print(payload)

        for key in self.required_keys:
            if not payload.get(key):
                msg = "Missing/invalid required key+val: ['%s']" % key
                self.set_error(msg)
                return

        for key in self.required_job_options_keys:
            if not payload['job_options'].get(key):
                msg = "Missing/invalid required key+val: ['job_options'][%s]" % key
                self.set_error(msg)
                return

        source_path = payload['source_path']
        dest_path = payload['dest_path']
        notify_url = payload.get('notify_url')
        job_options = payload['job_options']['options']
        nommer = payload['job_options']['nommer']

        #print "SOURCE", source_path
        #print "DEST", dest_path
        #print "NOTIFY", notify_url
        #print "OPTIONS", job_options
        #print "NOMMER", nommer

        # Create a new job and save it to the DB/queue.
        job = EncodingJob(source_path,
                          dest_path,
                          nommer,
                          job_options,
                          notify_url=notify_url)
        unique_job_id = job.save()
        # Add the job to the local job cache.
        JobCache.update_job(job)

        # This is serialized and returned to the user.
        self.context.update({'job_id': unique_job_id})
Example #7
0
    def set_context(self, request):
        payload = self.user_input
        print(payload)

        for key in self.required_keys:
            if not payload.get(key):
                msg = "Missing/invalid required key+val: ['%s']" % key
                self.set_error(msg)
                return

        for key in self.required_job_options_keys:
            if not payload['job_options'].get(key):
                msg = "Missing/invalid required key+val: ['job_options'][%s]" % key
                self.set_error(msg)
                return

        source_path = payload['source_path']
        dest_path = payload['dest_path']
        notify_url = payload.get('notify_url')
        job_options = payload['job_options']['options']
        nommer = payload['job_options']['nommer']

        #print "SOURCE", source_path
        #print "DEST", dest_path
        #print "NOTIFY", notify_url
        #print "OPTIONS", job_options
        #print "NOMMER", nommer

        # Create a new job and save it to the DB/queue.
        job = EncodingJob(source_path, dest_path, nommer, job_options,
                          notify_url=notify_url)
        unique_job_id = job.save()
        # Add the job to the local job cache.
        JobCache.update_job(job)

        # This is serialized and returned to the user.
        self.context.update({'job_id': unique_job_id})
Example #8
0
 def load_job_cache(self):
     """
     Loads a portion of recently modified jobs into the job cache, where
     they may be quickly accessed.
     """
     JobCache.load_recent_jobs_at_startup()
Example #9
0
 def load_job_cache(self):
     """
     Loads a portion of recently modified jobs into the job cache, where
     they may be quickly accessed.
     """
     JobCache.load_recent_jobs_at_startup()