コード例 #1
0
def cancel(task_id):
    """
    Cancel the task that is represented by the given task_id.

    This method cancels only the task with given task_id, not the spawned tasks. This also updates
    task's state to 'canceled'.

    Args:
        task_id (str): The ID of the task you wish to cancel

    Raises:
        MissingResource: if a task with given task_id does not exist
    """
    try:
        task_status = Task.objects.get(pk=task_id)
    except Task.DoesNotExist:
        raise MissingResource(task=task_id)

    if task_status.state in TASK_FINAL_STATES:
        # If the task is already done, just stop
        msg = _("Task [{task_id}] already in a final state: {state}")
        _logger.debug(msg.format(task_id=task_id, state=task_status.state))
        return task_status

    _logger.info(_("Canceling task: {id}").format(id=task_id))

    redis_conn = connection.get_redis_connection()
    job = Job(id=str(task_status.pk), connection=redis_conn)
    resource_job = Job(id=str(task_status._resource_job_id),
                       connection=redis_conn)

    task_status.state = TASK_STATES.CANCELED
    task_status.save()

    try:
        send_stop_job_command(redis_conn, job.get_id())
        send_stop_job_command(redis_conn, resource_job.get_id())
    except (InvalidJobOperation, NoSuchJobError):
        # We don't care if the job isn't currently running when we try to cancel
        pass

    # A hack to ensure that we aren't deleting resources still being used by the workhorse
    time.sleep(0.5)

    resource_job.delete()
    job.delete()

    with transaction.atomic():
        for report in task_status.progress_reports.all():
            if report.state not in TASK_FINAL_STATES:
                report.state = TASK_STATES.CANCELED
                report.save()
        _delete_incomplete_resources(task_status)
        task_status.release_resources()

    return task_status
コード例 #2
0
ファイル: util.py プロジェクト: wibbit/pulpcore
def cancel(task_id):
    """
    Cancel the task that is represented by the given task_id.

    This method cancels only the task with given task_id, not the spawned tasks. This also updates
    task's state to 'canceled'.

    Args:
        task_id (str): The ID of the task you wish to cancel

    Raises:
        MissingResource: if a task with given task_id does not exist
    """
    try:
        task_status = Task.objects.get(pk=task_id)
    except Task.DoesNotExist:
        raise MissingResource(task=task_id)

    if task_status.state in TASK_FINAL_STATES:
        # If the task is already done, just stop
        msg = _("Task [{task_id}] already in a completed state: {state}")
        _logger.info(msg.format(task_id=task_id, state=task_status.state))
        return task_status

    redis_conn = connection.get_redis_connection()

    job = Job(id=str(task_status.pk), connection=redis_conn)
    resource_job = Job(id=str(task_status._resource_job_id),
                       connection=redis_conn)

    if job.is_started:
        redis_conn.sadd(TASKING_CONSTANTS.KILL_KEY, job.get_id())

    if resource_job.is_started:
        redis_conn.sadd(TASKING_CONSTANTS.KILL_KEY, resource_job.get_id())

    resource_job.delete()
    job.delete()

    # A hack to ensure that we aren't deleting resources still being used by the workhorse
    time.sleep(1.5)

    with transaction.atomic():
        task_status.state = TASK_STATES.CANCELED
        for report in task_status.progress_reports.all():
            if report.state not in TASK_FINAL_STATES:
                report.state = TASK_STATES.CANCELED
                report.save()
        task_status.save()
        _delete_incomplete_resources(task_status)
        task_status.release_resources()

    _logger.info(_("Task canceled: {id}.").format(id=task_id))
    return task_status
コード例 #3
0
ファイル: scraperclock.py プロジェクト: plooploops/rosterrun
def retrieve_market_scrape():
  row_count = MappedMarketResult.query.count()
  #check row count to clean up market if it's over 9000 (dbz?)
  if row_count >= 9000:
    clean_up_market()
  
  #retrieve results from redis queue
  if sched.scrapejobid is None:
    print 'No scrape job found'
    return
  
  job_id = sched.scrapejobid
  currentjob = Job(connection=conn)
  
  try:
    currentjob = currentjob.fetch(job_id, connection=conn)
    print 'scrape job found'
  except:
    print 'job not available'
    sched.scrapejobid = None
    return
  
  print 'found job %s ' % currentjob
  print 'for job id %s ' % job_id
    
  if currentjob is not None:
    if currentjob.result is not None:
      marketresults = currentjob.result
      print 'found market results %s ' % marketresults
  
      #delete existing market results
         
      #cur = MappedMarketResult.query.filter_by(g_spreadsheet_id=str(session['g_spreadsheet_id']), g_worksheet_id=str(session['g_worksheet_id'])) 
    
      #[db.session.delete(c) for c in cur]  
      #db.session.commit()
      #mapped market result havs [itemid, name, cards, price, amount, title, vendor, coords, date]
      
      print 'adding to db'
      vals = marketresults.values()
      #flattenedvals = [item for sublist in vals for item in sublist]
      daterun = datetime.now()
      for k in marketresults.keys():
        [db.session.add(MappedMarketResult(str(mr.itemid), str(mr.name), str(mr.cards), str(mr.price), str(mr.amount), str(mr.title), str(mr.vendor), str(mr.coords), str(daterun))) for mr in marketresults[k]]
     
      db.session.commit()
      print 'added to db'
      print 'removing job results'
      currentjob.delete()
      print 'finished deleting job results'
      
      update_guild_treasure_with_market()
  else: 
    print 'current job is not ready %s' % job_id
コード例 #4
0
ファイル: util.py プロジェクト: bmbouter/pulp
def cancel(task_id):
    """
    Cancel the task that is represented by the given task_id.

    This method cancels only the task with given task_id, not the spawned tasks. This also updates
    task's state to 'canceled'.

    :param task_id: The ID of the task you wish to cancel
    :type  task_id: basestring

    :raises MissingResource: if a task with given task_id does not exist
    """
    try:
        task_status = Task.objects.get(pk=task_id)
    except Task.DoesNotExist:
        raise MissingResource(task_id)

    if task_status.state in TASK_FINAL_STATES:
        # If the task is already done, just stop
        msg = _('Task [{task_id}] already in a completed state: {state}')
        _logger.info(msg.format(task_id=task_id, state=task_status.state))
        return

    redis_conn = connection.get_redis_connection()
    job = Job(id=str(task_id), connection=redis_conn)

    if job.is_started:
        redis_conn.sadd(TASKING_CONSTANTS.KILL_KEY, job.get_id())
    job.delete()

    # A hack to ensure that we aren't deleting resources still being used by the workhorse
    time.sleep(1.5)

    with transaction.atomic():
        task_status.state = TASK_STATES.CANCELED
        task_status.save()
        _delete_incomplete_resources(task_status)

    _logger.info(_('Task canceled: {id}.').format(id=task_id))
コード例 #5
0
ファイル: scraperclock.py プロジェクト: hc9854/rosterrun
def retrieve_market_scrape():
    # retrieve results from redis queue
    if sched.scrapejobid is None:
        print "No scrape job found"
        return

    job_id = sched.scrapejobid
    currentjob = Job(connection=conn)

    try:
        currentjob = currentjob.fetch(job_id, connection=conn)
        print "scrape job found"
    except:
        print "job not available"
        sched.scrapejobid = None
        return

    print "found job %s " % currentjob
    print "for job id %s " % job_id

    if currentjob is not None:
        if currentjob.result is not None:
            marketresults = currentjob.result
            print "found market results %s " % marketresults

            # delete existing market results

            # cur = MappedMarketResult.query.filter_by(g_spreadsheet_id=str(session['g_spreadsheet_id']), g_worksheet_id=str(session['g_worksheet_id']))

            # [db.session.delete(c) for c in cur]
            # db.session.commit()
            # mapped market result havs [itemid, name, cards, price, amount, title, vendor, coords, date]

            print "adding to db"
            vals = marketresults.values()
            # flattenedvals = [item for sublist in vals for item in sublist]
            daterun = datetime.now()
            for k in marketresults.keys():
                [
                    db.session.add(
                        MappedMarketResult(
                            str(mr.itemid),
                            str(mr.name),
                            str(mr.cards),
                            str(mr.price),
                            str(mr.amount),
                            str(mr.title),
                            str(mr.vendor),
                            str(mr.coords),
                            str(daterun),
                        )
                    )
                    for mr in marketresults[k]
                ]

            db.session.commit()
            print "added to db"
            print "removing job results"
            currentjob.delete()
            print "finished deleting job results"
    else:
        print "current job is not ready %s" % job_id