def _import_job(self, info, cluster, id, updated, callback): """ import an individual Job, this is the actual work function. Jobs that have a complete state (success or error) are updated. All other jobs are ignored since job state is never cached. @param info - info from ganeti @param cluster - cluster this job is on @param id - job_id for job @param updated - counter object @param callback - callback fired when method is complete. """ print 'importing >>> : ', info, cluster, id info = json.loads(info) if any((op['OP_ID'] in IMPORTABLE_JOBS for op in info['ops'])): # get related mode and object op = info['ops'][0] model, hostname_key = IMPORTABLE_JOBS[op['OP_ID']] hostname = op[hostname_key] base = model.objects.filter(hostname=hostname) if not isinstance(base, Cluster): base = base.filter(cluster=cluster) (obj_id,) = base.values_list('pk', flat=True) # create job job = Job(cluster=cluster, job_id=id, obj_type=model, obj_id=obj_id) job.cleared = info['status'] in COMPLETE_STATUS job.info = info job.save() updated += 1 callback(id)
def _update_job(self, info, cluster, id, updated, callback): """ updates an individual Job, this is the actual work function. Jobs that have a complete state (success or error) are updated. All other jobs are ignored since job state is never cached. @param info - info from ganeti @param cluster - cluster this job is on @param id - job_id for job @param updated - counter object @param callback - callback fired when method is complete. """ if info['status'] in COMPLETE_STATUS: parsed = Job.parse_persistent_info(info) Job.objects.filter(job_id=id).update( serialized_info=cPickle.dumps(info), **parsed) # get related model and query the object. Loading it will trigger # any logic in check_job_status op = info['ops'][0] model, hostname_key = IMPORTABLE_JOBS[op['OP_ID']] hostname = op[hostname_key] if not isinstance(model, Cluster): base = model.objects.filter(cluster=cluster) else: base = model.objects.all() obj = base.get(hostname=hostname) updated += 1 callback(id)