Beispiel #1
0
def import_fire():	
	# get new job num
	job_num = jobs.jobStart()

	# get username
	username = session['username']	

	# prepare job_package for boutique celery wrapper
	job_package = {
		'job_num':job_num,
		'task_name':"importMODS_worker",
		'form_data':request.form
	}	

	# pass along binary uploaded data if included in job task
	if 'upload' in request.files and request.files['upload'].filename != '':
		job_package['upload_data'] = request.files['upload'].read()

	# job celery_task_id
	celery_task_id = celeryTaskFactoryImportMODS.delay(job_num,job_package)		 

	# send job to user_jobs SQL table
	db.session.add(models.user_jobs(job_num, username, celery_task_id, "init", "importMODS"))	
	db.session.commit()		

	print "Started job #",job_num,"Celery task #",celery_task_id
	return redirect("/userJobs")
Beispiel #2
0
def fireTaskWorker(task_name,task_inputs_key):

	print "Starting task request..."

	# get job_package and burn it
	job_package = pickle.loads(redisHandles.r_job_handle.get(task_inputs_key))
	redisHandles.r_job_handle.delete(task_inputs_key)

	# check if task in available tasks, else abort
	try:
		task_handle = getattr(actions, task_name)
	except:		 
		return utilities.applicationError("Task not found, or user not authorized to perform.  Return to <a href='/userPage'>user page</a>.")		
	
	# get username from session (will pull from user auth session later)
	username = session['username']	

	# get user-selectedd objects	
	stime = time.time()
	userSelectedPIDs = models.user_pids.query.filter_by(username=username,status=True)	
	PIDlist = [PID.PID for PID in userSelectedPIDs]	
	etime = time.time()
	ttime = (etime - stime) * 1000
	print "Took this long to create list from SQL query",ttime,"ms"	

	# instantiate job number
	''' pulling from incrementing redis counter, considering MySQL '''
	job_num = jobs.jobStart()		
	
	# begin job and set estimated tasks
	print "Antipcating",userSelectedPIDs.count(),"tasks...."	
	redisHandles.r_job_handle.set("job_{job_num}_est_count".format(job_num=job_num),userSelectedPIDs.count())	

	# augment job_package
	job_package['job_num'] = job_num

	# send to celeryTaskFactory in actions.py
	'''
	iterates through PIDs and creates secondary async tasks for each
	passing username, task_name, and job_package containing all the update handles	
	'celery_task_id' below contains celery task key, that contains all eventual children objects
	'''
	celery_task_id = actions.celeryTaskFactory.delay(job_num=job_num,task_name=task_name,job_package=job_package,PIDlist=PIDlist)	

	# send job to user_jobs SQL table
	db.session.add(models.user_jobs(job_num, username, celery_task_id, "init", task_name))	
	db.session.commit() 

	print "Started job #",job_num,"Celery task #",celery_task_id	
	return redirect("/userJobs")
Beispiel #3
0
def bagIngest_router():	
	# get new job num
	job_num = jobs.jobStart()

	# get username
	username = session['username']			

	# prepare job_package for boutique celery wrapper
	job_package = {
		'job_num':job_num,
		'task_name':"bagIngest_worker",
		'form_data':request.form
	}	

	# job celery_task_id
	celery_task_id = celeryTaskFactoryBagIngest.delay(job_num,job_package)		 

	# send job to user_jobs SQL table
	db.session.add(models.user_jobs(job_num, username, celery_task_id, "init", "singleBagItIngest"))	
	db.session.commit()		

	print "Started job #",job_num,"Celery task #",celery_task_id
	return redirect("/userJobs")