def recipe_run(request, uid): """ View used to execute recipes and start a 'Queued' job. """ recipe = Analysis.objects.filter(uid=uid).first() # Form submission. if request.method == "POST": form = forms.RecipeInterface(request=request, analysis=recipe, json_data=recipe.json_data, data=request.POST, files=request.FILES) # The form validation will authorize the job. if form.is_valid(): # Create the job from the recipe and incoming json data. #print(form.cleaned_data) job = auth.create_job(analysis=recipe, user=request.user, fill_with=form.cleaned_data) # Spool via UWSGI or start it synchronously. tasks.execute_job.spool(job_id=job.id) url = reverse("recipe_view", kwargs=dict(uid=recipe.uid)) + "#results" return redirect(url) else: messages.error(request, form.errors) url = reverse("recipe_view", kwargs=dict(uid=recipe.uid)) + "#run" return redirect(url)
def job_rerun(request, uid): # Get the job. job = Job.objects.filter(uid=uid).first() next = request.GET.get('next') # Get the recipe recipe = job.analysis # Get the job JSON json_data = job.json_data # Validate users can run the recipe. valid, msg = auth.validate_recipe_run(user=request.user, recipe=recipe) if not valid: messages.error(request, msg) redir = next or reverse('job_view', kwargs=dict(uid=job.uid)) return redirect(redir) # Create a new job job = auth.create_job(analysis=recipe, user=request.user, json_data=json_data) # Spool via UWSGI or run it synchronously. tasks.execute_job.spool(job_id=job.id) return redirect(reverse('job_list', kwargs=dict(uid=job.project.uid)))
def recipe_run(request, uid): """ View used to execute recipes and start a 'Queued' job. """ analysis = Analysis.objects.filter(uid=uid).first() project = analysis.project # Form submission. if request.method == "POST": form = forms.RecipeInterface(request=request, analysis=analysis, json_data=analysis.json_data, data=request.POST, files=request.FILES) # The form validation will authorize the job. if form.is_valid(): # Create the job from the recipe and incoming json data. job = auth.create_job(analysis=analysis, user=request.user, fill_with=form.cleaned_data) # Spool the job right away if UWSGI exists. if tasks.HAS_UWSGI: # Spool via UWSGI. tasks.execute_job.spool(job_id=job.id) return redirect(reverse("job_list", kwargs=dict(uid=project.uid))) else: initial = dict(name=f"Results for: {analysis.name}") form = forms.RecipeInterface(request=request, analysis=analysis, json_data=analysis.json_data, initial=initial) is_runnable = auth.authorize_run(user=request.user, recipe=analysis) context = dict(project=project, analysis=analysis, form=form, is_runnable=is_runnable, activate='Run Recipe') context.update(get_counts(project)) return render(request, 'recipe_run.html', context)
def setUp(self): logger.setLevel(logging.WARNING) self.username = f"tested{get_uuid(10)}" self.owner = models.User.objects.create(username=self.username, is_staff=True, email="*****@*****.**") self.owner.set_password("tested") self.owner.save() self.project = auth.create_project(user=self.owner, name="Test project", privacy=models.Project.PUBLIC, uid="tested") data = auth.create_data(project=self.project, path=__file__) analysis = auth.create_analysis(project=self.project, json_text='', template="# Add code here.") self.job = auth.create_job(analysis=analysis) self.proj_params = dict(uid=self.project.uid) self.analysis_params = dict(uid=analysis.uid) self.recipes_id_param = dict(id=analysis.id) self.data_params = dict(uid=data.uid) self.job_params = dict(uid=self.job.uid) self.data_file_params = dict(uid=data.uid, path="foo.txt") self.job_file_params = dict(uid=self.job.uid, path="foo.txt")
def setUp(self): logger.setLevel(logging.WARNING) # Set up generic owner self.owner = models.User.objects.create_user( username=f"tested{get_uuid(10)}", email="*****@*****.**") self.owner.set_password("tested") self.project = auth.create_project(user=self.owner, name="tested", text="Text", summary="summary", uid="tested") self.recipe = auth.create_analysis(project=self.project, json_text="{}", template="", security=models.Analysis.AUTHORIZED) self.snippet_type = models.SnippetType.objects.create( name='Snippet type', owner=self.owner) self.snippet = models.Snippet.objects.create( command='ls -l', type=self.snippet_type, help_text='List files in directory', owner=self.owner) self.job = auth.create_job(analysis=self.recipe, user=self.owner) self.job.save()
def test_scheduler(self): """ Test task scheduler used to run queued jobs. """ from biostar.recipes.tasks import scheduler self.job = auth.create_job(analysis=self.recipe, user=self.owner) self.job.state = models.Job.QUEUED self.job.save()
def create_job(recipe): """Create a queued job for a recipe""" json_data = recipe.json_data # Get the data if it exists for key, obj in json_data.items(): if obj.get("source") != "PROJECT": continue name = obj.get('value', '') data = Data.objects.filter(project=recipe.project, name=name).first() if not data: logger.error( f"Job not created! Missing data:{name} in analysis:{recipe.name}" ) return data.fill_dict(obj) auth.create_job(analysis=recipe, json_data=json_data) return
def setUp(self): logger.setLevel(logging.WARNING) # Set up generic owner self.owner = models.User.objects.create_user(username=f"tested{util.get_uuid(10)}", email="*****@*****.**") self.owner.set_password("tested") self.project = auth.create_project(user=self.owner, name="tested", text="Text", summary="summary", uid="tested") self.recipe = auth.create_analysis(project=self.project, json_text="{}", template="", security=models.Analysis.AUTHORIZED) self.job = auth.create_job(analysis=self.recipe, user=self.owner) self.job.save()
def handle(self, *args, **options): json = options['json'] pid = options['id'] uid = options["uid"] template_fname = options['template'] jobs = options['jobs'] update = options["update"] verbosity = int(options['verbosity']) if verbosity > 1: logger.setLevel(logging.DEBUG) logger.info(f"level={verbosity}") # Require JSON and templates to exist. if not (json and template_fname): logger.error( "This command requires --json and a --template to be set") return # Get the target project. if pid: project = Project.objects.filter(id=pid).first() else: project = Project.objects.filter(uid=uid).first() # Invalid project specified. if not project: logger.error(f'No project with id={pid} , uid={uid}') return # JSON file does not exist. if not os.path.isfile(json): logger.error(f'No file found for --json={json}') return # Template file does not exist. if not os.path.isfile(template_fname): logger.error(f'No file found for --template={template_fname}') return try: # Parse the json_text into json_data json_text = open(json).read() json_path = os.path.dirname(json) json_data = hjson.loads(json_text) except Exception as exc: logger.exception(f"JSON exception in file: {json}\n{exc}") return try: # Read the specification template = open(template_fname).read() except Exception as exc: logger.exception(f"Template exception: {exc}") return try: name = json_data.get("settings", {}).get("name", "") text = json_data.get("settings", {}).get("help", "") uid = json_data.get("settings", {}).get("uid", "") image = json_data.get("settings", {}).get("image", "") text = textwrap.dedent(text) summary = json_data.get("settings", {}).get("summary", "") analysis = auth.create_analysis(project=project, uid=uid, json_text=json_text, summary=summary, template=template, name=name, text=text, security=Analysis.AUTHORIZED, update=update) # Load the image if specified. if image: image_path = os.path.join(json_path, image) if os.path.isfile(image_path): stream = open(image_path, 'rb') analysis.image.save(image, stream, save=True) logger.info(f"Image path: {image_path}") else: logger.error(f"Skipping invalid image path: {image_path}") if jobs: # When creating a job automatically for data in projects # it will try to match the value of the parameter to the data name. missing_name = '' for key, obj in json_data.items(): if obj.get("source") != "PROJECT": continue name = obj.get('value', '') data = Data.objects.filter(project=project, name=name).first() if not data: missing_name = name break data.fill_dict(obj) if missing_name: logger.error( f"Job not created! Missing data:{missing_name} in analysis:{analysis.name}" ) else: auth.create_job(analysis=analysis, json_data=json_data) except Exception as exc: logger.exception(f"Error: {exc}") return