def extract(request): if request.method == 'POST': # If the form has been submitted... form = ExtractForm(request.POST) # A form bound to the POST data if form.is_valid(): # All validation rules pass tasks.run("s3://%(bucket)s%(path)s" % form.cleaned_data, form.cleaned_data['frames'], form.cleaned_data['threshold']) return HttpResponseRedirect('/extract') # Redirect after POST else: form = ExtractForm() # An unbound form return render(request, 'extract.html', {'form': form})
def do_step(requested_time): # TODO Draw "loading" bar? next_time = tasks.next_time() while next_time < requested_time: requested_time -= next_time delay(next_time) tasks.wait_time(next_time) tasks.run() redraw() next_time = tasks.next_time() delay(requested_time) tasks.wait_time(requested_time) tasks.run(tasks.THINK_PATIENCE) redraw()
def init(path): """ Initialize Terraform in a given path. """ if not is_initialized(path): return tasks.run(f"cd {path} && terraform init -no-color") return tasks.success()
def provision_task(host, phases): """ Executes the provisioning in a given host. """ logging.info(f"[{host}] <- Provision launching...") # # Execute provisioning # for dir, option, phase in phases: res = ssh.run( "root", "linux", host, f"sh /{dir}/salt/provision.sh -{option} -l /var/log/provision.log") if tasks.has_failed(res): logging.info(f"[{host}] <- phase {phase} failed") break else: logging.info(f"[{host}] <- phase {phase} executed") # # Independently of success of provisioning process, cat logs # destiny = f"./{host}.tmp" rcopy = ssh.copy_from_host("root", "linux", host, "/var/log/provision.log", destiny) if tasks.has_succeeded(rcopy): with open(destiny, "r") as f: logging.debug(f"[{host}] provision logs =\n{f.read()}") rcopy = ssh.copy_from_host("root", "linux", host, "/var/log/salt/minion", destiny) if tasks.has_succeeded(rcopy): with open(destiny, "r") as f: logging.debug(f"[{host}] provision minion logs =\n{f.read()}") tasks.run(f"rm -f {destiny}") # # Log global result # if tasks.has_succeeded(res): logging.info(f"[{host}] <- provisioning success") else: logging.error( f"[{host}] <- provisioning FAILED, continue provisioning with => deploy.py provision {host} --from={phase}" ) return res
def task_worker(params, send_end): task_name = params.get('task_name') task_params = params.get('params') email = params.get('email') task_object = None try: task_object = create_task(params) except Exception as error: result = { "status": "ERROR", "error_code": 102, "error_msg": "Ошибка записи задачи в базу: " + str(error) } send_end.send(result) return finally: print("Created resultTask entity with id %s" % task_object.id) try: parameters = json.loads(task_params) result_value = tasks.run(task_name, parameters) result = {"result": result_value} if task_object is not None and task_object.id is not None: update_task_results(task_object.id, result_value) except Exception as error: result = { "status": "ERROR", "error_code": 100, "error_msg": "Ошибка выполнения задачи: " + str(error) } finally: if email is not None and len(email) > 7: send_result_to_mail(email, result) send_end.send(result)
def post(self, request, *wargs, **kwargs): url = request.data.get('url') if not url: raise RESTValidationError({'url': 'Specify a value.'}) validate_url = URLValidator() try: validate_url(url) except DjangoValidationError: raise RESTValidationError({'url': 'Specify a valid value.'}) job = run(IMPORT_TASK_NAME, url=url, api_access_key=str(request.auth)) return Response( { 'url': url, 'job': { 'id': job.id, 'status': job.status } }, status=201)
steps = 200 full_graph = FullGraph(5) todo = [{ "msg": "Running IL", "name": "IL", "partners": 0, "n_samples": args.n_samples, "fun": lambda: LJALPart1(graph=Graph(5)).n_steps(steps) }, { "msg": "Running LJAL-2", "name": "LJAL-2", "partners": 2, "n_samples": args.n_samples, "fun": lambda: LJALPart1(graph=RandomGraph(5, 2)).n_steps(steps) }, { "msg": "Running LJAL-3", "name": "LJAL-3", "partners": 3, "n_samples": args.n_samples, "fun": lambda: LJALPart1(graph=RandomGraph(5, 3)).n_steps(steps) }, { "msg": "Running JAL", "name": "JAL", "partners": 4, "n_samples": args.n_samples, "fun": lambda: LJALPart1(graph=full_graph).n_steps(steps) }] tasks.run(todo, args.save_name)
def infrastructure(name): """ Create infrastructure for a deployment. """ # # Check deployment does exist # res, path, env = utils.deployment_verify(name) if tasks.has_failed(res): logging.critical(tasks.get_stderr(res)) return res # # Create infrastructure # logging.info("[X] Creating infrastructure...") # init logging.info("Initializing Terraform") res = terraform.init(path) if tasks.has_failed(res): logging.critical(tasks.get_stderr(res)) return res else: logging.debug(tasks.get_stdout(res)) # switch to workspace logging.info(f"Switching to workspace {env['name']}") res = terraform.workspace(path, env["name"]) if tasks.has_failed(res): logging.error(tasks.get_stderr(res)) return res else: logging.debug(tasks.get_stdout(res)) # apply logging.info(f"Executing plan") res = terraform.apply(path) if tasks.has_failed(res): logging.critical(tasks.get_stderr(res)) return res else: logging.debug(tasks.get_stdout(res)) logging.info("OK\n") # # Get terraform outputs # logging.info("[X] Adding terraform outputs to environment...") # capture output logging.info(f"Capturing output") res = terraform.output(path) if tasks.has_failed(res): logging.critical(tasks.get_stderr(res)) return res else: logging.debug(tasks.get_stdout(res)) # load as json terraform_json = json.loads(tasks.get_stdout(res)) # translate "a_b = v" outputs to env[terraform][a][b] = v logging.info(f"Translating output") for _, (k, v) in enumerate(terraform_json.items()): if v["value"]: key, _, subkey = k.partition("_") env["terraform"][key][subkey] = v["value"] # save enriched enviroment data utils.environment_save(name, **env) logging.info(f"Updated environment") logging.debug(f"{json.dumps(env, indent = 4)}\n") logging.info("OK\n") # # Copy provision files # logging.info("[X] Copying provision files...") # render grains files for nodes using enviroment for index in range(0, int(env["terraform"]["node"]["count"])): utils.template_render(utils.path_templates(env["provider"]), "node.grains.j2", path, index=index, **env) res = tasks.run( f"cd {path} && mv node.grains node-0{index + 1}.grains") if tasks.has_failed(res): logging.critical(tasks.get_stderr(res)) return res logging.info(f"Rendered node-0{index + 1}.grains") with open(f"{path}/node-0{index + 1}.grains", "r") as f: logging.debug(f"{f.read()}") # if there is a iscsi device, render grains file for iscsi using enviroment if "public_ip" in env["terraform"]["iscsi"]: utils.template_render(utils.path_templates(env["provider"]), "iscsi.grains.j2", path, **env) logging.info("Rendered iscsi.grains") with open(f"{path}/iscsi.grains", "r") as f: logging.debug(f"{f.read()}") # if there is a monitor device, render grains file for monitor using enviroment if "public_ip" in env["terraform"]["monitor"]: utils.template_render(utils.path_templates(env["provider"]), "monitor.grains.j2", path, **env) logging.info("Rendered monitor.grains") with open(f"{path}/monitor.grains", "r") as f: logging.debug(f"{f.read()}") logging.info("OK\n") return tasks.success()
def output(path): """ Get Terraform output as json """ return tasks.run(f"cd {path} && terraform output -json")
def destroy(path): """ Launch Terraform and eliminate all infrastructure. """ return tasks.run(f"cd {path} && terraform destroy -auto-approve -no-color")
def apply(path): """ Launch Terraform and apply the changes. """ return tasks.run(f"cd {path} && terraform apply -auto-approve -no-color")
def workspace(path, workspace): """ Switch to a new Terraform workspace. """ return tasks.run( f"cd {path} && terraform workspace new {workspace} -no-color")
def run(user, password, host, command): """ Execute a command in a remote host """ remote_command = f"sshpass -p {password} ssh -o StrictHostKeyChecking=no {user}@{host} {command}" return tasks.run(remote_command)
def copy_from_host(user, password, host, origin, destination): """ Copy a local directory to a remote host """ command = f"sshpass -p {password} scp -o StrictHostKeyChecking=no -r {user}@{host}:{origin} {destination}" return tasks.run(command)
def query(self, start_at, end_at, quality=None): if self.status == 'not downloaded': tasks.run(self.number, start_at, end_at, quality)