def object_factory(**kwargs): obj_type = kwargs['type'] cls = Node if obj_type in node_class else Link obj = get_obj(cls, name=kwargs['name']) if obj: for property, value in kwargs.items(): if property in obj.__dict__: setattr(obj, property, value) elif obj_type in link_class: if 'import' in kwargs: source = get_obj(Node, name=kwargs.pop('source')) destination = get_obj(Node, name=kwargs.pop('destination')) else: source = get_obj(Node, id=kwargs.pop('source')) destination = get_obj(Node, id=kwargs.pop('destination')) obj = link_class[obj_type](source_id=source.id, destination_id=destination.id, source=source, destination=destination, **kwargs) db.session.add(obj) else: obj = object_class[obj_type](**kwargs) db.session.add(obj) db.session.commit() return obj
def add_to_workflow(workflow_id): print(request.form) workflow = get_obj(Workflow, id=workflow_id) task = get_obj(Task, id=request.form['task']) task.workflows.append(workflow) db.session.commit() return jsonify(task.serialized)
def add_edge(workflow_id, type, source, dest): source_task = get_obj(Task, id=source) destination_task = get_obj(Task, id=dest) workflow = get_obj(Workflow, id=workflow_id) workflow_edge = WorkflowEdge(type, source_task, destination_task) db.session.add(workflow_edge) workflow.edges.append(workflow_edge) db.session.commit() return jsonify(workflow_edge.serialized)
def scheduler(workflow_id=None): print(request.form) data = request.form.to_dict() data['job'] = get_obj(Job, id=data['job']) data['nodes'] = [get_obj(Node, id=id) for id in request.form.getlist('nodes')] data['pools'] = [get_obj(Pool, id=id) for id in request.form.getlist('pools')] data['workflow'] = get_obj(Workflow, id=workflow_id) data['user'] = current_user task = task_factory(**data) return jsonify(task.serialized)
def scheduler(task_type, workflow_id=None): print(request.form) data = request.form.to_dict() if task_type in ('script_task', 'inner_task'): scripts = request.form.getlist('scripts') nodes = request.form.getlist('nodes') data['scripts'] = [get_obj(Script, id=id) for id in scripts] data['nodes'] = [get_obj(Node, id=id) for id in nodes] for pool_id in request.form.getlist('pools'): data['nodes'].extend(get_obj(Pool, id=pool_id).nodes) if task_type in ('workflow_task', 'inner_task'): data['workflow'] = get_obj(Workflow, id=workflow_id) data['user'] = current_user task = task_factory(task_type, **data) return jsonify(task.serialized)
def get_logs(node_id): node = get_obj(Node, id=node_id) node_logs = [ l.content for l in Log.query.all() if l.source == node.ip_address ] return jsonify('\n'.join(node_logs))
def job(self, runtime): start_task = get_obj(Task, id=self.workflow.start_task) if not start_task: return False, {runtime: 'No start task in the workflow.'} layer, visited = {start_task}, set() result, logs = True, {} while layer: new_layer = set() for task in layer: visited.add(task) success, task_logs = task.job(str(datetime.now())) if not success: result = False edge_type = 'success' if success else 'failure' print(task) for neighbor in task.task_neighbors(self.workflow, edge_type): print(neighbor) if neighbor not in visited: new_layer.add(neighbor) logs[task.name] = task_logs sleep(task.waiting_time) layer = new_layer print(logs) self.logs[runtime] = logs print(self.logs) db.session.commit() return result, logs
def compare_logs(task_id): task = get_obj(Task, id=task_id) results = { 'nodes': [node.name for node in task.nodes], 'versions': list(task.logs) } return jsonify(results)
def create_script(script_type): script = get_obj(Script, name=request.form['name']) # convert ImmutableMultiDict to an actual dictionnary form = dict(request.form) if not script: if script_type in ('netmiko_config', 'napalm_config'): if form['content_type'][0] != 'simple': file = request.files['file'] filename = secure_filename(file.filename) if allowed_file(filename, {'yaml', 'yml'}): parameters = yaml_load(file.read()) template = Template(form['content'][0]) form['content'] = [''.join(template.render(**parameters))] elif script_type == 'file_transfer': source_file_name = form['source_file'][0] source_file_path = join( current_app.path, 'file_transfer', source_file_name ) form['source_file'] = [source_file_path] script = script_factory(script_type, **form) db.session.add(script) db.session.commit() return jsonify(script.serialized)
def create_script(script_type): script = get_obj(Script, name=request.form['name']) if script: script_factory(script_type, **request.form) db.session.commit() elif script_type in ('netmiko_config', 'napalm_config'): # retrieve the raw script: we will use it as-is or update it # depending on the type of script (jinja2-enabled template or not) real_content = request.form['content'] if request.form['content_type'] != 'simple': file = request.files['file'] filename = secure_filename(file.filename) if allowed_file(filename, {'yaml', 'yml'}): parameters = load(file.read()) template = Template(real_content) real_content = template.render(**parameters) script = { 'netmiko_config': NetmikoConfigScript, 'napalm_config': NapalmConfigScript }[script_type](real_content, **request.form) elif script_type == 'file_transfer': source_file_name = request.form['source_file'] source_file_path = join(current_app.path, 'file_transfer', source_file_name) script = FileTransferScript(source_file_path, **request.form) else: script = { 'ansible_playbook': AnsibleScript, 'napalm_getters': NapalmGettersScript, 'netmiko_validation': NetmikoValidationScript }[script_type](**request.form) db.session.add(script) db.session.commit() return jsonify({})
def get_script(script_type, script_id): script = get_obj(Script, id=script_id) properties = type_to_properties[script_type] script_properties = { property: getattr(script, property) for property in properties } return jsonify(script_properties)
def save_positions(): print(request.json) for task_id, position in request.json.items(): print(position) task = get_obj(Task, id=task_id) task.x, task.y = position['x'], position['y'] db.session.commit() return jsonify({})
def get_object(obj_type, obj_id): cls = Node if obj_type == 'node' else Link properties = node_public_properties if cls == Node else link_public_properties obj = get_obj(cls, id=obj_id) obj_properties = { property: str(getattr(obj, property)) for property in properties } return jsonify(obj_properties)
def get_diff(task_id, v1, v2, n1, n2, s1, s2): task = get_obj(Task, id=task_id) first = str_dict(task.logs[v1][s1][n1]).splitlines() second = str_dict(task.logs[v2][s2][n2]).splitlines() opcodes = SequenceMatcher(None, first, second).get_opcodes() return jsonify({ 'first': first, 'second': second, 'opcodes': opcodes, })
def get_object(obj_type, obj_id): if obj_type == 'node': cls, properties = Node, node_public_properties else: cls, properties = Link, link_public_properties obj = get_obj(cls, id=obj_id) obj_properties = { property: str(getattr(obj, property)) for property in properties } return jsonify(obj_properties)
def log_rule_factory(**kwargs): log_rule = get_obj(LogRule, name=kwargs['name']) if log_rule: for property, value in kwargs.items(): if property in log_rule.__dict__: setattr(log_rule, property, value) else: log_rule = LogRule(**kwargs) db.session.add(log_rule) db.session.commit() return log_rule
def workflow_factory(**kwargs): workflow = get_obj(Workflow, name=kwargs['name']) if workflow: for property, value in kwargs.items(): if property in workflow.__dict__: setattr(workflow, property, value) else: workflow = Workflow(**kwargs) db.session.add(workflow) db.session.commit() return workflow
def pool_factory(**kwargs): pool = get_obj(Pool, name=kwargs['name']) if pool: for property, value in kwargs.items(): if property in pool.__dict__: setattr(pool, property, value) else: pool = Pool(**kwargs) db.session.add(pool) pool.compute_pool() db.session.commit() return pool
def task_factory(task_type, **kwargs): cls = task_types[task_type] task = get_obj(cls, name=kwargs['name']) if task: for property, value in kwargs.items(): if property in ('start_date', 'end_date') and value: value = task.datetime_conversion(value) setattr(task, property, value) else: task = cls(**kwargs) db.session.add(task) db.session.commit() return task
def user_factory(**kwargs): user = get_obj(User, name=kwargs['name']) if user: for property, value in kwargs.items(): if property in user.__dict__: if property == 'password': value = cisco_type7.hash(value) setattr(user, property, value) else: user = User(**kwargs) db.session.add(user) db.session.commit() return user
def task_factory(**kwargs): cls = WorkflowTask if kwargs['job'].type == 'workflow' else ScriptTask task = get_obj(cls, name=kwargs['name']) if task: for property, value in kwargs.items(): if property in ('start_date', 'end_date') and value: value = task.datetime_conversion(value) setattr(task, property, value) else: task = cls(**kwargs) db.session.add(task) db.session.commit() return task
def putty_connection(name): current_os, node = platform_system(), get_obj(Node, name=name) password = cisco_type7.decode(current_user.password) if current_os == 'Windows': path_putty = join(current_app.path, 'applications', 'putty.exe') ssh_connection = '{} -ssh {}@{} -pw {}'.format(path_putty, current_user.name, node.ip_address, password) Popen(ssh_connection.split()) else: arg = "gnome-terminal -e 'bash -c \"sshpass -p {} ssh {}@{}\"'".format( password, current_user.name, node.ip_address) os_system(arg) return jsonify({'success': True})
def workflow_editor(workflow_id=None): workflow_editor_form = WorkflowEditorForm(request.form) workflow_editor_form.workflow.choices = Workflow.choices() workflow = get_obj(Workflow, id=workflow_id).serialized if workflow_id else None scheduling_form = SchedulingForm(request.form) scheduling_form.scripts.choices = Script.choices() scheduling_form.nodes.choices = Node.choices() scheduling_form.pools.choices = Pool.choices() print(workflow) return render_template('workflow_editor.html', workflow_editor_form=workflow_editor_form, scheduling_form=scheduling_form, compare_form=CompareForm(request.form), names=pretty_names, workflow=workflow)
def workflow_editor(workflow_id=None): add_existing_task_form = AddExistingTaskForm(request.form) workflow_editor_form = WorkflowEditorForm(request.form) workflow_editor_form.workflow.choices = Workflow.choices() workflow = get_obj(Workflow, id=workflow_id) scheduling_form = SchedulingForm(request.form) scheduling_form.job.choices = Job.choices() scheduling_form.nodes.choices = Node.choices() scheduling_form.pools.choices = Pool.choices() add_existing_task_form.task.choices = Task.choices() return render_template( 'workflow_editor.html', add_existing_task_form=add_existing_task_form, workflow_editor_form=workflow_editor_form, scheduling_form=scheduling_form, compare_form=CompareForm(request.form), names=pretty_names, workflow=workflow.serialized if workflow_id else None)
def script_job(task_name, runtime=None): with scheduler.app.app_context(): job_time = runtime if runtime else str(datetime.now()) task = get_obj(Task, name=task_name) logs = deepcopy(task.logs) logs[job_time] = {} for script in task.scripts: results = {} if task.nodes: pool = ThreadPool(processes=len(task.nodes)) args = [(task, node, results) for node in task.nodes] pool.map(script.job, args) pool.close() pool.join() else: results = script.job(task, results) logs[job_time][script.name] = results task.logs = logs db.session.commit()
def script_factory(type, **kwargs): cls = type_to_class[type] script = get_obj(cls, name=kwargs['name'][0]) for property in type_to_properties[type]: # type is not in kwargs, we leave it unchanged if property not in kwargs: continue # unchecked tickbox do not yield any value when posting a form, and # they yield "y" if checked if property in boolean_properties: value = property in kwargs # if the property is not a list, we unpack it as it is returned # as a singleton in the ImmutableMultiDict elif property not in list_properties: value, = kwargs[property] else: value = kwargs[property] setattr(script, property, value) db.session.commit()
def run(self, job_time): layer, visited = set(), set() start_task = get_obj(Task, id=self.start_task) result, workflow_logs = True, {job_time: {}} if start_task: layer.add(start_task) while layer: new_layer = set() for task in layer: visited.add(task) success = task.run() if not success: result = False edge_type = 'success' if success else 'failure' for neighbor in task.task_neighbors(edge_type): if neighbor not in visited: new_layer.add(neighbor) workflow_logs[job_time][task.name] = task.logs sleep(task.waiting_time) layer = new_layer return result, workflow_logs
def script_job(task_name): with scheduler.app.app_context(): job_time = str(datetime.now()) task = get_obj(Task, name=task_name) logs = deepcopy(task.logs) logs[job_time] = {} nodes = task.nodes if task.nodes else ['dummy'] for script in task.scripts: results = {} pool = ThreadPool(processes=len(nodes)) args = [(task, node, results) for node in nodes] pool.map(script.job, args) pool.close() pool.join() logs[job_time][script.name] = results result = True for script in task.scripts: for node in task.nodes: if not logs[job_time][script.name][node.name]: result = False task.result = result task.logs = logs db.session.commit()
def run(self): layer, visited = set(), set() start_task = get_obj(Task, id=self.start_task) result = True if start_task: layer.add(start_task) while layer: new_layer = set() print(layer) for task in layer: print(task) visited.add(task) success = task.run() print(success) if not success: result = False edge_type = 'success' if success else 'failure' for neighbor in task.task_neighbors(edge_type): print(neighbor, neighbor not in visited) if neighbor not in visited: new_layer.add(neighbor) sleep(task.waiting_time) layer = new_layer return result
def delete_pool(pool_id): pool = get_obj(Pool, id=pool_id) db.session.delete(pool) db.session.commit() return jsonify(pool.name)