def cmd(self):
        """批量任务"""
        # print("run multi task ...")
        task_obj = models.Task.objects.create(
            # task_type = self.task_data.get('task_type'),
            task_type=0,
            account=self.request.user.account,
            content=self.task_data.get('cmd'),
            # host_user_binds =
        )
        # task_obj.host_user_binds.add(*self.task_data.get('selected_host_ids'))
        tasklog_objs = []
        host_ids = set(self.task_data.get('selected_host_ids'))
        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)

        # 执行任务
        # for host_id in self.task_data.get('selected_host_ids'):
        #     t = Thread(target=self.run_cmd,args=(host_id,self.task_data.get('cmd')))
        #     t.start()
        # 可能会出现进程未执行完,而视图函数一直等待的情况,
        # 故使用subprocess单独启动一个独立的进程
        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_obj.id)
        # cmd_str = "/home/denghonglin/env/py35/bin/python3 /home/denghonglin/demo/devops/multitask.py %s" %task_obj.id
        multitask_obj = subprocess.Popen(cmd_str,
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)
        # print("task result: ",multitask_obj.stdout.read(),multitask_obj.stderr.read().decode('utf8'))
        return task_obj.id
Example #2
0
    def file_transfer(self):
        """批量文件"""
        # task_data: {"task_type":"file_transfer","selected_host_ids":["5","6"],"file_transfer_type":"send","random_str":"6b2qj3r9","remote_path":"/etc/"}

        # 创建 任务记录
        task_obj = models.Task.objects.create(
            # task_type=self.task_data.get('task_type'),  0对应数据库里 task_choices 的 cmd
            task_type=1,  # 1: file_transfer
            account=self.request.user.account,
            content=json.dumps(
                self.task_data
            ),  # 这里直接把前端发来的 data 序列化并写入数据库:{"task_type": "file_transfer", "selected_host_ids": ["1", "2", "3", "4", "5"], "file_transfer_type": "send", "random_str": "4uqv379n"}
        )

        # 主机会重复,所以要去重 (Python set() 函数Python 内置函数描述set() 函数创建一个无序不重复元素集,可进行关系测试,删除重复数据,还可以计算交集、差集、并集等。)
        host_ids = set(self.task_data.get("selected_host_ids"))

        tasklog_objs = []

        # 通过前端发来的主机 id列表,取到每一台主机
        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        """
        由于TaskLog.objects.create()每保存一条就执行一次SQL,而bulk_create()是执行一条SQL存入多条数据,做会快很多!
        当然用列表解析代替 for 循环会更快!!
        """
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)

        # 创建以 Task id 为文件名的下载目录,这样会更高效(不然的话,之后比如执行1W次任务,每次创建都需要判断这个文件在不在才能执行操作,在这里创建了,以后所有任务要下载直接使用就可以,无需再做额外的判断)
        download_dir = "{download_base_dir}/{task_id}".format(
            download_base_dir=settings.FILE_DOWNLOADS, task_id=task_obj.id)
        if not os.path.exists(download_dir):  #  检查某个路径
            os.makedirs(
                download_dir, exist_ok=True
            )  # 该参数为真时执行mkdir -p(加上此选项后,系统将自动建立好那些尚不存在的目录,即一次可以建立多个目录;)

        # 执行任务
        """
        # 完全独立的进程(脚本)

        subprocess 模块中基本的进程创建和管理由Popen 类来处理.
        subprocess.popen是用来替代os.popen的.
        shell=True (默认是 false)在 unix 下想让与 args 前面添加了 /bin/sh
        PIPE 创建管道
        stdin 输入
        stdout 输出
        stderr 错误信息
        args 字符串或者列表

        """

        multitask_obj = subprocess.Popen(
            'python3 %s %s' % (settings.MULTI_TASK_SCRIPT, task_obj.id),
            shell=True)

        # 返回任务 id 给前端
        return task_obj.id
Example #3
0
    def file_transfer(self):
        """批量文件"""

        task_obj = models.Task.objects.create(
            task_type=1,
            account=self.request.user.account,
            content=json.dumps(self.task_data),
            #host_user_binds =
        )
        tasklog_objs = []
        host_ids = set(self.task_data.get("selected_host_ids"))
        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)

        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_obj.id)
        multitask_obj = subprocess.Popen(cmd_str,
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)
        # print("task result :",multitask_obj.stdout.read(),multitask_obj.stderr.read().decode('gbk'))
        # print(cmd_str)
        return task_obj
Example #4
0
    def cmd(self):
        """批量任务"""
        #print("run multi task.....")
        task_obj = models.Task.objects.create(
            task_type=0,
            account=self.request.user.account,
            content=self.task_data.get('cmd'),
            #host_user_binds =
        )
        tasklog_objs = []
        host_ids = set(self.task_data.get("selected_host_ids"))
        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)
        #task_obj.host_user_binds.add(1,2,3)
        #task_obj.host_user_binds.add(*self.task_data.get('selected_host_ids'))

        #执行任务

        # for host_id in self.task_data.get('selected_host_ids'):
        #     t = Thread(target=self.run_cmd,args=(host_id,self.task_data.get('cmd')))
        #     t.start()
        #
        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_obj.id)
        multitask_obj = subprocess.Popen(cmd_str,
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)
        # print("task result :",multitask_obj.stdout.read(),multitask_obj.stderr.read().decode('gbk'))
        # print(cmd_str)
        return task_obj
Example #5
0
    def cmd(self):
        task_obj = models.Task.objects.create(
            task_type=0,
            account=self.request.user.account,
            content=self.task_data.get('cmd'),
        )  # 1.增加批量任务信息,并返回批量任务信息的 pk

        tasklog_objs = []  # 2.增加子任务信息(初始化数据库)
        host_ids = set(self.task_data.get("selected_host_ids"))  # 获取选中的主机id,并用集合去重
        for host_id in host_ids:
            tasklog_objs.append(models.TaskLog(task_id=task_obj.id,
                                               host_user_bind_id=host_id,
                                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)  # 没100条记录 commit 1次!

        task_id = task_obj.pk
        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_id)  # 执行multitask.py脚本路径
        print('------------------>', cmd_str)
        multitask_obj = subprocess.Popen(cmd_str,shell=True, stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)  # 新打开1个新进程

        # settings.CURRENT_PGID = os.getpgid(multitask_obj.pid)  # os.getpgid(multitask_obj.pid)
    
        # os.killpg(pgid=pgid,sig=signal.SIGKILL)

        # print(multitask_obj.stderr.read().decode('utf-8') or multitask_obj.stdout.read().decode('utf-8'))
        # print("task result :",multitask_obj.stdout.read().decode('utf-8'),multitask_obj.stderr.read().decode('utf-8'))
        # print(multitask_obj.stdout.read())

        # for host_id in self.task_data.get('selected_host_ids'):
        #     t=Thread(target=self.run_cmd,args=(host_id,self.task_data.get('cmd')))
        #     t.start()

        return task_obj
Example #6
0
    def file_transfer(self):
        print(
            self.task_data)  # {'task_type': 'file_transfer', 'selected_host_ids': ['3'], 'file_transfer_type': 'send', 'random_str': 'iuon9bhm', 'remote_path': '/'}
        task_obj = models.Task.objects.create(
            task_type=1,
            account=self.request.user.account,
            content=json.dumps(self.task_data),
        )  # 1.增加批量任务信息,并返回批量任务信息的 pk

        tasklog_objs = []  # 2.增加子任务信息(初始化数据库)
        host_ids = set(self.task_data.get("selected_host_ids"))  # 获取选中的主机id,并用集合去重
        for host_id in host_ids:
            tasklog_objs.append(models.TaskLog(task_id=task_obj.id,
                                               host_user_bind_id=host_id,
                                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)  # 没100条记录 commit 1次!

        task_id = task_obj.pk
        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_id)  # 执行multitask.py脚本路径
        print('------------------>', cmd_str)
        multitask_obj = subprocess.Popen(cmd_str, shell=True, stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)  # 新打开1个新进程
        # settings.CURRENT_PGID = os.getpgid(multitask_obj.pid)  # os.getpgid(multitask_obj.pid)

        return task_obj
Example #7
0
    def cmd(self):
        """批量任务"""

        task_obj = models.Task.objects.create(
            # task_type=self.task_data.get('task_type'),  0对应数据库里 task_choices 的 cmd
            task_type=0,
            account=self.request.user.account,
            content=self.task_data.get('cmd'),
        )

        # 主机会重复,所以要去重 (Python set() 函数Python 内置函数描述set() 函数创建一个无序不重复元素集,可进行关系测试,删除重复数据,还可以计算交集、差集、并集等。)
        host_ids = set(self.task_data.get("selected_host_ids"))

        tasklog_objs = []

        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        """
        由于TaskLog.objects.create()每保存一条就执行一次SQL,而bulk_create()是执行一条SQL存入多条数据,做会快很多!
        当然用列表解析代替 for 循环会更快!!
        """
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)

        # 执行任务
        """
        # 完全独立的进程(脚本)

        subprocess 模块中基本的进程创建和管理由Popen 类来处理.
        subprocess.popen是用来替代os.popen的.
        shell=True (默认是 false)在 unix 下想让与 args 前面添加了 /bin/sh
        PIPE 创建管道
        stdin 输入
        stdout 输出
        stderr 错误信息
        args 字符串或者列表
    
        """

        multitask_obj = subprocess.Popen(
            'python3 %s %s' % (settings.MULTI_TASK_SCRIPT, task_obj.id),
            shell=True)

        # 返回任务 id 给前端
        return task_obj.id
    def cmd(self):
        """批量任务"""
        task_obj = models.Task.objects.create(
            task_type=0,
            account=self.request.user.account,
            content=self.task_data.get('cmd'),
        )
        tasklog_objs = []
        host_ids = set(self.task_data.get('selected_host_ids'))
        for host_id in host_ids:
            tasklog_objs.append(
                models.TaskLog(task_id=task_obj.id,
                               host_user_bind_id=host_id,
                               status=3))
        models.TaskLog.objects.bulk_create(tasklog_objs, 100)

        #执行任务
        cmd_str = "python %s %s" % (settings.MULTI_TASK_SCRIPT, task_obj.id)
        multitask_obj = subprocess.Popen(cmd_str,
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)

        return task_obj