def post_import(request): password = request.POST.get('password') file = request.FILES['file'] ws = load_workbook(file, read_only=True)['Sheet1'] summary = {'invalid': [], 'skip': [], 'fail': [], 'success': []} for i, row in enumerate(ws.rows): if i == 0: # 第1行是表头 略过 continue if not all([row[x].value for x in range(5)]): summary['invalid'].append(i) continue data = AttrDict(zone=row[0].value, name=row[1].value, hostname=row[2].value, port=row[3].value, username=row[4].value, password=row[5].value, desc=row[6].value) if Host.objects.filter(hostname=data.hostname, port=data.port, username=data.username, deleted_by_id__isnull=True).exists(): summary['skip'].append(i) continue if valid_ssh(data.hostname, data.port, data.username, data.pop('password') or password) is False: summary['fail'].append(i) continue host = Host.objects.create(created_by=request.user, **data) if request.user.role: request.user.role.add_host_perm(host.id) summary['success'].append(i) return json_response(summary)
def get_detail(request, r_id): repository = Repository.objects.filter(pk=r_id).first() if not repository: return json_response(error='未找到指定构建记录') rds, counter = get_redis_connection(), 0 if repository.remarks == 'SPUG AUTO MAKE': req = repository.deployrequest_set.last() key = f'{settings.REQUEST_KEY}:{req.id}' else: key = f'{settings.BUILD_KEY}:{repository.spug_version}' data = rds.lrange(key, counter, counter + 9) response = AttrDict(data='', step=0, s_status='process', status=repository.status) while data: for item in data: counter += 1 item = json.loads(item.decode()) if item['key'] == 'local': if 'data' in item: response.data += item['data'] if 'step' in item: response.step = item['step'] if 'status' in item: response.status = item['status'] data = rds.lrange(key, counter, counter + 9) response.index = counter if repository.status in ('0', '1'): response.data = f'{human_time()} 建立连接... ' + response.data elif not response.data: response.data = f'{human_time()} 读取数据... \r\n\r\n未读取到数据,Spug 仅保存最近2周的构建日志。' else: response.data = f'{human_time()} 读取数据... ' + response.data return json_response(response)
def post_import(request): group_id = request.POST.get('group_id') file = request.FILES['file'] ws = load_workbook(file, read_only=True)['Sheet1'] summary = {'invalid': [], 'skip': [], 'repeat': [], 'success': []} for i, row in enumerate(ws.rows): if i == 0: # 第1行是表头 略过 continue if not all([row[x].value for x in range(4)]): summary['invalid'].append(i) continue data = AttrDict( name=row[0].value, hostname=row[1].value, port=row[2].value, username=row[3].value, desc=row[4].value ) if Host.objects.filter(hostname=data.hostname, port=data.port, username=data.username).exists(): summary['skip'].append(i) continue if Host.objects.filter(name=data.name).exists(): summary['repeat'].append(i) continue host = Host.objects.create(created_by=request.user, **data) host.groups.add(group_id) summary['success'].append(i) return json_response(summary)
def run(self): rds_cli = get_redis_connection() self._init() rds_cli.delete(settings.SCHEDULE_KEY) logger.info('Running scheduler') while True: _, data = rds_cli.brpop(settings.SCHEDULE_KEY) task = AttrDict(json.loads(data)) if task.action in ('add', 'modify'): trigger = self.parse_trigger(task.trigger, task.trigger_args) self.scheduler.add_job(dispatch, trigger, id=str(task.id), args=(task.command, task.targets), replace_existing=True) elif task.action == 'remove': job = self.scheduler.get_job(str(task.id)) if job: job.remove()
def run(self): rds_cli = get_redis_connection() self._init() rds_cli.delete(settings.MONITOR_KEY) logger.info('Running monitor') while True: _, data = rds_cli.brpop(settings.MONITOR_KEY) task = AttrDict(json.loads(data)) if task.action in ('add', 'modify'): trigger = IntervalTrigger(minutes=int(task.rate), timezone=self.timezone) self.scheduler.add_job(dispatch, trigger, id=str(task.id), args=(task.type, task.addr, task.extra), replace_existing=True) elif task.action == 'remove': job = self.scheduler.get_job(str(task.id)) if job: job.remove()