def create_group(self, data): """ 创建分组 """ if not self._check_params(data): return get_error_result("ParamError") group = voi_api.get_item_with_first(models.YzyVoiGroup, {'name': data['name']}) if group: return get_error_result("GroupAlreadyExists", name=data['name']) group_uuid = create_uuid() # dhcp = data.get("dhcp") group_value = { "uuid": group_uuid, "group_type": data.get('group_type', 1), "name": data['name'], "desc": data['desc'], "start_ip": data['start_ip'], "end_ip": data['end_ip'], # "dhcp" : json.dumps(data["dhcp"]) if dhcp else None } # if dhcp: # start_ip = dhcp.get("start_ip") # end_ip = dhcp.get("end_ip") # netmask = dhcp.get("netmask") # gateway = dhcp.get("gateway") # if start_ip and end_ip and netmask and gateway: # # 更新DHCP配置 # try: # self.update_dhcp_server(dhcp) # except Exception as e: # logger.error("", exc_info=True) # return get_error_result("DhcpConfigUpdateError") templates = voi_api.get_item_with_all(models.YzyVoiTemplate, {"all_group": True}) binds = list() for template in templates: binds.append({ "uuid": create_uuid(), "template_uuid": template.uuid, "group_uuid": group_uuid }) try: voi_api.create_voi_group(group_value) if binds: db_api.insert_with_many(models.YzyVoiTemplateGroups, binds) logger.info("create voi group %s success", data['name']) except Exception as e: logging.info("insert voi group info to db failed:%s", e) return get_error_result("GroupCreateError", name=data['name']) # if group_value.get("dhcp"): # group_value["dhcp"] = json.loads(group_value["dhcp"]) return get_error_result("Success", group_value)
def create_remote_storage(self, data): """ 创建远端存储, 目前只支持nfs :param data: { "name": "remote_storage_name", "type": "nfs" "server": "172.16.1.23:/mnt/nfs/", } :return: """ if not (data.get('name') and data.get('type') and data.get('server_ip') and data.get('mount_point')): return build_result("ParamError") server = ':'.join([data['server_ip'], data['mount_point']]) if db_api.get_remote_storage_by_key('name', data['name']): logger.error("remote storage is already exist") return build_result("RemoteStorageNameExistError") if db_api.get_remote_storage_by_key('server', server): logger.error("server is already added") return build_result("ServerExistError") remote_storage_uuid = create_uuid() remote_storage_value = { "uuid": remote_storage_uuid, "name": data['name'], "type": data['type'], "server": server } try: db_api.add_remote_storage(remote_storage_value) logger.info("add remote storage:%s success", data['name']) except Exception as e: logger.error("add remote storage failed:%s", e, exc_info=True) return build_result("Others") return build_result("Success", remote_storage_value)
def create_user(self, data): if not self._check_params(data): return get_error_result("ParamError") group = db_api.get_group_with_first({'uuid': data['group_uuid']}) if not group: logger.error("group: %s not exist", data['group_uuid']) return get_error_result("GroupNotExists", name='') # add user to group user_value = { "uuid": create_uuid(), "group_uuid": data['group_uuid'], "user_name": data['user_name'], "passwd": create_md5(data['passwd']), "name": data.get('name', ''), "phone": data.get('phone', ''), "email": data.get('email', ''), "enabled": data.get('enabled', True) } try: db_api.create_group_user(user_value) logger.info("create group user %s success", data['user_name']) except Exception as e: logging.info("insert group user info to db failed:%s", e) return get_error_result("GroupUserCreateError", user_name=data['user_name']) return get_error_result("Success", user_value)
def _generate_subnet_info(self, data): """ 返回子网信息 :param data: { "network_uuid": "570a316e-27b5-11ea-9eac-562668d3ccea", "name": "subnet1", "start_ip": "172.16.1.10", "end_ip": "172.16.1.20", "netmask": "255.255.0.0", "gateway": "172.16.1.254", "dns1": "8.8.8.8", "dns2": "" } :return: """ _is_netmask, netmask_bits = is_netmask(data['netmask']) subnet_value = { "uuid": create_uuid(), "name": data['name'], "network_uuid": data['network_uuid'], "netmask": data['netmask'], "gateway": data['gateway'], "cidr": "%s/%s" % (data['start_ip'], netmask_bits), "start_ip": data['start_ip'], "end_ip": data['end_ip'], "enable_dhcp": 0, "dns1": data['dns1'] if data.get('dns1', '') else None, "dns2": data['dns2'] if data.get('dns2', '') else None } return subnet_value
def create_resource_pool(self, data): """ :param data: { "name": "default", "desc": "this is default pool", "default": 1 } :return: """ if not data: return build_result("ParamError") if not data.get('name', ''): return build_result("ParamError") res_pool = db_api.get_resource_pool_by_key("name", data['name']) if res_pool: return build_result("ResourcePoolNameExistErr", name=data['name']) _uuid = create_uuid() data['uuid'] = _uuid try: db_api.add_resource_pool(data) except Exception as e: current_app.logger.error(traceback.format_exc()) return build_result("ResourcePoolAddError", name=data['name']) return build_result("Success")
def create_virtual_switch(self, data): """ 创建虚拟交换机 :param data: { "name": "switch1", "type": "vlan", "desc": "this is switch1", "uplinks": [ { "node_uuid": "", "nic_uuid": "" }, ... ] } :return: """ if not data: return build_result("ParamError") vs_uuid = create_uuid() switch_value = { "uuid": vs_uuid, "name": data['name'], "type": data['type'], "desc": data.get("desc", '') } uplinks = list() for nic in data.get('uplinks', []): uplink_value = { "uuid": create_uuid(), "vs_uuid": vs_uuid, "node_uuid": nic['node_uuid'], "nic_uuid": nic['nic_uuid'] } uplinks.append(uplink_value) try: db_api.add_virtual_swtich(switch_value) if uplinks: db_api.insert_with_many(models.YzyVswitchUplink, uplinks) logger.info("add virtual switch:%s success", data['name']) except Exception as e: logger.error("add virtual switch failed:%s", e, exc_info=True) return build_result("VSwitchCreateError", name=data['name']) return build_result("Success", switch_value)
def _update(self, course_schedule_obj, course_ret_list, desktops): course_md5 = create_md5(json.dumps(course_ret_list)) if course_md5 != course_schedule_obj.course_md5: course_template = { "uuid": create_uuid(), "desktops": json.dumps(desktops) } course_values_list = [{ "uuid": create_uuid(), "course_template_uuid": course_template["uuid"], "desktop_uuid": _d["desktop_uuid"], "weekday": _d["weekday"], "course_num": _d["course_num"] } for _d in course_ret_list] # 创建新模板和课程 db_api.create_course_template(course_template) logger.info("insert in yzy_course_template success: %s" % course_template) db_api.create_course_many(course_values_list) logger.info( "insert many[%d] in yzy_course success: course_template_uuid[%s]" % (len(course_values_list), course_template["uuid"])) # 记录旧模板uuid old_tmplate_uuid = course_schedule_obj.course_template_uuid # 更新课表中的引用模板、课程md5 update_dict = { "course_template_uuid": course_template["uuid"], "course_md5": course_md5 } course_schedule_obj.update(update_dict) logger.info("update uuid[%s] in yzy_course_schedule success: %s" % (course_schedule_obj.uuid, update_dict)) # 删除没有被任何课表所引用的旧模板及其包含的课程 self._delete_course_template([old_tmplate_uuid])
def _create(self, term_uuid, group_uuid, week_num, course_ret_list, desktops): group_obj = db_api.get_group_with_first({"uuid": group_uuid}) if not group_obj: return get_error_result("EduGroupNotExist") if group_obj.group_type != constants.EDUCATION_GROUP: return get_error_result("CourseNotEduGroup") course_template = { "uuid": create_uuid(), "desktops": json.dumps(desktops) } course_values_list = [{ "uuid": create_uuid(), "course_template_uuid": course_template["uuid"], "desktop_uuid": _d["desktop_uuid"], "weekday": _d["weekday"], "course_num": _d["course_num"] } for _d in course_ret_list] # 课表创建后默认启用 course_schedule = { "uuid": create_uuid(), "term_uuid": term_uuid, "group_uuid": group_uuid, "course_template_uuid": course_template["uuid"], "week_num": week_num, "course_md5": create_md5(json.dumps(course_ret_list)), "status": constants.COURSE_SCHEDULE_ENABLED } db_api.create_course_template(course_template) logger.info("insert in yzy_course_template success: %s" % course_template) db_api.create_course_many(course_values_list) logger.info( "insert many[%d] in yzy_course success: course_template_uuid[%s]" % (len(course_values_list), course_template["uuid"])) db_api.create_course_schedule_many([course_schedule]) logger.info("insert in yzy_course_schedule success: %s" % course_schedule)
class NoModel(models.Model): ''' class: 包含唯一编号的基础类 ''' no = models.CharField(default=utils.create_uuid(), max_length=32, unique=True, verbose_name=u'编号') class Meta: abstract = True
def multi_create_user(self, data): """ 如果用户名冲突,则失败计数加1 """ group = db_api.get_group_with_first({'uuid': data['group_uuid']}) if not group: logger.error("group: %s not exist", data['group_uuid']) return get_error_result("GroupNotExists", name='') success_num = 0 failed_num = 0 postfix = data['postfix'] postfix_start = data['postfix_start'] for i in range(data['user_num']): # 桌面名称是前缀加几位数字 if len(str(postfix_start)) < postfix: post = '0' * (postfix - len(str(postfix_start))) + str(postfix_start) else: post = str(postfix_start) user_name = data['prefix'] + post if db_api.get_group_user_with_first({'user_name': user_name}): logger.info( "multi create user failed, the user %s already exists", user_name) failed_num += 1 postfix_start += 1 continue user_uuid = create_uuid() user_value = { "uuid": user_uuid, "group_uuid": data['group_uuid'], "user_name": user_name, "passwd": create_md5(data['passwd']), "name": data.get('name', ''), "phone": data.get('phone', ''), "email": data.get('email', ''), "enabled": data.get('enabled', True) } postfix_start += 1 try: db_api.create_group_user(user_value) logger.info("create user:%s success", user_name) except Exception as e: logging.info("insert group user info to db failed:%s", e) failed_num += 1 postfix_start += 1 continue success_num += 1 logger.info("multi create group user success") return get_error_result("Success", { "failed_num": failed_num, "success_num": success_num })
def update_service_status(node, services, node_services): """ :param node: node object of db :param services: the query result of node services :param node_services: the service which node must have :return: """ try: service_names = services.keys() exist_services = db_api.get_service_by_node_uuid(node.uuid) exist_service_names = list( map(lambda node_service: node_service.name, exist_services)) not_exist_service_names = list( filter( lambda service_name: service_name not in exist_service_names, service_names)) if not_exist_service_names and len(not_exist_service_names) > 0: service_list = list() # 不属于节点应监测的服务不添加 for name in not_exist_service_names: if name in node_services: service_uuid = create_uuid() value = { 'uuid': service_uuid, 'node_uuid': node.uuid, 'name': name, 'status': services[name] } service_list.append(value) logger.info("add service %s in node:%s", name, node.name) if service_list: db_api.insert_with_many(models.YzyNodeServices, service_list) logger.info("add info to db success") if exist_service_names and len(exist_service_names) > 0: for name in exist_service_names: if name in node_services: for service in exist_services: if service.name == name: if service.status != services[name]: logger.info( "node %s service %s status change from %s to %s", node.ip, service.name, service.status, services[name]) service.status = services[name] service.soft_update() logger.info("update server %s status to %s", service.name, services[name]) break except Exception as e: logger.error("update error:%s", e)
def upload(self, file_obj): # 保存上传的升级包文件 size = 0 logger.info("go to upload func") package_id = create_uuid() base_path = constants.UPGRADE_FILE_PATH if not os.path.exists(base_path): os.makedirs(base_path) package_path = os.path.join(base_path, "".join([package_id, ".tar.gz"])) logger.info("begin save upgrade compress file to %s", package_path) try: md5_sum = hashlib.md5() with open(package_path, "wb") as f: for chunk in chunks(file_obj): size += len(chunk) md5_sum.update(chunk) f.write(chunk) f.close() md5_sum = md5_sum.hexdigest() # 解压升级包 if not decompress_package(package_path): return get_error_result("UpgradePackageFormatError", data={"package_path": package_path}) # 校验升级包 if not self._check_package(): return get_error_result("PackageNotMatchSystem", data={"package_path": package_path}) except Exception: logger.exception("save upgrade package error", exc_info=True) return get_error_result("OtherError", data={"package_path": package_path}) return get_error_result("Success", data={ "package_id": package_id, "package_path": package_path, "md5_value": md5_sum })
def create_group(self, data): """ 创建分组,包括教学分组和用户分组 """ if not self._check_params(data): return get_error_result("ParamError") if constants.EDUCATION_DESKTOP == data.get( 'group_type', constants.EDUCATION_DESKTOP): subnet = db_api.get_subnet_by_uuid(data['subnet_uuid']) if not subnet: logger.error("subnet: %s not exist", data['subnet_uuid']) return get_error_result("SubnetNotExist") groups = db_api.get_group_with_all({ 'name': data['name'], 'group_type': data['group_type'] }) if groups: return get_error_result("GroupAlreadyExists", name=data['name']) # add education group group_uuid = create_uuid() group_value = { "uuid": group_uuid, "group_type": data['group_type'], "name": data['name'], "desc": data['desc'], "network_uuid": data.get('network_uuid', ''), "subnet_uuid": data.get('subnet_uuid', ''), "start_ip": data.get('start_ip', ''), "end_ip": data.get('end_ip', '') } try: db_api.create_group(group_value) logger.info("create group %s success", data['name']) except Exception as e: logging.info("insert group info to db failed:%s", e) return get_error_result("GroupCreateError", name=data['name']) if data['group_type'] == constants.EDUCATION_DESKTOP: terminals = db_api.get_terminal_with_all({'group_uuid': None}) self.change_group_uuid(terminals, data.get('start_ip', ''), data.get('end_ip', ''), group_uuid) return get_error_result("Success", group_value)
def terminal_desktop_bind(self, data): logger.info("terminal desktop bind data: {}".format(data)) try: terminal_uuid = data.get("terminal_uuid", "") desktop_uuid = data.get("desktop_uuid", "") desktop = db_api.get_item_with_first(models.YzyVoiDesktop, {"uuid": desktop_uuid}) if not desktop: logger.error("terminal desktop bind desktop not exist: %s", desktop_uuid) return build_result("VOIDesktopGroupNotExist") bind_info = { "uuid": create_uuid(), "terminal_uuid": terminal_uuid, "desktop_uuid": desktop_uuid } db_api.create_voi_terminal_desktop_bind(bind_info) logger.info( "terminal desktop bind data: {} success".format(bind_info)) return build_result("Success") except Exception as e: logger.error("", exc_info=True) return build_result("OtherError")
def allocate_remote_storage(self, remote_storage_uuid, resource_pool_uuid): """ 分配远端存储 :param data: { "uuid": "resource_pool_uuid", } :return: """ if not (remote_storage_uuid and resource_pool_uuid): return build_result("ParamError") remote_storage = db_api.get_remote_storage_by_key( 'uuid', remote_storage_uuid) if not remote_storage: logger.error( "remote storage: {} not exist".format(remote_storage_uuid)) return build_result("RemoteStorageNotExistError") if remote_storage.allocated: logger.error( "the remote storage is already allocated, can not allocated") return build_result("RemoteStorageAlreadyAllocatedError") nodes = db_api.get_node_by_pool_uuid(resource_pool_uuid) remote_storage_list = list() for node in nodes: # 底层挂载nfs存储 _data = { "command": "mount_nfs", "handler": "NfsHandler", "data": { "nfs_server": remote_storage.server, "name": remote_storage.name, } } rep_json = compute_post(node['ip'], _data) ret_code = rep_json.get("code", -1) if ret_code != 0: logger.error("mount nfs failed:%s", rep_json['msg']) return build_result("MountNfsError", host=node['ip']) if 'data' not in rep_json: logger.error("mount nfs failed: unexpected error") return build_result("MountNfsError", host=node['ip']) storage_uuid = create_uuid() info = { 'uuid': storage_uuid, 'node_uuid': node.uuid, 'path': constants.NFS_MOUNT_POINT_PREFIX + remote_storage.name, 'role': '', 'type': 2, # 1:本地存储 2:远端存储 'total': rep_json['data'][2], 'free': rep_json['data'][1], 'used': rep_json['data'][0] } remote_storage_list.append(info) logger.info("allocate remote storage success") db_api.insert_with_many(models.YzyNodeStorages, remote_storage_list) remote_storage_info = { 'allocated_to': resource_pool_uuid, 'allocated': 1, 'total': rep_json['data'][2], 'free': rep_json['data'][1], 'used': rep_json['data'][0] } db_api.update_remote_storage(remote_storage, remote_storage_info) return build_result("Success")
def create_terminal_desktop_bind(self, data): """ 'group_uuid','terminal_uuid','terminal_id','mac', 'ip','mask','gateway','dns1','dns2','is_dhcp', {'group_uuid': 'a0cd32c9-9502-4ddc-b7a2-115203e4df0c', 'terminal_uuid': '891fa480-8f9b-4d25-8df3-a59798e302c4', 'terminal_id': 210, 'mac': 'E4:3A:6E:35:5C:A4', 'ip': '10.100.20.150', 'mask': '255.255.255.0', 'gateway': '10.100.20.254', 'dns1': '10.10.0.2', 'dns2': '', 'is_dhcp': 1} """ logger.info("create terminal desktop bind data: {}".format(data)) try: terminal_uuid = data.get("terminal_uuid", "") terminal_id = data.get("terminal_id", "") group_uuid = data.get("group_uuid", "") terminal_mac = data.get("mac", "") bind_info = {} # group_uuid != input group_uuid, then delete all old data with redis_lock.Lock(self._redis.rds, 'create_terminal_desktop_bind_lock', 2): db_api.delete_voi_terminal_desktops(group_uuid, terminal_uuid) # get all desktop groups qry_desktop_groups = db_api.get_item_with_all( models.YzyVoiDesktop, {"group_uuid": group_uuid}) # 查找所有该终端、桌面组对应关系 terminal_desktop_relations = db_api.get_item_with_all( models.YzyVoiTerminalToDesktops, {"terminal_uuid": terminal_uuid}) for qry in qry_desktop_groups: desktop_group_uuid = qry.uuid # one (desktop_group_uuid, terminal_id) only one row # qry_exists = db_api.get_item_with_first(models.YzyVoiTerminalToDesktops, # {"desktop_group_uuid": desktop_group_uuid, # "terminal_uuid": terminal_uuid}) relation_exists = False for relation in terminal_desktop_relations: if relation.desktop_group_uuid == desktop_group_uuid: relation_exists = True break if relation_exists: logger.info('continue: {}'.format(terminal_mac)) continue logger.info( 'desktop_group_uuid: {}, qry.ip_detail: {}'.format( desktop_group_uuid, qry.ip_detail)) if bool(qry.use_bottom_ip): logger.info( 'desktop_group_uuid: {}, qry.use_bottom_ip: {}'. format(desktop_group_uuid, qry.use_bottom_ip)) desktop_ip_info = { "desktop_is_dhcp": data.get("is_dhcp", ""), "desktop_ip": data.get("ip", ""), "desktop_mask": data.get("mask", ""), "desktop_gateway": data.get("gateway", ""), "desktop_dns1": data.get("dns1", ""), "desktop_dns2": data.get("dns2", ""), } else: desktop_is_dhcp = 0 if qry.ip_detail: ip_detail = json.loads(qry.ip_detail) desktop_is_dhcp = int(ip_detail.get("auto", True)) desktop_start_ip = ip_detail.get("start_ip", "") desktop_mask = ip_detail.get("netmask", "") desktop_gateway = ip_detail.get("gateway", "") desktop_dns1 = ip_detail.get("dns_master", "") desktop_dns2 = ip_detail.get("dns_slave", "") if desktop_is_dhcp: logger.info( 'desktop_group_uuid: {}, desktop_is_dhcp: {}'. format(desktop_group_uuid, desktop_is_dhcp)) desktop_ip_info = { "desktop_is_dhcp": 1, "desktop_ip": "", "desktop_mask": "", "desktop_gateway": "", "desktop_dns1": "", "desktop_dns2": "", } else: logger.info( 'desktop_group_uuid: {}, static ip'.format( desktop_group_uuid)) # get ip pool use start_ip and netmask, filter yzy_voi_terminal_desktops ips, get least ip netmask_bits = netaddr.IPAddress( desktop_mask).netmask_bits() network = ipaddress.ip_interface( desktop_start_ip + '/' + str(netmask_bits)).network original_ip_pool = [ x for x in network.hosts() if x >= ipaddress.IPv4Address(desktop_start_ip) ] # get used ip in this desktop_group_uuid # with redis_lock.Lock(self._redis.rds, 'create_terminal_desktop_bind_lock', 2): qry_ips = db_api.get_item_with_all( models.YzyVoiTerminalToDesktops, {"desktop_group_uuid": desktop_group_uuid}) used_ip_pool = [ ipaddress.IPv4Address(qry.desktop_ip) for qry in qry_ips ] available_ip_pool = [ ip for ip in original_ip_pool if ip not in used_ip_pool ] if available_ip_pool: desktop_ip = min(available_ip_pool).compressed desktop_ip_info = { "desktop_is_dhcp": 0, "desktop_ip": desktop_ip, "desktop_mask": desktop_mask, "desktop_gateway": desktop_gateway, "desktop_dns1": desktop_dns1, "desktop_dns2": desktop_dns2, } else: # use use_bottom_ip logger.info( 'desktop_group_uuid: {}, no availabel_ip'. format(desktop_group_uuid)) desktop_ip_info = { "desktop_is_dhcp": data.get("is_dhcp", ""), "desktop_ip": data.get("ip", ""), "desktop_mask": data.get("mask", ""), "desktop_gateway": data.get("gateway", ""), "desktop_dns1": data.get("dns1", ""), "desktop_dns2": data.get("dns2", ""), } bind_info = { "uuid": create_uuid(), "group_uuid": group_uuid, "terminal_uuid": terminal_uuid, "desktop_group_uuid": desktop_group_uuid, "terminal_mac": terminal_mac, } bind_info.update(desktop_ip_info) db_api.create_voi_terminal_desktop_bind(bind_info) logger.info( "terminal desktop bind data: {} success".format(bind_info)) return build_result("Success") except Exception as e: logger.error("", exc_info=True) return build_result("OtherError")
def init_network(self, data): """ :param data: { "ip": "172.16.1.49", "network_name": "default", "switch_name": "default", "switch_type": "vlan", "vlan_id": 10, "subnet_info": { "name": "default", "start_ip": "172.16.1.10", "end_ip": "172.16.1.20", "netmask": "255.255.0.0", "gateway": "172.16.1.254", "dns1": "8.8.8.8", "dns2": "" } "uplink": { "node_uuid": "", "nic_uuid": "", "interface": "ens224" } } :return: """ logger.info("check params") if not self._check_params(data): return build_result("ParamError") vs = db_api.get_virtual_switch_by_name(data['switch_name']) if vs: return build_result("VSwitchExistError", name=data['switch_name']) network = db_api.get_network_by_name(data['network_name']) if network: return build_result("NetworkNameRepeatError", name=data['network_name']) if constants.VLAN_NETWORK_TYPE == data['switch_type']: vlan_id = data.get('vlan_id', '') if not check_vlan_id(str(vlan_id)): return build_result("VlanIDError", vid=vlan_id) else: vlan_id = None subnet = db_api.get_subnet_by_name(data['subnet_info']['name']) if subnet: return build_result("SubnetNameRepeatError", name=data['subnet_info']['name']) try: self.check_subnet_params(data['subnet_info']) except Exception as e: return build_result("SubnetInfoError", e.__str__(), name=data['name']) # add default switch vs_uuid = create_uuid() switch_value = { "uuid": vs_uuid, "name": data['switch_name'], "type": data['switch_type'], "default": 1 } uplink_value = { "vs_uuid": vs_uuid, "node_uuid": data['uplink']['node_uuid'], "nic_uuid": data['uplink']['nic_uuid'] } # add default network network_uuid = create_uuid() network_value = { "uuid": network_uuid, "name": data['network_name'], "switch_name": data['switch_name'], "switch_uuid": vs_uuid, "switch_type": data['switch_type'], "vlan_id": vlan_id, "default": 1 } _data = { "command": "create", "handler": "NetworkHandler", "data": { "network_id": network_uuid, "network_type": data['switch_type'], "physical_interface": data['uplink']['interface'], "vlan_id": vlan_id } } rep_json = compute_post(data['ip'], _data) ret_code = rep_json.get("code", -1) if ret_code != 0: logger.error("create network failed:%s", rep_json['data']) return build_result("NetworkCreateFail") logger.info("create network success") # add subnet data['subnet_info']['network_uuid'] = network_uuid subnet_value = self._generate_subnet_info(data['subnet_info']) try: db_api.add_virtual_swtich(switch_value) db_api.add_virtual_switch_uplink(uplink_value) db_api.add_network(network_value) db_api.add_subnet(subnet_value) logger.info("init network success") except Exception as e: logger.error("init network failed:%s", e, exc_info=True) return build_result("NetworkInitFail") return build_result("Success")
def create_terminal_desktop_bind_bak(self, data): """ 'group_uuid','terminal_uuid','terminal_id','mac', 'ip','mask','gateway','dns1','dns2','is_dhcp', """ logger.info("create terminal desktop bind data: {}".format(data)) try: terminal_uuid = data.get("terminal_uuid", "") terminal_id = data.get("terminal_id", "") group_uuid = data.get("group_uuid", "") terminal_mac = data.get("mac", "") bind_info = {} # group_uuid != input group_uuid, then delete all old data db_api.delete_voi_terminal_desktops(group_uuid, terminal_uuid) # get all desktop groups qry_desktop_groups = db_api.get_item_with_all( models.YzyVoiDesktop, {"group_uuid": group_uuid}) # 查找所有该终端、桌面组对应关系 for qry in qry_desktop_groups: desktop_group_uuid = qry.uuid # one (desktop_group_uuid, terminal_id) only one row qry_exists = db_api.get_item_with_first( models.YzyVoiTerminalToDesktops, { "desktop_group_uuid": desktop_group_uuid, "terminal_uuid": terminal_uuid }) if qry_exists: logger.info('continue: {}'.format(terminal_mac)) continue logger.info('desktop_group_uuid: {}, qry.ip_detail: {}'.format( desktop_group_uuid, qry.ip_detail)) desktop_is_dhcp = 0 if qry.ip_detail: ip_detail = json.loads(qry.ip_detail) desktop_is_dhcp = int(ip_detail.get("auto", True)) desktop_start_ip = ip_detail.get("start_ip", "") desktop_mask = ip_detail.get("netmask", "") desktop_gateway = ip_detail.get("gateway", "") desktop_dns1 = ip_detail.get("dns_master", "") desktop_dns2 = ip_detail.get("dns_slave", "") desktop_ip_info = {} if bool(qry.use_bottom_ip): logger.info( 'desktop_group_uuid: {}, qry.use_bottom_ip: {}'.format( desktop_group_uuid, qry.use_bottom_ip)) desktop_ip_info = { "desktop_is_dhcp": data.get("is_dhcp", ""), "desktop_ip": data.get("ip", ""), "desktop_mask": data.get("mask", ""), "desktop_gateway": data.get("gateway", ""), "desktop_dns1": data.get("dns1", ""), "desktop_dns2": data.get("dns2", ""), } elif desktop_is_dhcp: logger.info( 'desktop_group_uuid: {}, desktop_is_dhcp: {}'.format( desktop_group_uuid, desktop_is_dhcp)) desktop_ip_info = { "desktop_is_dhcp": 1, "desktop_ip": "", "desktop_mask": "", "desktop_gateway": "", "desktop_dns1": "", "desktop_dns2": "", } else: logger.info('desktop_group_uuid: {}, static ip'.format( desktop_group_uuid)) # get ip pool use start_ip and netmask, filter yzy_voi_terminal_desktops ips, get least ip netmask_bits = netaddr.IPAddress( desktop_mask).netmask_bits() network = ipaddress.ip_interface(desktop_start_ip + '/' + str(netmask_bits)).network original_ip_pool = [ x for x in network.hosts() if x >= ipaddress.IPv4Address(desktop_start_ip) ] # get used ip in this desktop_group_uuid qry_ips = db_api.get_item_with_all( models.YzyVoiTerminalToDesktops, {"desktop_group_uuid": desktop_group_uuid}) used_ip_pool = [ ipaddress.IPv4Address(qry.desktop_ip) for qry in qry_ips ] available_ip_pool = [ ip for ip in original_ip_pool if ip not in used_ip_pool ] if available_ip_pool: desktop_ip = min(available_ip_pool).compressed desktop_ip_info = { "desktop_is_dhcp": 0, "desktop_ip": desktop_ip, "desktop_mask": desktop_mask, "desktop_gateway": desktop_gateway, "desktop_dns1": desktop_dns1, "desktop_dns2": desktop_dns2, } else: # use use_bottom_ip logger.info( 'desktop_group_uuid: {}, no availabel_ip'.format( desktop_group_uuid)) desktop_ip_info = { "desktop_is_dhcp": data.get("is_dhcp", ""), "desktop_ip": data.get("ip", ""), "desktop_mask": data.get("mask", ""), "desktop_gateway": data.get("gateway", ""), "desktop_dns1": data.get("dns1", ""), "desktop_dns2": data.get("dns2", ""), } bind_info = { "uuid": create_uuid(), "group_uuid": group_uuid, "terminal_uuid": terminal_uuid, "desktop_group_uuid": desktop_group_uuid, "terminal_mac": terminal_mac, } bind_info.update(desktop_ip_info) db_api.create_voi_terminal_desktop_bind(bind_info) logger.info( "terminal desktop bind data: {} success".format(bind_info)) return build_result("Success") except Exception as e: logger.error("", exc_info=True) return build_result("OtherError")
def create_network(self, data): """ 创建数据网络 :param data: { "name": "network1", "switch_uuid": "570ddad8-27b5-11ea-a53d-562668d3ccea", "vlan_id": 10, "subnet_info": { "subnet_name": "default", "start_ip": "172.16.1.10", "end_ip": "172.16.1.20", "netmask": "255.255.0.0", "gateway": "172.16.1.254", "dns1": "8.8.8.8", "dns2": "" } } :return: """ if not data: return build_result("ParamError") network_info = db_api.get_network_by_name(data['name']) if network_info: logger.error("network name : %s repeat", data['name']) return build_result("NetworkNameRepeatError", name=data['name']) virtual_switch = db_api.get_virtual_switch(data['switch_uuid']) if not virtual_switch: logger.error("not virtual switch : %s", data['switch_uuid']) return build_result("VSwitchNotExist") network_type = virtual_switch.type if constants.VLAN_NETWORK_TYPE == network_type: vlan_id = data.get('vlan_id', 1) if not check_vlan_id(str(vlan_id)): return build_result("VlanIDError", vid=vlan_id) else: vlan_id = None net = db_api.get_network_all({'switch_uuid': virtual_switch.uuid}) if net: return build_result("VSwitchFlatInUse") network_uuid = create_uuid() for uplink in virtual_switch.uplinks: if not uplink.deleted: node = db_api.get_node_by_uuid(uplink.node_uuid) nic = db_api.get_nics_first({"uuid": uplink.nic_uuid}) try: self._create_network(node.ip, network_uuid, virtual_switch.type, nic.nic, vlan_id) except Exception as e: logger.error("NetworkCreateFail: %s" % e, exc_info=True) # 虚拟机启动会检测网桥并且进行添加,所以这里创建失败无所谓 # return build_result("NetworkCreateFail", name=data['name']) network_value = { "uuid": network_uuid, "name": data['name'], "switch_name": virtual_switch.name, "switch_uuid": virtual_switch.uuid, "switch_type": virtual_switch.type, "vlan_id": vlan_id } logger.info("add network info in db") db_api.add_network(network_value) if data.get('subnet_info'): data['subnet_info']['network_uuid'] = network_uuid subnet_info = self._generate_subnet_info(data['subnet_info']) db_api.add_subnet(subnet_info) return build_result("Success")
def import_user(self, data): file = data['filepath'] enabled = data['enabled'] logger.info("begin to import user, file:%s", file) success_num = 0 failed_num = 0 book = openpyxl.load_workbook(file) ws = book.active for row in ws.iter_rows(min_row=2, max_col=6): flag = True values = dict() for cell in row: if 1 == cell.column: user_name = cell.value if not user_name: failed_num += 1 flag = False logger.error("the user_name can not be blank, skip", user_name) break user = db_api.get_group_user_with_first( {"user_name": user_name}) if user: failed_num += 1 flag = False logger.error("the user %s already exist in db, skip", user_name) break values['user_name'] = user_name elif 2 == cell.column: passwd = cell.value if not passwd: failed_num += 1 flag = False logger.error("the password can not be blank") break if len(str(passwd)) < 6 or len(str(passwd)) > 16: failed_num += 1 flag = False logger.error("the password len must between 6~16") break values['passwd'] = create_md5(cell.value) elif 3 == cell.column: group_name = cell.value group = db_api.get_group_with_first({"name": group_name}) if not group: failed_num += 1 flag = False logger.error("the group %s not exist, skip", group_name) break else: values['group_uuid'] = group.uuid elif 4 == cell.column: name = cell.value if name: values['name'] = str(name) elif 5 == cell.column: email = cell.value if email: values['email'] = str(email) elif 6 == cell.column: phone = cell.value if phone: values['phone'] = str(phone) else: break if flag: values['enabled'] = int(enabled) values['uuid'] = create_uuid() db_api.create_group_user(values) success_num += 1 return get_error_result("Success", { "failed_num": failed_num, "success_num": success_num })
def export_user(self, data): """ :param data: { "filename": "file1", "users": [ { "uuid": "", "user_name": "" } ] } :return: """ logger.info("begin to export user:%s", data) # 添加任务信息数据记录 uuid = create_uuid() task_data = { "uuid": uuid, "task_uuid": uuid, "name": constants.NAME_TYPE_MAP[6], "status": constants.TASK_RUNNING, "type": 6 } db_api.create_task_info(task_data) task_obj = db_api.get_task_info_first({"uuid": uuid}) users = data.get('users', []) filename = '%s.xlsx' % data['filename'] filepath = '/root/%s' % filename book = openpyxl.Workbook() ws = book.active ws.title = 'users' lines = list() info = ["用户名", "状态", "所属分组", "姓名", "邮箱", "电话"] lines.append(info) for user in users: user = db_api.get_group_user_with_first({'uuid': user['uuid']}) if not user: continue # return build_result("GroupUserNotExists", name=user['user_name']) if user.enabled: status = "启用" else: status = "禁用" user_info = [ user.user_name, status, user.group.name, user.email, user.phone ] lines.append(user_info) try: for line in lines: ws.append(line) book.save(filepath) except Exception as e: logger.error("write xlsx file failed:%s", e) task_obj.update({"status": constants.TASK_ERROR}) task_obj.soft_update() return get_error_result("GroupUserExportError", file=filename) node = db_api.get_controller_node() bind = SERVER_CONF.addresses.get_by_default('server_bind', '') if bind: port = bind.split(':')[-1] else: port = constants.SERVER_DEFAULT_PORT endpoint = 'http://%s:%s' % (node.ip, port) url = '%s/api/v1/group/user/download?path=%s' % (endpoint, filepath) logger.info("export user to file %s success", filename) task_obj.update({"status": constants.TASK_COMPLETE}) task_obj.soft_update() return get_error_result("Success", {"url": url})
def create_share_disk(self, data): """ 创建共享数据盘 { "group_uuid": "xxxxxx" "disk_size": 5, # 共享盘大小 "enable" : 0, # 是否启用 "restore": 0 # 还原与不还原 } :param data: :return: """ """ { "command": "create_share", "handler": "VoiHandler", "data": { "disk_info": { 'uuid': '2f110de8-78d8-11ea-ad5d-000c29e84b9c', 'base_path': '/opt/slow/instances' } "version": 0 } } """ logger.info("terminal share disk create data: {}".format(data)) try: # disk_uuid = create_uuid() version = 0 node = db_api.get_controller_node() sys_base, data_base = self._get_template_storage() disk_info = dict() disk_info["uuid"] = create_uuid() disk_info["base_path"] = sys_base['path'] disk_info["size"] = data["disk_size"] command_data = { "command": "create_share", "handler": "VoiHandler", "data": { "disk_info": disk_info, "version": version } } logger.info("create share disk %s", disk_info) rep_json = compute_post(node.ip, command_data, timeout=600) if rep_json.get("code", -1) != 0: logger.error("create voi share disk:%s failed, error:%s", disk_info, rep_json.get('data')) # message = rep_json['data'] if rep_json.get('data', None) else rep_json['msg'] return jsonify(rep_json) # 记录数据库 share_disk = { "group_uuid": data["group_uuid"], "uuid": disk_info["uuid"], "disk_size": data["disk_size"], "enable": data["enable"], "restore": data["restore"] } db_api.create_voi_terminal_share(share_disk) # todo 生成bt种子 # 生成种子文件 task = Thread(target=self.create_share_disk_torrent, args=(disk_info, version)) task.start() logger.info( "create terminal voi share disk data: {} success".format( share_disk)) return build_result("Success", {"disk": share_disk}) except Exception as e: logger.error("", exc_info=True) return build_result("OtherError")
def update_share_disk(self, data): """ 更新共享数据盘 { "uuid": "xxxxxxxxxx", "enable": 0, "disk_size": 8, "restore": 1, "share_desktop": [ {"uuid": "xxxxxxx", "name": "xxxxx", "choice": 0}, {"uuid": "xxxxxxx", "name": "xxxxx", "choice": 0} ] } :param data: :return: """ """ { "command": "create_share", "handler": "VoiHandler", "data": { "disk_info": { 'uuid': '2f110de8-78d8-11ea-ad5d-000c29e84b9c', 'base_path': '/opt/slow/instances' } "version": 0 } } """ logger.info("terminal share disk update data: {}".format(data)) try: # disk_uuid = create_uuid() version = 0 disk_uuid = data["uuid"] sys_base, data_base = self._get_template_storage() share_disk = db_api.get_item_with_first( models.YzyVoiTerminalShareDisk, {"uuid": disk_uuid}) disk_info = dict() disk_info["uuid"] = share_disk.uuid disk_info["base_path"] = sys_base['path'] disk_info["size"] = data["disk_size"] # 判断是否大小有更新 if data["disk_size"] != share_disk.disk_size: # 需要重新删除创建 # pass node = db_api.get_controller_node() delete_command = { "command": "delete_share", "handler": "VoiHandler", "data": { "disk_info": { "uuid": share_disk.uuid, "base_path": sys_base['path'], }, "version": version } } ret = compute_post(node.ip, delete_command) if ret.get("code", -1) != 0: logger.error( "terminal share disk update fail, delete old fail") return build_result("ShareDiskUpdateFail") # 创建新的容量盘 command_data = { "command": "create_share", "handler": "VoiHandler", "data": { "disk_info": disk_info, "version": version } } ret_json = compute_post(node.ip, command_data) if ret_json.get("code", -1) != 0: logger.error( "terminal share disk update fail, create new fail") return build_result("ShareDiskUpdateFail") share_disk.disk_size = data["disk_size"] share_disk.version += 1 # todo 维护桌面组的绑定关系 # import pdb; pdb.set_trace() desktops = db_api.get_item_with_all( models.YzyVoiDesktop, {"group_uuid": share_disk.group_uuid}) desktop_binds = db_api.get_item_with_all( models.YzyVoiShareToDesktops, {"disk_uuid": disk_uuid}) share_desktops = data["share_desktop"] copy_share_desktops = share_desktops[:] for desktop in share_desktops: # is_exist = False for bind in desktop_binds: if desktop["uuid"] == bind.desktop_uuid: # is_exist = True if not int(desktop.get("choice", 0)): bind.soft_delete() copy_share_desktops.remove(desktop) insert_binds = list() if copy_share_desktops: for desktop in copy_share_desktops: if desktop["choice"]: for _d in desktops: if desktop["uuid"] == _d.uuid: insert_binds.append({ "uuid": create_uuid(), "group_uuid": share_disk.group_uuid, "disk_uuid": disk_uuid, "desktop_uuid": desktop["uuid"], "desktop_name": desktop["name"] }) # 更新数据库绑定记录 if insert_binds: db_api.insert_with_many(models.YzyVoiShareToDesktops, insert_binds) # 更新数据库记录 share_disk.restore = data["restore"] share_disk.enable = data["enable"] share_disk.soft_update() # todo 生成bt种子 # 生成种子文件 task = Thread(target=self.create_share_disk_torrent, args=(disk_info, version)) task.start() logger.info( "update terminal voi share disk data: {} success".format( share_disk)) return build_result("Success") except Exception as e: logger.error("", exc_info=True) return build_result("OtherError")
def update_ip_info(): """ { "name": "eth0", "ip": "172.16.1.31", "netmask": "255.255.255.0", "gateway": "172.16.1.254", "dns1": "8.8.8.8", "dns2": "114.114.114.114" } :return: """ try: data = request.get_json() nic_name = data.get("name") ip = data.get("ip") netmask = data.get("netmask") gateway = data.get("gateway") dns1 = data.get("dns1") dns2 = data.get("dns2") if not (is_ip_addr(ip) and is_netmask(netmask)[0] and is_ip_addr(gateway)): current_app.logger.error("update nic %s ip, param error" % (nic_name)) return errcode.get_error_result("IpInfoParamError") if dns2 and not is_ip_addr(dns2): current_app.logger.error("update nic %s ip, dns2 error" % (nic_name)) return errcode.get_error_result("IpInfoParamError") resp = errcode.get_error_result() virtual_net_device = os.listdir('/sys/devices/virtual/net/') resp['data'] = {} utc = int((dt.datetime.utcnow() - dt.datetime.utcfromtimestamp(0)).total_seconds()) resp['data']['utc'] = utc nic_addrs = psutil.net_if_addrs() nic_stats = psutil.net_if_stats() physical_net_device = [ dev for dev in nic_addrs.keys() if dev not in virtual_net_device ] if nic_name not in physical_net_device: current_app.logger.error("add nic %s ip, not physical nic" % nic_name) return errcode.get_error_result("NotPhysicalNICError") nic_ifcfg = "/etc/sysconfig/network-scripts/ifcfg-%s" % nic_name if not os.path.exists(nic_ifcfg): resp = errcode.get_error_result(error="IpConfFileNoFound") return resp ifcfg_str = "TYPE=Ethernet\nPROXY_METHOD=none\nBROWSER_ONLY=no\nBOOTPROTO=static\nDEFROUTE=yes"\ "\nIPV4_FAILURE_FATAL=no\nIPV6INIT=yes\nIPV6_AUTOCONF=yes\nIPV6_DEFROUTE=yes\nIPV6_FAILURE_FATAL=no"\ "\nIPV6_ADDR_GEN_MODE=stable-privacy\nNAME={name}\nUUID={uuid}\nDEVICE={nic}\nONBOOT=yes\nIPADDR={ip}"\ "\nNETMASK={netmask}\nGATEWAY={gateway}\nDNS1={dns1}" uuid = create_uuid() ifcfg_str = ifcfg_str.format( **{ "uuid": uuid, "nic": nic_name, "ip": ip, "netmask": netmask, "gateway": gateway, "dns1": dns1, "name": nic_name }) os.system("ifdown %s" % nic_name) if dns2: ifcfg_str += "\nDNS2=%s" % dns2 with open(nic_ifcfg, "w") as f: f.write(ifcfg_str) # 维护路由表 # 判断新增网关是否能通 ret = icmp_ping(gateway) if ret: # 启动命令 ifup eth0:0 os.system("ifup %s" % nic_name) current_app.logger.info( "interface %s update ip %s, gateway %s is link", nic_name, ip, gateway) else: # 如果网关不通 # 需维护route 表 os.system("ifup %s" % nic_name) os.system("route del default gw %s" % gateway) current_app.logger.info( "interface %s update ip %s, gateway %s is not link", nic_name, ip, gateway) # 重启网络服务 os.system("systemctl restart network") current_app.logger.info("update nic %s ip success" % nic_name) resp["data"] = {"name": nic_name} return resp except Exception as err: current_app.logger.error(err) current_app.logger.error(''.join(traceback.format_exc())) resp = errcode.get_error_result(error="UpdateIpConfFileFailure") return resp
def create(self, data): """ 创建新学期 :param data: { "name": "2020年上学期", "start": "2020/09/01", "end": "2021/02/01", "duration": 45, "break_time": 10, "morning": "08:00", "afternoon": "14:00", "evening": "19:00", "morning_count": 4, "afternoon_count": 4, "evening_count": 2, "course_num_map": { "1": "08:00-08:45", "2": "09:00-09:45", "3": "10:00-10:45", "4": "11:00-11:45", "5": "14:00-14:45", "6": "15:00-15:45", "7": "16:00-16:45", "8": "17:00-17:45", "9": "19:00-19:45", "10": "20:00-20:45" } } :return: """ try: logger.info("data: %s" % data) check_ret = self._check_course_num_map(data["course_num_map"]) if check_ret: return check_ret check_ret = self._check_term_time_detail(data) if check_ret: return check_ret if db_api.get_term_with_first({"name": data["name"]}): return get_error_result("TermNameExist") # 校验新学期期间是否与已有学期重叠 check_ret = self._check_term_duplicate(data["start"], data["end"]) if check_ret: return check_ret data["uuid"] = create_uuid() data["weeks_num_map"] = self._generate_weeks_num_map( data["start"], data["end"]) # 添加学期的定时任务 task_uuid = self._add_crontab_task( term_uuid=data["uuid"], name=data["name"], start_date=data["start"], end_date=data["end"], course_num_map=data["course_num_map"], weeks_num_map=data["weeks_num_map"]) if task_uuid: data["crontab_task_uuid"] = task_uuid data["course_num_map"] = json.dumps(data["course_num_map"]) data["weeks_num_map"] = json.dumps(data["weeks_num_map"]) # 新创建学期时,所有教学分组的状态默认为已启用 group_status_map = dict() group_obj_list = db_api.get_group_with_all( {"group_type": constants.EDUCATION_GROUP}) for group_obj in group_obj_list: group_status_map[ group_obj.uuid] = constants.COURSE_SCHEDULE_ENABLED data["group_status_map"] = json.dumps(group_status_map) db_api.create_term(data) logger.info("insert in yzy_term success: %s" % data) return get_error_result() else: logger.info("add course crontab task failed") return get_error_result("OtherError") except Exception as e: logger.exception("create term failed: %s" % str(e), exc_info=True) return get_error_result("OtherError")
def person_instance(self, data): """ 个人桌面的详情 """ session_id = data.get("session_id", "") desktop_uuid = data.get("desktop_uuid", "") desktop_name = data.get("desktop_name", "") auth_info = LicenseManager().get_auth_info() # 0-过期 1-宽限期 2-试用期 3-正式版 # if 0 == auth_info.get('auth_type') or (1 == auth_info.get('auth_type') and auth_info.get('delay_days', 0) == 0)\ # or (2 == auth_info.get('auth_type') and auth_info.get('expire_time', 0) == 0): if auth_info.get("auth_type", 0) == 0: return build_result("AuthExpired") if self.get_links_num() >= auth_info.get('vdi_size', 0): return build_result("AuthSizeExpired") # 判断用户的登录状态 session = db_api.get_group_user_session_first( {"session_id": session_id}) if not session: logger.error( "terminal user query desktop group error: %s not exist" % session_id) return build_result("TerminalUserLogout") user = db_api.get_group_user_with_first({"uuid": session.user_uuid}) if not user: logger.error( "terminal user query desktop group error: %s not exist" % session.user_uuid) return build_result("TerminalUserNotExistError") if not user.enabled: logger.error( "terminal user query desktop group error: %s is unenabled" % user.user_name) return build_result("TerminalUserUnenabledError") desktop_group = db_api.get_personal_desktop_with_first( {"uuid": desktop_uuid}) if not desktop_group: logger.error("person desktop not exist: %s" % desktop_uuid) return build_result("DesktopNotExist", name=desktop_name) # 查找当前用户分配的随机及静态桌面 instances = list() # 静态桌面 static_instances = db_api.get_instance_with_all( {"user_uuid": user.uuid}) for obj in static_instances: instances.append(obj) random_instances = db_api.get_user_random_instance_with_all( {"user_uuid": user.uuid}) for obj in random_instances: instances.append(obj.instance) # 查找桌面的spice链接状态 host_spice_ports = dict() current_instance = None for instance in instances: host_ip = instance.host.ip if host_ip not in host_spice_ports: host_spice_ports[host_ip] = [] if instance.spice_port: host_spice_ports[host_ip].append(str(instance.spice_port)) if instance.desktop_uuid == desktop_uuid: current_instance = instance # 获取所有个人桌面的链接状态 ret = self.get_spice_link(host_spice_ports) if current_instance: host_ip = current_instance.host.ip spice_port = current_instance.spice_port if spice_port and ret.get(host_ip, {}).get(spice_port): logger.info( "terminal user request the same instance: user %s, desktop %s", user.uuid, desktop_uuid) instance_info = current_instance.instance_base_info() instance_info.update({"os_type": desktop_group.os_type}) return build_result("Success", instance_info) else: # 如果是随机桌面,需要释放 if desktop_group.desktop_type == constants.RANDOM_DESKTOP: for _obj in random_instances: if _obj.instance_uuid == current_instance.uuid: _obj.soft_delete() break current_instance.allocated = 0 current_instance.soft_update() # 判断已链接数是否大于等于2 count = 0 for k, v in ret.items(): for i, j in v.items(): if j: count += 1 if count >= 2: logger.error("user %s person desktop instance much 2", user.uuid) return build_result("TerminalPersonalInstanceNumError") # 如果桌面组状态为维护 if desktop_group.maintenance: logger.error("person desktop is maintenance: %s", desktop_uuid) return build_result("TerminalPersonMaintenance") subnet = db_api.get_subnet_by_uuid(desktop_group.subnet_uuid) # if not subnet: # logger.error("person instance start error: not subnet %s" % desktop_group.subnet_uuid) # return build_result("TerminalPersonStartError") # 启动桌面 controller = BaseController() template = db_api.get_instance_template(desktop_group.template_uuid) sys_base, data_base = controller._get_storage_path_with_uuid( template.sys_storage, template.data_storage) if desktop_group.desktop_type == constants.RANDOM_DESKTOP: # 随机桌面 instance = db_api.get_instance_by_desktop_first_alloc(desktop_uuid) if not instance: logger.error("person desktop not instance to alloc") return build_result("TerminalPersonInstanceNotAlloc") ret = controller.create_instance(desktop_group, subnet, instance, sys_base, data_base) if ret.get('code') != 0: logger.error("person instance start error: %s", instance.uuid) return build_result("TerminalPersonStartError") # 记录数据库 instance_binds = db_api.get_user_random_instance_with_all( {"instance_uuid": instance.uuid}) # 清除其他绑定关系 for random_ins in random_instances: if random_ins.desktop_uuid == desktop_uuid: instance = random_ins.instance instance.allocated = 0 instance.terminal_mac = None instance.link_time = None instance.soft_update() random_ins.soft_delete() for random_ins in instance_binds: random_ins.soft_delete() values = { "uuid": create_uuid(), "desktop_uuid": desktop_uuid, "user_uuid": user.uuid, "instance_uuid": instance.uuid } db_api.create_user_random_instance(values) instance.allocated = 1 instance.link_time = datetime.now() instance.spice_link = 1 instance.terminal_mac = user.mac instance.soft_update() logger.info("random person instance start succes: %s", instance.uuid) else: static_instance_bind = db_api.get_instance_with_first({ "desktop_uuid": desktop_uuid, "user_uuid": user.uuid }) if not static_instance_bind: logger.error( "static person desktop not bind: desktop group %s, user %s", desktop_uuid, user.uuid) return build_result("TerminalPersonInstanceNotAlloc") instance = static_instance_bind ret = controller.create_instance(desktop_group, subnet, instance, sys_base, data_base) if ret.get('code') != 0: logger.error("person instance start error: %s", instance.uuid) return build_result("TerminalPersonStartError") logger.info("static person instance start succes: %s", instance.uuid) instance.link_time = datetime.now() instance.terminal_mac = user.mac instance.spice_link = 1 instance.soft_update() data = { "spice_host": instance.host.ip, "spice_token": instance.spice_token, "spice_port": instance.spice_port, "name": instance.name, "uuid": instance.uuid, "os_type": desktop_group.os_type } edu_instance = db_api.get_instance_first({ "terminal_mac": user.mac, "classify": constants.EDUCATION_DESKTOP, "status": constants.STATUS_ACTIVE }) if edu_instance: desktop = db_api.get_desktop_by_uuid(edu_instance.desktop_uuid) controller.stop_instance(edu_instance, desktop) return build_result("Success", data)
def sync_base(self, ipaddr, server_ip, image_id, image_path, host_uuid=None, md5_sum=None, version=0): """节点同步镜像""" task = Task(image_id=image_id, host_uuid=host_uuid, version=version) uuid = create_uuid() task_id = create_uuid() # 添加任务信息记录 task_data = { "uuid": uuid, "task_uuid": task_id, "name": constants.NAME_TYPE_MAP[2], "status": constants.TASK_RUNNING, "type": 2 } db_api.create_task_info(task_data) task.begin(task_id, "start sync the image to host:%s" % ipaddr) image = { "image_id": image_id, "disk_file": image_path, "backing_file": image_path, "dest_path": image_path, "md5_sum": md5_sum } bind = SERVER_CONF.addresses.get_by_default('server_bind', '') if bind: port = bind.split(':')[-1] else: port = constants.SERVER_DEFAULT_PORT endpoint = "http://%s:%s" % (server_ip, port) command_data = { "command": "sync", "handler": "TemplateHandler", "data": { "image_version": 0, "task_id": task_id, "endpoint": endpoint, "url": constants.IMAGE_SYNC_URL, "image": image } } rep_json = compute_post(ipaddr, command_data, timeout=600) task_obj = db_api.get_task_info_first({"uuid": uuid}) if rep_json.get('code') != 0: logger.info("sync the image to host:%s failed:%s", ipaddr, rep_json['data']) task.error( task_id, "sync the image to host:%s failed:%s" % (ipaddr, rep_json['data'])) task_obj.update({"status": constants.TASK_ERROR}) task_obj.soft_update() else: logger.info("sync the base to host:%s success", ipaddr) task.end(task_id, "sync the image to host:%s success" % ipaddr) task_obj.update({"status": constants.TASK_COMPLETE}) task_obj.soft_update() # 如果同步失败,考虑添加数据库记录 return rep_json
def create_voi_desktop(self, data): """ 创建教学桌面组 :param data: { "name": "desktop2", "owner_id": "", "group_uuid": "d02cd368-5396-11ea-ad80-000c295dd728", "template_uuid": "6f1006c0-56d1-11ea-aec0-000c295dd728", "sys_restore": 1, "data_restore": 1, "prefix": "pc", "postfix": 3, "postfix_start": 5, "show_info": true, "auto_update": true } :return: """ if not self._check_params(data): return get_error_result("ParamError") group = db_api.get_item_with_first(models.YzyVoiGroup, {"uuid": data['group_uuid']}) if not group: logger.error("voi group: %s not exist", data['group_uuid']) return get_error_result("GroupNotExists", name="") template = db_api.get_item_with_first(models.YzyVoiTemplate, {"uuid": data['template_uuid']}) if not template: logger.error("voi template: %s not exist", data['template_uuid']) return get_error_result("TemplateNotExist") has_group = db_api.get_item_with_first(models.YzyVoiDesktop, {}) # if constants.PERSONAL_DEKSTOP == template.classify: # return get_error_result("TemplatePersonalError", name=template.name) # add desktop desktop_uuid = create_uuid() desktop_value = { "uuid": desktop_uuid, "owner_id": data['owner_id'], "name": data['name'], "group_uuid": data['group_uuid'], "template_uuid": data['template_uuid'], "os_type": template.os_type, "sys_restore": data['sys_restore'], "data_restore": data['data_restore'], "sys_reserve_size": data['sys_reserve_size'], "data_reserve_size": data['data_reserve_size'], "prefix": data['prefix'], "diff_mode": data.get("diff_mode", 0), "use_bottom_ip": data.get('use_bottom_ip', True), "ip_detail": json.dumps(data['ip_detail']) if data.get('ip_detail') else '', # "postfix": data.get('postfix', 1), # "postfix_start": data.get('postfix_start', 1), "active": True, # 默认激活 "default": False if has_group else True, "show_info": data.get('show_info', False), "auto_update": data.get('auto_update', False) } try: db_api.create_voi_desktop(desktop_value) logger.info("create voi desktop %s success", data['name']) except Exception as e: logging.info("insert voi desktop info to db failed:%s", e) return get_error_result("DesktopCreateFail", name=data['name']) return get_error_result("Success")
def apply(self, data): """ 将指定课表批量应用到多个周 :param data: { "uuid": "886cc37d-121c-4f81-a933-f002b5d86094", "week_nums": [1, 3, 5, 7] } :return: """ try: logger.info("data: %s" % data) uuid = data.get("uuid", None) week_nums = data.get("week_nums", None) if not uuid or not week_nums or not isinstance( week_nums, list) or not all( [isinstance(i, int) for i in week_nums]): return get_error_result("ParamError", data={"keys": ["uuid", "week_nums"]}) target_cs_obj = db_api.get_course_schedule_with_first( {"uuid": uuid}) if not target_cs_obj: return get_error_result("CourseScheduleNotExist") # 校验批量应用的周是否合法 term_obj = db_api.get_term_with_first( {"uuid": target_cs_obj.term_uuid}) weeks_num_map = json.loads(term_obj.weeks_num_map) week_nums = set( [num for num in week_nums if num != target_cs_obj.week_num]) for week_num in week_nums: if str(week_num) not in weeks_num_map.keys(): # logger.error("ParamError: week_nums %d" % week_num) return get_error_result("ParamError", data={"week_nums": week_num}) # 找出该学期、该教学分组下所有已有课表的周 cs_list = db_api.get_course_schedule_with_all({ "term_uuid": target_cs_obj.term_uuid, "group_uuid": target_cs_obj.group_uuid }) occupied_weeks = dict() for cs_obj in cs_list: occupied_weeks[cs_obj.week_num] = cs_obj cs_values_list = list() for week_num in week_nums: # 如果批量应用的周已有课表,则覆盖,将其引用模板更新为target模板 if week_num in occupied_weeks.keys(): occupied_weeks[week_num].update({ "course_template_uuid": target_cs_obj.course_template_uuid }) logger.info( "update uuid[%s] week_num[%s] in yzy_course_schedule success: {'course_template_uuid': %s}" % (str(week_num), occupied_weeks[week_num].uuid, occupied_weeks[week_num].course_template_uuid)) # 如果批量应用的周没有课表,则创建 else: cs_values_list.append({ "uuid": create_uuid(), "term_uuid": target_cs_obj.term_uuid, "group_uuid": target_cs_obj.group_uuid, "course_template_uuid": target_cs_obj.course_template_uuid, "week_num": week_num, "course_md5": target_cs_obj.course_md5, "status": 1 }) db_api.create_course_schedule_many(cs_values_list) logger.info( "insert many[%d] in yzy_course_schedule success: course_template_uuid[%s]" % (len(cs_values_list), target_cs_obj.course_template_uuid)) return get_error_result() except Exception as e: logger.exception("apply course_schedule failed: %s" % str(e), exc_info=True) return get_error_result("OtherError")