def _GET(self, *param, **params): _prep_console() (host_id, guest_id) = self.chk_guestby1(param) if guest_id is None: return web.notfound() model = findbyguest1(self.orm, guest_id) kvc = KaresansuiVirtConnection() try: domname = kvc.uuid_to_domname(model.uniq_key) if not domname: return web.notfound() dom = kvc.search_guests(domname)[0] document = XMLParse(dom.XMLDesc(1)) self.view.graphics_port = XMLXpath(document, '/domain/devices/graphics/@port') self.view.xenname = XMLXpath(document, '/domain/name/text()') finally: kvc.close() h_model = findbyhost1(self.orm, host_id) try: from karesansui.lib.utils import get_ifconfig_info device = KVM_BRIDGE_PREFIX + "0" self.view.host_ipaddr = get_ifconfig_info(device)[device]["ipaddr"] except: try: self.view.host_ipaddr = h_model.hostname.split(":")[0].strip() except: self.view.host_ipaddr = socket.gethostbyname(socket.gethostname()) return True
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() device = param[1] if device is None: return web.notfound() cmdname = u"Delete Bonding Setting" cmd = BONDING_COMMAND_DELETE options = {} options['dev'] = device options["succession"] = None _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def _GET(self, *param, **params): """<comment-ja> virDomainState - VIR_DOMAIN_NOSTATE = 0 - VIR_DOMAIN_RUNNING = 1 - VIR_DOMAIN_BLOCKED = 2 - VIR_DOMAIN_PAUSED = 3 - VIR_DOMAIN_SHUTDOWN = 4 - VIR_DOMAIN_SHUTOFF = 5 - VIR_DOMAIN_CRASHED = 6 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) if self.is_mode_input() is False: if model.attribute == 2: info = {} segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = "" if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ":" + segs["passwd"] self.kvc = KaresansuiVirtConnectionAuth(uri, creds) try: host = MergeHost(self.kvc, model) for guest in host.guests: _virt = self.kvc.search_kvg_guests(guest.info["model"].name) if 0 < len(_virt): for _v in _virt: info = _v.get_info() if info["uuid"] == uri_id or (uri[0:5] == "test:"): __guest = MergeGuest(guest.info["model"], _v) status = _v.status() break if self.is_json() is True: self.view.status = json_dumps(status) else: self.view.status = status finally: self.kvc.close() return True
def _GET(self, *param, **params): """<comment-ja> virDomainState - VIR_DOMAIN_NOSTATE = 0 - VIR_DOMAIN_RUNNING = 1 - VIR_DOMAIN_BLOCKED = 2 - VIR_DOMAIN_PAUSED = 3 - VIR_DOMAIN_SHUTDOWN = 4 - VIR_DOMAIN_SHUTOFF = 5 - VIR_DOMAIN_CRASHED = 6 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) if self.is_mode_input() is False: if model.attribute == 2: info = {} segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = '' if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ':' + segs["passwd"] self.kvc = KaresansuiVirtConnectionAuth(uri,creds) try: host = MergeHost(self.kvc, model) for guest in host.guests: _virt = self.kvc.search_kvg_guests(guest.info["model"].name) if 0 < len(_virt): for _v in _virt: info = _v.get_info() if info["uuid"] == uri_id or (uri[0:5] == "test:"): __guest = MergeGuest(guest.info["model"],_v) status = _v.status() break if self.is_json() is True: self.view.status = json_dumps(status) else: self.view.status = status finally: self.kvc.close() return True
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uni_device = param[1] if uni_device is None: return web.notfound() device = uni_device.encode("utf-8") if not validates_nic(self): self.logger.debug("Change nic failed. Did not validate.") return web.badrequest(self.view.alert) host = findbyhost1(self.orm, host_id) modules = ["ifcfg"] dop = read_conf(modules, self, host) if dop is False: self.logger.error("Change nic failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read conf)') ipaddr = "" if is_param(self.input, ipaddr): if self.input.ipaddr: ipaddr = self.input.ipaddr netmask = "" if is_param(self.input, netmask): if self.input.netmask: netmask = self.input.netmask bootproto = self.input.bootproto onboot = "no" if is_param(self.input, 'onboot'): onboot = "yes" net = NetworkAddress("%s/%s" % (ipaddr, netmask)) network = net.network broadcast = net.broadcast if not dop.get("ifcfg", device): self.logger.error("Change nic failed. Target config not found.") return web.internalerror('Internal Server Error. (Get conf)') dop.set("ifcfg", [device, "ONBOOT"], onboot) dop.set("ifcfg", [device, "BOOTPROTO"], bootproto) dop.set("ifcfg", [device, "IPADDR"], ipaddr) dop.set("ifcfg", [device, "NETMASK"], netmask) if network is not None: dop.set("ifcfg", [device, "NETWORK"], network) if broadcast is not None: dop.set("ifcfg", [device, "BROADCAST"], broadcast) retval = write_conf(dop, self, host) if retval is False: self.logger.error("Change nic failed. Failed write conf.") return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted(url=web.ctx.path)
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) if not validates_staticroute(self): return web.badrequest(self.view.alert) modules = ["staticroute"] dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') target = self.input.target net = NetworkAddress(target) ipaddr = net.ipaddr netmask = net.netmask netlen = net.netlen network = net.network target = "%s/%s" % ( ipaddr, netlen, ) gateway = self.input.gateway device = self.input.device dop.set("staticroute", [device, target], gateway) from karesansui.lib.parser.staticroute import PARSER_COMMAND_ROUTE if net.netlen == 32: command = "%s add -host %s gw %s dev %s" % ( PARSER_COMMAND_ROUTE, ipaddr, gateway, device, ) command = "%s add -host %s dev %s" % ( PARSER_COMMAND_ROUTE, ipaddr, device, ) else: command = "%s add -net %s netmask %s gw %s dev %s" % ( PARSER_COMMAND_ROUTE, network, netmask, gateway, device, ) extra_args = {"post-command": command} retval = write_conf(dop, self, host, extra_args=extra_args) if retval is False: return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted(url=web.ctx.path)
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uni_device = param[1] if uni_device is None: return web.notfound() device = uni_device.encode("utf-8") if not validates_nic(self): self.logger.debug("Change nic failed. Did not validate.") return web.badrequest(self.view.alert) host = findbyhost1(self.orm, host_id) modules = ["ifcfg"] dop = read_conf(modules, self, host) if dop is False: self.logger.error("Change nic failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read conf)') ipaddr = "" if is_param(self.input, ipaddr): if self.input.ipaddr: ipaddr = self.input.ipaddr netmask = "" if is_param(self.input, netmask): if self.input.netmask: netmask = self.input.netmask bootproto = self.input.bootproto onboot = "no" if is_param(self.input, 'onboot'): onboot = "yes" net = NetworkAddress("%s/%s" % (ipaddr,netmask)) network = net.network broadcast = net.broadcast if not dop.get("ifcfg", device): self.logger.error("Change nic failed. Target config not found.") return web.internalerror('Internal Server Error. (Get conf)') dop.set("ifcfg",[device,"ONBOOT"] ,onboot) dop.set("ifcfg",[device,"BOOTPROTO"],bootproto) dop.set("ifcfg",[device,"IPADDR"] ,ipaddr) dop.set("ifcfg",[device,"NETMASK"] ,netmask) if network is not None: dop.set("ifcfg",[device,"NETWORK"] ,network) if broadcast is not None: dop.set("ifcfg",[device,"BROADCAST"],broadcast) retval = write_conf(dop, self, host) if retval is False: self.logger.error("Change nic failed. Failed write conf.") return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted(url=web.ctx.path)
def network_start_stop_job(obj, host_id, network_name, action): """ Register start/stop network job into pysilhouette @param obj: Rest object @param network_name: Name of network to start or stop @type network_name: string @param action: 'start' or 'stop' @type action: string """ if not network_name: raise KaresansuiException if (karesansui.sheconf.has_key('env.uniqkey') is False) \ or (karesansui.sheconf['env.uniqkey'].strip('') == ''): raise KaresansuiException if not (action == 'start' or action == 'stop'): raise KaresansuiException host = findbyhost1(obj.orm, host_id) _cmd = None _jobgroup = None if action == 'start': cmdname = ["Start Network", "start network"] _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], 'restart_network.py'), dict(name=network_name, force=None)) else: cmdname = ["Stop Network", "stop network"] _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], 'stop_network.py'), dict(name=network_name)) # Job Register _jobgroup = JobGroup(cmdname[0], karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('%s command' % cmdname[1], 0, _cmd)) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=obj.me, modified_user=obj.me, ) # INSERT save_job_collaboration( obj.orm, obj.pysilhouette.orm, _machine2jobgroup, _jobgroup, )
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) logical_delete(self.orm, host) return web.seeother(url="/")
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) logical_delete(self.orm, host) return web.seeother(url = "/")
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id uuid = param[1] if not validates_storage_pool(self, uuid): self.logger.debug( "Set storage pool status failed. Did not validate.") return web.badrequest(self.view.alert) model = findbyhost1(self.orm, host_id) # Pool try: kvc = KaresansuiVirtConnection() inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() self.view.pools = pools pools_obj = kvc.get_storage_pool_UUIDString2kvn_storage_pool(uuid) if len(pools_obj) <= 0: self.logger.debug( "Set storage pool status failed. Target storage pool not found." ) return web.notfound() status = int(self.input.status) if status == STORAGE_POOL_START: storagepool_start_stop_job(self, model, pools_obj[0], 'start') elif status == STORAGE_POOL_STOP: if kvc.is_used_storage_pool( name=pools_obj[0].get_storage_name(), active_only=True) is True: self.logger.debug( "Stop storage pool failed. Target storage pool is used by guest." ) return web.badrequest( "Target storage pool is used by guest.") else: storagepool_start_stop_job(self, model, pools_obj[0], 'stop') else: self.logger.debug( "Set storage pool status failed. Unknown status type.") return web.badrequest() return web.accepted() finally: kvc.close()
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id host = findbyhost1(self.orm, host_id) status = int(self.input.status) if status != NETWORK_RESTART: return web.badrequest() cmdname = u"Restart Network" cmd = NETWORK_COMMAND_RESTART options = {} _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_host_edit(self): self.logger.debug("Update Host OS is failed, Invalid input value.") return web.badrequest(self.view.alert) host = findbyhost1(self.orm, host_id) cmp_host = findby1name(self.orm, self.input.m_name) if cmp_host is not None and int(host_id) != cmp_host.id: self.logger.debug("Update Host OS is failed, " "Already exists name" "- %s, %s" % (host, cmp_host)) return web.conflict(web.ctx.path) hostname_check = findby1hostname(self.orm, self.input.m_hostname) if hostname_check is not None and int(host_id) != hostname_check.id: return web.conflict(web.ctx.path) if is_param(self.input, "m_hostname"): host.hostname = self.input.m_hostname if is_param(self.input, "note_title"): host.notebook.title = self.input.note_title if is_param(self.input, "note_value"): host.notebook.value = self.input.note_value if is_param(self.input, "m_name"): host.name = self.input.m_name # Icon icon_filename = None if is_param(self.input, "icon_filename", empty=True): host.icon = self.input.icon_filename # tag UPDATE if is_param(self.input, "tags"): _tags = [] tag_array = comma_split(self.input.tags) tag_array = uniq_sort(tag_array) for x in tag_array: if t_count(self.orm, x) == 0: _tags.append(t_new(x)) else: _tags.append(t_name(self.orm, x)) host.tags = _tags host.modified_user = self.me m_update(self.orm, host) return web.seeother(web.ctx.path)
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() target_regex = re.compile( r"^bonding_target_dev_select_(?P<dev>eth[0-9]+)") if not validates_bonding(self, target_regex): self.logger.debug("Add bonding failed. Did not validate.") return web.badrequest(self.view.alert) target_dev = [] for input in self.input: m = target_regex.match(input) if m: target_dev.append(m.group('dev')) primary = self.input.bonding_target_dev_primary mode = self.input.bonding_mode cmdname = u"Add Bonding Setting" cmd = BONDING_COMMAND_ADD options = {} options['dev'] = ','.join(target_dev) options["primary"] = primary options["mode"] = mode _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def network_start_stop_job(obj, host_id, network_name, action): """ Register start/stop network job into pysilhouette @param obj: Rest object @param network_name: Name of network to start or stop @type network_name: string @param action: 'start' or 'stop' @type action: string """ if not network_name: raise KaresansuiException if (karesansui.sheconf.has_key('env.uniqkey') is False) \ or (karesansui.sheconf['env.uniqkey'].strip('') == ''): raise KaresansuiException if not (action == 'start' or action == 'stop'): raise KaresansuiException host = findbyhost1(obj.orm, host_id) _cmd = None _jobgroup = None if action == 'start': cmdname = ["Start Network", "start network"] _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], 'restart_network.py'), dict(name=network_name, force=None)) else: cmdname = ["Stop Network", "stop network"] _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], 'stop_network.py'), dict(name=network_name)) # Job Register _jobgroup = JobGroup(cmdname[0], karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('%s command' % cmdname[1], 0, _cmd)) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=obj.me, modified_user=obj.me, ) # INSERT save_job_collaboration(obj.orm, obj.pysilhouette.orm, _machine2jobgroup, _jobgroup, )
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id host = findbyhost1(self.orm, host_id) status = int(self.input.status) if status != NETWORK_RESTART: return web.badrequest() cmdname = u"Restart Network" cmd = NETWORK_COMMAND_RESTART options = {} _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) uris = available_virt_uris() if model.attribute == 0 and model.hypervisor == 1: uri = uris["XEN"] elif model.attribute == 0 and model.hypervisor == 2: uri = uris["KVM"] else: uri = None # if input mode then return empty form if self.is_mode_input(): self.view.host_id = host_id self.view.network = dict(name='', cidr='', dhcp_start='', dhcp_end='', forward_dev='', forward_mode='', bridge='') return True else: kvc = KaresansuiVirtConnection(uri) try: labelfunc = (('active', kvc.list_active_network), ('inactive', kvc.list_inactive_network), ) # networks = {'active': [], 'inactive': []} networks = [] for label, func in labelfunc: for name in func(): try: network = kvc.search_kvn_networks(name)[0] # throws KaresansuiVirtException info = network.get_info() # networks[label].append(info) if info['is_active']: info['activity'] = 'Active' else: info['activity'] = 'Inactive' networks.append(info) except KaresansuiVirtException, e: # network not found pass finally: kvc.close() self.view.networks = networks return True
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() target_regex = re.compile(r"^bonding_target_dev_select_(?P<dev>eth[0-9]+)") if not validates_bonding(self, target_regex): self.logger.debug("Add bonding failed. Did not validate.") return web.badrequest(self.view.alert) target_dev = [] for input in self.input: m = target_regex.match(input) if m: target_dev.append(m.group('dev')) primary = self.input.bonding_target_dev_primary mode = self.input.bonding_mode cmdname = u"Add Bonding Setting" cmd = BONDING_COMMAND_ADD options = {} options['dev'] = ','.join(target_dev) options["primary"] = primary options["mode"] = mode _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() network_name = param[1] if not network_name: self.logger.debug("Network delete failed. Network not found.") return web.notfound("Network not found.") if network_name == 'default': self.logger.debug( 'Network delete failed. Target network is "default".') return web.badrequest('Target network "default" can not deleted.') host = findbyhost1(self.orm, host_id) options = {} options['name'] = network_name _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_DELETE_NETWORK), options) # Job Registration _jobgroup = JobGroup('Delete network: %s' % network_name, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Delete network', 0, _cmd)) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Delete network) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted()
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) if not validates_iptables_save(self, host): return web.badrequest(self.view.alert) from karesansui.lib.dict_op import DictOp dop = DictOp() dop.addconf("iptables", {}) dop.set("iptables",["config"],self.input.iptables_save.split("\r\n")) retval = write_conf(dop, self, host) if retval is False: return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted(url=web.ctx.path)
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id uuid = param[1] if not validates_storage_pool(self, uuid): self.logger.debug("Set storage pool status failed. Did not validate.") return web.badrequest(self.view.alert) model = findbyhost1(self.orm, host_id) # Pool try: kvc = KaresansuiVirtConnection() inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() self.view.pools = pools pools_obj = kvc.get_storage_pool_UUIDString2kvn_storage_pool(uuid) if len(pools_obj) <= 0: self.logger.debug("Set storage pool status failed. Target storage pool not found.") return web.notfound() status = int(self.input.status) if status == STORAGE_POOL_START: storagepool_start_stop_job(self, model, pools_obj[0], 'start') elif status == STORAGE_POOL_STOP: if kvc.is_used_storage_pool(name=pools_obj[0].get_storage_name(), active_only=True) is True: self.logger.debug("Stop storage pool failed. Target storage pool is used by guest.") return web.badrequest("Target storage pool is used by guest.") else: storagepool_start_stop_job(self, model, pools_obj[0], 'stop') else: self.logger.debug("Set storage pool status failed. Unknown status type.") return web.badrequest() return web.accepted() finally: kvc.close()
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id uuid = param[1] if not validates_storage_pool(self, uuid): self.logger.debug("Delete storage pool failed. Did not validate.") return web.badrequest(self.view.alert) # Pool try: kvc = KaresansuiVirtConnection() inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() self.view.pools = pools pools_obj = kvc.get_storage_pool_UUIDString2kvn_storage_pool(uuid) if len(pools_obj) <= 0: return web.notfound() if kvc.is_used_storage_pool( pools_obj[0].get_storage_name()) is True: self.logger.debug( "Delete storage pool failed. Target storage pool is used by guest." ) return web.badrequest("Target storage pool is used by guest.") finally: kvc.close() model = findbyhost1(self.orm, host_id) if delete_storage_pool_job(self, model, pools_obj[0].get_storage_name()) is True: self.logger.debug("Delete storage pool success. name=%s" % (pools_obj[0].get_storage_name())) return web.accepted() else: self.logger.debug("Failed delete storage pool. name=%s" % (pools_obj[0].get_storage_name())) return False
def _PUT(self, *param, **params): """<comment-ja> ステータス更新 - param - read = 0 - start = 1 - stop = 2 - restart = 3 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_fw_status(self): return web.badrequest(self.view.alert) status = int(self.input.status) kit = KaresansuiIpTables() model = findbyhost1(self.orm, host_id) ret = False if status == FIREWALL_ACTION_INIT: ret = firewall_save(self, model) elif status & FIREWALL_ACTION_STOP and status & FIREWALL_ACTION_START: kit.firewall_xml = kit.read_firewall_xml() ret = firewall_restore(self, model, 'restart') elif status & FIREWALL_ACTION_STOP: kit.firewall_xml = kit.read_firewall_xml() ret = firewall_restore(self, model, 'stop') elif status & FIREWALL_ACTION_START: kit.firewall_xml = kit.read_firewall_xml() ret = firewall_restore(self, model, 'start') if ret is True: return web.accepted(url=web.ctx.path) else: return False
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() network_name = param[1] if not network_name: self.logger.debug("Network delete failed. Network not found.") return web.notfound("Network not found.") if network_name == 'default': self.logger.debug('Network delete failed. Target network is "default".') return web.badrequest('Target network "default" can not deleted.') host = findbyhost1(self.orm, host_id) options = {} options['name'] = network_name _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_DELETE_NETWORK), options) # Job Registration _jobgroup = JobGroup('Delete network: %s' % network_name, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Delete network', 0, _cmd)) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Delete network) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted()
def get_iscsi_cmd(obj, host_id): cmd_name = u'Get iSCSI List' jobgroup = JobGroup(cmd_name, karesansui.sheconf['env.uniqkey']) jobgroup.jobs.append(Job('%s command' % cmd_name, 0, "%s/%s" \ % (karesansui.config['application.bin.dir'], ISCSI_COMMAND_GET))) jobgroup.type = JOBGROUP_TYPE['PARALLEL'] host = findbyhost1(obj.orm, host_id) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=obj.me, modified_user=obj.me, ) if corp(obj.orm, obj.pysilhouette.orm,_machine2jobgroup, jobgroup) is False: return False ret = jobgroup.jobs[0].action_stdout network_storages = get_network_storages(ret) return network_storages
def _GET(self, *param, **params): _prep_console() host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) try: segs = uri_split(model.hostname) self.view.host_ipaddr = socket.gethostbyname(segs['host']) #172.23.227.50 except: self.view.host_ipaddr = socket.gethostbyname(socket.gethostname()) return True
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) if not validates_staticroute(self): return web.badrequest(self.view.alert) modules = ["staticroute"] dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') target = self.input.target net = NetworkAddress(target) ipaddr = net.ipaddr netmask = net.netmask netlen = net.netlen network = net.network target = "%s/%s" % (ipaddr,netlen,) gateway = self.input.gateway device = self.input.device dop.set("staticroute", [device,target], gateway) from karesansui.lib.parser.staticroute import PARSER_COMMAND_ROUTE if net.netlen == 32: command = "%s add -host %s gw %s dev %s" % (PARSER_COMMAND_ROUTE,ipaddr,gateway,device,) command = "%s add -host %s dev %s" % (PARSER_COMMAND_ROUTE,ipaddr,device,) else: command = "%s add -net %s netmask %s gw %s dev %s" % (PARSER_COMMAND_ROUTE,network,netmask,gateway,device,) extra_args = {"post-command": command} retval = write_conf(dop, self, host, extra_args=extra_args) if retval is False: return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted(url=web.ctx.path)
def _GET(self, *param, **params): java_dir = karesansui.dirname + "/static/java" self.view.applet_dst_path = java_dir + "/VncViewer.jar" self.view.applet_src_path = _get_applet_source_path() self.view.found_applet_located = os.path.exists(self.view.applet_dst_path) (host_id, guest_id) = self.chk_guestby1(param) if guest_id is None: return web.notfound() model = findbyguest1(self.orm, guest_id) kvc = KaresansuiVirtConnection() try: domname = kvc.uuid_to_domname(model.uniq_key) if not domname: return web.notfound() dom = kvc.search_guests(domname)[0] document = XMLParse(dom.XMLDesc(1)) self.view.graphics_port = XMLXpath(document, "/domain/devices/graphics/@port") self.view.xenname = XMLXpath(document, "/domain/name/text()") finally: kvc.close() h_model = findbyhost1(self.orm, host_id) try: from karesansui.lib.utils import get_ifconfig_info device = KVM_BRIDGE_PREFIX + "0" self.view.host_ipaddr = get_ifconfig_info(device)[device]["ipaddr"] except: try: self.view.host_ipaddr = h_model.hostname.split(":")[0].strip() except: self.view.host_ipaddr = socket.gethostbyname(socket.gethostname()) return True
def _GET(self, *param, **params): java_dir = karesansui.dirname + '/static/java' self.view.applet_dst_path = java_dir + '/VncViewer.jar' self.view.applet_src_path = _get_applet_source_path() self.view.found_applet_located = os.path.exists( self.view.applet_dst_path) (host_id, guest_id) = self.chk_guestby1(param) if guest_id is None: return web.notfound() model = findbyguest1(self.orm, guest_id) kvc = KaresansuiVirtConnection() try: domname = kvc.uuid_to_domname(model.uniq_key) if not domname: return web.notfound() dom = kvc.search_guests(domname)[0] document = XMLParse(dom.XMLDesc(1)) self.view.graphics_port = XMLXpath( document, '/domain/devices/graphics/@port') self.view.xenname = XMLXpath(document, '/domain/name/text()') finally: kvc.close() h_model = findbyhost1(self.orm, host_id) try: from karesansui.lib.utils import get_ifconfig_info device = KVM_BRIDGE_PREFIX + "0" self.view.host_ipaddr = get_ifconfig_info(device)[device]["ipaddr"] except: try: self.view.host_ipaddr = h_model.hostname.split(":")[0].strip() except: self.view.host_ipaddr = socket.gethostbyname( socket.gethostname()) return True
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() b64name = param[1] if not (b64name and host_id): return web.badrequest() host = findbyhost1(self.orm, host_id) name = base64_decode(str(b64name)) (target, device) = name.split("@") net = NetworkAddress(target) ipaddr = net.ipaddr netmask = net.netmask netlen = net.netlen target = "%s/%s" % (ipaddr,netlen,) modules = ["staticroute"] dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') dop.delete("staticroute", [device,target]) from karesansui.lib.parser.staticroute import PARSER_COMMAND_ROUTE if net.netlen == 32: command = "%s del -host %s dev %s" % (PARSER_COMMAND_ROUTE,ipaddr,device,) else: command = "%s del -net %s netmask %s dev %s" % (PARSER_COMMAND_ROUTE,ipaddr,netmask,device,) extra_args = {"post-command": command} retval = write_conf(dop, self, host, extra_args=extra_args) if retval is False: return web.internalerror('Internal Server Error. (Adding Task)') return web.accepted()
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id uuid = param[1] if not validates_storage_pool(self, uuid): self.logger.debug("Delete storage pool failed. Did not validate.") return web.badrequest(self.view.alert) # Pool try: kvc = KaresansuiVirtConnection() inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() self.view.pools = pools pools_obj = kvc.get_storage_pool_UUIDString2kvn_storage_pool(uuid) if len(pools_obj) <= 0: return web.notfound() if kvc.is_used_storage_pool(pools_obj[0].get_storage_name()) is True: self.logger.debug("Delete storage pool failed. Target storage pool is used by guest.") return web.badrequest("Target storage pool is used by guest.") finally: kvc.close() model = findbyhost1(self.orm, host_id) if delete_storage_pool_job(self,model,pools_obj[0].get_storage_name()) is True: self.logger.debug("Delete storage pool success. name=%s" % (pools_obj[0].get_storage_name())) return web.accepted() else: self.logger.debug("Failed delete storage pool. name=%s" % (pools_obj[0].get_storage_name())) return False
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id self.view.current = get_ifconfig_info() modules = ["network", "resolv", "hosts"] host = findbyhost1(self.orm, host_id) dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') self.view.gateway = dop.get("network", ["GATEWAY"]) self.view.search = dop.get("resolv", ["search"]) self.view.nameserver = dop.get("resolv", ["nameserver"]) if self.view.nameserver is False: self.view.nameserver = [] if type(self.view.nameserver) == str: self.view.nameserver = [self.view.nameserver] self.view.nameserver = "\n".join(self.view.nameserver) self.view.domainname = dop.get("resolv", ["domain"]) if self.view.domainname is False: self.view.domainname = dop.get("network", ["DOMAINNAME"]) if self.view.domainname is False: self.view.domainname = re.sub("^[^\.]+\.", "", os.uname()[1]) self.view.hostname = dop.get("network", ["HOSTNAME"]) if self.view.hostname is False: self.view.hostname = os.uname()[1] self.view.hostname_short = re.sub("\.%s$" % (self.view.domainname), "", self.view.hostname) # -- return True
def get_iscsi_cmd(obj, host_id): cmd_name = u'Get iSCSI List' jobgroup = JobGroup(cmd_name, karesansui.sheconf['env.uniqkey']) jobgroup.jobs.append(Job('%s command' % cmd_name, 0, "%s/%s" \ % (karesansui.config['application.bin.dir'], ISCSI_COMMAND_GET))) jobgroup.type = JOBGROUP_TYPE['PARALLEL'] host = findbyhost1(obj.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=obj.me, modified_user=obj.me, ) if corp(obj.orm, obj.pysilhouette.orm, _machine2jobgroup, jobgroup) is False: return False ret = jobgroup.jobs[0].action_stdout network_storages = get_network_storages(ret) return network_storages
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id host = findbyhost1(self.orm, host_id) name = param[1] config = ServiceConfigParam(SERVICE_XML_FILE) config.load_xml_config() service = config.findby1service(name) if not service: self.logger.debug("Set service status failed. Service not found.") return web.notfound("Service not found") if not is_param(self.input, 'status'): self.logger.error( "Set service status failed. Missing request param.") return web.badrequest("Missing request param.") status = int(self.input.status) if status == SERVICE_START: service_job(self, host, name, "start") elif status == SERVICE_STOP: service_job(self, host, name, "stop") elif status == SERVICE_RESTART: service_job(self, host, name, "restart") elif status == SERVICE_ENABLE: service_job(self, host, name, "enable") elif status == SERVICE_DISABLE: service_job(self, host, name, "disable") else: self.logger.error( "Set service status failed. Invalid request param.") return web.badrequest("Invalid request param") return web.accepted()
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id self.view.current = get_ifconfig_info() modules = ["network","resolv","hosts"] host = findbyhost1(self.orm, host_id) dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') self.view.gateway = dop.get("network",["GATEWAY"]) self.view.search = dop.get("resolv" ,["search"]) self.view.nameserver = dop.get("resolv" ,["nameserver"]) if self.view.nameserver is False: self.view.nameserver = [] if type(self.view.nameserver) == str: self.view.nameserver = [self.view.nameserver] self.view.nameserver = "\n".join(self.view.nameserver) self.view.domainname = dop.get("resolv" ,["domain"]) if self.view.domainname is False: self.view.domainname = dop.get("network" ,["DOMAINNAME"]) if self.view.domainname is False: self.view.domainname = re.sub("^[^\.]+\.","",os.uname()[1]) self.view.hostname = dop.get("network" ,["HOSTNAME"]) if self.view.hostname is False: self.view.hostname = os.uname()[1] self.view.hostname_short = re.sub("\.%s$" % (self.view.domainname), "", self.view.hostname) # -- return True
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id self.view.current = get_ifconfig_info() modules = ["iptables"] host = findbyhost1(self.orm, host_id) dop = read_conf(modules, self, host) if dop is False: return web.internalerror('Internal Server Error. (Timeout)') config = dop.get("iptables",["config"]) status = dop.get("iptables",["status"]) lint = dop.get("iptables",["lint"]) policies = {} for _aline in status: m = re.match("\*(?P<table>[a-z]+)",_aline.rstrip()) if m: table = m.group("table") policies[table] = {} else: m = re.match(":(?P<chain>[A-Z]+) +(?P<policy>[A-Z]+)",_aline.rstrip()) if m: chain = m.group("chain") policy = m.group("policy") policies[table][chain] = policy self.view.config = "\n".join(config) self.view.status = "\n".join(status) self.view.lint = lint self.view.policies = policies self.view.result_js = "" return True
def _PUT(self, *param, **params): """<comment-ja> ステータス更新 - param - read = 0 - start = 1 - stop = 2 - restart = 3 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_iptables_status(self): return web.badrequest(self.view.alert) status = int(self.input.status) model = findbyhost1(self.orm, host_id) ret = False if status & IPTABLES_ACTION_STOP and status & IPTABLES_ACTION_START: ret = iptables_control(self, model, 'restart') elif status & IPTABLES_ACTION_STOP: ret = iptables_control(self, model, 'stop') elif status & IPTABLES_ACTION_START: ret = iptables_control(self, model, 'start') if ret is True: return web.accepted(url=web.ctx.path) else: return False
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() device = param[1] if device is None: return web.notfound() cmdname = u"Delete Bonding Setting" cmd = BONDING_COMMAND_DELETE options = {} options['dev'] = device options["succession"] = None _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], cmd), options) _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _job = Job('%s command' % cmdname, 0, _cmd) _jobgroup.jobs.append(_job) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted()
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id host = findbyhost1(self.orm, host_id) name = param[1] config = ServiceConfigParam(SERVICE_XML_FILE) config.load_xml_config() service = config.findby1service(name) if not service: self.logger.debug("Set service status failed. Service not found.") return web.notfound("Service not found") if not is_param(self.input, 'status'): self.logger.error("Set service status failed. Missing request param.") return web.badrequest("Missing request param.") status = int(self.input.status) if status == SERVICE_START: service_job(self, host, name, "start") elif status == SERVICE_STOP: service_job(self, host, name, "stop") elif status == SERVICE_RESTART: service_job(self, host, name, "restart") elif status == SERVICE_ENABLE: service_job(self, host, name, "enable") elif status == SERVICE_DISABLE: service_job(self, host, name, "disable") else: self.logger.error("Set service status failed. Invalid request param.") return web.badrequest("Invalid request param") return web.accepted()
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id model = findbyhost1(self.orm, host_id) # virt kvc = KaresansuiVirtConnection() try: inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() finally: kvc.close() now_pools = inactive_pool + active_pool if self.input.pool_type == STORAGE_POOL_TYPE["TYPE_DIR"]: if not validates_pool_dir(self, now_pools): return web.badrequest(self.view.alert) extra_opts = {} if ( create_pool_dir_job( self, model, self.input.pool_name, self.input.pool_type, self.input.pool_target_path, extra_opts ) is True ): self.logger.debug("Create dir storage pool success.") return web.accepted() else: self.logger.debug("Failed create DIR storage pool job.") return False elif self.input.pool_type == STORAGE_POOL_TYPE["TYPE_ISCSI"]: if not validates_pool_iscsi(self, now_pools): return web.badrequest(self.view.alert) extra_opts = {} network_storages = get_iscsi_cmd(self, host_id) if network_storages is False: self.logger.debug("Get iSCSI command failed. Return to timeout") return web.internalerror("Internal Server Error. (Timeout)") pool_host_name = None pool_device_path = None for iscsi in network_storages: if self.input.pool_target_iscsi == iscsi["iqn"]: pool_host_name = iscsi["hostname"] pool_device_path = iscsi["iqn"] disk_list = iscsi["disk_list"] break if pool_host_name is None or pool_device_path is None: self.logger.debug("Failed create iSCSI storage pool. Target iSCSI device not found.") return web.badrequest() automount_list = [] for disk in disk_list: if is_param(self.input, "iscsi-disk-use-type-%s" % (disk["symlink_name"])): if ( self.input["iscsi-disk-use-type-%s" % (disk["symlink_name"])] == "mount" and disk["is_partitionable"] is False ): if is_param(self.input, "iscsi-disk-format-%s" % (disk["symlink_name"])): if self.input["iscsi-disk-format-%s" % (disk["symlink_name"])] == "true": disk["is_format"] = True automount_list.append(disk) if ( create_pool_iscsi_job( self, model, self.input.pool_name, self.input.pool_type, pool_host_name, pool_device_path, automount_list, extra_opts, ) is True ): self.logger.debug("Create iSCSI storage pool success. name=%s" % (self.input.pool_name)) return web.accepted() else: self.logger.debug("Failed create iSCSI storage pool job. name=%s" % (self.input.pool_name)) return False else: self.logger.debug("Non-existent type. type=%s" % self.input.pool_type) return web.badrequest("Non-existent type. type=%s" % self.input.pool_type)
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_network_storage(self): self.logger.debug( "Network storage change status failed. Did not validate.") return web.badrequest(self.view.alert) host = findbyhost1(self.orm, host_id) if is_param(self.input, "iqn"): iqn = self.input.iqn else: self.logger.debug( "Network storage change status failed. Target IQN not found.") return web.badrequest() options = {'iqn': iqn} job_order = 0 if is_param(self.input, "status"): status = self.input.status else: self.logger.debug( "Network storage change status failed. Status type not found.") return web.badrequest() if is_param(self.input, "host") and is_param(self.input, "port"): host = self.input.host port = self.input.port else: self.logger.debug( "Network storage change status failed. Target host and port not found." ) return web.badrequest() active_used_pool = [] inactive_used_pool = [] kvc = KaresansuiVirtConnection() try: dev_symlink_list = get_filelist(ISCSI_DEVICE_DIR) dev_symlink_list.sort() symlink_regexp = re.compile("^%s/%s" % (re.escape(ISCSI_DEVICE_DIR), re.escape(ISCSI_DEVICE_NAME_TPL % (host, port, iqn)))) active_pools = kvc.list_active_storage_pool() inactive_pools = kvc.list_inactive_storage_pool() now_pools = active_pools + inactive_pools for pool in now_pools: pool_type = kvc.get_storage_pool_type(pool) if pool_type == "iscsi": if iqn == kvc.get_storage_pool_sourcedevicepath(pool): if pool in active_pools: active_used_pool.append(pool) if pool in inactive_pools: inactive_used_pool.append(pool) elif pool_type == "fs": if symlink_regexp.match( kvc.get_storage_pool_sourcedevicepath(pool)): if pool in active_pools: active_used_pool.append(pool) if pool in inactive_pools: inactive_used_pool.append(pool) if status == NETWORK_STORAGE_STOP: for pool in active_used_pool: if kvc.is_used_storage_pool(name=pool, active_only=True) is True: self.logger.debug( "Stop iSCSI failed. Target iSCSI is used by guest." ) return web.badrequest("Target iSCSI is used by guest.") finally: kvc.close() if status == NETWORK_STORAGE_START: network_storage_cmd = ISCSI_COMMAND_START cmd_name = u'Start iSCSI' jobgroup = JobGroup(cmd_name, karesansui.sheconf['env.uniqkey']) for pool in inactive_used_pool: pool_cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_START_STORAGE_POOL), {"name": pool}) pool_cmdname = "Start Storage Pool" jobgroup.jobs.append( Job('%s command' % pool_cmdname, 1, pool_cmd)) job_order = 0 elif status == NETWORK_STORAGE_STOP: network_storage_cmd = ISCSI_COMMAND_STOP cmd_name = u'Stop iSCSI' jobgroup = JobGroup(cmd_name, karesansui.sheconf['env.uniqkey']) for pool in active_used_pool: pool_cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_DESTROY_STORAGE_POOL), {"name": pool}) pool_cmdname = "Stop Storage Pool" jobgroup.jobs.append( Job('%s command' % pool_cmdname, 0, pool_cmd)) job_order = 1 else: return web.internalerror('Internal Server Error. (Param)') _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], network_storage_cmd), options) jobgroup.jobs.append(Job('%s command' % cmd_name, job_order, _cmd)) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, jobgroup, ) return web.accepted()
def _PUT(self, *param, **params): """<comment-ja> ステータス更新 - param - create = 0 - shutdown = 1 - destroy = 2 - suspend = 3 - resume = 4 - reboot = 5 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() if not validates_uriguest_status(self): return web.badrequest(self.view.alert) status = int(self.input.status) model = findbyhost1(self.orm, host_id) if model.attribute == 2: info = {} segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = '' if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ':' + segs["passwd"] self.kvc = KaresansuiVirtConnectionAuth(uri,creds) try: host = MergeHost(self.kvc, model) for guest in host.guests: _virt = self.kvc.search_kvg_guests(guest.info["model"].name) if 0 < len(_virt): for _v in _virt: info = _v.get_info() #uri = _v._conn.getURI() if info["uuid"] == uri_id or (uri[0:5] == "test:"): esc_name = "'%s'" % guest.info["model"].name opts = {"name":esc_name,"connection":uri} if creds != '': passwd_file = KARESANSUI_TMP_DIR + "/" + segs['host'] + ".auth" open(passwd_file, "w").write(creds) os.chmod(passwd_file, 0600) opts["passwd-file"] = passwd_file if status == GUEST_ACTION_CREATE: # -- Create cmdname = ["Start Guest", "start guest"] if _v.is_creatable() is True: _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_START_GUEST), opts) self.view.status = VIRT_COMMAND_START_GUEST else: self.logger.error("Create Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_SHUTDOWN: cmdname = ["Shutdown Guest", "shutdown guest"] if _v.is_shutdownable() is True: # -- Shutdown _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_SHUTDOWN_GUEST), opts) self.view.status = VIRT_COMMAND_SHUTDOWN_GUEST else: self.logger.error("Shutdown Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_DESTROY: cmdname = ["Destroy Guest", "Destroy guest"] if _v.is_destroyable() is True: # -- destroy _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_DESTROY_GUEST), opts) self.view.status = VIRT_COMMAND_DESTROY_GUEST else: self.logger.error("Destroy Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_SUSPEND: cmdname = ["Suspend Guest", "suspend guest"] if _v.is_suspendable() is True: # -- Suspend _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_SUSPEND_GUEST), opts) self.view.status = VIRT_COMMAND_SUSPEND_GUEST else: self.logger.error("Destroy Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_RESUME: cmdname = ["Resume Guest", "resume guest"] if _v.is_resumable() is True: # -- Resume _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_RESUME_GUEST), opts) self.view.status = VIRT_COMMAND_RESUME_GUEST else: self.logger.error("Resume Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_REBOOT: cmdname = ["Reboot Guest", "reboot guest"] if _v.is_shutdownable() is True: # -- Reboot _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_REBOOT_GUEST), opts) self.view.status = VIRT_COMMAND_REBOOT_GUEST else: self.logger.error("Reboot Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_ENABLE_AUTOSTART: opts["enable"] = None cmdname = ["Enable Autostart Guest", "enable autostart guest"] # -- Enable autostart guest _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_AUTOSTART_GUEST), opts) self.view.status = VIRT_COMMAND_AUTOSTART_GUEST elif status == GUEST_ACTION_DISABLE_AUTOSTART: opts["disable"] = None cmdname = ["Disable Autostart Guest", "disable autostart guest"] # -- Disable autostart guest _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'],VIRT_COMMAND_AUTOSTART_GUEST), opts) self.view.status = VIRT_COMMAND_AUTOSTART_GUEST else: self.logger.error("Action:Bad Request. - request status=%d" % status) return web.badrequest() break finally: self.kvc.close() # Job Register _jobgroup = JobGroup(cmdname[0], karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('%s command' % cmdname[1], 0, _cmd)) _machine2jobgroup = m2j_new(machine=model, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) # INSERT save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.accepted(url="/host/%d/uriguest/%s.part" % (host_id, uri_id))
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_general(self): self.logger.debug( "Change network general failed. Did not validate.") return web.badrequest(self.view.alert) modules = ["network", "resolv", "hosts"] host = findbyhost1(self.orm, host_id) dop = read_conf(modules, self, host) if dop is False: self.logger.error( "Change network general failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read Conf)') gateway = self.input.gateway hostname = self.input.fqdn nameservers = self.input.nameserver.strip().split() domainname = re.sub("^[^\.]+\.", "", hostname) hostname_short = re.sub("\.%s$" % (domainname), "", hostname) old_domainname = dop.get("resolv", ["domain"]) if old_domainname is False: old_domainname = dop.get("network", ["DOMAINNAME"]) if old_domainname is False: old_domainname = re.sub("^[^\.]+\.", "", os.uname()[1]) old_hostname = dop.get("network", ["HOSTNAME"]) if old_hostname is False: old_hostname = os.uname()[1] old_hostname_short = re.sub("\.%s$" % (old_domainname), "", old_hostname) # hosts hosts_arr = dop.getconf("hosts") for _k, _v in hosts_arr.iteritems(): _host = dop.get("hosts", [_k]) _value = _host[0] _comment = _host[1] _values = _value.strip().split() new_values = [] for _entry in _values: if _entry == old_hostname: _entry = hostname elif _entry == old_hostname_short: _entry = hostname_short new_values.append(_entry) new_value = " ".join(new_values) dop.set("hosts", [_k], [new_value, _comment]) dop.set("network", ["GATEWAY"], gateway) dop.set("resolv", ["nameserver"], nameservers) dop.set("resolv", ["domain"], domainname) dop.set("network", ["DOMAINNAME"], domainname) dop.set("network", ["HOSTNAME"], hostname) retval = write_conf(dop, self, host) if retval is False: self.logger.error( "Change network general failed. Failed write conf.") return web.internalerror('Internal Server Error. (Write Conf)') return web.accepted(url=web.ctx.path)
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() network_name = param[1] if not network_name: self.logger.debug("Network update failed. Network not found.") return web.notfound("Network not found.") if not validates_network(self, network_name=network_name): self.logger.debug("Network update failed. Did not validate.") return web.badrequest(self.view.alert) cidr = self.input.cidr dhcp_start = self.input.dhcp_start dhcp_end = self.input.dhcp_end bridge = self.input.bridge forward_mode = getattr(self.input, 'forward_mode', '') try: autostart = self.input.autostart except: autostart = "no" # # spin off update job # options = {'name' : network_name, 'cidr': cidr, 'dhcp-start': dhcp_start, 'dhcp-end' : dhcp_end, 'bridge-name' : bridge, 'forward-mode' : forward_mode, 'autostart' : autostart, } self.logger.debug('spinning off network_update_job options=%s' % (options)) host = findbyhost1(self.orm, host_id) _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_UPDATE_NETWORK), options) # Job Registration _jobgroup = JobGroup('Update network: %s' % network_name, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Update network', 0, _cmd)) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Update network) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted(url=url)
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() # valid self.view.uuid = param[1] kvc = KaresansuiVirtConnection() try: # Storage Pool #inactive_pool = kvc.list_inactive_storage_pool() inactive_pool = [] active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() export = [] for pool_name in pools: pool = kvc.search_kvn_storage_pools(pool_name) path = pool[0].get_info()["target"]["path"] if os.path.exists(path): for _afile in glob.glob("%s/*/info.dat" % (path, )): e_param = ExportConfigParam() e_param.load_xml_config(_afile) if e_param.get_uuid() != self.view.uuid: continue e_name = e_param.get_domain() _dir = os.path.dirname(_afile) param = ConfigParam(e_name) path = "%s/%s.xml" % (_dir, e_name) if os.path.isfile(path) is False: self.logger.error( 'Export corrupt data.(file not found) - path=%s' % path) return web.internalerror() param.load_xml_config(path) if e_name != param.get_domain_name(): self.logger.error('Export corrupt data.(The name does not match) - info=%s, xml=%s' \ % (e_name, param.get_name())) return web.internalerror() _dir = os.path.dirname(_afile) export.append({ "dir": _dir, "pool": pool_name, "uuid": e_param.get_uuid(), "name": e_name, }) if len(export) != 1: self.logger.info("Export does not exist. - uuid=%s" % self.view.uuid) return web.badrequest() finally: kvc.close() export = export[0] if os.path.exists(export['dir']) is False or os.path.isdir( export['dir']) is False: self.logger.error('Export data is not valid. [%s]' % export_dir) return web.badrequest('Export data is not valid.') host = findbyhost1(self.orm, host_id) options = {} options['uuid'] = export["uuid"] _cmd = dict2command("%s/%s" % (karesansui.config['application.bin.dir'], \ VIRT_COMMAND_DELETE_EXPORT_DATA), options) # Job Registration _jobgroup = JobGroup('Delete Export Data', karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Delete Export Data', 0, _cmd)) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Delete export data) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted()
def _DELETE(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() # valid self.view.uuid = param[1] kvc = KaresansuiVirtConnection() try: # Storage Pool #inactive_pool = kvc.list_inactive_storage_pool() inactive_pool = [] active_pool = kvc.list_active_storage_pool() pools = inactive_pool + active_pool pools.sort() export = [] for pool_name in pools: pool = kvc.search_kvn_storage_pools(pool_name) path = pool[0].get_info()["target"]["path"] if os.path.exists(path): for _afile in glob.glob("%s/*/info.dat" % (path,)): e_param = ExportConfigParam() e_param.load_xml_config(_afile) if e_param.get_uuid() != self.view.uuid: continue e_name = e_param.get_domain() _dir = os.path.dirname(_afile) param = ConfigParam(e_name) path = "%s/%s.xml" % (_dir, e_name) if os.path.isfile(path) is False: self.logger.error('Export corrupt data.(file not found) - path=%s' % path) return web.internalerror() param.load_xml_config(path) if e_name != param.get_domain_name(): self.logger.error('Export corrupt data.(The name does not match) - info=%s, xml=%s' \ % (e_name, param.get_name())) return web.internalerror() _dir = os.path.dirname(_afile) export.append({"dir" : _dir, "pool" : pool_name, "uuid" : e_param.get_uuid(), "name" : e_name, }) if len(export) != 1: self.logger.info("Export does not exist. - uuid=%s" % self.view.uuid) return web.badrequest() finally: kvc.close() export = export[0] if os.path.exists(export['dir']) is False or os.path.isdir(export['dir']) is False: self.logger.error('Export data is not valid. [%s]' % export_dir) return web.badrequest('Export data is not valid.') host = findbyhost1(self.orm, host_id) options = {} options['uuid'] = export["uuid"] _cmd = dict2command("%s/%s" % (karesansui.config['application.bin.dir'], \ VIRT_COMMAND_DELETE_EXPORT_DATA), options) # Job Registration _jobgroup = JobGroup('Delete Export Data', karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Delete Export Data', 0, _cmd)) _machine2jobgroup = m2j_new(machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Delete export data) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted()
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() host = findbyhost1(self.orm, host_id) self.view.host_id = host_id # unremovable entries excludes = { "device": ["^peth","^virbr","^sit","^xenbr","^lo","^br"], "ipaddr": ["^0\.0\.0\.0$", "^169\.254\.0\.0$"], } devices = [] phydev_regex = re.compile(r"^eth[0-9]+") for dev,dev_info in get_ifconfig_info().iteritems(): if phydev_regex.match(dev): try: if dev_info['ipaddr'] is not None: devices.append(dev) net = NetworkAddress("%s/%s" % (dev_info['ipaddr'],dev_info['mask'],)) excludes['ipaddr'].append(net.network) except: pass self.view.devices = devices parser = Parser() status = parser.do_status() routes = {} for _k,_v in status.iteritems(): for _k2,_v2 in _v.iteritems(): name = base64_encode("%s@%s" % (_k2,_k,)) routes[name] = {} routes[name]['name'] = name routes[name]['device'] = _k routes[name]['gateway'] = _v2['gateway'] routes[name]['flags'] = _v2['flags'] routes[name]['ref'] = _v2['ref'] routes[name]['use'] = _v2['use'] net = NetworkAddress(_k2) routes[name]['ipaddr'] = net.ipaddr routes[name]['netlen'] = net.netlen routes[name]['netmask'] = net.netmask removable = True for _ex_key,_ex_val in excludes.iteritems(): ex_regex = "|".join(_ex_val) mm = re.search(ex_regex,routes[name][_ex_key]) if mm: removable = False routes[name]['removable'] = removable self.view.routes = routes if self.is_mode_input(): pass return True
def _PUT(self, *param, **params): """<comment-ja> ステータス更新 - param - create = 0 - shutdown = 1 - destroy = 2 - suspend = 3 - resume = 4 - reboot = 5 </comment-ja> <comment-en> TODO: English Comment </comment-en> """ host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() if not validates_uriguest_status(self): return web.badrequest(self.view.alert) status = int(self.input.status) model = findbyhost1(self.orm, host_id) if model.attribute == 2: info = {} segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = "" if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ":" + segs["passwd"] self.kvc = KaresansuiVirtConnectionAuth(uri, creds) try: host = MergeHost(self.kvc, model) for guest in host.guests: _virt = self.kvc.search_kvg_guests(guest.info["model"].name) if 0 < len(_virt): for _v in _virt: info = _v.get_info() # uri = _v._conn.getURI() if info["uuid"] == uri_id or (uri[0:5] == "test:"): esc_name = "'%s'" % guest.info["model"].name opts = {"name": esc_name, "connection": uri} if creds != "": passwd_file = KARESANSUI_TMP_DIR + "/" + segs["host"] + ".auth" open(passwd_file, "w").write(creds) os.chmod(passwd_file, 0600) opts["passwd-file"] = passwd_file if status == GUEST_ACTION_CREATE: # -- Create cmdname = ["Start Guest", "start guest"] if _v.is_creatable() is True: _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_START_GUEST), opts, ) self.view.status = VIRT_COMMAND_START_GUEST else: self.logger.error("Create Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_SHUTDOWN: cmdname = ["Shutdown Guest", "shutdown guest"] if _v.is_shutdownable() is True: # -- Shutdown _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_SHUTDOWN_GUEST), opts, ) self.view.status = VIRT_COMMAND_SHUTDOWN_GUEST else: self.logger.error("Shutdown Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_DESTROY: cmdname = ["Destroy Guest", "Destroy guest"] if _v.is_destroyable() is True: # -- destroy _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_DESTROY_GUEST), opts, ) self.view.status = VIRT_COMMAND_DESTROY_GUEST else: self.logger.error("Destroy Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_SUSPEND: cmdname = ["Suspend Guest", "suspend guest"] if _v.is_suspendable() is True: # -- Suspend _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_SUSPEND_GUEST), opts, ) self.view.status = VIRT_COMMAND_SUSPEND_GUEST else: self.logger.error("Destroy Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_RESUME: cmdname = ["Resume Guest", "resume guest"] if _v.is_resumable() is True: # -- Resume _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_RESUME_GUEST), opts, ) self.view.status = VIRT_COMMAND_RESUME_GUEST else: self.logger.error("Resume Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_REBOOT: cmdname = ["Reboot Guest", "reboot guest"] if _v.is_shutdownable() is True: # -- Reboot _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_REBOOT_GUEST), opts, ) self.view.status = VIRT_COMMAND_REBOOT_GUEST else: self.logger.error("Reboot Action:The state can not run. - %d" % _v.status()) elif status == GUEST_ACTION_ENABLE_AUTOSTART: opts["enable"] = None cmdname = ["Enable Autostart Guest", "enable autostart guest"] # -- Enable autostart guest _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_AUTOSTART_GUEST), opts, ) self.view.status = VIRT_COMMAND_AUTOSTART_GUEST elif status == GUEST_ACTION_DISABLE_AUTOSTART: opts["disable"] = None cmdname = ["Disable Autostart Guest", "disable autostart guest"] # -- Disable autostart guest _cmd = dict2command( "%s/%s" % (karesansui.config["application.bin.dir"], VIRT_COMMAND_AUTOSTART_GUEST), opts, ) self.view.status = VIRT_COMMAND_AUTOSTART_GUEST else: self.logger.error("Action:Bad Request. - request status=%d" % status) return web.badrequest() break finally: self.kvc.close() # Job Register _jobgroup = JobGroup(cmdname[0], karesansui.sheconf["env.uniqkey"]) _jobgroup.jobs.append(Job("%s command" % cmdname[1], 0, _cmd)) _machine2jobgroup = m2j_new( machine=model, jobgroup_id=-1, uniq_key=karesansui.sheconf["env.uniqkey"], created_user=self.me, modified_user=self.me, ) # INSERT save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup) return web.accepted(url="/host/%d/uriguest/%s.part" % (host_id, uri_id))
def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id self.view.current = get_ifconfig_info() self.view.device = param[1] modules = ["ifcfg"] host = findbyhost1(self.orm, host_id) dop = read_conf(modules,self,host) if dop is False: self.logger.error("Get nic info failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read conf)') self.view.bootproto = dop.get("ifcfg",[self.view.device,"BOOTPROTO"]) if self.view.bootproto is False: self.view.bootproto = "static" self.view.onboot = dop.get("ifcfg",[self.view.device,"ONBOOT"]) if self.view.onboot is False: self.view.onboot = "yes" self.view.ipaddr = dop.get("ifcfg",[self.view.device,"IPADDR"]) if self.view.ipaddr is False: self.view.ipaddr = "" self.view.netmask = dop.get("ifcfg",[self.view.device,"NETMASK"]) if self.view.netmask is False: self.view.netmask = "" self.view.network = dop.get("ifcfg",[self.view.device,"NETWORK"]) if self.view.network is False: net = NetworkAddress("%s/%s" % (self.view.ipaddr,self.view.netmask)) if net.valid_addr(self.view.ipaddr) is True and net.valid_addr(self.view.netmask) is True: self.view.network = net.network else: self.view.network = "" self.view.broadcast = dop.get("ifcfg",[self.view.device,"BROADCAST"]) if self.view.broadcast is False: net = NetworkAddress("%s/%s" % (self.view.ipaddr,self.view.netmask)) if net.valid_addr(self.view.ipaddr) is True and net.valid_addr(self.view.netmask) is True: self.view.broadcast = net.broadcast else: self.view.broadcast = "" self.view.master = dop.get("ifcfg",[self.view.device,"MASTER"]) if self.view.master is False: self.view.master = "" self.view.c_ipaddr = self.view.current[self.view.device]["ipaddr"] if self.view.c_ipaddr is None: self.view.c_ipaddr = "" self.view.c_netmask = self.view.current[self.view.device]["mask"] if self.view.c_netmask is None: self.view.c_netmask = "" if self.view.current[self.view.device]["cidr"] is None: self.view.c_network = "" else: self.view.c_network = re.sub("\/.*","",self.view.current[self.view.device]["cidr"]) self.view.c_broadcast = self.view.current[self.view.device]["bcast"] if self.view.c_broadcast is None: net = NetworkAddress("%s/%s" % (self.view.c_ipaddr,self.view.c_netmask)) if net.valid_addr(self.view.c_ipaddr) is True and net.valid_addr(self.view.c_netmask) is True: self.view.c_broadcast = net.broadcast else: self.view.c_broadcast = "" self.view.c_hwaddr = self.view.current[self.view.device]["hwaddr"] if self.view.c_hwaddr is None: self.view.c_hwaddr = "" self.view.bond_info = get_bonding_info() self.view.c_master = "" for bond in self.view.bond_info: for slave in self.view.bond_info[bond]['slave']: if self.view.device == slave: self.view.c_master = bond return True
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_host_edit(self): self.logger.debug("Update Host OS is failed, Invalid input value.") return web.badrequest(self.view.alert) host = findbyhost1(self.orm, host_id) cmp_host = findby1name(self.orm, self.input.m_name) if cmp_host is not None and int(host_id) != cmp_host.id: self.logger.debug("Update Host OS is failed, " "Already exists name" "- %s, %s" % (host, cmp_host)) return web.conflict(web.ctx.path) if self.input.m_connect_type == "karesansui": hostname_check = findby1hostname(self.orm, self.input.m_hostname) if hostname_check is not None and int( host_id) != hostname_check.id: return web.conflict(web.ctx.path) if self.input.m_connect_type == "karesansui": host.attribute = MACHINE_ATTRIBUTE['HOST'] if is_param(self.input, "m_hostname"): host.hostname = self.input.m_hostname if self.input.m_connect_type == "libvirt": host.attribute = MACHINE_ATTRIBUTE['URI'] if is_param(self.input, "m_uri"): segs = uri_split(self.input.m_uri) if is_param(self.input, "m_auth_user") and self.input.m_auth_user: segs["user"] = self.input.m_auth_user if is_param(self.input, "m_auth_passwd") and self.input.m_auth_passwd: segs["passwd"] = self.input.m_auth_passwd host.hostname = uri_join(segs) if is_param(self.input, "note_title"): host.notebook.title = self.input.note_title if is_param(self.input, "note_value"): host.notebook.value = self.input.note_value if is_param(self.input, "m_name"): host.name = self.input.m_name # Icon icon_filename = None if is_param(self.input, "icon_filename", empty=True): host.icon = self.input.icon_filename # tag UPDATE if is_param(self.input, "tags"): _tags = [] tag_array = comma_split(self.input.tags) tag_array = uniq_sort(tag_array) for x in tag_array: if t_count(self.orm, x) == 0: _tags.append(t_new(x)) else: _tags.append(t_name(self.orm, x)) host.tags = _tags host.modified_user = self.me m_update(self.orm, host) return web.seeother(web.ctx.path)
def _GET(self, *param, **params): if self.input.has_key('job_id') is True: self.view.job_id = self.input.job_id else: self.view.job_id = None host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) uris = available_virt_uris() if model.attribute == 0 and model.hypervisor == 1: uri = uris["XEN"] elif model.attribute == 0 and model.hypervisor == 2: uri = uris["KVM"] else: uri = None # other_url other_url = "%s://%s%s/" % ( self.view.ctx.protocol, model.hostname, karesansui.config['application.url.prefix']) if self.is_mode_input() is False: if karesansui.config["application.uniqkey"] == model.uniq_key: # My host host_cpuinfo = get_proc_cpuinfo() cpuinfo = {} cpuinfo["number"] = len(host_cpuinfo) cpuinfo["vendor"] = host_cpuinfo[0]["vendor_id"] cpuinfo["model"] = host_cpuinfo[0]["model name"] cpuinfo["frequency"] = host_cpuinfo[0]["cpu MHz"] host_meminfo = get_proc_meminfo() meminfo = {} meminfo["total"] = host_meminfo["MemTotal"][0] meminfo["free"] = host_meminfo["MemFree"][0] meminfo["buffers"] = host_meminfo["Buffers"][0] meminfo["cached"] = host_meminfo["Cached"][0] host_diskinfo = get_partition_info(VENDOR_DATA_DIR) diskinfo = {} diskinfo["total"] = host_diskinfo[1] diskinfo["free"] = host_diskinfo[3] self.kvc = KaresansuiVirtConnection(uri) try: host = MergeHost(self.kvc, model) if self.is_json() is True: json_host = host.get_json(self.me.languages) self.view.data = json_dumps({ "model": json_host["model"], "cpuinfo": cpuinfo, "meminfo": meminfo, "diskinfo": diskinfo, }) else: self.view.model = host.info["model"] self.view.virt = host.info["virt"] finally: self.kvc.close() else: # other uri if model.attribute == 2: segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = '' if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ':' + segs["passwd"] try: self.kvc = KaresansuiVirtConnectionAuth(uri, creds) host = MergeHost(self.kvc, model) if self.is_json() is True: json_host = host.get_json(self.me.languages) self.view.data = json_dumps({ "model": json_host["model"], "uri": uri, "num_of_guests": len(host.guests), }) else: self.view.model = host.info["model"] self.view.virt = host.info["virt"] self.view.uri = uri try: self.view.auth_user = segs["user"] except: self.view.auth_user = "" try: self.view.auth_passwd = segs["passwd"] except: self.view.auth_passwd = "" except: pass finally: #if 'kvc' in dir(locals()["self"]) if 'kvc' in dir(self): self.kvc.close() # other host else: if self.is_json() is True: self.view.data = json_dumps({ "model": model.get_json(self.me.languages), "other_url": other_url, }) else: self.view.model = model self.view.virt = None self.view.other_url = other_url return True else: # mode=input if model.attribute == 2: segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = '' if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ':' + segs["passwd"] self.view.model = model self.view.uri = uri try: self.view.auth_user = segs["user"] except: self.view.auth_user = "" try: self.view.auth_passwd = segs["passwd"] except: self.view.auth_passwd = "" else: self.kvc = KaresansuiVirtConnection(uri) try: host = MergeHost(self.kvc, model) self.view.model = host.info["model"] finally: self.kvc.close() self.view.application_uniqkey = karesansui.config[ 'application.uniqkey'] return True
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() self.view.host_id = host_id model = findbyhost1(self.orm, host_id) # virt kvc = KaresansuiVirtConnection() try: inactive_pool = kvc.list_inactive_storage_pool() active_pool = kvc.list_active_storage_pool() finally: kvc.close() now_pools = inactive_pool + active_pool if self.input.pool_type == STORAGE_POOL_TYPE["TYPE_DIR"]: if not validates_pool_dir(self, now_pools): return web.badrequest(self.view.alert) extra_opts = {} if create_pool_dir_job(self, model, self.input.pool_name, self.input.pool_type, self.input.pool_target_path, extra_opts) is True: self.logger.debug("Create dir storage pool success.") return web.accepted() else: self.logger.debug("Failed create DIR storage pool job.") return False elif self.input.pool_type == STORAGE_POOL_TYPE["TYPE_ISCSI"]: if not validates_pool_iscsi(self, now_pools): return web.badrequest(self.view.alert) extra_opts = {} network_storages = get_iscsi_cmd(self, host_id) if network_storages is False: self.logger.debug("Get iSCSI command failed. Return to timeout") return web.internalerror('Internal Server Error. (Timeout)') pool_host_name = None pool_device_path = None for iscsi in network_storages: if self.input.pool_target_iscsi == iscsi["iqn"]: pool_host_name = iscsi["hostname"] pool_device_path = iscsi["iqn"] disk_list = iscsi["disk_list"] break if pool_host_name is None or pool_device_path is None: self.logger.debug("Failed create iSCSI storage pool. Target iSCSI device not found.") return web.badrequest() automount_list = [] for disk in disk_list: if is_param(self.input, "iscsi-disk-use-type-%s" % (disk['symlink_name'])): if self.input["iscsi-disk-use-type-%s" % (disk['symlink_name'])] == "mount" and disk['is_partitionable'] is False: if is_param(self.input, "iscsi-disk-format-%s" % (disk['symlink_name'])): if self.input["iscsi-disk-format-%s" % (disk['symlink_name'])] == "true": disk["is_format"] = True automount_list.append(disk) if create_pool_iscsi_job(self, model, self.input.pool_name, self.input.pool_type, pool_host_name, pool_device_path, automount_list, extra_opts) is True: self.logger.debug("Create iSCSI storage pool success. name=%s" % (self.input.pool_name)) return web.accepted() else: self.logger.debug("Failed create iSCSI storage pool job. name=%s" % (self.input.pool_name)) return False else: self.logger.debug("Non-existent type. type=%s" % self.input.pool_type) return web.badrequest("Non-existent type. type=%s" % self.input.pool_type)
def _PUT(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() network_name = param[1] if not network_name: self.logger.debug("Network update failed. Network not found.") return web.notfound("Network not found.") if not validates_network(self, network_name=network_name): self.logger.debug("Network update failed. Did not validate.") return web.badrequest(self.view.alert) cidr = self.input.cidr dhcp_start = self.input.dhcp_start dhcp_end = self.input.dhcp_end bridge = self.input.bridge forward_mode = getattr(self.input, 'forward_mode', '') try: autostart = self.input.autostart except: autostart = "no" # # spin off update job # options = { 'name': network_name, 'cidr': cidr, 'dhcp-start': dhcp_start, 'dhcp-end': dhcp_end, 'bridge-name': bridge, 'forward-mode': forward_mode, 'autostart': autostart, } self.logger.debug('spinning off network_update_job options=%s' % (options)) host = findbyhost1(self.orm, host_id) _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_UPDATE_NETWORK), options) # Job Registration _jobgroup = JobGroup('Update network: %s' % network_name, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('Update network', 0, _cmd)) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) self.logger.debug('(Update network) Job group id==%s', _jobgroup.id) url = '%s/job/%s.part' % (web.ctx.home, _jobgroup.id) self.logger.debug('Returning Location: %s' % url) return web.accepted(url=url)
def _GET(self, *param, **params): if self.input.has_key('job_id') is True: self.view.job_id = self.input.job_id else: self.view.job_id = None host_id = param[0] host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() uri_id = param[1] if uri_id is None: return web.notfound() model = findbyhost1(self.orm, host_id) if self.is_mode_input() is False: if model.attribute == 2: info = {} segs = uri_split(model.hostname) uri = uri_join(segs, without_auth=True) creds = '' if segs["user"] is not None: creds += segs["user"] if segs["passwd"] is not None: creds += ':' + segs["passwd"] self.kvc = KaresansuiVirtConnectionAuth(uri,creds) try: host = MergeHost(self.kvc, model) for guest in host.guests: if not '__guest' in locals(): _virt = self.kvc.search_kvg_guests(guest.info["model"].name) if 0 < len(_virt): for _v in _virt: info = _v.get_info() if info["uuid"] == uri_id or (uri[0:5] == "test:"): __guest = MergeGuest(guest.info["model"],_v) autostart = _v.autostart() status = _v.status() is_creatable = _v.is_creatable() is_shutdownable = _v.is_shutdownable() is_suspendable = _v.is_suspendable() is_resumable = _v.is_resumable() is_destroyable = _v.is_destroyable() is_active = _v.is_active() break if self.is_json() is True: json_host = host.get_json(self.me.languages) json_guest = __guest.get_json(self.me.languages) self.view.data = json_dumps( { "parent_model": json_host["model"], "parent_virt": json_host["virt"], "model": json_guest["model"], "virt": json_guest["virt"], "info": info, "autostart": autostart, "status": status, "is_creatable": is_creatable, "is_shutdownable": is_shutdownable, "is_suspendable": is_suspendable, "is_resumable": is_resumable, "is_destroyable": is_destroyable, "is_active": is_active, } ) else: self.view.parent_model = host.info["model"] self.view.parent_virt = host.info["virt"] self.view.model = __guest.info["model"] self.view.virt = __guest.info["virt"] self.view.info = info self.view.autostart = autostart self.view.status = status self.view.is_creatable = is_creatable self.view.is_shutdownable = is_shutdownable self.view.is_suspendable = is_suspendable self.view.is_resumable = is_resumable self.view.is_destroyable = is_destroyable self.view.is_active = is_active finally: self.kvc.close() return True
class HostBy1NetworkStorage(Rest): @auth def _GET(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if self.is_mode_input() is True: self.view.host_id = host_id self.view.info = { 'type': "iSCSI", 'hostname': "", 'port': "3260", 'tpgt': "", 'iqn': "", 'activity': "", 'autostart': "", 'auth': "", 'user': "", } return True network_storages = get_iscsi_cmd(self, host_id) if network_storages is False: self.logger.debug( "Get iSCSI List command failed. Return to timeout") #return web.internalerror('Internal Server Error. (Timeout)') self.view.network_storages = network_storages return True @auth def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_network_storage(self): self.logger.debug("Network storage add failed. Did not validate.") return web.badrequest(self.view.alert) hostname = self.input.network_storage_host_name port = self.input.network_storage_port_number auth = self.input.network_storage_authentication user = self.input.network_storage_user password = self.input.network_storage_password auto_start = False if is_param(self.input, 'network_storage_auto_start'): auto_start = True options = {'auth': auth} if port: options['target'] = "%s:%s" % (hostname, port) else: options['target'] = hostname if auth == ISCSI_CONFIG_VALUE_AUTH_METHOD_CHAP: options['user'] = user try: password_file_name = '/tmp/' + generate_phrase( 12, 'abcdefghijklmnopqrstuvwxyz') create_file(password_file_name, password) options['password-file'] = password_file_name except Exception, e: self.logger.error( 'Failed to create tmp password file. - file=%s' % (password_file_name)) options['password'] = password _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], ISCSI_COMMAND_ADD), options) if auto_start: _cmd = _cmd + " --autostart" cmd_name = u'Add iSCSI' jobgroup = JobGroup(cmd_name, karesansui.sheconf['env.uniqkey']) jobgroup.jobs.append(Job('%s command' % cmd_name, 0, _cmd)) host = findbyhost1(self.orm, host_id) _machine2jobgroup = m2j_new( machine=host, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, jobgroup, ) return web.accepted()