def setRoot(self, root): logging.getLogger().debug(root) if self.pid != None and self.pid != '' and not root: return self.onError("Can't get path for package %s" % self.pid) self.root = root # If is an empty Package, avoid file uploading if self.pinfos['size'] == 0 : self.pinfos['files'] = None # Prepare command parameters for database insertion cmd = prepareCommand(self.pinfos, self.params) # cmd['maxbw'] is in kbits, set in bits cmd['maxbw'] = int(cmd['maxbw']) * 1024 cmd['start_file'], patternActions = MscDatabase().applyCmdPatterns(cmd['start_file'], { 'do_reboot': cmd['do_reboot'], 'do_halt': cmd['issue_halt_to'], 'do_wol': cmd['do_wol'], 'do_inventory': cmd['do_inventory'], } ) addCmd = MscDatabase().addCommand( # TODO: refactor to get less args self.ctx, self.pid, cmd['start_file'], cmd['parameters'], cmd['files'], self.targets, # TODO : need to convert array into something that we can get back ... self.mode, self.gid, cmd['start_script'], cmd['clean_on_success'], cmd['start_date'], cmd['end_date'], "root", # TODO: may use another login name cmd['title'], patternActions['do_halt'], patternActions['do_reboot'], patternActions['do_wol'], cmd['next_connection_delay'], cmd['max_connection_attempt'], patternActions['do_inventory'], cmd['maxbw'], self.root, cmd['deployment_intervals'], self.bundle_id, self.order_in_bundle, cmd['proxy_mode'], self.proxies, cmd['state'] ) if type(addCmd) != int: addCmd.addCallbacks(self.sendResult, self.onError) else: self.onError('Error while creating the command')
def add_command_quick(self, cmd, target, desc, gid=None): """ Deprecated """ ctx = self.currentContext d = MscDatabase().addCommandQuick(ctx, cmd, target, desc, gid) d.addCallbacks(xmlrpcCleanup, lambda err: err) return d
def add_command_quick(self, cmd, target, desc, gid = None): """ Deprecated """ ctx = self.currentContext d = MscDatabase().addCommandQuick(ctx, cmd, target, desc, gid) d.addCallbacks(xmlrpcCleanup, lambda err: err) return d
def setRoot(self, root): logging.getLogger().debug(root) if self.pid != None and self.pid != '' and not root: return self.onError("Can't get path for package %s" % self.pid) self.root = root # If is an empty Package, avoid file uploading if self.pinfos['size'] == 0: self.pinfos['files'] = None # Prepare command parameters for database insertion cmd = prepareCommand(self.pinfos, self.params) # cmd['maxbw'] is in kbits, set in bits cmd['maxbw'] = int(cmd['maxbw']) * 1024 cmd['start_file'], patternActions = MscDatabase().applyCmdPatterns( cmd['start_file'], { 'do_reboot': cmd['do_reboot'], 'do_halt': cmd['issue_halt_to'], 'do_wol': cmd['do_wol'], 'do_inventory': cmd['do_inventory'], }) addCmd = MscDatabase().addCommand( # TODO: refactor to get less args self.ctx, self.pid, cmd['start_file'], cmd['parameters'], cmd['files'], self. targets, # TODO : need to convert array into something that we can get back ... self.mode, self.gid, cmd['start_script'], cmd['clean_on_success'], cmd['start_date'], cmd['end_date'], "root", # TODO: may use another login name cmd['title'], patternActions['do_halt'], patternActions['do_reboot'], patternActions['do_wol'], cmd['next_connection_delay'], cmd['max_connection_attempt'], patternActions['do_inventory'], cmd['maxbw'], self.root, cmd['deployment_intervals'], self.bundle_id, self.order_in_bundle, cmd['proxy_mode'], self.proxies, cmd['state']) if type(addCmd) != int: addCmd.addCallbacks(self.sendResult, self.onError) else: self.onError('Error while creating the command')
def action_on_bundle(id, f_name, f_database, f_scheduler): # Update command in database getattr(MscDatabase(), f_database)(id) # Stop related commands_on_host on related schedulers scheds = MscDatabase().getCommandsonhostsAndSchedulersOnBundle(id) logger = logging.getLogger() for sched in scheds: d = getattr(mmc.plugins.msc.client.scheduler, f_scheduler)(sched, scheds[sched]) d.addErrback(lambda err: logger.error("%s: " % (f_name) + str(err)))
def checkPool(): ret = True try : pool = MscDatabase().db.pool if pool._max_overflow > -1 and pool._overflow >= pool._max_overflow : logging.getLogger().error('msc plugin: CHECK: NOK: timeout then overflow (%d vs. %d) detected in SQL pool : check your network connectivity !' % (pool._overflow, pool._max_overflow)) pool.dispose() pool = pool.recreate() ret = False except Exception, e: logging.getLogger().warn('msc plugin: CHECK: NOK: got the following error : %s' % (e)) ret = False
def scheduler_select(result): dl = [] schedulers = MscDatabase().getCommandsonhostsAndSchedulers(cmd_id) method = mmc.plugins.msc.client.scheduler.extend_command for scheduler in schedulers.keys(): d_scheduler = method(scheduler, cmd_id, start_date, end_date) dl.append(d_scheduler) return defer.DeferredList(dl)
def createBundle(self): # treat bundle title try: title = self.params['bundle_title'] except: title = '' # ie. "no title" self.params['bundle_title'] = None if title == None or title == '': title = get_default_bundle_name(len(self.porders)) # Insert bundle object self.session = create_session() bundle = MscDatabase().createBundle(title, self.session) commands = [] for p in self.porders: p_api, pid, order = self.porders[p] pinfos = self.packages[pid] ppath = self.ppaths[pid] params = self.params.copy() if int(order) == int(self.first_order): params['do_wol'] = self.do_wol else: params['do_wol'] = 'off' if int(order) == int(self.last_order): params['do_inventory'] = self.do_inventory params['issue_halt_to'] = self.issue_halt_to else: params['do_inventory'] = 'off' params['issue_halt_to'] = '' # override possible choice of do_reboot from the gui by the one declared in the package # (in bundle mode, the gui does not offer enough choice to say when to reboot) params['do_reboot'] = pinfos['do_reboot'] cmd = prepareCommand(pinfos, params) command = cmd.copy() command['package_id'] = pid command['connect_as'] = 'root' command['mode'] = self.mode command['root'] = ppath command['order_in_bundle'] = order command['proxies'] = self.proxies command['fk_bundle'] = bundle.id commands.append(command) add = MscDatabase().addCommands(self.ctx, self.session, self.targets, commands, self.gid) if type(add) != int: add.addCallbacks(self.sendResult, self.onError) else: self.onError("Error while creating the bundle")
def createBundle(self): # treat bundle title try: title = self.params['bundle_title'] except: title = '' # ie. "no title" self.params['bundle_title'] = None if title == None or title == '': title = get_default_bundle_name(len(self.porders)) # Insert bundle object self.session = create_session() bundle = MscDatabase().createBundle(title, self.session) bundle_id = bundle.id commands = [] for p in self.porders: p_api, pid, order = self.porders[p] pinfos = self.packages[pid] ppath = self.ppaths[pid] params = self.params.copy() if int(order) == int(self.first_order): params['do_wol'] = self.do_wol else: params['do_wol'] = 'off' if int(order) == int(self.last_order): params['do_inventory'] = self.do_inventory params['issue_halt_to'] = self.issue_halt_to else: params['do_inventory'] = 'off' params['issue_halt_to'] = '' # override possible choice of do_reboot from the gui by the one declared in the package # (in bundle mode, the gui does not offer enough choice to say when to reboot) params['do_reboot'] = pinfos['do_reboot'] cmd = prepareCommand(pinfos, params) command = cmd.copy() command['package_id'] = pid command['connect_as'] = 'root' command['mode'] = self.mode command['root'] = ppath command['order_in_bundle'] = order command['proxies'] = self.proxies command['fk_bundle'] = bundle.id commands.append(command) add = MscDatabase().addCommands(self.ctx, self.session, self.targets, commands, self.gid) if type(add) != int: add.addCallbacks(self.sendResult, self.onError) else: self.onError("Error while creating the bundle")
def checkPool(): ret = True try: pool = MscDatabase().db.pool if pool._max_overflow > -1 and pool._overflow >= pool._max_overflow: logging.getLogger().error( 'msc plugin: CHECK: NOK: timeout then overflow (%d vs. %d) detected in SQL pool : check your network connectivity !' % (pool._overflow, pool._max_overflow)) pool.dispose() pool = pool.recreate() ret = False except Exception, e: logging.getLogger().warn( 'msc plugin: CHECK: NOK: got the following error : %s' % (e)) ret = False
def add_command_quick_with_id(self, idcmd, target, lang, gid=None): """ @param idcmd: id of the quick action @type idcmd: str @param target: targets, list of computers UUIDs @type target: list @param lang: language to use for the command title (two characters) @type lang: str @param gid: if not None, apply command to a group of machine @type gid: str """ ctx = self.currentContext result, qas = qa_list_files() if result and idcmd in qas: try: desc = qas[idcmd]["title" + lang] except KeyError: desc = qas[idcmd]["title"] if gid: # Get all targets corresponding to the computer given group ID target = ComputerGroupManager().get_group_results( ctx, gid, 0, -1, '', True) # Use maybeDeferred because addCommandQuick will return an error # code in case of failure d = defer.maybeDeferred(MscDatabase().addCommandQuick, ctx, qas[idcmd]["command"], target, desc, gid) d.addCallback(xmlrpcCleanup) ret = d else: ret = -1 return ret
def activate(): """ Run some tests to ensure the module is ready to operate. """ config = MscConfig() config.init("msc") logger = logging.getLogger() if config.disable: logger.warning("Plugin msc: disabled by configuration.") return False if not os.path.isdir(config.qactionspath): logger.error( "Quick Actions config is invalid: %s is not a directory. Please check msc.ini." % config.qactionspath) return False if not MscDatabase().activate(config): return False if config.check_db_enable: scheduleCheckStatus(config.check_db_interval) # Add convergence reschedule task in the task manager TaskManager().addTask("msc.convergence_reschedule", (convergence_reschedule, ), cron_expression=config.convergence_reschedule) return True
def get_pull_targets(self): """ Returns list of Pull target UUIDs @return: list """ return xmlrpcCleanup(MscDatabase().getPullTargets())
def delete_bundle(self, bundle_id): """ Deletes a bundle with all related sub-elements. @param bundle_id: Bundle id @type bundle_id: int """ return MscDatabase().deleteBundle(bundle_id)
def delete_command(self, cmd_id): """ Deletes a command with all related sub-elements. @param cmd_id: Commands id @type cmd_id: int """ return MscDatabase().deleteCommand(cmd_id)
def get_all_commandsonhost_by_currentstate(self, current_state, min=0, max=10, filt=''): ctx = self.currentContext return xmlrpcCleanup(MscDatabase().getAllCommandsonhostByCurrentstate( ctx, current_state, min, max, filt))
def delete_command_on_host(self, coh_id): """ Deletes a command on host with all related sub-elements. @param coh_id: CommandsOnHost id @type coh_id: int """ return MscDatabase().deleteCommandOnHost(coh_id)
def createBundle(self): # treat bundle title try: title = self.params["bundle_title"] except: title = "" # ie. "no title" self.params["bundle_title"] = None if title == None or title == "": title = get_default_bundle_name(len(self.porders)) # Insert bundle object self.session = create_session() bundle = MscDatabase().createBundle(title, self.session) commands = [] for p in self.porders: p_api, pid, order = self.porders[p] pinfos = self.packages[pid] ppath = self.ppaths[pid] params = self.params.copy() if int(order) == int(self.first_order): params["do_wol"] = self.do_wol else: params["do_wol"] = "off" # override possible choice of do_reboot from the gui by the one declared in the package # (in bundle mode, the gui does not offer enough choice to say when to reboot) params["do_reboot"] = pinfos["do_reboot"] cmd = prepareCommand(pinfos, params) command = cmd.copy() command["package_id"] = pid command["connect_as"] = "root" command["mode"] = self.mode command["root"] = ppath command["order_in_bundle"] = order command["proxies"] = self.proxies command["fk_bundle"] = bundle.id command["do_windows_update"] = "disable" commands.append(command) add = MscDatabase().addCommands(self.ctx, self.session, self.targets, commands, self.gid) if type(add) != int: add.addCallbacks(self.sendResult, self.onError) else: self.onError("Error while creating the bundle")
def _add_machines_to_convergence_command(ctx, cmd_id, new_machine_ids, convergence_group_id, phases={}): return MscDatabase().addMachinesToCommand(ctx, cmd_id, new_machine_ids, convergence_group_id, phases=phases)
def is_pull_target(self, uuid): """ Returns True if the machine is a known pull client @param uuid: computer UUID @type uuid: str @return: bool """ return xmlrpcCleanup(MscDatabase().isPullTarget(uuid))
def checkLightPullCommands(self, uuid): """ Returns all coh ids te re-execute. @param uuid: uuid of checked computer @type uuid: str @return: coh ids to start @rtype: list """ return xmlrpcCleanup(MscDatabase().checkLightPullCommands(uuid))
def remove_pull_targets(self, uuids): """ remove pull targets @param uuids: a list of uuids to remove @type uuids: list or str @return: True or False :-) """ if isinstance(uuids, basestring): uuids = [uuids] return xmlrpcCleanup(MscDatabase().removePullTargets(uuids))
def update_target_ip(self, uuid, ip): """ Updates IP address of all records according to UUID. @param uuid: UUID of machine @type uuid: str @param ip: actualized IP address of machine @type ip: str """ return xmlrpcCleanup(MscDatabase().updateTargetIP(uuid, ip))
def create_update_command(ctx, target, update_list, gid=None): """ Create the Windows Update command. @param target: list of target UUIDs @type target: list @param update_list: list of KB numbers to install @type update_list: list @param gid: group id - if not None, apply command to a group of machine @type gid: str @return: command id @rtype: Deferred """ if update_list: cmd = "%s -i %s" % (MscConfig().wu_command, " ".join(update_list)) else: cmd = '' cmd = cmd + ("\n%s -l --json" % MscConfig().wu_command) desc = "Install Windows Updates" if gid: target = ComputerGroupManager().get_group_results( ctx, gid, 0, -1, '', True) do_wol = "disable" if MscConfig().web_def_awake == 1: do_wol = "enable" # set end_date to now() + 24H Refs #2313 fmt = "%Y-%m-%d %H:%M:%S" end_date = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime(fmt) d = defer.maybeDeferred(MscDatabase().addCommand, ctx, None, cmd, "", [], target, 'push', gid, end_date=end_date, title=desc, do_wol=do_wol, do_windows_update="enable", cmd_type=4) d.addCallback(xmlrpcCleanup) return d
def expire_all_package_commands(self, pid): """ Expires all commands of a given package Used usually when a package is dropped @param pid: uuid of dropped package @type pid: uuid """ # get all cmd_ids with their start_date of given package id cmds = MscDatabase().get_package_cmds(pid) if cmds: logging.getLogger().info('%d command will be expired' % len(cmds)) # for all cmd_ids, get start_date and expire them for cmd_id, start_date in cmds.items(): logging.getLogger().info('Expires command %d' % cmd_id) end_date = time.strftime("%Y-%m-%d %H:%M:%S") self.extend_command(cmd_id, start_date, end_date) # Delete convergence groups if any DyngroupDatabase().delete_package_convergence(pid) return True
def get_all_commands_for_consult(self, min=0, max=10, filt='', expired=True): ctx = self.currentContext size, ret1 = MscDatabase().getAllCommandsConsult( ctx, min, max, filt, expired) ret = [] logger = logging.getLogger() cache = {} for c in ret1: if c['gid']: if cache.has_key("G%s" % (c['gid'])): #if "G%s"%(c['gid']) in cache: c['target'] = cache["G%s" % (c['gid'])] else: group = DyngroupDatabase().get_group(ctx, c['gid'], True) if type( group ) == bool: # we dont have the permission to view the group c['target'] = 'UNVISIBLEGROUP' # TODO! elif group == None: c['target'] = 'this group has been deleted' elif hasattr(group, 'ro') and group.ro: logger.debug("user %s access to group %s in RO mode" % (ctx.userid, group.name)) c['target'] = group.name else: c['target'] = group.name cache["G%s" % (c['gid'])] = c['target'] else: if cache.has_key("M%s" % (c['uuid'])): #if "M%s"%(c['uuid']) in cache: c['target'] = cache["M%s" % (c['uuid'])] else: if not ComputerLocationManager( ).doesUserHaveAccessToMachine(ctx, c['uuid']): c['target'] = "UNVISIBLEMACHINE" elif not ComputerManager().getComputer( ctx, {'uuid': c['uuid']}): c['target'] = "UNVISIBLEMACHINE" cache["M%s" % (c['uuid'])] = c['target'] # treat c['title'] to remove the date when possible # "Bundle (1) - 2009/12/14 10:22:24" => "Bundle (1)" date_re = re.compile(" - \d\d\d\d/\d\d/\d\d \d\d:\d\d:\d\d") c['title'] = date_re.sub('', c['title']) ret.append(c) return xmlrpcCleanup((size, ret))
def start_these_commands(scheduler, commands): database = MscDatabase() session = create_session() coh_query = session.query(CommandsOnHost).filter( database.commands_on_host.c.fk_commands.in_(commands)) done = [] for coh in coh_query.all(): scheduler = coh.scheduler if not scheduler in done: logging.getLogger().debug('Starting command on scheduler %s' % scheduler) done.append(scheduler) getProxy(__select_scheduler(scheduler)).callRemote( 'start_these_commands', commands) session.close()
def extend_command(self, cmd_id, start_date, end_date): """ Custom command re-scheduling. @param cmd_id: Commands id @type cmd_id: int @param start_date: new start date of command @type start_date: str @param end_date: new end date of command @type end_date: str """ d = defer.maybeDeferred(MscDatabase().extend_command, cmd_id, start_date, end_date) return d
def _get_convergence_soon_ended_commands(all=False): """ @param all: If True, get all convergence active commands @type all: Bool @return: list of soon ended convergence commands @rtype: list """ ret = [] active_convergence_cmd_ids = DyngroupDatabase()._get_convergence_active_commands_ids() if all: # Return all active_convergence_cmd_ids return active_convergence_cmd_ids elif active_convergence_cmd_ids: # Get active_convergence_cmd_ids who are soon expired ret = MscDatabase()._get_convergence_soon_ended_commands(cmd_ids=active_convergence_cmd_ids) return xmlrpcCleanup(ret)
def extend_command(self, cmd_id, start_date, end_date): """ Custom command re-scheduling. @param cmd_id: Commands id @type cmd_id: int @param start_date: new start date of command @type start_date: str @param end_date: new end date of command @type end_date: str """ d = defer.maybeDeferred(MscDatabase().extend_command, cmd_id, start_date, end_date) @d.addCallback def scheduler_select(result): dl = [] schedulers = MscDatabase().getCommandsonhostsAndSchedulers(cmd_id) method = mmc.plugins.msc.client.scheduler.extend_command for scheduler in schedulers.keys(): d_scheduler = method(scheduler, cmd_id, start_date, end_date) dl.append(d_scheduler) return defer.DeferredList(dl) @d.addErrback def scheduler_call(failure): logging.getLogger().warn("Command extend signal sending failed: %s" % str(failure)) return d
def setRoot(self, root): logging.getLogger().debug(root) if self.pid != None and self.pid != "" and not root: return self.onError("Can't get path for package %s" % self.pid) self.root = root # If is an empty Package, avoid file uploading if "size" in self.pinfos: if self.pinfos["size"] == 0: self.pinfos["files"] = None # Prepare command parameters for database insertion cmd = prepareCommand(self.pinfos, self.params) # cmd['maxbw'] is in kbits, set in bits cmd["maxbw"] = int(cmd["maxbw"]) * 1024 cmd["do_wol_with_imaging"] = "disable" cmd["do_windows_update"] = "disable" _patterns = { "do_reboot": cmd["do_reboot"], "do_halt": cmd["issue_halt_to"], "do_wol": cmd["do_wol"], "do_wol_with_imaging": cmd["do_wol_with_imaging"], "do_windows_update": cmd["do_windows_update"], "do_inventory": cmd["do_inventory"], } cmd["start_file"], patternActions = MscDatabase().applyCmdPatterns(cmd["start_file"], _patterns) addCmd = MscDatabase().addCommand( # TODO: refactor to get less args self.ctx, self.pid, cmd["start_file"], cmd["parameters"], cmd["files"], self.targets, # TODO : need to convert array into something that we can get back ... self.mode, self.gid, cmd["start_script"], cmd["clean_on_success"], cmd["start_date"], cmd["end_date"], "root", # TODO: may use another login name cmd["title"], patternActions["do_halt"], patternActions["do_reboot"], patternActions["do_wol"], patternActions["do_wol_with_imaging"], patternActions["do_windows_update"], cmd["next_connection_delay"], cmd["max_connection_attempt"], patternActions["do_inventory"], cmd["maxbw"], self.root, cmd["deployment_intervals"], self.bundle_id, self.order_in_bundle, cmd["proxy_mode"], self.proxies, cmd["state"], cmd_type=self.cmd_type, ) if type(addCmd) != int: addCmd.addCallbacks(self.sendResult, self.onError) else: self.onError("Error while creating the command")
def action_on_command(id, f_name, f_database, f_scheduler): # Update command in database getattr(MscDatabase(), f_database)(id)
def getMachineNamesOnBundleStatus(self, bundle_id, state): ctx = self.currentContext limit = DGConfig().maxElementsForStaticList return xmlrpcCleanup(MscDatabase().getMachineNamesOnBundleStatus( ctx, bundle_id, state, limit))
def get_array_last_commands_on_cmd_id_start_end(self, array_cmd_id): ctx = self.currentContext return xmlrpcCleanup2( MscDatabase().getarrayLastCommandsOncmd_id_start_end( ctx, array_cmd_id))
def get_last_commands_on_cmd_id(self, cmd_id): ctx = self.currentContext return xmlrpcCleanup2(MscDatabase().getLastCommandsOncmd_id( ctx, cmd_id))
def getarraystatbycmd(self, arraycmd_id): ctx = self.currentContext return xmlrpcCleanup2(MscDatabase().getarraystatbycmd( ctx, arraycmd_id))