def executeBashAsync(self, cmds, path=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, tags=None): """ Same as execute but cmds can be a bash script """ nid, roles, fanout = self._get_route(nid, roles, allnodes) runargs = acclient.RunArgs(max_time=timeout, working_dir=path) command = self._client.cmd(gid, nid, 'bash', args=runargs, data=cmds, roles=roles, fanout=fanout, tags=tags) return list(command.get_jobs().values())
def create_share(self, gid, nid, name, path, readonly=True, ignore=[]): """ Creates a share :param gid: Grid id :param nid: Node id :param name: Share name :param path: Share full path on node :param readonly: If true this share is master and other peers can't upload to it. :param ignore: A list of patterns (files names) to ignore (ex ['**.pyc']) :return: a share object """ data = { 'path': path, 'readonly': readonly, 'ignore': ignore, 'name': name } runargs = acclient.RunArgs(name='create_share', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=j.data.serializer.json.dumps(data)) job = command.get_next_result(SyncClient.API_TIMEOUT) folder = self._client._load_json_or_die(job) return Share(gid, nid, folder, self)
def _get_devices(self, gid, nid): runargs = acclient.RunArgs(name='list_devices', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs) job = command.get_next_result(SyncClient.API_TIMEOUT) devices = self._client._load_json_or_die(job) return dict([(d['deviceID'], d) for d in devices])
def execute(self, id, cron, cmd, path=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, data=None, tags=None): """ Schedule execution of command according to cron Cron expression represents a set of times, using 6 space-separated fields. +--------------+------------+-----------------+----------------------------+ | Field name | Mandatory? | Allowed values | Allowed special characters | +==============+============+=================+============================+ | Seconds | Yes | 0-59 | * / , - | +--------------+------------+-----------------+----------------------------+ | Minutes | Yes | 0-59 | * / , - | +--------------+------------+-----------------+----------------------------+ | Hours | Yes | 0-23 | * / , - | +--------------+------------+-----------------+----------------------------+ | Day of month | Yes | 1-31 | * / , - ? | +--------------+------------+-----------------+----------------------------+ | Month | Yes | 1-12 or JAN-DEC | * / , - | +--------------+------------+-----------------+----------------------------+ | Day of week | Yes | 0-6 or SUN-SAT | * / , - ? | +--------------+------------+-----------------+----------------------------+ Note: Month and Day-of-week field values are case insensitive. "SUN", "Sun", and "sun" are equally accepted. :param id: Cron job id, must be unique globally and can be used later to stop the schedule :param cron: Cron string to run the task """ parts = shlex.split(cmd) assert len(parts) > 0, "Empty command string" cmd = parts[0] args = parts[1:] nid, roles, fanout = self._simple._get_route(nid, roles, allnodes) runargs = acclient.RunArgs(max_time=timeout, working_dir=path, name=cmd, args=args) return self._client.schedule_add(id=id, cron=cron, gid=gid, nid=nid, cmd=acclient.CMD_EXECUTE, args=runargs, roles=roles, fanout=fanout, data=data, tags=tags)
def _get_need(self, gid, nid, folder_id): runargs = acclient.RunArgs(name='get_share_need', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=json.dumps({'name': folder_id})) job = command.get_next_result(SyncClient.API_TIMEOUT) return self._client._load_json_or_die(job)
def list_shares(self, gid, nid): """ List all shares on the node """ runargs = acclient.RunArgs(name='list_shares', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs) job = command.get_next_result(SyncClient.API_TIMEOUT) return map(lambda folder: Share(gid, nid, folder, self), self._client._load_json_or_die(job))
def _get_ingore(self, gid, nid, path): runargs = acclient.RunArgs(name='get_share_ignore', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=json.dumps({'path': path})) job = command.get_next_result(SyncClient.API_TIMEOUT) return self._client._load_json_or_die(job).get('ignore', []) or []
def _add_device_to_share(self, gid, nid, device_id, folder_id): data = {'device_id': device_id, 'folder_id': folder_id} runargs = acclient.RunArgs(name='add_device_to_share', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=j.data.serializer.json.dumps(data)) job = command.get_next_result(SyncClient.API_TIMEOUT) return self._client._load_json_or_die(job)
def _get_id(self, gid, nid): if (gid, nid) in self._ids_cache: return self._ids_cache[(gid, nid)] runargs = acclient.RunArgs(name='get_id', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs) job = command.get_next_result(SyncClient.API_TIMEOUT) id = self._client._load_json_or_die(job) self._ids_cache[(gid, nid)] = id return id
def _add_device(self, gid, nid, name, id): data = {'name': name, 'id': id} runargs = acclient.RunArgs(name='add_device', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=j.data.serializer.json.dumps(data)) job = command.get_next_result(SyncClient.API_TIMEOUT) folder = self._client._load_json_or_die(job) return Share(gid, nid, folder, self)
def executeBash(self, id, cron, cmds, path=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, tags=None): """ Schedule bash script execution according to given cron. Cron expression represents a set of times, using 6 space-separated fields. +--------------+------------+-----------------+----------------------------+ | Field name | Mandatory? | Allowed values | Allowed special characters | +==============+============+=================+============================+ | Seconds | Yes | 0-59 | * / , - | +--------------+------------+-----------------+----------------------------+ | Minutes | Yes | 0-59 | * / , - | +--------------+------------+-----------------+----------------------------+ | Hours | Yes | 0-23 | * / , - | +--------------+------------+-----------------+----------------------------+ | Day of month | Yes | 1-31 | * / , - ? | +--------------+------------+-----------------+----------------------------+ | Month | Yes | 1-12 or JAN-DEC | * / , - | +--------------+------------+-----------------+----------------------------+ | Day of week | Yes | 0-6 or SUN-SAT | * / , - ? | +--------------+------------+-----------------+----------------------------+ Note: Month and Day-of-week field values are case insensitive. "SUN", "Sun", and "sun" are equally accepted. :param id: Cron job id, must be unique globally and can be used later to stop the schedule :param cron: Cron string to run the task """ nid, roles, fanout = self._simple._get_route(nid, roles, allnodes) runargs = acclient.RunArgs(max_time=timeout, working_dir=path) return self._client.schedule_add(id, cron, gid=gid, nid=nid, cmd='bash', args=runargs, roles=roles, fanout=fanout, data=cmds, tags=tags)
def reboot(self, gid=None, nid=None, roles=[], allnodes=True): """ Same as execute but cmds can be a bash script """ nid, roles, fanout = self._get_route(nid, roles, allnodes) runargs = acclient.RunArgs() command = self._client.cmd(gid, nid, 'reboot', args=runargs, roles=roles, fanout=fanout) return list(command.get_jobs().keys())
def executeAsync(self, cmd, path=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, data=None, tags=None): """ Executes a command on node(s) :param cmd: Command to execute ex('ls -l /opt') :param path: CWD of commnad (where to execute) :param gid: GID of node :param nid: NID of node :param roles: List of agent roles to match (only agent that satisfies all the given roles will execue this) :param allnodes: Execute on ALL agents that matches the roles (default True), if False only one agent will execute :param timeout: Process timeout, if took more than `timeout` the process will get killed and error is returned. :param data: raw data that will be feed to command stdin. :param tags: Tags string that will be attached to the command and return with the results as is. Usefule to attach meta data to the command for analysis """ parts = shlex.split(cmd) assert len(parts) > 0, "Empty command string" cmd = parts[0] args = parts[1:] nid, roles, fanout = self._get_route(nid, roles, allnodes) runargs = acclient.RunArgs(max_time=timeout, working_dir=path) command = self._client.execute(gid, nid, cmd, cmdargs=args, args=runargs, data=data, roles=roles, fanout=fanout, tags=tags) return list(command.get_jobs().values())
def getAgents(self): """ Gets a list of all active agents """ cmd = self._client.cmd(None, None, 'controller', acclient.RunArgs(name='list_agents'), roles=['*']) job = cmd.get_next_result() agents = self._client._load_json_or_die(job) results = [] for key, roles in agents.items(): gid, _, nid = key.partition(':') results.append(Agent(self._client, int(gid), int(nid), roles)) return results
def getRunArgs(self, domain=None, name=None, max_time=0, max_restart=0, recurring_period=0, stats_interval=0, args=None, loglevels='*', loglevels_db=None, loglevels_ac=None, queue=None): """ Creates a reusable run arguments object :domain: Domain name :name: script or executable name :max_time: Max run time, 0 (forever), -1 forever but remember during reboots (long running), other values is timeout :max_restart: Max number of restarts if process died in under 5 min. :recurring_period: Scheduling time :stats_interval: How frequent the stats aggregation is done/flushed to AC :args: Command line arguments (in case of execute) :loglevels: Which log levels to capture and pass to logger :loglevels_db: Which log levels to store in DB (overrides logger defaults) :loglevels_ac: Which log levels to send to AC (overrides logger defaults) """ return acclient.RunArgs(domain=domain, name=name, max_time=max_time, max_restart=max_restart, recurring_period=recurring_period, stats_interval=stats_interval, args=args, loglevels=loglevels, loglevels_db=loglevels_db, loglevels_ac=loglevels_ac, queue=queue)
def scan(self, gid, nid, name, sub=None): """ Rescan folder :param gid: Grid id :param nid: Node id :param name: Share name :param sub: Subfolder to scan in :return: """ data = { 'name': name, 'sub': sub, } runargs = acclient.RunArgs(name='scan_share', max_time=SyncClient.API_TIMEOUT) command = self._client.cmd(gid, nid, 'sync', args=runargs, data=j.data.serializer.json.dumps(data)) job = command.get_next_result(SyncClient.API_TIMEOUT) self._client._load_json_or_die(job)
def executeJumpscriptAsync(self, domain=None, name=None, content=None, path=None, method=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, args={}, tags=None): """ Executes jumpscript asynchronusly and immediately return jobs :param domain: jumpscript domain name :param name: jumpscript name. Note when using domain/name to execute jumpscript the script is loaded from the node jumpscritps folder. :param content: Optional jumpscript content (as code) to execute. :param path: Optional full path to jumpscript on node to execute :param method: A python function to execute. :param gid: GID of node :param nid: NID of node :param roles: List of agent roles to match (only agent that satisfies all the given roles will execue this) :param allnodes: Execute on ALL agents that matches the roles (default True), if False only one agent will execute :param timeout: Process timeout, if took more than `timeout` the process will get killed and error is returned. :param args: Arguments to jumpscript :param tags: Tags string that will be attached to the command and return with the results as is. Usefule to attach meta data to the command for analysis """ nid, roles, fanout = self._get_route(nid, roles, allnodes) if domain is not None: assert name is not None, "name is required in case 'domain' is given" else: if not content and not path and not method: raise ValueError( 'domain/name, content, or path must be supplied') runargs = acclient.RunArgs(max_time=timeout) if domain is not None: command = self._client.execute_jumpscript(gid=gid, nid=nid, domain=domain, name=name, args=args, runargs=runargs, roles=roles, fanout=fanout, tags=tags) else: # call the unexposed jumpscript_content extension manually if method: content = self._getFuncCode(method) elif path: content = j.sal.fs.fileGetContents(path) command = self._client.execute_jumpscript_content(gid, nid, content, args=args, runargs=runargs, roles=roles, fanout=fanout, tags=tags) return list(command.get_jobs().values())
def executeJumpscript(self, id, cron, domain=None, name=None, content=None, path=None, method=None, gid=None, nid=None, roles=[], allnodes=True, timeout=5, args={}, tags=None): nid, roles, fanout = self._simple._get_route(nid, roles, allnodes) if domain is not None: assert name is not None, "name is required in case 'domain' is given" else: if not content and not path and not method: raise ValueError( 'domain/name, content, or path must be supplied') runargs = acclient.RunArgs(max_time=timeout) if domain is not None: runargs = runargs.update({'domain': domain, 'name': name}) return self._client.schedule_add( id, cron, gid=gid, nid=nid, cmd=acclient.CMD_EXECUTE_JUMPSCRIPT, args=runargs, data=j.data.serializer.json.dumps(args), roles=roles, fanout=fanout, tags=tags) else: # call the unexposed jumpscript_content extension manually if method: content = self._simple._getFuncCode(method) elif path: content = j.sal.fs.fileGetContents(path) data = { 'content': content, 'args': args, } return self._client.schedule_add( id, cron, gid=gid, nid=nid, cmd=acclient.CMD_EXECUTE_JUMPSCRIPT_CONTENT, args=runargs, data=j.data.serializer.json.dumps(data), roles=roles, fanout=fanout, tags=tags)