def index(self, locked=None, machine_type='', os_type=None, os_version=None, locked_by=None, up=None, count=None): query = Node.query if locked is not None: query = query.filter(Node.locked == locked) if machine_type: if '|' in machine_type: machine_types = machine_type.split('|') query = query.filter(Node.machine_type.in_(machine_types)) else: query = query.filter(Node.machine_type == machine_type) if os_type: query = query.filter(Node.os_type == os_type) if os_version: query = query.filter(Node.os_version == os_version) if locked_by: query = query.filter(Node.locked_by == locked_by) if up is not None: query = query.filter(Node.up == up) if count is not None: if not count.isdigit() or isinstance(count, int): error('/errors/invalid/', 'count must be an integer') query = query.limit(count) return [node.__json__() for node in query.all()]
def index_delete(self): if not self.run: error('/errors/not_found/', 'attempted to delete a non-existent run') log.info("Deleting run: %r", self.run) self.run.delete() return dict()
def job_stats(self, machine_type="", since_days=14): since_days = int(since_days) if since_days < 1: error("/errors/invalid/", "since_days must be a positive integer") now = datetime.utcnow() past = now - timedelta(days=since_days) recent_jobs = Job.query.filter(Job.posted.between(past, now)).subquery() RecentJob = aliased(Job, recent_jobs) query = Session.query(Node.name, RecentJob.status, func.count("*")) if machine_type: # Note: filtering by Job.machine_type (as below) greatly improves # performance but could lead slightly incorrect values if many jobs # are being scheduled using mixed machine types. We work around # this by including the 'multi' machine type (which is the name of # the queue Inktank uses for such jobs. query = query.filter(RecentJob.machine_type.in_((machine_type, "multi"))) query = query.filter(Node.machine_type == machine_type) query = query.join(RecentJob.target_nodes).group_by(Node).group_by(RecentJob.status) all_stats = {} results = query.all() for (name, status, count) in results: node_stats = all_stats.get(name, {}) node_stats[status] = count all_stats[name] = node_stats stats_sorter = lambda t: sum(t[1].values()) ordered_stats = OrderedDict(sorted(all_stats.items(), key=stats_sorter)) return ordered_stats
def index(self): if not self.node: error( '/errors/not_found/', 'node not found' ) json_node = self.node.__json__() return json_node
def lock_many_post(self): req = request.json fields = set(("count", "locked_by", "machine_type", "description")) if not fields.issubset(set(req.keys())): error("/errors/invalid/", "must pass these fields: %s" % ", ".join(fields)) req["locked"] = True count = req.pop("count", 0) if count < 1: error("/errors/invalid/", "cannot lock less than 1 node") machine_type = req.pop("machine_type", None) if not machine_type: error("/errors/invalid/", "must specify machine_type") locked_by = req.get("locked_by") description = req.get("description") os_type = req.get("os_type") os_version = req.get("os_version") arch = req.get("arch") if os_version is not None: os_version = str(os_version) attempts = 2 log.debug( "Locking {count} {mtype} nodes for {locked_by}".format(count=count, mtype=machine_type, locked_by=locked_by) ) while attempts > 0: try: result = Node.lock_many( count=count, locked_by=locked_by, machine_type=machine_type, description=description, os_type=os_type, os_version=os_version, arch=arch, ) if description: desc_str = " with description %s" % description else: desc_str = "" log.info( "Locked {names} for {locked_by}{desc_str}".format( names=" ".join([str(node) for node in result]), locked_by=locked_by, desc_str=desc_str ) ) return result except RaceConditionError as exc: log.warn("lock_many() detected race condition") attempts -= 1 if attempts > 0: log.info("retrying after race avoidance (%s tries left)", attempts) else: error(exc.url, str(exc)) except PaddlesError as exc: error(exc.url, str(exc))
def index_delete(self): if not self.job: error('/errors/not_found/', 'attempted to delete a non-existent job') log.info("Deleting job %r", self.job) run = self.job.run self.job.delete() run.set_status() return dict()
def lock_many_post(self): req = request.json fields = set(('count', 'locked_by', 'machine_type', 'description')) if not fields.issubset(set(req.keys())): error('/errors/invalid/', "must pass these fields: %s" % ', '.join(fields)) req['locked'] = True count = req.pop('count', 0) if count < 1: error('/errors/invalid/', "cannot lock less than 1 node") machine_type = req.pop('machine_type', None) if not machine_type: error('/errors/invalid/', "must specify machine_type") locked_by = req.get('locked_by') description = req.get('description') os_type = req.get('os_type') os_version = req.get('os_version') arch = req.get('arch') if os_version is not None: os_version = str(os_version) attempts = 2 log.debug("Locking {count} {mtype} nodes for {locked_by}".format( count=count, mtype=machine_type, locked_by=locked_by)) while attempts > 0: try: result = Node.lock_many(count=count, locked_by=locked_by, machine_type=machine_type, description=description, os_type=os_type, os_version=os_version, arch=arch) if description: desc_str = " with description %s" % description else: desc_str = "" log.info("Locked {names} for {locked_by}{desc_str}".format( names=" ".join([str(node) for node in result]), locked_by=locked_by, desc_str=desc_str, )) return result except RaceConditionError as exc: log.warn("lock_many() detected race condition") attempts -= 1 if attempts > 0: log.info("retrying after race avoidance (%s tries left)", attempts) else: error(exc.url, str(exc)) except PaddlesError as exc: error(exc.url, str(exc))
def latest_runs(fields=None, count=conf.default_latest_runs_count, page=1): query = Run.query.order_by(Run.posted.desc()) query = offset_query(query, page_size=count, page=page) runs = query.all() if fields: try: return [run.slice(fields) for run in runs] except AttributeError: rollback() error('/errors/invalid/', 'an invalid field was specified') return [run for run in runs]
def _create_job(self, job_id, data): query = Job.query.options(load_only('id', 'job_id')) query = query.filter_by(job_id=job_id, run=self.run) if query.first(): error('/errors/invalid/', "job with job_id %s already exists" % job_id) else: log.info("Creating job: %s/%s", data.get('name', '<no name!>'), job_id) self.job = Job(data, self.run) Session.commit() return self.job
def index_put(self): """ Update the Node object here """ if not self.node: error("/errors/not_found/", "attempted to update a non-existent node") update = request.json log.info("Updating {node}: {data}".format(node=self.node, data=update)) try: self.node.update(update) except PaddlesError as exc: error(exc.url, str(exc)) return dict()
def lock(self): if not self.node: error("/errors/not_found/", "node not found") if request.method not in ("PUT", "POST"): error( "/errors/invalid/", "this URI only supports PUT and POST requests" + " but %s was attempted" % request.method, ) node_dict = request.json verb_dict = {False: "unlock", True: "lock", None: "check"} verb = verb_dict[node_dict.get("locked")] return self._lock(self.node, node_dict, verb)
def lock(self): if not self.node: error('/errors/not_found/', 'node not found') if request.method not in ('PUT', 'POST'): error( '/errors/invalid/', 'this URI only supports PUT and POST requests' + ' but %s was attempted' % request.method) node_dict = request.json verb_dict = {False: 'unlock', True: 'lock', None: 'check'} verb = verb_dict[node_dict.get('locked')] return self._lock(self.node, node_dict, verb)
def index(self, status='', fields=''): job_query = Job.filter_by(run=self.run) if status: job_query = job_query.filter_by(status=status) jobs = job_query.order_by(Job.posted.desc()).all() if fields: try: return [job.slice(fields) for job in jobs] except AttributeError: rollback() error('/errors/invalid/', 'an invalid field was specified') else: return jobs
def index_post(self): """ Create a new node """ try: data = request.json name = data.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not name: error('/errors/invalid/', "could not find required key: 'name'") if Node.filter_by(name=name).first(): error('/errors/invalid/', "Node with name %s already exists" % name) else: self.node = Node(name=name) try: self.node.update(data) except PaddlesError as exc: error(exc.url, str(exc)) log.info("Created {node}: {data}".format( node=self.node, data=data, )) return dict()
def index_post(self): """ We update a job here, it should obviously exist already but most likely the data is empty. """ if not self.job: error('/errors/not_found/', 'attempted to update a non-existent job') old_job_status = self.job.status self.job.update(request.json) if self.job.status != old_job_status: log.info("Job %s/%s status changed from %s to %s", self.job.name, self.job.job_id, old_job_status, self.job.status) return dict()
def run(self): run_name = request.context.get('run_name') if not run_name: error('/errors/notfound', 'associated run was not found and no name was provided to create one') # noqa run_q = models.Run.query.filter(models.Run.name == run_name) if run_q.count() == 1: run = run_q.one() return run elif run_q.count() > 1: error('/errors/invalid/', 'there are %s runs with that name!' % run_q.count()) elif run_q.count() == 0: log.info("Creating run: %s", run_name) run = models.Run(run_name) return run
def lock(self): if not self.node: error( '/errors/not_found/', 'node not found' ) if request.method not in ('PUT', 'POST'): error('/errors/invalid/', 'this URI only supports PUT and POST requests' + ' but %s was attempted' % request.method) node_dict = request.json verb_dict = {False: 'unlock', True: 'lock', None: 'check'} verb = verb_dict[node_dict.get('locked')] return self._lock(self.node, node_dict, verb)
def index_post(self): """ We update a job here, it should obviously exist already but most likely the data is empty. """ if not self.job: error( '/errors/not_found/', 'attempted to update a non-existent job' ) old_job_status = self.job.status self.job.update(request.json) if self.job.status != old_job_status: log.info("Job %s/%s status changed from %s to %s", self.job.name, self.job.job_id, old_job_status, self.job.status) return dict()
def index_put(self): """ Update the Node object here """ if not self.node: error('/errors/not_found/', 'attempted to update a non-existent node') update = request.json log.info("Updating {node}: {data}".format( node=self.node, data=update, )) try: self.node.update(update) except PaddlesError as exc: error(exc.url, str(exc)) return dict()
def _lock(node_obj, node_dict, verb): locked_by = node_dict.get('locked_by') _verb = dict(lock='Lock', unlock='Unlock').get(verb, 'Check') description = node_dict.get('description') desc_str = " with description %s" % description if description else '' log.debug("{verb}ing {node} for {locked_by}{desc_str}".format( verb=_verb, node=node_obj, locked_by=locked_by, desc_str=desc_str)) try: node_obj.update(node_dict) log.info("{verb}ed {node} for {locked_by}{desc_str}".format( verb=_verb, node=node_obj, locked_by=locked_by, desc_str=desc_str, )) except PaddlesError as exc: error(exc.url, str(exc)) return node_obj.__json__()
def run(self): run_name = request.context.get('run_name') if not run_name: error( '/errors/notfound', 'associated run was not found and no name was provided to create one' ) # noqa run_q = models.Run.query.filter(models.Run.name == run_name) if run_q.count() == 1: run = run_q.one() return run elif run_q.count() > 1: error('/errors/invalid/', 'there are %s runs with that name!' % run_q.count()) elif run_q.count() == 0: log.info("Creating run: %s", run_name) run = models.Run(run_name) return run
def __init__(self, job_id): self.job_id = str(job_id) run_name = request.context.get('run_name') if not run_name: self.run = None else: run_q = models.Run.query.filter(models.Run.name == run_name) if run_q.count() == 1: self.run = run_q.one() elif run_q.count() > 1: error('/errors/invalid/', 'there are %s runs with that name!' % run_q.count()) else: self.run = None query = Job.query.options(load_only('id', 'job_id', 'name', 'status')) query = query.filter_by(job_id=job_id, run=self.run) self.job = query.first()
def __init__(self, job_id): self.job_id = str(job_id) run_name = request.context.get('run_name') if not run_name: self.run = None else: run_q = models.Run.query.filter( models.Run.name == run_name) if run_q.count() == 1: self.run = run_q.one() elif run_q.count() > 1: error('/errors/invalid/', 'there are %s runs with that name!' % run_q.count()) else: self.run = None query = Job.query.options(load_only('id', 'job_id', 'name', 'status')) query = query.filter_by(job_id=job_id, run=self.run) self.job = query.first()
def index_post(self): """ We create new jobs associated to this run here """ try: data = request.json job_id = data.get('job_id') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not job_id: error('/errors/invalid/', "could not find required key: 'job_id'") self.run = self._find_run() if not self.run: self._create_run() job_id = data['job_id'] = str(job_id) self._create_job(job_id, data) return dict()
def date_from_string(date_str, out_fmt=datetime_format, hours='00:00:00'): try: if date_str == 'today': date = datetime.date.today() date_str = date.strftime(date_format) elif date_str == 'yesterday': date = datetime.date.today() date = date.replace(day=date.day - 1) date_str = date.strftime(date_format) else: date = datetime.datetime.strptime(date_str, date_format) if out_fmt == datetime_format: date_str = '{date}_{time}'.format(date=date_str, time=hours) date = datetime.datetime.strptime(date_str, out_fmt) return (date, date_str) except ValueError: rollback() error('/errors/invalid/', 'date format must match %s' % date_format)
def index_post(self): """ We create new jobs associated to this run here """ try: data = request.json job_id = data.get('job_id') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not job_id: error('/errors/invalid/', "could not find required key: 'job_id'") job_id = data['job_id'] = str(job_id) query = Job.query.options(load_only('id', 'job_id')) query = query.filter_by(job_id=job_id, run=self.run) if query.first(): error('/errors/invalid/', "job with job_id %s already exists" % job_id) else: log.info("Creating job: %s/%s", data.get('name', '<no name!>'), job_id) self.job = Job(data, self.run) return dict()
def unlock_many_post(self): req = request.json fields = ['names', 'locked_by'] if sorted(req.keys()) != sorted(fields): error('/errors/invalid/', "must pass these fields: %s" % ', '.join(fields)) locked_by = req.get('locked_by') names = req.get('names') if not isinstance(names, list): error('/errors/invalid/', "'names' must be a list; got: %s" % str(type(names))) base_query = Node.query query = base_query.filter(Node.name.in_(names)) if query.count() != len(names): error('/errors/invalid/', "Could not find all nodes!") log.info("Unlocking {count} nodes for {locked_by}".format( count=len(names), locked_by=locked_by)) result = [] for node in query.all(): result.append( NodeController._lock(node, dict(locked=False, locked_by=locked_by), 'unlock') ) return result
def unlock_many_post(self): req = request.json fields = ['names', 'locked_by'] if sorted(req.keys()) != sorted(fields): error('/errors/invalid/', "must pass these fields: %s" % ', '.join(fields)) locked_by = req.get('locked_by') names = req.get('names') if not isinstance(names, list): error('/errors/invalid/', "'names' must be a list; got: %s" % str(type(names))) base_query = Node.query query = base_query.filter(Node.name.in_(names)) if query.count() != len(names): error('/errors/invalid/', "Could not find all nodes!") log.info("Unlocking {count} nodes for {locked_by}".format( count=len(names), locked_by=locked_by)) result = [] for node in query.all(): result.append( NodeController._lock(node, dict(locked=False, locked_by=locked_by), 'unlock')) return result
def index(self, locked=None, machine_type="", os_type=None, os_version=None, locked_by=None, up=None, count=None): query = Node.query if locked is not None: query = query.filter(Node.locked == locked) if machine_type: if "|" in machine_type: machine_types = machine_type.split("|") query = query.filter(Node.machine_type.in_(machine_types)) else: query = query.filter(Node.machine_type == machine_type) if os_type: query = query.filter(Node.os_type == os_type) if os_version: query = query.filter(Node.os_version == os_version) if locked_by: query = query.filter(Node.locked_by == locked_by) if up is not None: query = query.filter(Node.up == up) if count is not None: if not count.isdigit() or isinstance(count, int): error("/errors/invalid/", "count must be an integer") query = query.limit(count) return [node.__json__() for node in query.all()]
def job_stats(self, machine_type='', since_days=14): since_days = int(since_days) if since_days < 1: error('/errors/invalid/', "since_days must be a positive integer") now = datetime.utcnow() past = now - timedelta(days=since_days) recent_jobs = Job.query.filter(Job.posted.between(past, now)).subquery() RecentJob = aliased(Job, recent_jobs) query = Session.query(Node.name, RecentJob.status, func.count('*')) if machine_type: # Note: filtering by Job.machine_type (as below) greatly improves # performance but could lead slightly incorrect values if many jobs # are being scheduled using mixed machine types. We work around # this by including the 'multi' machine type (which is the name of # the queue Inktank uses for such jobs. query = query.filter( RecentJob.machine_type.in_((machine_type, 'multi'))) query = query.filter(Node.machine_type == machine_type) query = query.join(RecentJob.target_nodes).group_by(Node)\ .group_by(RecentJob.status) all_stats = {} results = query.all() for (name, status, count) in results: node_stats = all_stats.get(name, {}) node_stats[status] = count all_stats[name] = node_stats stats_sorter = lambda t: sum(t[1].values()) ordered_stats = OrderedDict(sorted(all_stats.items(), key=stats_sorter)) return ordered_stats
def index_post(self): # save to DB here try: name = request.json.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') if not name: error('/errors/invalid/', "could not find required key: 'name'") if not Run.query.filter_by(name=name).first(): self._create_run(name) return dict() else: error('/errors/invalid/', "run with name %s already exists" % name)
def index_post(self): # save to DB here try: name = request.json.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') if not name: error('/errors/invalid/', "could not find required key: 'name'") if not Run.query.filter_by(name=name).first(): log.info("Creating run: %s", name) Run(name) return dict() else: error('/errors/invalid/', "run with name %s already exists" % name)
def lock_many(self): error('/errors/invalid/', "this URI only supports POST requests")
def unlock_many(self): error("/errors/invalid/", "this URI only supports POST requests")
def index(self): if not self.node: error('/errors/not_found/', 'node not found') json_node = self.node.__json__() return json_node
def index(self): if not self.node: error("/errors/not_found/", "node not found") json_node = self.node.__json__() return json_node