def run(self, args): super(ImportNodesCommand, self).run(args) response = requests.get(self.lockserver) nodes_json = response.json() print "Found {count} nodes to import".format(count=len(nodes_json)) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") start() self.vm_hosts = {} count = len(nodes_json) for i in range(count): node_json = nodes_json[i] verb = self.update_node(node_json) print "{verb} {n}/{count}\r".format(verb=verb, n=i+1, count=count), print self.set_vm_hosts() except: rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") commit()
def run(self, args): super(SetTargetsCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() out("STARTING A TRANSACTION...") start() count = Job.query.filter(~Job.target_nodes.any()).count() jobs_q = Job.query.filter(~Job.target_nodes.any()) n = 0 for job in jobs_q.yield_per(10): try: n += 1 print "Processing Job {n}/{t}\r".format(n=n, t=count), self._populate(job) except: print rollback() out("ROLLING BACK... ") raise else: flush() print nodes = Node.query.filter(Node.machine_type.is_(None)).all() for node in nodes: node.machine_type = self.parse_machine_type(node.name) commit()
def run(self, args): super(ImportNodesCommand, self).run(args) response = requests.get(self.lockserver) nodes_json = response.json() print("Found {count} nodes to import".format(count=len(nodes_json))) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") start() self.vm_hosts = {} count = len(nodes_json) for i in range(count): node_json = nodes_json[i] verb = self.update_node(node_json) print("{verb} {n}/{count}\r".format(verb=verb, n=i + 1, count=count)), print() self.set_vm_hosts() except: rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") commit()
def commit(self): try: models.commit() except: print("Rolling back") models.rollback() raise
def index_post(self): """ Create a new node """ try: data = request.json name = data.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not name: error('/errors/invalid/', "could not find required key: 'name'") if Node.filter_by(name=name).first(): error('/errors/invalid/', "Node with name %s already exists" % name) else: self.node = Node(name=name) try: self.node.update(data) except PaddlesError as exc: error(exc.url, str(exc)) log.info("Created {node}: {data}".format( node=self.node, data=data, )) return dict()
def index_post(self): """ We create new jobs associated to this run here """ try: data = request.json job_id = data.get('job_id') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not job_id: error('/errors/invalid/', "could not find required key: 'job_id'") job_id = data['job_id'] = str(job_id) query = Job.query.options(load_only('id', 'job_id')) query = query.filter_by(job_id=job_id, run=self.run) if query.first(): error('/errors/invalid/', "job with job_id %s already exists" % job_id) else: log.info("Creating job: %s/%s", data.get('name', '<no name!>'), job_id) self.job = Job(data, self.run) return dict()
def commit(self): try: models.commit() except: print "Rolling back" models.rollback() raise
def lock_many(cls, count, locked_by, machine_type, description=None, os_type=None, os_version=None, arch=None): update_dict = dict(locked=True, locked_by=locked_by, description=description) query = cls.query if "|" in machine_type: machine_types = machine_type.split("|") query = query.filter(Node.machine_type.in_(machine_types)) else: query = query.filter(Node.machine_type == machine_type) if os_type: query = query.filter(Node.os_type == os_type) if os_version: query = query.filter(Node.os_version == os_version) if arch: query = query.filter(Node.arch == arch) query = query.filter(Node.up.is_(True)) # Find unlocked nodes query = query.filter(Node.locked.is_(False)) query = query.limit(count) nodes = query.all() nodes_avail = len(nodes) if nodes_avail < count: raise ResourceUnavailableError("only {count} nodes available".format(count=nodes_avail)) for node in nodes: node.update(update_dict) try: commit() except (sqlalchemy.exc.DBAPIError, sqlalchemy.exc.InvalidRequestError): rollback() raise RaceConditionError("error locking nodes. please retry request.") return nodes
def latest_runs(fields=None, count=conf.default_latest_runs_count, page=1): query = Run.query.order_by(Run.posted.desc()) query = offset_query(query, page_size=count, page=page) runs = query.all() if fields: try: return [run.slice(fields) for run in runs] except AttributeError: rollback() error('/errors/invalid/', 'an invalid field was specified') return [run for run in runs]
def index(self, status='', fields=''): job_query = Job.filter_by(run=self.run) if status: job_query = job_query.filter_by(status=status) jobs = job_query.order_by(Job.posted.desc()).all() if fields: try: return [job.slice(fields) for job in jobs] except AttributeError: rollback() error('/errors/invalid/', 'an invalid field was specified') else: return jobs
def index_post(self): # save to DB here try: name = request.json.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') if not name: error('/errors/invalid/', "could not find required key: 'name'") if not Run.query.filter_by(name=name).first(): self._create_run(name) return dict() else: error('/errors/invalid/', "run with name %s already exists" % name)
def index_post(self): # save to DB here try: name = request.json.get('name') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') if not name: error('/errors/invalid/', "could not find required key: 'name'") if not Run.query.filter_by(name=name).first(): log.info("Creating run: %s", name) Run(name) return dict() else: error('/errors/invalid/', "run with name %s already exists" % name)
def run(self, args): super(PopulateCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() out("BUILDING SCHEMA") try: out("STARTING A TRANSACTION...") models.start() models.Base.metadata.create_all(conf.sqlalchemy.engine) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def run(self, args): super(ReparseCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() runs = models.Run.query.all() for run in runs: self._reparse(run) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def lock_many(cls, count, locked_by, machine_type, description=None, os_type=None, os_version=None, arch=None): update_dict = dict( locked=True, locked_by=locked_by, description=description, ) query = cls.query if '|' in machine_type: machine_types = machine_type.split('|') query = query.filter(Node.machine_type.in_(machine_types)) else: query = query.filter(Node.machine_type == machine_type) if os_type: query = query.filter(Node.os_type == os_type) if os_version: query = query.filter(Node.os_version == os_version) if arch: query = query.filter(Node.arch == arch) query = query.filter(Node.up.is_(True)) # Find unlocked nodes query = query.filter(Node.locked.is_(False)) query = query.limit(count) nodes = query.all() nodes_avail = len(nodes) if nodes_avail < count: raise ResourceUnavailableError( "only {count} nodes available".format(count=nodes_avail)) for node in nodes: node.update(update_dict) try: commit() except (sqlalchemy.exc.DBAPIError, sqlalchemy.exc.InvalidRequestError): rollback() raise RaceConditionError( "error locking nodes. please retry request.") return nodes
def run(self, args): super(DeleteCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() query = Run.query.filter(Run.name == args.name) run = query.one() out("Deleting run named %s" % run.name) run.delete() except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def date_from_string(date_str, out_fmt=datetime_format, hours='00:00:00'): try: if date_str == 'today': date = datetime.date.today() date_str = date.strftime(date_format) elif date_str == 'yesterday': date = datetime.date.today() date = date.replace(day=date.day - 1) date_str = date.strftime(date_format) else: date = datetime.datetime.strptime(date_str, date_format) if out_fmt == datetime_format: date_str = '{date}_{time}'.format(date=date_str, time=hours) date = datetime.datetime.strptime(date_str, out_fmt) return (date, date_str) except ValueError: rollback() error('/errors/invalid/', 'date format must match %s' % date_format)
def run(self, args): super(DedupeCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() query = Run.query.filter(Run.name.like(args.pattern)) names = [val[0] for val in query.values(Run.name)] out("Found {count} runs to process".format(count=len(names))) for name in names: self._fix_dupe_runs(name) self._fix_dupe_jobs(name) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def index_post(self): """ We create new jobs associated to this run here """ try: data = request.json job_id = data.get('job_id') except ValueError: rollback() error('/errors/invalid/', 'could not decode JSON body') # we allow empty data to be pushed if not job_id: error('/errors/invalid/', "could not find required key: 'job_id'") self.run = self._find_run() if not self.run: self._create_run() job_id = data['job_id'] = str(job_id) self._create_job(job_id, data) return dict()
def run(self, args): super(SetStatusCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() models.start() try: out("SETTING RUN STATUSES...") running = Run.query.filter(Run.status == 'running') to_fix = [] for run in running: if run.jobs.filter(Job.status == 'running').count() == 0: to_fix.append(run) self._set_run_status(run) print "" out("Updated {count} runs...".format(count=len(to_fix))) except: models.rollback() out("ROLLING BACK...") raise else: out("COMMITTING...") models.commit()
def teardown(self): # Tear down and dispose the DB binding pmodels.rollback() pmodels.clear()
def teardown_class(cls): pmodels.rollback() pmodels.clear()