def run(self, args): super(ExpireJobsCommand, self).run(args) self.load_app() models.start() self.expire_running() self.expire_queued() self.commit()
def run(self, args): super(SetTargetsCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() out("STARTING A TRANSACTION...") start() count = Job.query.filter(~Job.target_nodes.any()).count() jobs_q = Job.query.filter(~Job.target_nodes.any()) n = 0 for job in jobs_q.yield_per(10): try: n += 1 print "Processing Job {n}/{t}\r".format(n=n, t=count), self._populate(job) except: print rollback() out("ROLLING BACK... ") raise else: flush() print nodes = Node.query.filter(Node.machine_type.is_(None)).all() for node in nodes: node.machine_type = self.parse_machine_type(node.name) commit()
def run(self, args): super(QueueStatsCommand, self).run(args) days = int(args.days) + 1 self.load_app() models.start() today = date.today() day_objs = [] for day_num in range(days)[::-1]: day = date.fromordinal(today.toordinal() - day_num) day_objs.append(day) job_counts = {} for day in day_objs: if day_objs[0] == day: continue prev_day = day_objs[day_objs.index(day) - 1] jobs_sched = self.jobs_scheduled_between(prev_day, day).count() jobs_done = self.jobs_completed_between(prev_day, day).count() if jobs_sched == 0: percent = 0 else: percent = float(jobs_done) / float(jobs_sched) * 100 job_counts[day] = OrderedDict(scheduled=jobs_sched, completed=jobs_done, percent=percent, ) print "{day}: {sched: >4} scheduled, {done: >4} completed ({percent:.0f}%)".format( day=day, sched=jobs_sched, done=jobs_done, percent=percent, )
def run(self, args): super(ImportNodesCommand, self).run(args) response = requests.get(self.lockserver) nodes_json = response.json() print "Found {count} nodes to import".format(count=len(nodes_json)) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") start() self.vm_hosts = {} count = len(nodes_json) for i in range(count): node_json = nodes_json[i] verb = self.update_node(node_json) print "{verb} {n}/{count}\r".format(verb=verb, n=i+1, count=count), print self.set_vm_hosts() except: rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") commit()
def run(self, args): super(ImportNodesCommand, self).run(args) response = requests.get(self.lockserver) nodes_json = response.json() print("Found {count} nodes to import".format(count=len(nodes_json))) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") start() self.vm_hosts = {} count = len(nodes_json) for i in range(count): node_json = nodes_json[i] verb = self.update_node(node_json) print("{verb} {n}/{count}\r".format(verb=verb, n=i + 1, count=count)), print() self.set_vm_hosts() except: rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") commit()
def run(self, args): super(ExpireJobsCommand, self).run(args) self.running_minutes = int(args.running) self.running_delta = timedelta(minutes=self.running_minutes) self.queued_days = int(args.queued) self.queued_delta = timedelta(days=self.queued_days) self.load_app() models.start() self.expire_running() self.expire_queued() self.commit()
def setup(self): config = deepcopy(self.config) # Add the appropriate connection string to the app config. config['sqlalchemy'] = { 'url': self.engine_url, 'encoding': 'utf-8', 'poolclass': NullPool } # Set up a fake app self.app = self.load_test_app(config) pmodels.start()
def run(self, args): super(NodeJobsCommand, self).run(args) self.load_app() models.start() node_name = args.node job_count = int(args.job_count) node_obj = Node.query.filter(Node.name.startswith(node_name)).one() jobs = Job.query.filter(Job.target_nodes.contains(node_obj)).filter( ~Job.updated.is_(None)).order_by( Job.updated.desc()).limit(job_count) if not jobs.count(): print("No jobs found for %s" % node_name) return for job in jobs: print('%s/%s/' % (job.run.name, job.job_id))
def run(self, args): super(PopulateCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() out("BUILDING SCHEMA") try: out("STARTING A TRANSACTION...") models.start() models.Base.metadata.create_all(conf.sqlalchemy.engine) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def run(self, args): super(NodeJobsCommand, self).run(args) self.load_app() models.start() node_name = args.node job_count = int(args.job_count) node_obj = Node.query.filter(Node.name.startswith(node_name)).one() jobs = Job.query.filter( Job.target_nodes.contains(node_obj)).filter( ~Job.updated.is_(None)).order_by( Job.updated.desc()).limit(job_count) if not jobs.count(): print "No jobs found for %s" % node_name return for job in jobs: print '%s/%s/' % (job.run.name, job.job_id)
def run(self, args): super(ReparseCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() runs = models.Run.query.all() for run in runs: self._reparse(run) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def run(self, args): super(DeleteCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() query = Run.query.filter(Run.name == args.name) run = query.one() out("Deleting run named %s" % run.name) run.delete() except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def run(self, args): super(DedupeCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() try: out("STARTING A TRANSACTION...") models.start() query = Run.query.filter(Run.name.like(args.pattern)) names = [val[0] for val in query.values(Run.name)] out("Found {count} runs to process".format(count=len(names))) for name in names: self._fix_dupe_runs(name) self._fix_dupe_jobs(name) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def run(self, args): super(SetStatusCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() models.start() try: out("SETTING RUN STATUSES...") running = Run.query.filter(Run.status == 'running') to_fix = [] for run in running: if run.jobs.filter(Job.status == 'running').count() == 0: to_fix.append(run) self._set_run_status(run) print "" out("Updated {count} runs...".format(count=len(to_fix))) except: models.rollback() out("ROLLING BACK...") raise else: out("COMMITTING...") models.commit()
def run(self, args): super(NodeStatsCommand, self).run(args) days = int(args.days) + 1 self.machine_type = args.machine_type self.load_app() models.start() today = date.today() day_objs = [] for day_num in range(days)[::-1]: day = date.fromordinal(today.toordinal() - day_num) day_objs.append(day) all_jobs = list() for day in day_objs: if day_objs[0] == day: continue prev_day = day_objs[day_objs.index(day) - 1] jobs_done = self.jobs_completed_between(prev_day, day) for job in jobs_done: if not job.started: continue job_info = self.get_job_info(job) all_jobs.append(job_info) print json.dumps(all_jobs, indent=2)
def run(self, args): super(NodeStatsCommand, self).run(args) days = int(args.days) + 1 self.machine_type = args.machine_type self.load_app() models.start() today = date.today() day_objs = [] for day_num in range(days)[::-1]: day = date.fromordinal(today.toordinal() - day_num) day_objs.append(day) all_jobs = list() for day in day_objs: if day_objs[0] == day: continue prev_day = day_objs[day_objs.index(day) - 1] jobs_done = self.jobs_completed_between(prev_day, day) for job in jobs_done: if not job.started: continue job_info = self.get_job_info(job) all_jobs.append(job_info) print(json.dumps(all_jobs, indent=2))
def teardown_method(self, meth): # After each test in this class, delete all the Nodes we created start() Node.query.delete() commit()
def teardown_method(self, meth): start() Job.query.delete() Run.query.delete() commit()
def teardown_method(self, meth): start() Node.query.delete() commit()