def handle_noargs(self, **options): """Handle working on a single project or looping over several.""" project = options.get("project") del options["project"] cron_batches = options.get("cron_batches") if options.get("view_batches"): if project or cron_batches: raise CommandError( "view_batches can not be used with project or cron_batch" ) # print out each batch that is in use, and the projects # that belong to it batches = PerformanceTestModel.get_projects_by_cron_batch() for key in sorted(batches.keys()): self.stdout.write("{0}: {1}\n".format( key, ", ".join(batches[key])), ) return if not (project or cron_batches): raise CommandError( "You must provide either a project or cron_batch value." ) if project and cron_batches: raise CommandError( "You must provide either project or cron_batch, but not both.") if cron_batches: projects = PerformanceTestModel.get_cron_batch_projects(cron_batches) else: projects = [project] lock = FileLock(self.LOCK_FILE + '_' + str(project)) timeout_sec = 10 try: lock.acquire(timeout=timeout_sec) try: self.stdout.write( "Starting for projects: {0}\n".format(", ".join(projects))) for p in projects: self.handle_project(p, **options) self.stdout.write( "Completed for {0} project(s).\n".format(len(projects))) finally: lock.release() except AlreadyLocked: self.stdout.write("This command is already being run elsewhere. " "Please try again later.\n") except LockTimeout: self.stdout.write("Lock timeout of {0} seconds exceeded. " "Please try again later.\n".format(str(timeout_sec)) )
def handle_noargs(self, **options): """Handle working on a single project or looping over several.""" project = options.get("project") del options["project"] cron_batches = options.get("cron_batches") if options.get("view_batches"): if project or cron_batches: raise CommandError( "view_batches can not be used with project or cron_batch") # print out each batch that is in use, and the projects # that belong to it batches = PerformanceTestModel.get_projects_by_cron_batch() for key in sorted(batches.keys()): self.stdout.write( "{0}: {1}\n".format(key, ", ".join(batches[key])), ) return if not (project or cron_batches): raise CommandError( "You must provide either a project or cron_batch value.") if project and cron_batches: raise CommandError( "You must provide either project or cron_batch, but not both.") if cron_batches: projects = PerformanceTestModel.get_cron_batch_projects( cron_batches) else: projects = [project] lock = FileLock(self.LOCK_FILE + '_' + str(project)) timeout_sec = 10 try: lock.acquire(timeout=timeout_sec) try: self.stdout.write("Starting for projects: {0}\n".format( ", ".join(projects))) for p in projects: self.handle_project(p, **options) self.stdout.write("Completed for {0} project(s).\n".format( len(projects))) finally: lock.release() except AlreadyLocked: self.stdout.write("This command is already being run elsewhere. " "Please try again later.\n") except LockTimeout: self.stdout.write("Lock timeout of {0} seconds exceeded. " "Please try again later.\n".format( str(timeout_sec)))
def get_json_blob_by_revisions(project, branch, gaia_revision, gecko_revision, testId, test_type): ptm = PerformanceTestModel(project) test_run_ids = ptm.get_test_run_ids_by_revisions(branch, gaia_revision, gecko_revision, testId, test_type) ptm.disconnect() ptrm = PerformanceTestRefDataModel(project) blobs = ptrm.get_object_json_blob_for_test_run(test_run_ids) ptm.disconnect() return blobs
def get_json_blob_by_revisions( project, branch, gaia_revision, gecko_revision, testId, test_type): ptm = PerformanceTestModel(project) test_run_ids = ptm.get_test_run_ids_by_revisions( branch, gaia_revision, gecko_revision, testId, test_type ) ptm.disconnect() ptrm = PerformanceTestRefDataModel(project) blobs = ptrm.get_object_json_blob_for_test_run(test_run_ids) ptm.disconnect() return blobs