def call_log(args): """ Get workflow logs via cromwell API. :param args: log subparser arguments. :return: """ cromwell = Cromwell(host=args.server) res = cromwell.get('logs', args.workflow_id) print res["calls"] command = "" # for each task, extract the command used for key in res["calls"]: stderr = res["calls"][key][0]["stderr"] script = "/".join(stderr.split("/")[:-1]) + "/script" with open(script, 'r') as f: command_log = f.read() command = command + key + ":\n\n" command = command + command_log + "\n\n" print(command) # print to stdout return None
def call_abort(args): """ Abort a workflow with a given workflow id. :param args: abort subparser args. :return: JSON containing abort response. """ cromwell = Cromwell(host=args.server) logger.info("Abort requested") return cromwell.stop_workflow(workflow_id=args.workflow_id)
class CromwellCloudUnitTests(unittest.TestCase): @classmethod def setUp(self): resources = c.resource_dir self.logger = logging.getLogger('test_cromwell') hdlr = logging.FileHandler(os.path.join(c.log_dir, 'test_cromwell.log')) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.INFO) self.cromwell = Cromwell(c.cloud_server, 8000) self.json = os.path.join(resources, 'hello.json') self.wdl = os.path.join(resources, 'hello_world.wdl') self.logger.info('Resources: {}, {}'.format(self.wdl, self.json)) self.wf = self.cromwell.jstart_workflow(self.wdl, self.json) self.logger.info('Workflow: {}'.format(self.wf)) self.workflow_id = self.wf['id'] self.labels = {'username': '******', 'foo': 'bar'} # Sleeping here to make sure workflow is started and has workflow ID otherwise tests can fail. time.sleep(5) def test_get_version(self): """ Simple test to make sure the server is responding. :return: """ result = requests.get("http://35.193.85.62:8000/engine/v1/version") self.assertEqual(result.status_code, 200) def test_begin_workflow(self): self.logger.info('Testing start_workflow...') self.assertTrue('id' in self.wf and 'status' in self.wf) self.assertEqual(self.wf['status'], 'Submitted') def test_query_status(self): self.logger.info('Testing query_status...') result = self.cromwell.query_status(self.workflow_id) self.logger.info('Result: {}'.format(result)) self.assertTrue('id' in result and 'status' in result) def test_query_metadata(self): self.logger.info('Testing query_metadata...') result = self.cromwell.query_metadata(self.workflow_id) self.logger.info('Result: {}'.format(result)) self.assertTrue('id' in result and 'submission' in result) def test_label_workflow(self): r = self.cromwell.label_workflow(self.workflow_id, self.labels) self.assertEquals(r.status_code, 200) @classmethod def tearDown(self): self.logger.info("Test done!")
def call_label(args): """ Apply labels to a workflow that currently exists in the database. :param args: label subparser arguments :return: """ cromwell = Cromwell(host=args.server) labels_dict = kv_list_to_dict(args.label) response = cromwell.label_workflow(workflow_id=args.workflow_id, labels=labels_dict) if response.status_code == 200: print("Labels successfully applied:\n{}".format(response.content)) else: logger.critical("Unable to apply specified labels:\n{}".format(response.content))
def __init__(self, user, host, no_notify, verbose, interval, workflow_id=None): self.host = host self.user = user self.interval = interval self.cromwell = Cromwell(host=host) self.messenger = Messenger(self.user) self.no_notify = no_notify self.verbose = verbose self.workflow_id = workflow_id if user == "*": self.event_subscribers = [EmailNotification(self.cromwell), SystemTestDownload(), Download(self.cromwell.host), GATKDownload()] engine = create_engine("sqlite:///" + config.workflow_db) Base.metadata.bind = engine DBSession = sessionmaker() DBSession.bind = engine self.session = DBSession()
def call_restart(args): """ Call cromwell restart to restart a failed workflow. :param args: restart subparser arguments. :return: """ logger.info("Restart requested") cromwell = Cromwell(host=args.server) result = cromwell.restart_workflow(workflow_id=args.workflow_id, disable_caching=args.disable_caching) if result is not None and "id" in result: msg = "Workflow restarted successfully; new workflow-id: " + str(result['id']) print(msg) logger.info(msg) else: msg = "Workflow was not restarted successfully; server response: " + str(result) print(msg) logger.critical(msg)
def call_run(args): """ Optionally validates inputs and starts a workflow on the Cromwell execution engine if validation passes. Validator returns an empty list if valid, otherwise, a list of errors discovered. :param args: run subparser arguments. :return: JSON response with Cromwell workflow ID. """ if args.validate: call_validate(args) #prep labels and add user labels_dict = kv_list_to_dict(args.label) if kv_list_to_dict(args.label) != None else {} labels_dict['username'] = args.username cromwell = Cromwell(host=args.server) result = cromwell.jstart_workflow(wdl_file=args.wdl, json_file=args.json, dependencies=args.dependencies, disable_caching=args.disable_caching, extra_options=kv_list_to_dict(args.extra_options), bucket=args.bucket, custom_labels=labels_dict) print("-------------Cromwell Links-------------") links = get_cromwell_links(args.server, result['id'], cromwell.port) print (links['metadata']) print (links['timing']) logger.info("Metadata:{}".format(links['metadata'])) logger.info("Timing Graph:{}".format(links['timing'])) args.workflow_id = result['id'] if args.monitor: # this sleep is to allow job to get started in Cromwell before labeling or monitoring. # Probably better ways to do this but for now this works. time.sleep(5) print ("These will also be e-mailed to you when the workflow completes.") retry = 4 while retry != 0: try: call_monitor(args) retry = 0 except KeyError as e: logger.debug(e) retry = retry - 1 return result
def call_explain(args): logger.info("Explain requested") cromwell = Cromwell(host=args.server) (result, additional_res, stdout_res) = cromwell.explain_workflow(workflow_id=args.workflow_id, include_inputs=args.input) def my_safe_repr(object, context, maxlevels, level): typ = pprint._type(object) if typ is unicode: object = str(object) return pprint._safe_repr(object, context, maxlevels, level) printer = pprint.PrettyPrinter() printer.format = my_safe_repr if result is not None: print("-------------Workflow Status-------------") printer.pprint(result) if len(additional_res) > 0: print("-------------Additional Parameters-------------") printer.pprint(additional_res) if len(stdout_res) > 0: for log in stdout_res["failed_jobs"]: print("-------------Failed Stdout-------------") print ("Shard: "+ log["stdout"]["label"]) print (log["stdout"]["name"] + ":") print (log["stdout"]["log"]) print ("-------------Failed Stderr-------------") print ("Shard: " + log["stderr"]["label"]) print (log["stderr"]["name"] + ":") print (log["stderr"]["log"]) print("-------------Cromwell Links-------------") links = get_cromwell_links(args.server, result['id'], cromwell.port) print (links['metadata']) print (links['timing']) else: print("Workflow not found.") args.monitor = True return None
def setUp(self): resources = c.resource_dir self.logger = logging.getLogger('test_cromwell') hdlr = logging.FileHandler(os.path.join(c.log_dir, 'test_cromwell.log')) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.INFO) self.cromwell = Cromwell(c.cloud_server, 8000) self.json = os.path.join(resources, 'hello.json') self.wdl = os.path.join(resources, 'hello_world.wdl') self.logger.info('Resources: {}, {}'.format(self.wdl, self.json)) self.wf = self.cromwell.jstart_workflow(self.wdl, self.json) self.logger.info('Workflow: {}'.format(self.wf)) self.workflow_id = self.wf['id'] self.labels = {'username': '******', 'foo': 'bar'} # Sleeping here to make sure workflow is started and has workflow ID otherwise tests can fail. time.sleep(5)
def call_query(args): """ Get various types of data on a particular workflow ID. :param args: query subparser arguments. :return: A list of json responses based on queries selected by the user. """ cromwell = Cromwell(host=args.server) responses = [] if args.workflow_id == None or args.workflow_id == "None" and not args.label: return call_list(args) if args.label: logger.info("Label query requested.") labeled = cromwell.query_labels(labels=kv_list_to_dict(args.label)) return labeled if args.status: logger.info("Status requested.") status = cromwell.query_status(args.workflow_id) responses.append(status) if args.metadata: logger.info("Metadata requested.") metadata = cromwell.query_metadata(args.workflow_id) responses.append(metadata) if args.logs: logger.info("Logs requested.") logs = cromwell.query_logs(args.workflow_id) responses.append(logs) logger.debug("Query Results:\n" + str(responses)) return responses
def is_user_workflow(host, user, workflow_id): """ A top-level function that returns a workflow if it matches the user workflow. This can't be an instance method of Monitor because we run into serializing issues otherwise. See: https://stackoverflow.com/questions/26249442/can-i-use-multiprocessing-pool-in-a-method-of-a-class :param host: cromwell server :param user: user name to monitor :param workflow_id: workflow :return: The workflow_id if it's the user owns the workflow. Otherwise None. """ metadata = Cromwell(host=host).query_metadata(workflow_id) try: j_input = json.loads(metadata['submittedFiles']['inputs']) if j_input['user'] == user: return workflow_id except KeyError: return None
class MyTestCase(unittest.TestCase): @classmethod def setUp(self): resources = c.resource_dir self.cromwell = Cromwell(host='btl-cromwell') self.json = os.path.join(resources, 'hello_world_on_prem.json') self.wdl = os.path.join(resources, 'hello_world_on_prem.wdl') def test_monitor_workflow(self): m = Monitor(user='******', host='btl-cromwell', no_notify=False, verbose=True, interval=5) wf = self.cromwell.jstart_workflow(self.wdl, self.json) time.sleep(2) workflow_id = wf['id'] self.assertEqual(0, m.monitor_workflow(workflow_id)) @classmethod def tearDown(self): print("Done!")
def setUp(self): resources = c.resource_dir self.cromwell = Cromwell(host='btl-cromwell') self.json = os.path.join(resources, 'hello_world_on_prem.json') self.wdl = os.path.join(resources, 'hello_world_on_prem.wdl')
class Monitor: """ A class for monitoring a user's workflows, providing status reports at regular intervals as well as e-mail notification. """ def __init__(self, user, host, no_notify, verbose, interval, workflow_id=None): self.host = host self.user = user self.interval = interval self.cromwell = Cromwell(host=host) self.messenger = Messenger(self.user) self.no_notify = no_notify self.verbose = verbose self.workflow_id = workflow_id if user == "*": self.event_subscribers = [EmailNotification(self.cromwell), SystemTestDownload(), Download(self.cromwell.host), GATKDownload()] engine = create_engine("sqlite:///" + config.workflow_db) Base.metadata.bind = engine DBSession = sessionmaker() DBSession.bind = engine self.session = DBSession() def get_user_workflows(self, raw=False, start_time=None, silent=False): """ A function for creating a list of workflows owned by a particular user. :return: A list of workflow IDs owned by the user. """ if not silent: print('Determining {}\'s workflows...'.format(self.user)) user_workflows = [] results = None if self.user == "*": results = self.cromwell.query_labels({}, start_time=start_time, running_jobs=True) else: results = self.cromwell.query_labels({'username': self.user}, start_time=start_time) if raw: return results try: for result in results['results']: if result['status'] in c.run_states: user_workflows.append(result['id']) except Exception as e: logging.error(str(e)) print('No user workflows found with username {}.'.format(self.user)) return user_workflows def process_events(self, workflow): for event_subscriber in self.event_subscribers: metadata = self.cromwell.query_metadata(workflow.id) #get final metadata try: event_subscriber.on_changed_workflow_status(workflow, metadata, self.host) except Exception as e: #logging.error(str(e)) traceback.print_exc() #print("Event processing error occurred above.") def run(self): while True: try: one_day_ago = datetime.datetime.now() - datetime.timedelta(days=int(1)) db_workflows = dict( (d.id, d) for d in self.session.query(Workflow).filter(Workflow.start > one_day_ago) ) cromwell_workflows = dict( (c["id"], c) for c in self.get_user_workflows(raw=True, start_time=get_iso_datestr(one_day_ago), silent=True)['results'] ) new_workflows = map(lambda c: Workflow(self.cromwell, c["id"]), filter(lambda w: w["id"] not in db_workflows, cromwell_workflows.values())) [self.session.add(w) for w in new_workflows] changed_workflows = filter(lambda d: d.id in cromwell_workflows and d.status != cromwell_workflows[d.id]["status"], db_workflows.values()) [w.update_status(cromwell_workflows[w.id]["status"]) for w in changed_workflows] workflows_to_notify = new_workflows + changed_workflows [self.process_events(w) for w in workflows_to_notify] self.session.flush() self.session.commit() except Exception: traceback.print_exc() time.sleep(self.interval) def get_user_workflows(self, raw=False, start_time=None, silent=False): """ A function for creating a list of workflows owned by a particular user. :return: A list of workflow IDs owned by the user. """ if not silent: print('Determining {}\'s workflows...'.format(self.user)) user_workflows = [] results = None if self.user == "*": results = self.cromwell.query_labels({}, start_time=start_time, running_jobs=False) else: results = self.cromwell.query_labels({'username': self.user}, start_time=start_time) if raw: return results try: for result in results['results']: if result['status'] in c.run_states: user_workflows.append(result['id']) except Exception as e: logging.error(str(e)) print('No user workflows found with username {}.'.format(self.user)) return user_workflows def monitor_user_workflows(self): """ A function for monitoring a several workflows. :return: """ print('Monitoring {}\'s workflows.'.format(self.user)) workflows = self.get_user_workflows() if len(workflows) == 0: print("User {} has no running workflows.".format(self.user)) else: for workflow in workflows: t = threading.Thread(target=self.monitor_workflow, args=[workflow]) t.start() def monitor_workflow(self, workflow_id): """ Monitor the status of a single workflow. :param workflow_id: Workflow ID of workflow to monitor. :return: returns 0 when workflow reaches terminal state. """ while 0 == 0: query_status = self.cromwell.query_status(workflow_id) if self.verbose: print('Workflow {} | {}'.format(query_status['id'], query_status['status'])) if query_status['status'] not in c.run_states: if not self.no_notify: filename = '{}.metadata.json'.format(query_status['id']) filepath = os.path.join(c.log_dir, '{}.metadata.json'.format(query_status['id'])) metadata = open(filepath, 'w+') json.dump(self.cromwell.query_metadata(workflow_id), indent=4, fp=metadata) metadata.close() email_content = self.generate_content(query_status=query_status, workflow_id=workflow_id) msg = self.messenger.compose_email(email_content) file_dict = {filename: filepath} if 'Failed' in query_status['status']: jdata = self.cromwell.query_metadata(workflow_id) for task, call in jdata['calls'].items(): for shard in call: if 'Failed' in shard['executionStatus']: attach_prefix = "{}.{}".format(task, shard['shardIndex']) stdout = "{}.stdout".format(attach_prefix) stderr = "{}.stderr".format(attach_prefix) try: file_dict[stdout] = shard['stdout'] except Exception as e: logging.warn(str(e)) try: file_dict[stderr] = shard['stderr'] except Exception as e: logging.warn(str(e)) break attachments = self.generate_attachments(file_dict) for attachment in attachments: if attachment: msg.attach(attachment) self.messenger.send_email(msg) os.unlink(filepath) return 0 else: time.sleep(self.interval) @staticmethod def generate_attachment(filename, filepath): """ Create attachment from a file. :param filename: The name to assign to the attachment. :param filepath: The absolute path of the file including the file itself. :return: An attachment object. """ try: read_data = open(filepath, 'r') attachment = MIMEText(read_data.read()) read_data.close() attachment.add_header('Content-Disposition', 'attachment', filename=filename) return attachment except Exception as e: logging.warn('Unable to generate attachment for {}:\n{}'.format(filename, e)) def generate_attachments(self, file_dict): """ Generates a list of attachments to be added to an e-mail :param file_dict: A dictionary of filename:filepath pairs. Note the name is what the file will be called, and does not refer to the name of the file as it exists prior to attaching. That should be part of the filepath. :return: A list of attachments """ attachments = list() # if file_dict.items() > 3: # attachment = MIMEBase('application', 'zip') # with zipfile.ZipFile('workflow_logs.zip', mode='w') as zf: # for file_name, path in file_dict.items(): # try: # zf.write(path, os.path.basename(file_name)) # except Exception as e: # logging.warn('Unable to generate attachment for {}:\n{}'.format(file_name, e)) # zf.close() # attachment.set_payload('workflow_logs.zip') # encoders.encode_base64(attachment) # attachment.add_header('Content-Disposition', 'attachment', filename='workflow_logs.zip') # attachments.append(attachment) # else: for name, path in file_dict.items(): attachments.append(self.generate_attachment(name, path)) return attachments def generate_content(self, query_status, workflow_id, metadata=None, user=None): """ a method for generating the email content to be sent to user. :param query_status: status of workflow (helps determine what content to include in email). :param workflow_id: Workflow ID of the workflow to create e-mail for. :param metadata: The metadata of the workflow (optional). :return: a dictionary containing the email contents for the template. """ jdata = self.cromwell.query_metadata(workflow_id) if metadata is None else metadata summary = "" if 'start' in jdata: summary += "<br><b>Started:</b> {}".format(jdata['start']) if 'end' in jdata: summary += "<br><b>Ended:</b> {}".format(jdata['end']) if 'start' in jdata and 'end' in jdata: start = parse(jdata['start']) end = parse(jdata['end']) duration = (end - start) hours, remainder = divmod(duration.seconds, 3600) minutes, seconds = divmod(remainder, 60) summary += '<br><b>Duration:</b> {} hours, {} minutes, {} seconds'.format(hours, minutes, seconds) if 'Failed' in jdata['status']: fail_summary = "<br><b>Failures:</b> {}".format(json.dumps(jdata['failures'])) fail_summary = fail_summary.replace(',', '<br>') summary += fail_summary.replace('\n', '<br>') if 'workflowName' in jdata: summary = "<b>Workflow Name:</b> {}{}".format(jdata['workflowName'], summary) if 'workflowRoot' in jdata: summary += "<br><b>workflowRoot:</b> {}".format(jdata['workflowRoot']) summary += "<br><b>Timing graph:</b> http://{}:9000/api/workflows/v2/{}/timing".format(self.host, jdata['id']) user = self.user if user is None else user email_content = { 'user': user, 'workflow_id': jdata['id'], 'status': jdata['status'], 'summary': summary } return email_content
class CromwellUnitTests(unittest.TestCase): @classmethod def setUpClass(self): resources = c.resource_dir self.cromwell = Cromwell(host='btl-cromwell') self.json = os.path.join(resources, 'hello.json') self.wdl = os.path.join(resources, 'hello_world.wdl') self.labels = {'username': '******', 'foo': 'bar'} def _initiate_workflow(self): wf = self.cromwell.jstart_workflow(self.wdl, self.json) time.sleep(5) return wf def test_start_workflow(self): wf = self._initiate_workflow() wfid = wf['id'] self.assertTrue('id' in wf and 'status' in wf) self.assertEqual(wf['status'], 'Submitted') self.assertEqual(len(wfid), 36) self.cromwell.stop_workflow(wfid) def test_build_long_url(self): wf = self._initiate_workflow() wfid = wf['id'] url_dict = { 'name': 'test_build_long_url', 'id': wfid, 'start': datetime.datetime.now() - datetime.timedelta(days=1), 'end': datetime.datetime.now() } query_url = self.cromwell.build_query_url('http://btl-cromwell:9000/api/workflows/v1/query?', url_dict) r = requests.get(query_url) self.assertEquals(r.status_code, 200) self.cromwell.stop_workflow(wfid) def test_label_workflow(self): wf = self._initiate_workflow() wfid = wf['id'] r = self.cromwell.label_workflow(wfid, self.labels) self.assertEquals(r.status_code, 200) self.cromwell.stop_workflow(wfid) def test_explain(self): wf = self._initiate_workflow() wfid = wf['id'] time.sleep(10) result = self.cromwell.explain_workflow(wfid) self.assertIsInstance(result, tuple) self.cromwell.stop_workflow(wfid) def test_stop_workflow(self): wf = self._initiate_workflow() wfid = wf['id'] result = self.cromwell.stop_workflow(wfid) print(result) self.cromwell.stop_workflow(wfid) @classmethod def tearDownClass(self): print("Done!")
def setUpClass(self): resources = c.resource_dir self.cromwell = Cromwell(host='btl-cromwell') self.json = os.path.join(resources, 'hello.json') self.wdl = os.path.join(resources, 'hello_world.wdl') self.labels = {'username': '******', 'foo': 'bar'}
class QueryUnitTests(unittest.TestCase): @classmethod def setUpClass(self): resources = c.resource_dir self.cromwell = Cromwell(host='btl-cromwell') self.json = os.path.join(resources, 'hw.json') self.wdl = os.path.join(resources, 'hw.wdl') self.labels = {'username': '******', 'foo': 'bar'} def _initiate_workflow(self): wf = self.cromwell.jstart_workflow(self.wdl, self.json) time.sleep(5) return wf def test_query_status(self): wf = self._initiate_workflow() wfid = wf['id'] result = self.cromwell.query_status(wfid) self.assertTrue('id' in result and 'status' in result) self.cromwell.stop_workflow(wfid) def test_query_metadata(self): wf = self._initiate_workflow() wfid = wf['id'] result = self.cromwell.query_metadata(wfid) self.assertTrue('id' in result and 'submission' in result) self.cromwell.stop_workflow(wfid) def test_query_logs(self): wf = self._initiate_workflow() wfid = wf['id'] result = self.cromwell.query_logs(wfid) self.assertTrue('id' in result) self.cromwell.stop_workflow(wfid) def test_build_long_url(self): wf = self._initiate_workflow() wfid = wf['id'] url_dict = { 'name': 'test_build_long_url', 'id': wfid, 'start': datetime.datetime.now() - datetime.timedelta(days=1), 'end': datetime.datetime.now() } query_url = self.cromwell.build_query_url( 'http://btl-cromwell:9000/api/workflows/v1/query?', url_dict) r = requests.get(query_url) self.assertEquals(r.status_code, 200) self.cromwell.stop_workflow(wfid) def test_query(self): wf = self._initiate_workflow() wfid = wf['id'] url_dict = { 'name': 'gatk', 'id': [wfid], 'start': datetime.datetime.now() - datetime.timedelta(days=1), 'end': datetime.datetime.now() } result = self.cromwell.query(url_dict) self.assertTrue(isinstance(result['results'], list), True) self.cromwell.stop_workflow(wfid) def test_label_workflow(self): wf = self._initiate_workflow() wfid = wf['id'] r = self.cromwell.label_workflow(wfid, self.labels) self.assertEquals(r.status_code, 200) self.cromwell.stop_workflow(wfid) def test_query_labels(self): wf = self._initiate_workflow() wfid = wf['id'] labels = {'username': '******', 'foo': 'bar'} self.cromwell.label_workflow(wfid, self.labels) # This sleep is needed to make sure the label workflow completes before we query for it. time.sleep(5) r = self.cromwell.query_labels(labels) # Here, the most recent workflow that matches the query will be the last item so we can use that to check # this assertion. self.assertTrue(wfid in r['results'][-1]['id']) self.cromwell.stop_workflow(wfid) def test_query_filter_by_statuses(self): from argparse import Namespace from widdler import call_list wf = self._initiate_workflow() wfid = wf['id'] result = call_list( Namespace(server="btl-cromwell", all=False, no_notify=True, verbose=True, interval=None, username="******", days=1, filter=['Succeeded', 'Failed'])) statuses = set(d['status'] for d in result) self.assertEqual(len(statuses), 2) self.assertIn('Succeeded', statuses) self.assertIn('Failed', statuses) self.cromwell.stop_workflow(wfid) def test_query_filter_by_name(self): from argparse import Namespace from widdler import call_list user_result = call_list( Namespace(server="btl-cromwell", all=False, no_notify=True, verbose=True, interval=None, username="******", days=1, filter=None)) user_wfids = set(d['id'] for d in user_result) all_result = call_list( Namespace(server="btl-cromwell", all=False, no_notify=True, verbose=True, interval=None, username="******", days=1, filter=None)) all_wfids = set(d['id'] for d in all_result) self.assertGreater(len(all_wfids), len(user_wfids)) def test_query_filter_by_days(self): from argparse import Namespace from widdler import call_list result = call_list( Namespace(server="btl-cromwell", all=False, no_notify=True, verbose=True, interval=None, username="******", days=1, filter=None)) all_dates = set(d['start'].split('T')[0] for d in result) self.assertEqual(len(all_dates), 1) def test_query_backend(self): self.assertTrue('defaultBackend' in self.cromwell.query_backend()) @classmethod def tearDownClass(self): print("Done!")