def setUp(self): self.project = Project(name='Test Project 1') self.project.save() self.tp = TestPlan(name='Test Plan 1', project=self.project) self.tp.save() self.launch = Launch(test_plan=self.tp) self.launch.save()
def test_creation(self): url = "http://2gis.local" l = Launch(test_plan=self.tp, started_by=url) l.save() l1 = self.tp.launch_set.first() self.assertEqual(l, l1) l1.started_by = url
class TestResultTest(TestCase): project = None tp = None launch = None def setUp(self): self.project = Project(name='Test Project 1') self.project.save() self.tp = TestPlan(name='Test Plan 1', project=self.project) self.tp.save() self.launch = Launch(test_plan=self.tp) self.launch.save() def tearDown(self): Project.objects.all().delete() TestPlan.objects.all().delete() Launch.objects.all().delete() def test_creation(self): r = TestResult(launch=self.launch, name='TestCase1', suite='TestSute1', state=FAILED, failure_reason='Very clear message about failure', duration=1) r1 = TestResult(launch=self.launch, name='TestCase1', suite='TestSute2', state=PASSED, failure_reason='Very clear message about failure', duration=1) r.save() r1.save() self.assertEqual(len(self.launch.testresult_set.all()), 2)
def test_creation(self): url = 'http://2gis.local' launch = Launch(test_plan=self.tp, started_by=url) launch.save() l1 = self.tp.launch_set.first() self.assertEqual(launch, l1) l1.started_by = url
def handle(self, *args, **options): if options['project_name'] is None: raise CommandError('--project-name is not specified') if options['test_plan_name'] is None: raise CommandError('--test-plan-name is not specified') if options['started_by'] is None: raise CommandError('--started-by is not specified') (project, new) = Project.objects.get_or_create(name=options['project_name']) (test_plan, new) = TestPlan.objects.get_or_create(name=options['test_plan_name'], project=project) if options['launch_id'] is None: self.launch = Launch(test_plan=test_plan, started_by=options['started_by']) if options['save']: self.launch.save() log.info( 'REPORT_URL=http://autotests.cd.test/launch/{0}/'.format( self.launch.id)) else: log.info('Try to get launch with id = %s', options['launch_id']) self.launch = Launch.objects.get(id=options['launch_id']) log.info('Using next launch: %s', self.launch) for file_path in args: self.load_file(file_path, self.launch) if options['save']: TestResult.objects.bulk_create(self.buffer) if self.launch.counts['failed'] > 0: log.info('BUILD_IS_UNSTABLE')
def setUp(self): self.project = Project(name="Test Project 1") self.project.save() self.tp = TestPlan(name="Test Plan 1", project=self.project) self.tp.save() self.launch = Launch(test_plan=self.tp) self.launch.save()
def handle(self, *args, **options): if options['project_name'] is None: raise CommandError('--project-name is not specified') if options['test_plan_name'] is None: raise CommandError('--test-plan-name is not specified') if options['started_by'] is None: raise CommandError('--started-by is not specified') (project, new) = Project.objects.get_or_create( name=options['project_name']) (test_plan, new) = TestPlan.objects.get_or_create( name=options['test_plan_name'], project=project) if options['launch_id'] is None: self.launch = Launch(test_plan=test_plan, started_by=options['started_by']) if options['save']: self.launch.save() log.info('REPORT_URL=http://autotests.cd.test/launch/{0}/'. format(self.launch.id)) else: log.info('Try to get launch with id = %s', options['launch_id']) self.launch = Launch.objects.get(id=options['launch_id']) log.info('Using next launch: %s', self.launch) for file_path in args: self.load_file(file_path, self.launch) if options['save']: TestResult.objects.bulk_create(self.buffer) if self.launch.counts['failed'] > 0: log.info('BUILD_IS_UNSTABLE')
class TestResultTest(TestCase): project = None tp = None launch = None def setUp(self): self.project = Project(name="Test Project 1") self.project.save() self.tp = TestPlan(name="Test Plan 1", project=self.project) self.tp.save() self.launch = Launch(test_plan=self.tp) self.launch.save() def tearDown(self): Project.objects.all().delete() TestPlan.objects.all().delete() Launch.objects.all().delete() def test_creation(self): r = TestResult( launch=self.launch, name="TestCase1", suite="TestSute1", state=FAILED, failure_reason="Very clear message about failure", duration=1, ) r1 = TestResult( launch=self.launch, name="TestCase1", suite="TestSute2", state=PASSED, failure_reason="Very clear message about failure", duration=1, ) r.save() r1.save() self.assertEqual(len(self.launch.testresult_set.all()), 2)
def execute(self, request, pk=None): workspace_path = os.path.join( settings.CDWS_WORKING_DIR, timezone.now().strftime('%Y-%m-%d-%H-%M-%f')) post_data = request.data options = request.data['options'] json_file = None if 'json_file' in post_data: json_file = post_data['json_file'] test_plan = TestPlan.objects.get(pk=pk) # launch create launch = Launch(test_plan=test_plan, started_by=options['started_by'], state=INITIALIZED) launch.save() build = Build(launch=launch, version=options.get('version'), branch=options.get('branch'), hash=options.get('hash')) build.save() # env create env = { 'WORKSPACE': os.path.join(settings.CDWS_DEPLOY_DIR, workspace_path), 'HOME': os.path.join(settings.CDWS_DEPLOY_DIR, workspace_path) } if 'env' in post_data: for key, value in iter(post_data['env'].items()): env[key] = value env['REPORT_API_URL'] = 'http://{0}/{1}'.format( settings.CDWS_API_HOSTNAME, settings.CDWS_API_PATH) # environment values should be string for exec env['TESTPLAN_ID'] = str(test_plan.id) env['LAUNCH_ID'] = str(launch.id) env['WORKSPACE_URL'] = 'http://{}/{}/'.format(settings.CELERY_HOST, workspace_path) # queryset create if 'launch_items' in post_data: try: launch_items = test_plan.launchitem_set.filter( id__in=post_data['launch_items']).order_by('id') except (KeyError, ValueError) as e: return Response(status=status.HTTP_400_BAD_REQUEST, data={'message': '{}'.format(e)}) else: launch_items = test_plan.launchitem_set.all().order_by('id') mapping = {} init_task = None async_tasks = [] conclusive_tasks = [] create_env_task = create_environment.subtask([env, json_file], immutable=True, soft_time_limit=1200) final_task = finalize_launch.subtask([launch.id], {}, soft_time_limit=3600, immutable=True) is_init_task_present = False for launch_item in launch_items: item_uuid = uuid() # Write LAUNCH_ITEM_ID to environment of each process item_env = copy.copy(env) item_env['LAUNCH_ITEM_ID'] = str(launch_item.id) subtask = launch_process.subtask( [launch_item.command, launch_item.type], {'env': item_env}, immutable=True, soft_time_limit=launch_item.timeout, options={'task_id': item_uuid}) if launch_item.type == INIT_SCRIPT: if not is_init_task_present: is_init_task_present = True init_task = subtask mapping[item_uuid] = launch_item.id elif launch_item.type == ASYNC_CALL: async_tasks.append(subtask) mapping[item_uuid] = launch_item.id elif launch_item.type == CONCLUSIVE: conclusive_tasks.append(subtask) mapping[item_uuid] = launch_item.id else: msg = ('There is launch item with type {0} which not ' 'supported, please fix this.').format(launch_item.type) return Response(status=status.HTTP_400_BAD_REQUEST, data={'message': msg}) # update launch launch.set_tasks(mapping) launch.set_parameters({ 'options': options, 'env': {} if 'env' not in post_data else post_data['env'], 'json_file': json_file }) launch.save() # error handling if init_task is None: msg = ('Initial script for test plan "{0}" with id "{1}" ' 'does not exist or not selected. ' 'Currently selected items: {2}').format( test_plan.name, test_plan.id, launch_items) launch.delete() return Response(status=status.HTTP_400_BAD_REQUEST, data={'message': msg}) # pass sequence sequence = [create_env_task, init_task, celery.group(async_tasks)] sequence += conclusive_tasks sequence.append(final_task) try: log.info("Chain={}".format(celery.chain(sequence)())) except Exception as e: return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR, data={'message': '{}'.format(e)}) return Response(data={'launch_id': launch.id}, status=status.HTTP_200_OK)
class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--project-name', help='Name of the project for import'), make_option('--test-plan-name', help='Name of the testplan for import'), make_option('--launch-id', default=None, help='Launch id'), make_option('--started-by', default=None, help='Url to web service which start launch'), make_option('--save', action='store_true', default=False, help='Launch id') ) launch = None buffer = [] def handle(self, *args, **options): if options['project_name'] is None: raise CommandError('--project-name is not specified') if options['test_plan_name'] is None: raise CommandError('--test-plan-name is not specified') if options['started_by'] is None: raise CommandError('--started-by is not specified') (project, new) = Project.objects.get_or_create( name=options['project_name']) (test_plan, new) = TestPlan.objects.get_or_create( name=options['test_plan_name'], project=project) if options['launch_id'] is None: self.launch = Launch(test_plan=test_plan, started_by=options['started_by']) if options['save']: self.launch.save() log.info('REPORT_URL=http://autotests.cd.test/launch/{0}/'. format(self.launch.id)) else: log.info('Try to get launch with id = %s', options['launch_id']) self.launch = Launch.objects.get(id=options['launch_id']) log.info('Using next launch: %s', self.launch) for file_path in args: self.load_file(file_path, self.launch) if options['save']: TestResult.objects.bulk_create(self.buffer) if self.launch.counts['failed'] > 0: log.info('BUILD_IS_UNSTABLE') def load_file(self, file_path, launch): if os.stat(file_path)[6] == 0: return log.info('Loading "%s"', file_path) dom = xml.dom.minidom.parse(file_path) self.parse(dom) def parse(self, element, path=''): if element.nodeName == 'testcase': self.create_test_result(element, path) if element.nodeName == 'testsuite': path += element.getAttribute('name') + '/' if element.hasChildNodes(): for node in element.childNodes: if node.nodeType == node.ELEMENT_NODE: self.parse(node, path) def create_test_result(self, element, path): data = { 'launch': self.launch, 'name': element.getAttribute('name'), 'suite': (path[:125] + '...') if len(path) > 125 else path, 'state': BLOCKED, 'duration': element.getAttribute('time'), 'failure_reason': '' } error = self.get_node(element, ['error', 'failure']) skipped = self.get_node(element, ['skipped']) if skipped is not None: data['state'] = SKIPPED data['failure_reason'] = 'Type: {0} : {1}'.format( skipped.getAttribute('type'), self.get_text(skipped.childNodes)) else: data['state'] = PASSED if error is not None: data['state'] = FAILED data['failure_reason'] = 'Type: {0} : {1}'.format( error.getAttribute('type'), self.get_text(error.childNodes)) self.buffer.append(TestResult(**data)) def get_node(self, element, names): for node in element.childNodes: if node.nodeName in names: return node return None def get_text(self, nodelist): rc = [] for node in nodelist: if node.nodeType == node.TEXT_NODE: rc.append(node.data.encode('utf-8', errors='replace')) return ''.join(rc)
class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--project-name', help='Name of the project for import'), make_option('--test-plan-name', help='Name of the testplan for import'), make_option('--launch-id', default=None, help='Launch id'), make_option('--started-by', default=None, help='Url to web service which start launch'), make_option( '--save', action='store_true', default=False, help='Launch id')) launch = None buffer = [] def handle(self, *args, **options): if options['project_name'] is None: raise CommandError('--project-name is not specified') if options['test_plan_name'] is None: raise CommandError('--test-plan-name is not specified') if options['started_by'] is None: raise CommandError('--started-by is not specified') (project, new) = Project.objects.get_or_create(name=options['project_name']) (test_plan, new) = TestPlan.objects.get_or_create(name=options['test_plan_name'], project=project) if options['launch_id'] is None: self.launch = Launch(test_plan=test_plan, started_by=options['started_by']) if options['save']: self.launch.save() log.info( 'REPORT_URL=http://autotests.cd.test/launch/{0}/'.format( self.launch.id)) else: log.info('Try to get launch with id = %s', options['launch_id']) self.launch = Launch.objects.get(id=options['launch_id']) log.info('Using next launch: %s', self.launch) for file_path in args: self.load_file(file_path, self.launch) if options['save']: TestResult.objects.bulk_create(self.buffer) if self.launch.counts['failed'] > 0: log.info('BUILD_IS_UNSTABLE') def load_file(self, file_path, launch): if os.stat(file_path)[6] == 0: return log.info('Loading "%s"', file_path) dom = xml.dom.minidom.parse(file_path) self.parse(dom) def parse(self, element, path=''): if element.nodeName == 'testcase': self.create_test_result(element, path) if element.nodeName == 'testsuite': path += element.getAttribute('name') + '/' if element.hasChildNodes(): for node in element.childNodes: if node.nodeType == node.ELEMENT_NODE: self.parse(node, path) def create_test_result(self, element, path): data = { 'launch': self.launch, 'name': element.getAttribute('name'), 'suite': (path[:125] + '...') if len(path) > 125 else path, 'state': BLOCKED, 'duration': element.getAttribute('time'), 'failure_reason': '' } error = self.get_node(element, ['error', 'failure']) skipped = self.get_node(element, ['skipped']) if skipped is not None: data['state'] = SKIPPED data['failure_reason'] = 'Type: {0} : {1}'.format( skipped.getAttribute('type'), self.get_text(skipped.childNodes)) else: data['state'] = PASSED if error is not None: data['state'] = FAILED data['failure_reason'] = 'Type: {0} : {1}'.format( error.getAttribute('type'), self.get_text(error.childNodes)) self.buffer.append(TestResult(**data)) def get_node(self, element, names): for node in element.childNodes: if node.nodeName in names: return node return None def get_text(self, nodelist): rc = [] for node in nodelist: if node.nodeType == node.TEXT_NODE: rc.append(node.data.encode('utf-8', errors='replace')) return ''.join(rc)