def __init__(self, request): super().__init__(request) self.filename = None self.entity = None self.asserter = Asserter() self.lst_expected_dct = [] self.set_values_dct = OrderedDict() self.ignore_fields = [] self.category = None self.status = None self.field_mapping = None self.mandatory_fields = False self.standard_fields_flag = True self.udf_fields_flag = True self.is_update = False self.update_entities_lst = [] self.data_gen = DataGen()
def test_positive(request): filename = 'positive.html' file_path = PATH + filename with open(file_path, 'r') as html_file: html = html_file.read() actual_dict = helper.parse_to_dict(html) status, remarks = Asserter().verify(actual_dict, expected_dct=EXPECTED_DICT) assert status
def verify_rollups(self): self.status = True self.step_desc = 'Scope Rollup verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names( ) expected_dct = self.get_expected() self.step_input = '\n Expected Dictionary\n{}'.format( json.dumps(expected_dct)) actual_task_det_dct = self._get_actual_task_det() self.step_input += '\n Actual Task details rollup Dictionary\n{}'.format( json.dumps(actual_task_det_dct)) try: self.status, remark = Asserter().verify(actual_task_det_dct, expected_dct=expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' % traceback.format_exc( ) finally: db_wrapper.log_into_steps(self.request, self) assert self.status actual_task_heir_dct = self._get_actual_task_heir() self.step_input += '\n Actual Task heirarchy details rollup Dictionary\n{}'.format( json.dumps(actual_task_heir_dct)) try: self.status, remark = Asserter().verify(actual_task_heir_dct, expected_dct=expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' % traceback.format_exc( ) finally: db_wrapper.log_into_steps(self.request, self) assert self.status
def __init__(self, request): super().__init__(request) self.asserter = Asserter()
class Task(Base): def __init__(self, request): super().__init__(request) self.asserter = Asserter() def create(self, title, type): """ Atom to create the task under a project :param title: title of the task :param type: type of the tests (cap_bill, noncap_bill, noncap_nonbill, cap_nonbill) """ self.create_under_project(title, type) return self def create_under_project(self, title, type, **data): """ Atom to create under the project/summary task, type will decide which kind of the test it should be :param title: title of the task :param type: type of the tests (cap_bill, noncap_bill, noncap_nonbill, cap_nonbill) :param data: other optional params :return: self object """ data.update(project_id=data.get('project_id', self.project_id)) super().create(**data) self._update(title=title, payload=type) return self def _multipletasks(self, task_lst): """ Atom for creating multiple tasks and store store at a stretch :param task_lst: list of dict with names and type [{'title':<<title1>, 'type':cap_bill}, {'title':<<title2>>, 'type':noncap_bill}....] :return: self object """ for task in task_lst: self.create_under_project(task.get('title'), task.get('type', 'noncap_nonbill'), project_id=self.project_id) return self def childtask(self, title=None, parent=None, **data): """ Atom to create a child task under a summary task :param title: title, if title is not given then it will auto generate the title :param parent: name of the summary task under which child task has to be created :param data: other params if any :return: self object """ if parent: task_dict = self._get_task(parent) self.task_id = task_dict['id'] self.project_id = task_dict['projectId']['value'] data.update(parent_type='task', payload='default_subtask', task_id=self.task_id, project_id=self.project_id) super().create(**data) # self._set_task_details() if title: self.set_title(title) return self def _get_task(self, title): # task_dict = self.db.search.get_details_based_on_attributes(self.data_store.get(), ENTITY, title=title) task_dict = self.db_store.search_by_key(ENTITY, 'title', title)[0] return task_dict def _update(self, **data): data.update(task_id=self.task_id, project_id=self.project_id) super().update(**data) return self def set_predecessor(self, predecessor_task_titles): """ Atom to set predecessor for the given tasks :param predecessor_task_titles: list of titles which needs to be set as predecessor ['title1', 'title2'] :return: self object """ lst_task_numbers = [] from payloads.task.update import predecessor_dct tsk_dict = {} for predecessor_task_title in predecessor_task_titles: predecessor_obj = self._get_task(predecessor_task_title) tsk_dict[predecessor_obj.task_id] = predecessor_obj.task_number lst_task_numbers.append(predecessor_obj.task_number) data = { 'payload': 'predecessor', 'task_id': self.task_id, 'project_id': self.project_id, 'predecessor_dct': predecessor_dct, 'tsk_dict': tsk_dict } self._update(**data) return self def set_successor(self, successor_task_titles): """ Atom to set successor for the given tasks :param successor_task_titles: list of titles which needs to be set as successor ['title1', 'title2'] :return: self object """ lst_task_numbers = [] from payloads.task.update import successor_dct tsk_dict = {} for successor_task_title in successor_task_titles: successor_obj = self._get_task(successor_task_title) tsk_dict[successor_obj.task_id] = successor_obj.task_number lst_task_numbers.append(successor_obj.task_number) data = { 'payload': 'successor', 'task_id': self.task_id, 'project_id': self.project_id, 'successor_dct': successor_dct, 'tsk_dict': tsk_dict } self._update(**data) return self def set_title(self, title): """ Atom to update the title of the task :param title: title of the task :return: self object """ self._update(payload='title', title=title) return self def set_description(self, description): """ Atom to updatet he description of the task :param description: description :return: self object """ self._update(payload='description', description=description) return self def set_status(self, status): """ Atom to update the status of the task :param status_id: id for the status like '858147044' :param status_value: value for the status like 'Approved - Not Started' :return: self object """ self._update(payload='status', status_id=self.constants['TASK']['STATUS'][status]['ID'], status_value=self.constants['TASK']['STATUS'][status] ['DISPLAYTEXT']) return self def set_duration(self, days_in_sec): """ Atom to update the duration of the task :param days_in_sec: days in seconds like 28800 (60 * 60 * 8) for one working day of 8 workings hrs :return: """ self._update(payload='duration', duration=days_in_sec) return self def set_start_date(self, start_date_time): """ Atom to update the start date of the task :param start_date_time: date should be of datetime format of '%Y-%m-%dT%H:%M:%S.000' :return: self object """ self._update(payload='start_date', start_date_time=start_date_time) return self def set_complete_date(self, complete_date_time): """ Atom to update the complete date of the task :param complete_date_time: date should be of datetime format of '%Y-%m-%dT%H:%M:%S.000' :return: self object """ self._update(payload='completed_date', complete_date_time=complete_date_time) return self def delete(self, entity_details=None): if not entity_details: entity_id = self.task_id else: entity_id = entity_details['id'] super().delete(entity_id=entity_id) return self def schedule(self): """ This method is used to create the object of schedule class, which defines a relationship between schedule and timesheet :return: schedule object """ obj = Schedule(self.request) obj.task_id = self.task_id obj.project_id = self.project_id return obj def timesheet(self): """ This method is used to create the object of timesheet class, which defines a relationship between schedule and timesheet :return: timesheet object """ obj = Timesheet(self.request) obj.project_id = self.project_id obj.task_id = self.task_id return obj def verify(self): """ This mehod will generate expected dict from the data store and actual dict is generated from read operation of the task entity :return: status """ self.status = True self.step_desc = 'Task update verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names( ) expected_dct = self.db_store.search_by_key(ENTITY, 'id', self.task_id)[0] self.step_input = '\n Expected Dictionary\n{}'.format( json.dumps(expected_dct)) title = expected_dct.get('title') encoded_title = utils.html_encode(title) expected_dct.update({'title': encoded_title}) if expected_dct.get('description'): description = expected_dct.get('description') encoded_desc = utils.html_encode(description) expected_dct.update({'description': encoded_desc}) self.step_input = json.dumps(expected_dct) response = super().read(entity_id=self.task_id) actual_dct = response.json()['data'][0] self.step_input += '\n Actual Dictionary\n{}'.format( json.dumps(actual_dct)) try: self.status, remark = self.asserter.verify( actual_dct, expected_dct=expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) assert self.status return self
class Project(Base): def __init__(self, request): super().__init__(request) self.asserter = Asserter() def create(self, title=None, template=None): """ project can be created with or without template :param template: template_id :return: self object """ data = {'title': title} if template: data = {'payload': 'template', 'template': template} super().create(**data) return self def _update(self, **data): """ Atom to update the attributes for the given project_id :param data: data wil have attributes which needs to be updated from the atoms :return: self object """ data.update(entity_id=self.project_id) super().update(**data) return self def delete(self, entity_details=None): """ Atom to delete the project which calls base delete :return: self object """ if not entity_details: entity_id = self.project_id else: entity_id = entity_details['id'] super().delete(entity_id=entity_id) return self def set_title(self, title): """ Atom to update the title of the project :param title: title which needs to be updated for the given project :return: self object """ self._update(payload='title', title=title) return self def set_owner(self, owner): """ Atom to set the owner for the given project id :param owner: owner which needs to be set as owner of the project :return: self object """ self._update(payload='owner', owner_id=self.constants['USERS'][owner]['RESOURCEID'], owner_value=self.constants['USERS'][owner]['DISPLAYTEXT']) return self def set_description(self, description): """ Atom to update the description :param description: description :return: self object """ self._update(payload='description', description=description) return self def set_confidential(self, is_confidential_value): """ Atom to update the below param attributes for the given project :param is_confidential_id: :param is_confidential_value: :return: self object """ confidential_dct = {'yes': 1, 'no': 0} is_confidential_id = confidential_dct.get( is_confidential_value.lower()) self._update(payload='confidential', is_confidential_id=is_confidential_id, is_confidential_value=is_confidential_value) return self def set_phase(self, phase): self._update(payload='phase', phase_id=self.constants['PROJECT']['PHASES'][phase]['ID'], phase_value=self.constants['PROJECT']['PHASES'][phase] ['DISPLAYTEXT']) return self def set_complete_date(self, complete_date_time): """ set complete date for the project :param complete_date_time: complete date in yyyy-mm-ddT00:00:00.000 format """ self._update(payload='completed_date', complete_date_time=complete_date_time) return self def set_status(self, status): """ set status for the project, as per the input given it will get the id for the status and appends it to the payload :param status: status like 'proposed' or 'Hold' """ self._update( payload='status', status_id=self.constants['PROJECT']['STATUS'][status]['ID'], status_value=self.constants['PROJECT']['STATUS'][status] ['DISPLAYTEXT']) return self def set_department(self, department): """ set department for the project, as per the input given it will get the id for the department and appends it to the payload :param department: department like 'customer' or 'Finance' """ self._update( payload='department', department_id=self.constants['PROGRAMS'][department]['ID'], department_value=self.constants['PROGRAMS'][department] ['DISPLAYTEXT']) return self def task(self, title, type='noncap_nonbill'): """ Task is created, which defines a relationship between project and task :param title: :param type: :return: """ obj = Task(self.request).create_under_project( title, type, project_id=self.project_id) return obj def multipletasks(self, title_lst): """ Purpose of this function is to create a list of tasks """ obj = Task(self.request)._multipletasks(title_lst) return obj def issue(self, title): obj = Issue(self.request).create(title) return obj def verify(self): """ This mehod will generate expected dict from the data store and actual dict is generated from read operation of the project entity :return: status """ self.status = True self.step_desc = 'Project update verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names( ) expected_dct = self.db_store.search_by_key(ENTITY, 'id', self.project_id)[0] title = expected_dct.get('title') encoded_title = utils.html_encode(title) expected_dct.update({'title': encoded_title}) if expected_dct.get('description'): description = expected_dct.get('description') encoded_desc = utils.html_encode(description) expected_dct.update({'description': encoded_desc}) self.step_input = json.dumps(expected_dct) response = super().read(entity_id=self.project_id) actual_dct = response.json()['data'][0] try: self.status, remark = self.asserter.verify( actual_dct, expected_dct=expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) assert self.status return self
def __init__(self, request): super().__init__(request) self.task_id = None self.project_id = None self.asserter = Asserter()
class Schedule(Base): def __init__(self, request): super().__init__(request) self.task_id = None self.project_id = None self.asserter = Asserter() def add_resource(self, resource_name, role_name): """ Atom is to add resource to the schedule :param resource_name: name of the resource example : 'ba_user' or 'project_owner1' :param role_name: role names which is to betaken from the constants example : 'ba' or 'architect' """ data = { 'role_id': self.constants['ROLES'][role_name]['ID'], 'resource_id': self.constants['USERS'][resource_name]['RESOURCEID'], 'task_id': self.task_id } self.task_role_id, self.resource_task_schedule_id = super().create( **data) return self def set_role_hrs(self, estimated_hrs): """ This is to estimate the hours for a role which is assigned to the task :param estimated_hrs: estimated hrs in integer """ data = { 'task_role_id': self.task_role_id, 'task_id': self.task_id, 'estimated_hrs': estimated_hrs } super().update(**data) return self def set_resource_hrs(self, resource_hrs, resource_htc=0): """ This is set resource hours and hours to complete for a particular role and its task id :param resource_hrs: resource hrs in integer :param resource_htc: resource hts in integer """ data = { 'payload': 'resource_hrs', 'resource_task_schedule_id': self.resource_task_schedule_id, 'task_role_id': self.task_role_id, 'task_id': self.task_id, 'resource_hrs': resource_hrs, 'resource_htc': resource_htc } super().update(**data) return self def delete(self, entity_details): pass def timesheet(self): """ The time sheet object is created, which defines a relationship between schedule and timesheet :return: timesheet object """ obj = Timesheet(self.request) obj.project_id = self.project_id obj.task_id = self.task_id return obj def verify(self): """ This mehod will generate expected dict from the data store and actual dict is generated from read operation of the project entity :return: status """ self.status = True self.step_desc = 'Task update verification' from core import utils self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names( ) expected_dct = self.db_store.search_by_key(ENTITY, 'taskRoleId', str(self.task_role_id))[0] self.step_input = '\n Expected Dictionary\n{}'.format( json.dumps(expected_dct)) response = super().read(entity_id=self.task_id) actual_dct = utils.get_sub_dct(response, expected_dct) self.step_input += '\n Actual Dictionary\n{}'.format( json.dumps(actual_dct)) try: self.status, remark = self.asserter.verify( actual_dct, expected_dct=expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) assert self.status return self
def __init__(self, request): super().__init__(request) self.ttl = request.scope self.request = request self.payload_generator.request = request self.asserter = Asserter()
class Dataimport(Base): def __init__(self, request): super().__init__(request) self.filename = None self.entity = None self.asserter = Asserter() self.lst_expected_dct = [] self.set_values_dct = OrderedDict() self.ignore_fields = [] self.category = None self.status = None self.field_mapping = None self.mandatory_fields = False self.standard_fields_flag = True self.udf_fields_flag = True self.is_update = False self.update_entities_lst = [] self.data_gen = DataGen() def generate_import_file(self, entity): self.filename = super().export_file(entity, self.is_update) self.template_columns_dataframe, self.fields_lst = self._get_available_fields(entity) data = {'file_name': self.filename, 'excel_columns': self.template_columns_dataframe, 'fields_lst': self.fields_lst} self.db_store.insert(self.scope, self.test_id, ENTITY, data) return self def create_for_entity(self, entity): self.is_update = False self.entity = entity.lower() return self def update_for_entity(self, entity): self.is_update = True self.entity = entity.lower() return self def consider_entities(self, entities_lst): if not self.entity: raise NameError('Entity value is required') entity = self.entity for entity_title in entities_lst: entity_dict = self.db_store.search_by_key(entity, 'title', entity_title)[0] self.update_entities_lst.append(entity_dict['id']) return self def include_only_mandatory_fields(self): self.standard_fields_flag, self.udf_fields_flag, self.mandatory_fields = False, False, True return self def include_only_standard_fields(self): self.standard_fields_flag, self.udf_fields_flag = True, False return self def include_only_udf_fields(self): self.standard_fields_flag, self.udf_fields_flag = False, True return self def include_both_standard_and_udf_fields(self): self.standard_fields_flag, self.udf_fields_flag = True, True return self def with_default_category(self, category): entity = self.entity.upper() self.category = self.constants[entity]['CATEGORIES'][category] return self def with_default_status(self, status): entity = self.entity.upper() self.status = self.constants[entity]['STATUS'][status] return self def where_field_mapping_is(self, mapping): field_mapping = ['same', 'similar', 'manual'] self.field_mapping = field_mapping.index(mapping.lower()) + 1 return self def set_fields(self, specified_values_dct): self.set_values_dct.update(specified_values_dct) return self def turn_on_special_characters(self): self.data_gen.turn_on_special_character = True return self def import_file(self): entity = self.entity.upper() standard_fields = {'category': self.category['ID'] if self.category else None, 'status': self.status['ID'] if self.status else None, 'ptid': self.constants[ENTITY.upper()][entity.upper()], 'fieldMapping': self.field_mapping if self.field_mapping else '1'} super().import_file_in_system(self.entity, standard_fields, self.is_update) return self def delete(self, entity_details): super().delete(entity_id=entity_details['file_name']) def _get_available_fields(self, entity): template_columns_dataframe = pandas.read_excel(self.filename) fields_lst = super().read_all_available_fields(entity) return template_columns_dataframe, fields_lst def _get_entity_fields_and_columns(self): self.filename = '{}.xlsx'.format(self.entity) entity_data_dct = self.db_store.search_by_key(ENTITY, 'file_name', self.filename) if not entity_data_dct: if self.entity: self.generate_import_file(self.entity) entity_data_dct = self.db_store.search_by_key(ENTITY, 'file_name', self.filename) else: raise Exception( 'Entity parameter has to be provided to identify the file name') fields_lst = entity_data_dct[0]['fields_lst'] template_columns_dataframe = entity_data_dct[0]['excel_columns'] lst_template_columns = list(template_columns_dataframe) return fields_lst, lst_template_columns def populate_dynamic_data_to_excel_file(self, no_of_rows=1): fields_lst, lst_template_columns = self._get_entity_fields_and_columns() fields_with_types = super().filter_available_fields(self.entity, fields_lst, lst_template_columns, self.standard_fields_flag, self.udf_fields_flag, self.mandatory_fields, self.ignore_fields) default_values_dct = OrderedDict() default_values_dct.update({'CATEGORY': self.category['DISPLAYTEXT'] if self.category else None, 'STATUS': self.status['DISPLAYTEXT'] if self.status else None}) if self.is_update: self.generate_data_for_update_entity(fields_with_types, default_values_dct) else: self.generate_data_for_create_entity(fields_with_types, default_values_dct, no_of_rows) return self def generate_data_for_update_entity(self, fields_with_types, default_values_dct): lst_row_dct = [] for entity_id in self.update_entities_lst: row_dct, expected_dct = self.data_gen.generate(self.entity, fields_with_types, self.set_values_dct, default_values_dct) if 'PROJECT % COMPLETE METHOD' in row_dct.keys(): del row_dct['PROJECT % COMPLETE METHOD'] if '% complete method' in expected_dct.keys(): del expected_dct['% complete method'] if 'PROJECT PROJECTPLACE SYNC NOW' in row_dct.keys(): del row_dct['PROJECT PROJECTPLACE SYNC NOW'] if 'projectplace sync now' in expected_dct.keys(): del expected_dct['projectplace sync now'] if 'PROJECT DEFAULT TASK CATEGORY TITLE' in row_dct.keys(): del row_dct['PROJECT DEFAULT TASK CATEGORY TITLE'] if 'default task category' in expected_dct.keys(): del expected_dct['default task category'] row_dct.update({'ID': entity_id}) lst_row_dct.append(row_dct) self.lst_expected_dct.append(expected_dct) super().update(lst_row_dct) return self def generate_data_for_create_entity(self, fields_with_types, default_values_dct, no_of_rows=1): lst_row_dct = [] for row_no in range(0, no_of_rows): row_dct, expected_dct = self.data_gen.generate(self.entity, fields_with_types, self.set_values_dct, default_values_dct) default_values_dct.update({'CATEGORY': self.category[ 'DISPLAYTEXT'] if self.category else None, 'STATUS': self.status[ 'DISPLAYTEXT'] if self.status else None}) lst_row_dct.append(row_dct) self.lst_expected_dct.append(expected_dct) super().update(lst_row_dct) return self def remove_column(self, column_name): self.ignore_fields.append(column_name.upper()) return self def _verify_project(self, expected_dct): """ Verification of single project creation via data-import using new-project-template. 1) Read details, additional-details and settings of projects and combine them to form actual_dct 2) Verify the expected_dct with actual_dct """ self.status = True self.step_desc = 'Data Import of Project verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names() self.step_input = '\n Expected Dictionary\n{}'.format(json.dumps(expected_dct)) project = Project(self.request) # Read details of project based on title. In UI, these details can be seen under 'Details' of project read_response = project.read_title(expected_dct['title']) act_project_details = helper.get_formatted_dct(read_response.json()) project.project_id = read_response.json()['id'] # Read additional details of project. In UI, these additional details can be seen under 'Executive Summary' of project read_response = project.read_more(project.project_id) act_project_details_more = helper.get_formatted_dct( read_response.json()) # Read settings of project. In UI, these additional details can be seen under 'Settings' of project read_response = project.read_settings(project.project_id) act_project_details_settings = helper.get_formatted_dct(read_response.json()) # Combine all the details to form the actual_dct actual_dct = OrderedDict() actual_dct.update({act_project_details, act_project_details_more, act_project_details_settings}) self.step_input += '\n Actual Dictionary\n{}'.format(json.dumps(actual_dct)) expected_dct = helper.lower_keys(expected_dct) actual_dct = helper.lower_keys(actual_dct) # ignore_keys = helper.get_ignore_keys(expected_dct, actual_dct) try: self.status, remark = self.asserter.verify(actual_dct, expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) return def _verify_portfolio(self, expected_dct): """ Verification of portfolio creation via data-import using new-portfolio-template. 1) Read details, finance of portfolio and combine them to form actual_dct 2) Verify the expected_dct with actual_dct """ self.status = True self.step_desc = 'Data Import of Portfolio verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names() self.step_input = '\n Expected Dictionary\n{}'.format(json.dumps(expected_dct)) portfolio = Portfolio(self.request) read_category_resp = portfolio.read_category() category = expected_dct['category'].replace('Portfolio - ', '') category = category.replace('Port - ', '') class_id = utils.filter_dct_for_key('title', category, 'classId.value', read_category_resp.json())[0] read_title_resp = portfolio.read_title(expected_dct['title'], class_id) act_portfolio_details = helper.get_formatted_dct(read_title_resp.json()) portfolio.portfolio_id = read_title_resp.json()['id'] read_finance_resp = portfolio.read_finance( {'entity_id': portfolio.portfolio_id}) act_portfolio_details_finance = helper.get_formatted_dct( read_finance_resp.json()) # Combine all the details to form the actual_dct actual_dct = OrderedDict() actual_dct.update( {act_portfolio_details, act_portfolio_details_finance}) self.step_input += '\n Actual Dictionary\n{}'.format(json.dumps(actual_dct)) expected_dct = helper.lower_keys(expected_dct) actual_dct = helper.lower_keys(actual_dct) ignore_keys = ['parent portfolio'] try: self.status, remark = self.asserter.verify(actual_dct, expected_dct, ignore_keys) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) return def _verify_issue(self, expected_dct): """ Verification of single issue creation via data-import using new-issue-template. 1) Read details, additional-details and settings of projects and combine them to form actual_dct 2) Verify the expected_dct with actual_dct """ self.status = True self.step_desc = 'Data Import of Issue verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names() self.step_input = '\n Expected Dictionary\n{}'.format(json.dumps(expected_dct)) issue = Issue(self.request) read_response = issue.read_title(expected_dct['title']) actual_dct = helper.get_formatted_dct(read_response.json()) self.step_input += '\n Actual Dictionary\n{}'.format(json.dumps(actual_dct)) issue.issue_id = read_response.json()['id'] expected_dct = helper.lower_keys(expected_dct) actual_dct = helper.lower_keys(actual_dct) # TODO: Removal of this ignore_keys ignore_keys = ['udf issu multi-select list'] try: self.status, remark = self.asserter.verify(actual_dct, expected_dct, ignore_keys) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) return def _verify_asset(self, expected_dct): """ Verification of single issue creation via data-import using new-asset-template. 1) Read details, additional-details and settings of projects and combine them to form actual_dct 2) Verify the expected_dct with actual_dct """ self.status = True self.step_desc = 'Data Import of Asset verification' self.remarks = '\n Inside class: %s method: %s \n' % utils.get_method_class_names() self.step_input = '\n Expected Dictionary\n{}'.format(json.dumps(expected_dct)) asset = Asset(self.request) read_category_resp = asset.read_category() class_id = utils.filter_dct_for_key('title', expected_dct['asset category'], 'classId.value', read_category_resp.json())[0] read_response = asset.read_title(expected_dct['title'], class_id) actual_dct = helper.get_formatted_dct(read_response.json()) self.step_input += '\n Actual Dictionary\n{}'.format(json.dumps(actual_dct)) asset.asset_id = read_response.json()['id'] expected_dct = helper.lower_keys(expected_dct) actual_dct = helper.lower_keys(actual_dct) try: self.status, remark = self.asserter.verify(actual_dct, expected_dct) self.remarks += remark except KeyError: self.status = False self.remarks += 'KeyError Exception occurred, please see stack trace below: \n %s' \ % traceback.format_exc() finally: db_wrapper.log_into_steps(self.request, self) return def verify(self): """ Calls verification of single project by passing each project's expected_dct """ status_lst = [] remarks_lst = [] expected_dct_lst = self.lst_expected_dct if not expected_dct_lst: assert False for expected_dct in expected_dct_lst: method = '_verify_{}'.format(self.entity) func = getattr(self, method) func(expected_dct) status_lst.append(self.status) remarks_lst.append(self.remarks) assert all(status_lst), remarks_lst return self