def get_platform(config, logg): environment = config['Platform_Content'] jira_client = Jira(environment['jira_server'], JIRA_CONFIG_PATH, log=logg.logger) gid = jira_client.get_field_name('Global ID') assignee = jira_client.get_field_name('Assignee') validation = jira_client.get_field_name('Verification') description = jira_client.get_field_name('Description') def get_jira_item(item): return { 'KEY': str(item.key), # Remove version and platform inside square brackets... 'SUMMARY': re.sub(r'^\[.*]\[.*]\s*(\[AaaG])*', "", getattr(item.fields, 'summary', '')), 'DESCRIPTION': getattr(item.fields, description), 'GID': str(getattr(item.fields, gid)) if getattr(item.fields, gid, None) is not None else None, 'PARENT': str(item.fields.parent.key) if getattr(item.fields, 'parent', None) is not None else None, 'ASSIGNEE': str(getattr(item.fields, assignee)), 'VALIDATION': str(getattr(item.fields, validation)), } log.logger.info(f"Reading AREQ with '{environment['areq']}'") areq = read_jira_partial(jira_client, environment['areq']) # -- todo: Remember lead and validation! areq_features = [get_jira_item(item) for item in areq] log.logger.info(f"read {len(areq)} AREQ items") log.logger.info(f"Reading PREQ with '{environment['preq']}'") preq = read_jira_partial(jira_client, environment['preq']) # -- todo: Remember lead and validation! log.logger.info(f"read {len(preq)} PREQ items") preq_gids = [get_jira_item(item) for item in preq] with open(environment['pickle_file'], 'wb') as f: log.logger.info(f"Writing AREQ to picklefile '{environment['pickle_file']}'") pickle.dump(areq_features, f) log.logger.info(f"Writing PREQ to picklefile '{environment['pickle_file']}'") pickle.dump(preq_gids, f) return
def copy_platform_to_platform(parser, scenario, config, queries, search, log=None): """Copy platform to platform, based on the UCIS and E-Feature entries of the source platform""" # # -- Some sleight of hand here... Original code iterates the results of a JQL query. # If xls_source, this will read in an XLS file, look for a column named "Key" and # read those key values from Jira as source items. # if 'xls_source' in scenario: import pandas as pd source_file = realpath(dirname(realpath(sys.argv[0])) + '/../' + scenario['xls_source']) xls_data_frame = pd.read_excel(source_file, sheetname=0) def preq_item_list(jira): for i, item in xls_data_frame.iterrows(): key = item['Key'] if key.upper().startswith('PREQ'): yield jira.issue(key), item def areq_e_feature_list(jira): for i, item in xls_data_frame.iterrows(): key = item['Key'] if key.upper().startswith('AREQ'): yield jira.issue(key), item else: preq_source_query = get_query('preq_source_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) if preq_source_query is not None: def preq_item_list(jira): for preq_item in jira.do_query(preq_source_query): yield preq_item, None areq_source_e_feature_query = get_query('areq_source_e_feature', queries, copy_platform_to_platform.__name__, params=scenario, log=log) if areq_source_e_feature_query is not None: def areq_e_feature_list(jira): for e_feature in jira.do_query(areq_source_e_feature_query): yield e_feature, None preq_target_query = get_query('preq_target_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) areq_target_e_feature_query = get_query('areq_target_e_feature', queries, copy_platform_to_platform.__name__, params=scenario, log=log) target_feature_query = get_query('target_feature_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) target_summary_format = get_query('target_summary_format', queries, copy_platform_to_platform.__name__, params=scenario, log=log) log.logger.info("Examining source platform {splatform}, source android version {sversion}, target android version {tversion}".format_map(scenario)) verify = scenario['verify'] update = scenario['update'] verify_copy = scenario['verify_copy'] if 'verify_copy' in scenario else True; log.logger.info("Verify is %s and Update is %s", verify, update) log.logger.info("=================================================================") # -- Get and format it: jira = Jira(scenario['name'], search, log=log.logger) global_id = jira.get_field_name("Global ID") feature_id = jira.get_field_name("Feature ID") source_preq_scanned = 0 source_areq_scanned = 0 ucis_created = 0 e_features_created = 0 warnings_issued = 0 verify_failures = 0 update_failures = 0 processing_errors = 0 update_count = 0 preq_count = 0 areq_count = 0 def compare_items(item_kind, source_name, source_query, target_name, target_query, log=None): def read_items(query, log=None): """Read items into summary based dictionary, warning on duplicates""" dictionary = {} for item in jira.do_query(query): item_key = Jira.remove_version_and_platform(Jira.strip_non_ascii(item.fields.summary)) if item_key not in dictionary: dictionary[item_key] = [item] else: # So, what we have now is a POTENTIAL duplicate. figure out if it really is. if item.key != dictionary[item_key][0].key: # Yep, it's not the same item key... dictionary[item_key].append(item) log.logger.debug("Item key '%s' : '%s' creates a duplicate entry with key '%s': '%s'", item.key, item.fields.summary, dictionary[item_key][0].key, dictionary[item_key][0].fields.summary) pass return dictionary def scan_dups(source_dict, printit): for k, v in source_dict.items(): if len(v) > 1: keys = [] for item in v: keys.append(item.key) printit(keys, k) return source = read_items(source_query, log) scan_dups(source, lambda x, y: log.logger.error("Duplicate %s summaries: %s '%s'", source_name, x, y)) log.logger.info( "Source has %d items in dictionary", len(source)) target = read_items(target_query, log) scan_dups(target, lambda x, y: log.logger.error("Duplicate %s summaries: %s '%s'", target_name, x, y)) log.logger.info( "Target has %d items in dictionary", len(target)) # -- Everything in source should be copied to target: not_in_target = [{'source': value[0].key, 'summary': key} for key, value in source.items() if Jira.remove_version_and_platform(Jira.strip_non_ascii(key)) not in target] if len(not_in_target) > 0: log.logger.error("") log.logger.error("Could not find %s %s (source) %s summary items in target: ", len(not_in_target), source_name, item_kind) log.logger.error("") for item in not_in_target: log.logger.error("Source '%s', summary text: '%s'", item['source'], item['summary']) log.logger.error("--") # # -- Target should not have stuff in it that's not from the source!: not_in_source = [{'target': value[0].key, 'summary': Jira.remove_version_and_platform(Jira.strip_non_ascii(key))} for key, value in target.items() if Jira.remove_version_and_platform(Jira.strip_non_ascii(key)) not in source] if len(not_in_source) > 0: log.logger.error("") log.logger.error("Could not find %s %s (target) %s summary items in source: ", len(not_in_source), target_name, item_kind) log.logger.error("") for item in not_in_source: log.logger.error("%s Target '%s', summary text: '%s'", item_kind, item['target'], item['summary']) log.logger.error("--") return # -- Copy source preqs to target: # (Get the list of already existing PREQs for this platform and version!) if 'copy_preq' not in scenario or scenario['copy_preq']: # e.g., copy_preq is undefined or copy_preq = True for source_preq, data_frame in preq_item_list(jira): preq_count += 1 updated = False # # -- Remove old version and platform, prepend new version and platform source_preq_scanned += 1 log.logger.debug("Search for: '%s'", source_preq.fields.summary) target_summary = Jira.remove_version_and_platform(source_preq.fields.summary) target_summary = target_summary_format % target_summary existing_preq = jira.get_item(preq_summary=target_summary, log=log) if existing_preq is not None: # -- This is good, PREQ is already there so nothing to do. log.logger.info("%s Found existing UCIS: %s '%s'", preq_count, existing_preq.key, existing_preq.fields.summary) # -- Note: Patch the GID entry of this item... if 'FIX_GID' in scenario and scenario['FIX_GID']: update_fields = {} if getattr(existing_preq.fields, global_id) is None or not getattr(existing_preq.fields, global_id): # -- Patch the GID entry of this item... log.logger.info("GID of %s is empty, should be %s from %s", existing_preq.key, getattr(source_preq.fields, global_id), source_preq.key) update_fields[global_id] = getattr(source_preq.fields, global_id) if getattr(existing_preq.fields, feature_id) is None or not getattr(existing_preq.fields, feature_id): # -- Patch the Feature ID entry of this item... log.logger.info("Feature ID of %s is empty, should be %s from %s", existing_preq.key, getattr(source_preq.fields, feature_id), source_preq.key) update_fields[feature_id] = getattr(source_preq.fields, feature_id) if update and update_fields: existing_preq.update(notify=False, fields=update_fields) updated = True # # Note that because of where it is, it only affects PREQs, and we want both... # if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_preq, existing_preq, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_preq, existing_preq, log, scenario): updated = True # =================================================================================================== pass else: # -- This Target PREQ is missing, so use Source preq as template to create a new UCIS for the platform: log.logger.debug("Need to create new UCIS for: '%s'", target_summary) if update and ('CREATE_MISSING_UCIS' not in scenario or scenario['CREATE_MISSING_UCIS']): # -- Create a new UCIS(!) PREQ result = jira.create_ucis(target_summary, source_preq, scenario, log=log, data_frame=locals()) log.logger.info("%s Created a new UCIS %s for %s", areq_count, result.key, target_summary) updated = True ucis_created += 1 if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_ucis_status(jira, source_preq, result, log, scenario): updated = True if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_preq, result, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True else: log.logger.warning("Target UCIS is missing, sourced from %s: '%s'", source_preq.key, target_summary) warnings_issued += 1 if updated: update_count += 1 if scenario['createmax'] and update_count>=scenario['createmax']: break pass update_count = 0 # -- copy source e-features to output # This keeps having an exception because the total number of items seems to be changing... if 'copy_areq' not in scenario or scenario['copy_areq']: # e.g., copy_areq is undefined or copy_areq = True # features = [feature for feature in areq_e_feature_list(jira)] # for source_e_feature in features: for source_e_feature, data_frame in areq_e_feature_list(jira): areq_count += 1 updated = False # -- The parent for this one should already be in source_features source_areq_scanned += 1 lookup = source_e_feature.fields.parent.key try: # todo: This could actually just be: parent_feature = jira.issue(lookup) # parent_feature = jira.get_item(key=lookup, log=log) except Exception as e: parent_feature = None # This should never happen! log.logger.fatal("%s: Could not locate parent %s of E-Feature %s, looked for '%s'. continuing", e, source_e_feature.fields.parent.key, source_e_feature.key, lookup) # -- Well, if we couldn't find the parent, we can't continue warnings_issued += 1 continue # -- OK, at this point we can create the E-Feature record, if it's not going to be a duplicate... target_summary = Jira.remove_version_and_platform(source_e_feature.fields.summary).strip() target_summary = target_summary_format % target_summary existing_feature = jira.get_item(areq_summary=target_summary, log=log) if existing_feature is not None: # -- This E-Feature already exists, don't touch it! log.logger.info("%s The targeted E-Feature '%s' already exists! %s, %s: %s", areq_count, target_summary, source_e_feature.key, existing_feature.key, existing_feature.fields.summary) if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_e_feature, existing_feature, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_e_feature, existing_feature, log, scenario): updated = True else: if update: log.logger.info("%s Creating a new E-Feature for Feature %s: %s", areq_count, parent_feature.key, target_summary) if 'clone_from_sibling' in scenario and scenario['clone_from_sibling']: created_e_feature = jira.clone_e_feature_from_e_feature(target_summary, parent_feature, source_e_feature, scenario, log=log, data_frame=locals()) else: created_e_feature = jira.clone_e_feature_from_parent(target_summary, parent_feature, scenario, sibling=source_e_feature, log=log, data_frame=locals()) updated = True e_features_created += 1 if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_e_feature, created_e_feature, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_e_feature, created_e_feature, log, scenario): updated = True else: log.logger.info("%s Target E-Feature is missing for Source E-Feature %s, Feature %s: '%s'", areq_count, source_e_feature.key, parent_feature.key, target_summary) # -- Create a new E-Feature(!) PREQ if updated: update_count += 1 if scenario['createmax'] and update_count>=scenario['createmax']: break # -- TODO: Need to account for source and target version and platform if verify_copy: compare_items("UCIS", scenario['splatform'], preq_source_query, scenario['tplatform'], preq_target_query, log=log) compare_items("E-Feature", scenario['splatform'], areq_source_e_feature_query, scenario['tplatform'], areq_target_e_feature_query, log=log) else: log.logger.warning("Not checking that copy was complete or that duplicates were created.") log.logger.info("-----------------------------------------------------------------") log.logger.info("%s UCIS source entries were considered. ", source_preq_scanned) log.logger.info("%s target UCIS entries were created. ", ucis_created) log.logger.info("%s E-Feature source entries were considered. ", source_areq_scanned) log.logger.info("%s target E-Features entries were created. ", e_features_created) log.logger.info("%s warnings were issued. ", warnings_issued) log.logger.info("") # if verify: # log.logger.info("%s E-Feature comparison failure(s). ", verify_failures) # # if update: # log.logger.info("%s new E-Feature(s) were created, %s update failures. ", update_count, update_failures) log.logger.info("%s processing error(s). ", processing_errors)
def dng_jira_crosscheck(config, log): if len(sys.argv) < 2: log.logger.error( f"{sys.argv[0]} <tag>, available tags are {[t for t in config]}") exit(-1) if sys.argv[1] not in config: log.logger.error(f"configuration '{sys.argv[1]}' was not found") exit(-1) environment = config[sys.argv[1]] urllib3.disable_warnings() jazz = Jazz(server_alias=environment['jazz_server'], config_path=JAZZ_PATH, use_cache=True, op_name=None) jira = Jira(environment['jira_server'], JIRA_PATH, log=log.logger) epic_name = jira.get_field_name('Epic Name') status = jira.get_field_name('Status') external_link = jira.get_field_name('External Link') requirements_by_id = read_jazz(jazz_client=jazz) items_by_id = read_jira(jira_client=jira) log.logger.info("Analyzing...") missing_requirements = [ requirement for key, requirement in requirements_by_id.items() if requirement.get_identifier() not in items_by_id ] missing_requirements = sorted(missing_requirements, key=lambda y: y.get_identifier()) missing_items = [ item for key, item in items_by_id.items() if getattr(item.fields, epic_name) not in requirements_by_id ] missing_items = sorted(missing_items, key=lambda item: getattr(item.fields, epic_name)) matched = [(requirement, items_by_id[key]) for key, requirement in requirements_by_id.items() if requirement.get_identifier() in items_by_id] matched = sorted(matched, key=sort_matched) wb = Workbook() ws_summary = wb.active ws_summary.title = "Summary" ws_summary.column_dimensions['B'].width = 12 ws_requirements = wb.create_sheet(title="Requirements not in Jira") ws_items = wb.create_sheet(title="Jira items not in Collection") ws_matching = wb.create_sheet(title="Matched DNG and Jira Items") log.logger.info( "--------------------------------------------------------------------------------" ) log.logger.info(f"Summary:") ws_summary['A1'] = "Summary:" log.logger.info( f"{len(requirements_by_id)} DNG Requirements were analyzed") ws_summary['B3'] = "DNG Path:" ws_summary['C3'] = environment['path'] ws_summary['B4'] = "DNG Name:" ws_summary['C4'] = environment['name'] ws_summary['B5'] = "Jira Query:" ws_summary['C5'] = environment['jira_query'] ws_summary['B6'] = len(requirements_by_id) ws_summary['C6'] = "DNG Requirements were analyzed" log.logger.info(f"{len(items_by_id)} Jira items were analyzed") ws_summary['B7'] = len(items_by_id) ws_summary['C7'] = "Jira items were analyzed" log.logger.info( f"{len(missing_requirements)} Requirements in collection, not found in Jira" ) ws_summary['B8'] = len(missing_requirements) ws_summary['C8'] = "Requirements in collection, not found in Jira" log.logger.info( f"{len(missing_items)} Jira items not in specified Requirement Collection" ) ws_summary['B9'] = len(missing_items) ws_summary['C9'] = "Jira items not in specified Requirement Collection" log.logger.info( f"{len(matched)} DNG Requirements and Jira Items are matched") ws_summary['B10'] = len(matched) ws_summary['C10'] = "DNG Requirements and Jira Items are matched" log.logger.info( "--------------------------------------------------------------------------------" ) ws_requirements[ 'A1'] = f"Requirements from Collection '{environment['name']}'not found in Jira:" set_width(ws_requirements, {'A': 12, 'B': 40}) ws_requirements.append(['-']) ws_requirements.append(['DNG ID', 'DNG Name']) for requirement in missing_requirements: ws_requirements.append( [requirement.get_identifier(), requirement.get_name()]) ws_items[ 'A1'] = f"Jira items without matching DNG Requirement in Collection '{environment['name']}':" set_width(ws_items, {'A': 12, 'B': 8, 'C': 9, 'D': 80, 'E': 80}) ws_items.append(['-']) ws_items.append(['Jira Key', 'Epic', 'Status', 'Summary', 'Link to DNG']) for item in missing_items: ws_items.append([ item.key, '=HYPERLINK("' + getattr(item.fields, external_link) + '","' + str(getattr(item.fields, epic_name)) + '")', str(getattr(item.fields, status)), item.fields.summary, '=HYPERLINK("' + getattr(item.fields, external_link) + '")' ]) ws_matching[ 'A1'] = f"DNG Requirements and Jira Items that match from Collection '{environment['name']}':" set_width(ws_matching, { 'A': 10, 'B': 10, 'C': 8, 'D': 9, 'E': 80, 'F': 80 }) ws_matching.append(['-']) ws_matching.append( ['DNG ID', 'Jira Key', 'Epic', 'Status', 'Summary', 'Link to DNG']) for x in matched: requirement, item = x ws_matching.append([ '=' + requirement.get_identifier(), item.key, '=HYPERLINK("' + getattr(item.fields, external_link) + '","' + getattr(item.fields, epic_name) + '")', str(getattr(item.fields, status)), item.fields.summary, '=HYPERLINK("' + getattr(item.fields, external_link) + '")' ]) wb.save(environment['xls'])
class LabelFromLabeledItems: def __init__(self, parser, scenario, config, queries, search, log=None, jira=None): self.parser = parser self.jira = jira self.scenario = scenario self.config = config self.queries = queries self.search = search self.logger = log.logger self.update_count = 0 self.added_count = 0 self.createmax = self.scenario[ 'createmax'] if 'createmax' in self.scenario else 0 self.verify = self.scenario[ 'verify'] if 'verify' in self.scenario else False self.update = self.scenario[ 'update'] if 'update' in self.scenario else False # -- Authenticate to Jira server self.jira = Jira(self.scenario['name'], self.search, self.logger) pass def update_labels_field(self, target_preq, target_field="Label", delete_labels=[], add_labels=[]): updated = False delete_list = [x for x in self.scenario['delete_labels']] \ if 'delete_labels' in self.scenario else delete_labels.copy() add_list = [x for x in self.scenario['add_labels']] \ if 'add_labels' in self.scenario else add_labels.copy() label_field_name = self.jira.get_field_name(target_field) label_field = getattr(target_preq.fields, label_field_name) result_labels = {x: x for x in label_field} if isinstance( label_field, list) else {} original_labels = result_labels.copy() # -- Remove labels in delete list from source_labels: for regex in delete_list: for key, item in result_labels.copy().items(): if search(regex, item): self.logger.info( "Removing label %s (found by regex '%s') from item %s", key, regex, target_preq.key) del result_labels[key] # -- amd insert those in the add list: for label in add_list: self.logger.info("Adding label %s to item %s", label, target_preq.key) result_labels[label] = label if result_labels != original_labels: update_fields = { 'labels': [key for key, item in result_labels.items()] } comment_text = ("The Label on %s was updated from %s to %s by {command}.\n" + "\n" + "%s").format_map(self.scenario) \ % (target_preq.key, [key for key,value in original_labels.items()], [key for key, value in result_labels.items()], self.scenario['comment'] if self.scenario['comment'] is not None else "") if self.update: # -- only update if we're going to change something... self.logger.info( "Updating %s with %s, was %s", target_preq.key, [key for key, value in result_labels.items()], [key for key, value in original_labels.items()]) target_preq.update(notify=False, fields=update_fields) self.jira.jira_client.add_comment(target_preq, comment_text) updated = True else: self.logger.info( "%s: NO LABEL UPDATE; labels change from from %s, to %s; Comment would be\n%s", target_preq.key, [key for key, value in original_labels.items()], [key for key, value in result_labels.items()], comment_text) if updated: self.update_count += 1 return self.update_count def label_from_labeled_items(self): """Find the labeled AREQ items and add the label to corresponding target items""" self.logger.info( "Labeling target platform {tplatform}, source android version {tversion} with {tlabel}" .format_map(self.scenario)) self.logger.info("Update is %s", self.update) self.logger.info( "=================================================================" ) # -- Query to get the list of source items with the target label items_with_label_query = get_query( 'items_with_label_query', self.queries, LabelFromLabeledItems.label_from_labeled_items.__name__, params=self.scenario, log=self.logger) items_with_label_query = items_with_label_query.format_map( self.scenario) # items_with_label_list = [item for item in self.jira.do_query(items_with_label_query)] # -- Note: parent Feature could occur 1 or more times. (Also zero, but that doesn't matter in this case) by_parent_key = { item.fields.parent.key: item for item in self.jira.do_query(items_with_label_query) } # -- Query to get the (total) list of potential targets targets_for_label_query = get_query( 'targets_for_label_query', self.queries, LabelFromLabeledItems.label_from_labeled_items.__name__, params=self.scenario, log=self.logger) targets_for_label_query = targets_for_label_query.format_map( self.scenario) target_item_list = [ item for item in self.jira.do_query(targets_for_label_query) ] # -- Build a list of E-Features that share a parent Feature that's labeled in the source query target_items_with_same_parent = [ item for item in target_item_list if item.fields.parent.key in by_parent_key ] for target in target_items_with_same_parent: self.update_labels_field(target, target_field="Labels", add_labels=[self.scenario['tlabel']]) if self.createmax and self.update_count >= self.createmax: break self.logger.info( "-----------------------------------------------------------------" ) self.logger.info("%s E-Feature labels were updated. ", self.update_count) self.logger.info("")
class ProjectToCSV: def __init__(self, parser, scenario, config, queries, search, log=None, jira=None): self.parser = parser self.jira = jira self.scenario = scenario self.config = config self.queries = queries self.search = search self.logger = log.logger self.update_count = 0 self.added_count = 0 self.createmax = self.scenario[ 'createmax'] if 'createmax' in self.scenario else 0 self.verify = self.scenario[ 'verify'] if 'verify' in self.scenario else False self.update = self.scenario[ 'update'] if 'update' in self.scenario else False # -- Authenticate to Jira server self.jira = Jira(self.scenario['name'], self.search, self.logger) pass def run(self): """Iterate platforms, then items found by platform query""" for key, platform in self.scenario['platforms'].items(): self.logger.info(f"Processing {platform['splatform']}") query_values = { 'sproject': '"' + platform['sproject'] + '"', 'splatform': '"' + platform['splatform'] + '"', 'sexists_on': '"' + platform['sexists_on'] + '"', 'sversion': platform['sversion'] } field_list = platform['fields'] items_query = get_query('items_query', self.queries, __name__, params=query_values, log=self.logger) # items_query = items_query.format_map(query_values) with open(platform['splatform'] + '.csv', "w") as f: f.write(f"{platform['splatform']}\n") f.write(f"\n") item_count = 0 for item in self.jira.do_query(items_query): value_list = [] for field_name in field_list: if hasattr(item, field_name): local_value = getattr(item, field_name) elif hasattr(item.fields, field_name): local_value = getattr(item.fields, field_name) else: local_name = self.jira.get_field_name(field_name) if hasattr(item, local_name): local_value = getattr(item, local_name) elif hasattr(item.fields, local_name): local_value = getattr(item.fields, local_name) else: local_value = None value_list.append(local_value) result = ",".join(value_list) f.write(f"{result}\n") item_count += 1 self.logger.info( f"{item_count} entries written to {platform['splatform']}")