def __init__(self, parser, scenario, config, queries, search, log=None, jira=None): self.parser = parser self.jira = jira self.scenario = scenario self.config = config self.queries = queries self.search = search self.logger = log.logger self.update_count = 0 self.added_count = 0 self.createmax = self.scenario[ 'createmax'] if 'createmax' in self.scenario else 0 self.verify = self.scenario[ 'verify'] if 'verify' in self.scenario else False self.update = self.scenario[ 'update'] if 'update' in self.scenario else False # -- Authenticate to Jira server self.jira = Jira(self.scenario['name'], self.search, self.logger) pass
def setUpClass(cls): log.setup_logging(LOG_CONFIG_FILE, override={ 'handlers': { 'info_file_handler': { 'filename': 'Test_Update.log' } } }) cls.jira = Jira(TEST_HOST, CONFIG_FILE, log.logger) log.logger.info("UCIS Test starting...") pass
def setUpClass(cls): log.setup_logging(LOG_CONFIG_FILE, override={'handlers': {'info_file_handler': {'filename': 'TestTransition_to_state.log'}}}) cls.jira = Jira(TEST_HOST, CONFIG_FILE, log.logger) log.logger.info("E-Feature Test starting...") # -- Create a common Parent Feature... new_feature_dict = { 'project': {'key': 'AREQ'}, 'summary': 'This is a Nosetest Feature, may be deleted', 'description': 'This Feature was created for intgration testing', 'issuetype': {'name': 'Feature'}, 'assignee': {'name': 'pfhanchx'}, 'components': [{'name': 'Unknown'}], cls.jira.get_field_name('Android Version(s)'): [{'value': 'O-MR2'}], cls.jira.get_field_name('Platform/Program'): [{'value': 'Icelake-U SDC'}], cls.jira.get_field_name('Classification'): [{'value': 'Functional'}], cls.jira.get_field_name('Profile/s'): [{'value': 'Other'}], } cls.feature = cls.jira.jira_client.create_issue(fields=new_feature_dict) pass
def get_platform(config, logg): environment = config['Platform_Content'] jira_client = Jira(environment['jira_server'], JIRA_CONFIG_PATH, log=logg.logger) gid = jira_client.get_field_name('Global ID') assignee = jira_client.get_field_name('Assignee') validation = jira_client.get_field_name('Verification') description = jira_client.get_field_name('Description') def get_jira_item(item): return { 'KEY': str(item.key), # Remove version and platform inside square brackets... 'SUMMARY': re.sub(r'^\[.*]\[.*]\s*(\[AaaG])*', "", getattr(item.fields, 'summary', '')), 'DESCRIPTION': getattr(item.fields, description), 'GID': str(getattr(item.fields, gid)) if getattr(item.fields, gid, None) is not None else None, 'PARENT': str(item.fields.parent.key) if getattr(item.fields, 'parent', None) is not None else None, 'ASSIGNEE': str(getattr(item.fields, assignee)), 'VALIDATION': str(getattr(item.fields, validation)), } log.logger.info(f"Reading AREQ with '{environment['areq']}'") areq = read_jira_partial(jira_client, environment['areq']) # -- todo: Remember lead and validation! areq_features = [get_jira_item(item) for item in areq] log.logger.info(f"read {len(areq)} AREQ items") log.logger.info(f"Reading PREQ with '{environment['preq']}'") preq = read_jira_partial(jira_client, environment['preq']) # -- todo: Remember lead and validation! log.logger.info(f"read {len(preq)} PREQ items") preq_gids = [get_jira_item(item) for item in preq] with open(environment['pickle_file'], 'wb') as f: log.logger.info(f"Writing AREQ to picklefile '{environment['pickle_file']}'") pickle.dump(areq_features, f) log.logger.info(f"Writing PREQ to picklefile '{environment['pickle_file']}'") pickle.dump(preq_gids, f) return
def copy_platform_to_platform(parser, scenario, config, queries, search, log=None): """Copy platform to platform, based on the UCIS and E-Feature entries of the source platform""" # # -- Some sleight of hand here... Original code iterates the results of a JQL query. # If xls_source, this will read in an XLS file, look for a column named "Key" and # read those key values from Jira as source items. # if 'xls_source' in scenario: import pandas as pd source_file = realpath(dirname(realpath(sys.argv[0])) + '/../' + scenario['xls_source']) xls_data_frame = pd.read_excel(source_file, sheetname=0) def preq_item_list(jira): for i, item in xls_data_frame.iterrows(): key = item['Key'] if key.upper().startswith('PREQ'): yield jira.issue(key), item def areq_e_feature_list(jira): for i, item in xls_data_frame.iterrows(): key = item['Key'] if key.upper().startswith('AREQ'): yield jira.issue(key), item else: preq_source_query = get_query('preq_source_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) if preq_source_query is not None: def preq_item_list(jira): for preq_item in jira.do_query(preq_source_query): yield preq_item, None areq_source_e_feature_query = get_query('areq_source_e_feature', queries, copy_platform_to_platform.__name__, params=scenario, log=log) if areq_source_e_feature_query is not None: def areq_e_feature_list(jira): for e_feature in jira.do_query(areq_source_e_feature_query): yield e_feature, None preq_target_query = get_query('preq_target_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) areq_target_e_feature_query = get_query('areq_target_e_feature', queries, copy_platform_to_platform.__name__, params=scenario, log=log) target_feature_query = get_query('target_feature_query', queries, copy_platform_to_platform.__name__, params=scenario, log=log) target_summary_format = get_query('target_summary_format', queries, copy_platform_to_platform.__name__, params=scenario, log=log) log.logger.info("Examining source platform {splatform}, source android version {sversion}, target android version {tversion}".format_map(scenario)) verify = scenario['verify'] update = scenario['update'] verify_copy = scenario['verify_copy'] if 'verify_copy' in scenario else True; log.logger.info("Verify is %s and Update is %s", verify, update) log.logger.info("=================================================================") # -- Get and format it: jira = Jira(scenario['name'], search, log=log.logger) global_id = jira.get_field_name("Global ID") feature_id = jira.get_field_name("Feature ID") source_preq_scanned = 0 source_areq_scanned = 0 ucis_created = 0 e_features_created = 0 warnings_issued = 0 verify_failures = 0 update_failures = 0 processing_errors = 0 update_count = 0 preq_count = 0 areq_count = 0 def compare_items(item_kind, source_name, source_query, target_name, target_query, log=None): def read_items(query, log=None): """Read items into summary based dictionary, warning on duplicates""" dictionary = {} for item in jira.do_query(query): item_key = Jira.remove_version_and_platform(Jira.strip_non_ascii(item.fields.summary)) if item_key not in dictionary: dictionary[item_key] = [item] else: # So, what we have now is a POTENTIAL duplicate. figure out if it really is. if item.key != dictionary[item_key][0].key: # Yep, it's not the same item key... dictionary[item_key].append(item) log.logger.debug("Item key '%s' : '%s' creates a duplicate entry with key '%s': '%s'", item.key, item.fields.summary, dictionary[item_key][0].key, dictionary[item_key][0].fields.summary) pass return dictionary def scan_dups(source_dict, printit): for k, v in source_dict.items(): if len(v) > 1: keys = [] for item in v: keys.append(item.key) printit(keys, k) return source = read_items(source_query, log) scan_dups(source, lambda x, y: log.logger.error("Duplicate %s summaries: %s '%s'", source_name, x, y)) log.logger.info( "Source has %d items in dictionary", len(source)) target = read_items(target_query, log) scan_dups(target, lambda x, y: log.logger.error("Duplicate %s summaries: %s '%s'", target_name, x, y)) log.logger.info( "Target has %d items in dictionary", len(target)) # -- Everything in source should be copied to target: not_in_target = [{'source': value[0].key, 'summary': key} for key, value in source.items() if Jira.remove_version_and_platform(Jira.strip_non_ascii(key)) not in target] if len(not_in_target) > 0: log.logger.error("") log.logger.error("Could not find %s %s (source) %s summary items in target: ", len(not_in_target), source_name, item_kind) log.logger.error("") for item in not_in_target: log.logger.error("Source '%s', summary text: '%s'", item['source'], item['summary']) log.logger.error("--") # # -- Target should not have stuff in it that's not from the source!: not_in_source = [{'target': value[0].key, 'summary': Jira.remove_version_and_platform(Jira.strip_non_ascii(key))} for key, value in target.items() if Jira.remove_version_and_platform(Jira.strip_non_ascii(key)) not in source] if len(not_in_source) > 0: log.logger.error("") log.logger.error("Could not find %s %s (target) %s summary items in source: ", len(not_in_source), target_name, item_kind) log.logger.error("") for item in not_in_source: log.logger.error("%s Target '%s', summary text: '%s'", item_kind, item['target'], item['summary']) log.logger.error("--") return # -- Copy source preqs to target: # (Get the list of already existing PREQs for this platform and version!) if 'copy_preq' not in scenario or scenario['copy_preq']: # e.g., copy_preq is undefined or copy_preq = True for source_preq, data_frame in preq_item_list(jira): preq_count += 1 updated = False # # -- Remove old version and platform, prepend new version and platform source_preq_scanned += 1 log.logger.debug("Search for: '%s'", source_preq.fields.summary) target_summary = Jira.remove_version_and_platform(source_preq.fields.summary) target_summary = target_summary_format % target_summary existing_preq = jira.get_item(preq_summary=target_summary, log=log) if existing_preq is not None: # -- This is good, PREQ is already there so nothing to do. log.logger.info("%s Found existing UCIS: %s '%s'", preq_count, existing_preq.key, existing_preq.fields.summary) # -- Note: Patch the GID entry of this item... if 'FIX_GID' in scenario and scenario['FIX_GID']: update_fields = {} if getattr(existing_preq.fields, global_id) is None or not getattr(existing_preq.fields, global_id): # -- Patch the GID entry of this item... log.logger.info("GID of %s is empty, should be %s from %s", existing_preq.key, getattr(source_preq.fields, global_id), source_preq.key) update_fields[global_id] = getattr(source_preq.fields, global_id) if getattr(existing_preq.fields, feature_id) is None or not getattr(existing_preq.fields, feature_id): # -- Patch the Feature ID entry of this item... log.logger.info("Feature ID of %s is empty, should be %s from %s", existing_preq.key, getattr(source_preq.fields, feature_id), source_preq.key) update_fields[feature_id] = getattr(source_preq.fields, feature_id) if update and update_fields: existing_preq.update(notify=False, fields=update_fields) updated = True # # Note that because of where it is, it only affects PREQs, and we want both... # if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_preq, existing_preq, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_preq, existing_preq, log, scenario): updated = True # =================================================================================================== pass else: # -- This Target PREQ is missing, so use Source preq as template to create a new UCIS for the platform: log.logger.debug("Need to create new UCIS for: '%s'", target_summary) if update and ('CREATE_MISSING_UCIS' not in scenario or scenario['CREATE_MISSING_UCIS']): # -- Create a new UCIS(!) PREQ result = jira.create_ucis(target_summary, source_preq, scenario, log=log, data_frame=locals()) log.logger.info("%s Created a new UCIS %s for %s", areq_count, result.key, target_summary) updated = True ucis_created += 1 if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_ucis_status(jira, source_preq, result, log, scenario): updated = True if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_preq, result, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True else: log.logger.warning("Target UCIS is missing, sourced from %s: '%s'", source_preq.key, target_summary) warnings_issued += 1 if updated: update_count += 1 if scenario['createmax'] and update_count>=scenario['createmax']: break pass update_count = 0 # -- copy source e-features to output # This keeps having an exception because the total number of items seems to be changing... if 'copy_areq' not in scenario or scenario['copy_areq']: # e.g., copy_areq is undefined or copy_areq = True # features = [feature for feature in areq_e_feature_list(jira)] # for source_e_feature in features: for source_e_feature, data_frame in areq_e_feature_list(jira): areq_count += 1 updated = False # -- The parent for this one should already be in source_features source_areq_scanned += 1 lookup = source_e_feature.fields.parent.key try: # todo: This could actually just be: parent_feature = jira.issue(lookup) # parent_feature = jira.get_item(key=lookup, log=log) except Exception as e: parent_feature = None # This should never happen! log.logger.fatal("%s: Could not locate parent %s of E-Feature %s, looked for '%s'. continuing", e, source_e_feature.fields.parent.key, source_e_feature.key, lookup) # -- Well, if we couldn't find the parent, we can't continue warnings_issued += 1 continue # -- OK, at this point we can create the E-Feature record, if it's not going to be a duplicate... target_summary = Jira.remove_version_and_platform(source_e_feature.fields.summary).strip() target_summary = target_summary_format % target_summary existing_feature = jira.get_item(areq_summary=target_summary, log=log) if existing_feature is not None: # -- This E-Feature already exists, don't touch it! log.logger.info("%s The targeted E-Feature '%s' already exists! %s, %s: %s", areq_count, target_summary, source_e_feature.key, existing_feature.key, existing_feature.fields.summary) if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_e_feature, existing_feature, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_e_feature, existing_feature, log, scenario): updated = True else: if update: log.logger.info("%s Creating a new E-Feature for Feature %s: %s", areq_count, parent_feature.key, target_summary) if 'clone_from_sibling' in scenario and scenario['clone_from_sibling']: created_e_feature = jira.clone_e_feature_from_e_feature(target_summary, parent_feature, source_e_feature, scenario, log=log, data_frame=locals()) else: created_e_feature = jira.clone_e_feature_from_parent(target_summary, parent_feature, scenario, sibling=source_e_feature, log=log, data_frame=locals()) updated = True e_features_created += 1 if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: count = update_fields_and_link(jira, source_e_feature, created_e_feature, update, 0, scenario, log=log, data_frame=locals()) if count != 0: updated = True if update and 'UPDATE_STATUS' in scenario and scenario['UPDATE_STATUS']: if set_e_feature_status(jira, source_e_feature, created_e_feature, log, scenario): updated = True else: log.logger.info("%s Target E-Feature is missing for Source E-Feature %s, Feature %s: '%s'", areq_count, source_e_feature.key, parent_feature.key, target_summary) # -- Create a new E-Feature(!) PREQ if updated: update_count += 1 if scenario['createmax'] and update_count>=scenario['createmax']: break # -- TODO: Need to account for source and target version and platform if verify_copy: compare_items("UCIS", scenario['splatform'], preq_source_query, scenario['tplatform'], preq_target_query, log=log) compare_items("E-Feature", scenario['splatform'], areq_source_e_feature_query, scenario['tplatform'], areq_target_e_feature_query, log=log) else: log.logger.warning("Not checking that copy was complete or that duplicates were created.") log.logger.info("-----------------------------------------------------------------") log.logger.info("%s UCIS source entries were considered. ", source_preq_scanned) log.logger.info("%s target UCIS entries were created. ", ucis_created) log.logger.info("%s E-Feature source entries were considered. ", source_areq_scanned) log.logger.info("%s target E-Features entries were created. ", e_features_created) log.logger.info("%s warnings were issued. ", warnings_issued) log.logger.info("") # if verify: # log.logger.info("%s E-Feature comparison failure(s). ", verify_failures) # # if update: # log.logger.info("%s new E-Feature(s) were created, %s update failures. ", update_count, update_failures) log.logger.info("%s processing error(s). ", processing_errors)
def e_feature_by_parent(parser, scenario, config, queries, search, log=None): update = scenario['update'] log.logger.info("Update is %s", update) log.logger.info("Marking AREQ items as superseded by PREQ item.") log.logger.info("=================================================================") updates = 0 # -- Get and format it: jira = Jira(scenario['name'], search, log=log.logger) work = E_Feature_by_Parent(jira, parser, scenario, config, queries, search, log=log) parents = {work.scrub(parent.key): parent for parent in work.get_features()} all_children = [child for child in work.get_e_features()] children = [child for child in all_children if work.scrub(child.fields.parent.key) in parents] disagreement = [child for child in children if work.scrub(child.fields.parent.key) in parents \ and "Kernel" not in [item.name for item in child.fields.components]] log.logger.info("Found %d Features to process", len(parents)) log.logger.info("Found %d E-Features", len(all_children)) log.logger.info("Found %d E-Features to process", len(children)) log.logger.info("Found %d E-Features not set to Kernel", len(disagreement)) log.logger.info("") log.logger.info("Feature Keys:") log.logger.info("") # (parents is a dict, so parent is just the key...) log.logger.info(", ".join([parent for parent in parents])) log.logger.info("") log.logger.info("E-Features to process:") log.logger.info("") log.logger.info(", ".join([child.key for child in children])) log.logger.info("") # --> Process the E-Features, setting classification to "Functional" """ for e_feature in children: if target_preq.fields.issuetype.name not in ['E-Feature']: # -- (Can't add classification to E-Feature) classification_value = getattr(target_preq.fields, classification) classification_value = [v.value for v in classification_value] if classification_value is None or \ 'Unassigned' in classification_value or \ 'None' in classification_value: # -- Unconditional set: update_fields[classification] = [{'value': 'Functional Use Case'}] else: # -- Seems wrong to not catch this condition... # FIXME: This is likely the wrong way to check this... if ['Functional Use Case'] != classification_value: log.logger.warning("Item %s Classification was alreaady set to %s", target_preq.key, getattr(target_preq.fields, classification)) log.logger.warning("And is being overwritten") update_fields[classification] = [{'value': 'Functional Use Case'}] if len(update_fields) > 0: if update: # -- only update if we're going to change something... log.logger.info("Updating %s with %s", target_preq.key, {**update_fields, **assignee_fields, **lead_fields}) target_preq.update(notify=False, fields=update_fields) try: target_preq.update(notify=False, fields=assignee_fields) except JIRAError as e: log.logger.error("Jira error %s", e) try: target_preq.update(notify=False, fields=lead_fields) except JIRAError as e: log.logger.error("Jira error %s", e) updated = True else: log.logger.info("NO UPDATE; SHOULD update %s with %s", target_preq.key, update_fields) """ # --> Process Features, setting classification to "Functional" # -- Process the E-Features first (once we change the parent, we won't be able to find the child!) log.logger.info("-----------------------------------------------------------------") log.logger.info("%s items were updated ", updates) log.logger.info("") return
def dng_jira_crosscheck(config, log): if len(sys.argv) < 2: log.logger.error( f"{sys.argv[0]} <tag>, available tags are {[t for t in config]}") exit(-1) if sys.argv[1] not in config: log.logger.error(f"configuration '{sys.argv[1]}' was not found") exit(-1) environment = config[sys.argv[1]] urllib3.disable_warnings() jazz = Jazz(server_alias=environment['jazz_server'], config_path=JAZZ_PATH, use_cache=True, op_name=None) jira = Jira(environment['jira_server'], JIRA_PATH, log=log.logger) epic_name = jira.get_field_name('Epic Name') status = jira.get_field_name('Status') external_link = jira.get_field_name('External Link') requirements_by_id = read_jazz(jazz_client=jazz) items_by_id = read_jira(jira_client=jira) log.logger.info("Analyzing...") missing_requirements = [ requirement for key, requirement in requirements_by_id.items() if requirement.get_identifier() not in items_by_id ] missing_requirements = sorted(missing_requirements, key=lambda y: y.get_identifier()) missing_items = [ item for key, item in items_by_id.items() if getattr(item.fields, epic_name) not in requirements_by_id ] missing_items = sorted(missing_items, key=lambda item: getattr(item.fields, epic_name)) matched = [(requirement, items_by_id[key]) for key, requirement in requirements_by_id.items() if requirement.get_identifier() in items_by_id] matched = sorted(matched, key=sort_matched) wb = Workbook() ws_summary = wb.active ws_summary.title = "Summary" ws_summary.column_dimensions['B'].width = 12 ws_requirements = wb.create_sheet(title="Requirements not in Jira") ws_items = wb.create_sheet(title="Jira items not in Collection") ws_matching = wb.create_sheet(title="Matched DNG and Jira Items") log.logger.info( "--------------------------------------------------------------------------------" ) log.logger.info(f"Summary:") ws_summary['A1'] = "Summary:" log.logger.info( f"{len(requirements_by_id)} DNG Requirements were analyzed") ws_summary['B3'] = "DNG Path:" ws_summary['C3'] = environment['path'] ws_summary['B4'] = "DNG Name:" ws_summary['C4'] = environment['name'] ws_summary['B5'] = "Jira Query:" ws_summary['C5'] = environment['jira_query'] ws_summary['B6'] = len(requirements_by_id) ws_summary['C6'] = "DNG Requirements were analyzed" log.logger.info(f"{len(items_by_id)} Jira items were analyzed") ws_summary['B7'] = len(items_by_id) ws_summary['C7'] = "Jira items were analyzed" log.logger.info( f"{len(missing_requirements)} Requirements in collection, not found in Jira" ) ws_summary['B8'] = len(missing_requirements) ws_summary['C8'] = "Requirements in collection, not found in Jira" log.logger.info( f"{len(missing_items)} Jira items not in specified Requirement Collection" ) ws_summary['B9'] = len(missing_items) ws_summary['C9'] = "Jira items not in specified Requirement Collection" log.logger.info( f"{len(matched)} DNG Requirements and Jira Items are matched") ws_summary['B10'] = len(matched) ws_summary['C10'] = "DNG Requirements and Jira Items are matched" log.logger.info( "--------------------------------------------------------------------------------" ) ws_requirements[ 'A1'] = f"Requirements from Collection '{environment['name']}'not found in Jira:" set_width(ws_requirements, {'A': 12, 'B': 40}) ws_requirements.append(['-']) ws_requirements.append(['DNG ID', 'DNG Name']) for requirement in missing_requirements: ws_requirements.append( [requirement.get_identifier(), requirement.get_name()]) ws_items[ 'A1'] = f"Jira items without matching DNG Requirement in Collection '{environment['name']}':" set_width(ws_items, {'A': 12, 'B': 8, 'C': 9, 'D': 80, 'E': 80}) ws_items.append(['-']) ws_items.append(['Jira Key', 'Epic', 'Status', 'Summary', 'Link to DNG']) for item in missing_items: ws_items.append([ item.key, '=HYPERLINK("' + getattr(item.fields, external_link) + '","' + str(getattr(item.fields, epic_name)) + '")', str(getattr(item.fields, status)), item.fields.summary, '=HYPERLINK("' + getattr(item.fields, external_link) + '")' ]) ws_matching[ 'A1'] = f"DNG Requirements and Jira Items that match from Collection '{environment['name']}':" set_width(ws_matching, { 'A': 10, 'B': 10, 'C': 8, 'D': 9, 'E': 80, 'F': 80 }) ws_matching.append(['-']) ws_matching.append( ['DNG ID', 'Jira Key', 'Epic', 'Status', 'Summary', 'Link to DNG']) for x in matched: requirement, item = x ws_matching.append([ '=' + requirement.get_identifier(), item.key, '=HYPERLINK("' + getattr(item.fields, external_link) + '","' + getattr(item.fields, epic_name) + '")', str(getattr(item.fields, status)), item.fields.summary, '=HYPERLINK("' + getattr(item.fields, external_link) + '")' ]) wb.save(environment['xls'])
def add_label_to_platform_version(parser, scenario, config, queries, search, log=None): """Update the Label field, based on the UCIS and E-Feature entries of the source platform""" preq_source_query = get_query('preq_source_query', queries, add_label_to_platform_version.__name__, params=scenario, log=log) areq_source_e_feature_query = get_query( 'areq_source_e_feature', queries, add_label_to_platform_version.__name__, params=scenario, log=log) log.logger.info( "Labeling source platform {splatform}, source android version {sversion} with {label}" .format_map(scenario)) verify = scenario['verify'] update = scenario['update'] log.logger.info("Verify is %s and Update is %s", verify, update) log.logger.info( "=================================================================") # -- Get and format it: jira = Jira(scenario['name'], search, log=log.logger) source_preq_scanned = 0 source_areq_scanned = 0 update_count = 0 added_count = 0 # -- Label preqs: if 'label_preq' not in scenario or scenario[ 'label_preq']: # e.g., copy_preq is undefined or copy_preq = True for source_preq in jira.do_query(preq_source_query): updated = False source_preq_scanned += 1 log.logger.info("Source: %s '%s'", source_preq.key, source_preq.fields.summary) if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: updated = update_labels(jira, scenario, source_preq, update, 0, log) if updated: update_count += 1 if scenario['createmax'] and update_count >= scenario['createmax']: break added_count += update_count update_count = 0 # -- copy source e-features to output # This keeps having an exception because the total number of items seems to be changing... if 'label_areq' not in scenario or scenario[ 'label_areq']: # e.g., copy_areq is undefined or copy_areq = True e_features = [ e_feature for e_feature in jira.do_query(areq_source_e_feature_query) ] for source_e_feature in e_features: updated = False source_areq_scanned += 1 log.logger.info("Source: %s '%s'", source_e_feature.key, source_e_feature.fields.summary) if 'UPDATE_FIELDS' in scenario and scenario['UPDATE_FIELDS']: updated = update_labels(jira, scenario, source_e_feature, update, 0, log) if updated: update_count += 1 if scenario['createmax'] and update_count >= scenario['createmax']: break added_count += update_count log.logger.info( "-----------------------------------------------------------------") log.logger.info("%s UCIS source entries were considered. ", source_preq_scanned) log.logger.info("%s E-Feature source entries were considered. ", source_areq_scanned) log.logger.info("%s labels added. ", added_count) log.logger.info("")
import sys from utility_funcs.search import get_server_info, search_for_profile import utility_funcs.logger_yaml as log LOG_CONFIG_FILE = 'logging.yaml'+pathsep+dirname(realpath(sys.argv[0]))+'/logging.yaml' CONFIG_FILE = dirname(realpath(sys.argv[0]))+'/config.yaml'+pathsep+'~/.jira/config.yaml' QUERIES_FILE = dirname(realpath(sys.argv[0]))+'/queries.yaml'+pathsep+'~/.jira/queries.yaml' SCENARIO_FILE = 'scenarios.yaml'+pathsep+dirname(realpath(sys.argv[0]))+'/scenarios.yaml' log_file = log.logging.getLogger("file") log.setup_logging(LOG_CONFIG_FILE, override={'handlers': {'info_file_handler': {'filename': 'jupyter.log'}}}) from jira_class import Jira, get_query from navigate import * from jira.exceptions import JIRAError jira = Jira('jira-t3', CONFIG_FILE, log.logger) import pandas as pd def readout(jira, item, limit_to={}): result = {} for field in vars(item): if field.startswith("__") \ or field.startswith("TimeTracking") \ or getattr(item, field) is None: continue value = getattr(item, field) name = field try: name = jira.jira_field_lookup[field]