def init(token, config_file=None): if config_file is None: conf = Config(filename=__CONFIG_FILE__).config else: conf = Config(filename=config_file).config # Get Token from argument pat = token if len(pat.strip()) > 0: pat = token logger.info("Using PAT from argument") else: pat = conf['pat'] context = SimpleNamespace() context.runner_cache = SimpleNamespace() # Setup the connection context.connection = Connection(base_url=conf['url'], creds=BasicAuthentication('PAT', pat), user_agent=__TASK__ + '/' + __VERSION__) context.project_name = conf['project_name'] context.project_start_date = conf['project_start_date'] context.project_end_date = conf['project_end_date'] context.url = conf['url'] context.test_run = conf['test_run'] context.test_work_item_id = conf['test_work_item_id'] context.future_actuals_are_None = conf['future_actuals_are_None'] context.fields_array = conf['fields_array'] return context
def main(url, area, resource, auth_token, output_path=None): context = SimpleNamespace() context.runner_cache = SimpleNamespace() # setup the connection context.connection = Connection( base_url=url, creds=BasicAuthentication('PAT', auth_token), user_agent='azure-devops-python-samples/' + __VERSION__) # if the user asked for logging: # - add a hook for logging the http request # - create the root directory if output_path: # monkey-patch the get_client method to attach our hook _get_client = context.connection.get_client def get_client_with_hook(*args, **kwargs): logger.debug("get_client_with_hook") client = _get_client(*args, **kwargs) hacks.add_request_hook(client) return client context.connection.get_client = get_client_with_hook root_log_dir = pathlib.Path(output_path) if not root_log_dir.exists(): root_log_dir.mkdir(parents=True, exist_ok=True) http_logging.push_state(True) else: root_log_dir = None # runner_lib.discovered_samples will contain a key for each area loaded, # and each key will have the resources and sample functions discovered if area == 'all': areas = runner_lib.discovered_samples.keys() else: if area not in runner_lib.discovered_samples.keys(): raise ValueError("area '%s' doesn't exist" % (area, )) areas = [area] for area in areas: area_logging_path = runner_lib.enter_area(area, root_log_dir) for area_resource, functions in runner_lib.discovered_samples[ area].items(): if area_resource != resource and resource != 'all': logger.debug("skipping resource %s", area_resource) continue resource_logging_path = runner_lib.enter_resource( area_resource, area_logging_path) for run_sample in functions: runner_lib.before_run_sample(run_sample.__name__, resource_logging_path) run_sample(context) runner_lib.after_run_sample(resource_logging_path)
def main() -> None: parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "--url", default="https://dev.azure.com/your-instance-name", help="ADO Instance URL", ) parser.add_argument("--areapath", default="OneFuzz-Test-Project", help="areapath") parser.add_argument("--title", help="work item title") parser.add_argument( "--expected", type=int, help="expected number of work items", default=1 ) group = parser.add_mutually_exclusive_group() group.add_argument("--pat", default=os.environ.get("ADO_PAT"), help="ADO PAT") group.add_argument( "--token", default=os.environ.get("SYSTEM_ACCESSTOKEN"), help="ADO system access token", ) args = parser.parse_args() if args.pat: creds = BasicAuthentication("PAT", args.pat) elif args.token: creds = BasicTokenAuthentication(token={"access_token": args.token}) else: print("either --pat or --token is required") sys.exit(1) connection = Connection(base_url=args.url, creds=creds) client = connection.clients_v6_0.get_work_item_tracking_client() query_items = ["[System.AreaPath] = '%s'" % args.areapath] if args.title: query_items.append("[System.Title] = '%s'" % args.title) # Build an SQL-like query (WIQL - Work Item Query Language) using user # provided args to a user provided ADO instance. In CICD, this ends up # unconditionally trusting system generated reports. query = "select [System.Id] from WorkItems where " + " AND ".join( # nosec query_items ) work_items = [] for _ in range(60): work_items = client.query_by_wiql(Wiql(query=query)).work_items if len(work_items) >= args.expected: break time.sleep(2) print("trying again", flush=True) assert ( len(work_items) >= args.expected ), "unexpected work items (got %d, expected at least %d)" % ( len(work_items), args.expected, )
def download_build_artifact(project_name, branch_name, source_version, personal_access_token, artifact_name, output_filename): credentials = BasicAuthentication('', personal_access_token) connection = Connection(base_url=Organization_url, creds=credentials) build_client = connection.clients.get_build_client() builds = build_client.get_builds(project=project_name, branch_name=branch_name, repository_id=Repository_id, repository_type=Repository_type) builds = [ build for build in builds if build.source_version == source_version ] if len(builds) == 0: raise RuntimeError( 'There is no build corresponding to source_version: {0}'.format( source_version)) if len(builds) != 1: raise RuntimeError( 'There is more than one build corresponding to source_version: {0}' .format(source_version)) build_id = builds[0].id artifact = build_client.get_artifact(project='public', build_id=build_id, artifact_name=artifact_name) artifact_download_url = artifact.resource.download_url base64_personal_access_token = b64encode( str.encode('{0}:{1}'.format('', personal_access_token))).decode() opener = urllib.request.build_opener() opener.addheaders = [('Authorization', 'Basic {0}'.format(base64_personal_access_token))] urllib.request.install_opener(opener) urllib.request.urlretrieve(artifact_download_url, output_filename)
def get_ado_client(base_url: str, token: str) -> WorkItemTrackingClient: connection = Connection(base_url=base_url, creds=BasicAuthentication("PAT", token)) client = connection.clients_v6_0.get_work_item_tracking_client() return client
def main(): # extract arguments parser = argparse.ArgumentParser( description='push completed status from parent work items to children') parser.add_argument("-o", "--org", required=True, dest="url", help="Azure DevOps Organization URL") parser.add_argument("-p", "--project", required=True, dest="project", help="Azure DevOps Project") parser.add_argument("-t", "--pat", required=True, dest="pat", help="Azure DevOps Personal Access Token") parser.add_argument( "--parent-type", required=True, dest="parent_type", help="work item parent type to filter for (Bug,Feature,...)") parser.add_argument( "--child-type", required=True, dest="child_type", help= "work item child type to filter for (Task,Product Backlog Item,...)") parser.add_argument( "--age", required=False, dest="age", default=120, type=int, help="age in days when last change of work item happened") parser.add_argument("--update", required=False, action='store_true', dest="update", help="commit update to Azure DevOps") args = parser.parse_args() # create a connection to the org credentials = BasicAuthentication('', args.pat) connection = Connection(base_url=args.url, creds=credentials) # get a client wit_client = connection.clients.get_work_item_tracking_client() # determine potential Completed+Removed states for the parent/child work item type wi_types = wit_client.get_work_item_types(args.project) parent_completed_states = [ s.name for s in [t for t in wi_types if t.name == args.parent_type][0].states if s.category == 'Completed' ] parent_removed_states = [ s.name for s in [t for t in wi_types if t.name == args.parent_type][0].states if s.category == 'Removed' ] child_completed_states = [ s.name for s in [t for t in wi_types if t.name == args.child_type][0].states if s.category == 'Completed' ] child_removed_states = [ s.name for s in [t for t in wi_types if t.name == args.child_type][0].states if s.category == 'Removed' ] # query relations wiql = Wiql(query=f"""SELECT * FROM workitemLinks WHERE [Source].[System.TeamProject] = '{args.project}' AND [Source].[System.WorkItemType] = '{args.parent_type}' AND [Target].[System.WorkItemType] = '{args.child_type}' AND [Source].[System.ChangedDate] >= @today - {args.age} AND [System.Links.LinkType] = 'Child' MODE (MustContain) """) wi_relations = wit_client.query_by_wiql(wiql, top=1000).work_item_relations print(f'Results: {len(wi_relations)}') # process relations if wi_relations: for wir in wi_relations: if wir.source and wir.target: # for each source (parent) / target (child) pair check completed state wis = wit_client.get_work_item(wir.source.id) wit = wit_client.get_work_item(wir.target.id) if wis.fields[ 'System.State'] in parent_completed_states or wis.fields[ 'System.State'] in parent_removed_states: print( f"{wis.fields['System.WorkItemType']} {wir.source.id} ({wis.fields['System.State']}) -> {wit.fields['System.WorkItemType']} {wir.target.id} ({wit.fields['System.State']})" ) operations = [] if wis.fields[ 'System.State'] in parent_completed_states and not wit.fields[ 'System.State'] in child_completed_states and not wit.fields[ 'System.State'] in child_removed_states: print(f" =>{child_completed_states[0]}") operations.append( JsonPatchOperation( op='replace', path=f'/fields/System.State', value=child_completed_states[0])) if wis.fields[ 'System.State'] in parent_removed_states and not wit.fields[ 'System.State'] in child_completed_states and not wit.fields[ 'System.State'] in child_removed_states: print(f" =>{child_removed_states[0]}") operations.append( JsonPatchOperation(op='replace', path=f'/fields/System.State', value=child_removed_states[0])) if len(operations) > 0 and args.update: resp = wit_client.update_work_item(document=operations, id=wir.target.id) print(resp)
def main(): # extract arguments parser = argparse.ArgumentParser( description='push characteristics from parent work items to children') parser.add_argument("-o", "--org", required=True, dest="url", help="Azure DevOps Organization URL") parser.add_argument("-p", "--project", required=True, dest="project", help="Azure DevOps Project") parser.add_argument("-t", "--pat", required=True, dest="pat", help="Azure DevOps Personal Access Token") parser.add_argument( "--parent-type", required=True, dest="parent_type", help="work item parent type to filter for (Bug,Feature,...)") parser.add_argument( "--child-type", required=True, dest="child_type", help= "work item child type to filter for (Task,Product Backlog Item,...)") parser.add_argument( "--field-list", required=True, dest="field_list", help= "comma separated list of field names to compare e.g. System.AreaPath,System.IterationPath" ) parser.add_argument( "--age", required=False, dest="age", default=120, type=int, help="age in days when last change of work item happened") parser.add_argument("--update", required=False, action='store_true', dest="update", help="commit update to Azure DevOps") args = parser.parse_args() # create a connection to the org credentials = BasicAuthentication('', args.pat) connection = Connection(base_url=args.url, creds=credentials) # get a client wit_client = connection.clients.get_work_item_tracking_client() # query relations wiql = Wiql(query=f"""SELECT * FROM workitemLinks WHERE [Source].[System.TeamProject] = '{args.project}' AND [Source].[System.WorkItemType] = '{args.parent_type}' AND [Target].[System.WorkItemType] = '{args.child_type}' AND [Source].[System.ChangedDate] >= @today - {args.age} AND [System.Links.LinkType] = 'Child' MODE (MustContain) """) wi_relations = wit_client.query_by_wiql(wiql, top=1000).work_item_relations print(f'Results: {len(wi_relations)}') # process relations if wi_relations: fields_to_check = args.field_list.split(',') for wir in wi_relations: if wir.source and wir.target: # for each source (parent) / target (child) pair check field list wis = wit_client.get_work_item(wir.source.id) wit = wit_client.get_work_item(wir.target.id) print( f"{wis.fields['System.WorkItemType']} {wir.source.id} -> {wit.fields['System.WorkItemType']} {wir.target.id}" ) operations = [] for field in fields_to_check: if not field in wis.fields or not field in wit.fields: raise ValueError(f"field {field} unknown") elif wis.fields[field] != wit.fields[field]: print(f' =>{field}') operations.append( JsonPatchOperation(op='replace', path=f'/fields/{field}', value=wis.fields[field])) if len(operations) > 0 and args.update: resp = wit_client.update_work_item(document=operations, id=wir.target.id) print(resp)
from azure.devops.v5_1.work_item_tracking.models import CommentCreate import pprint import argparse parser = argparse.ArgumentParser() parser.add_argument('--buildid', required=True) parser.add_argument('--token', required=True) parser.add_argument('--comment', required=True) parser.add_argument('--org', required=True) parser.add_argument('--proj', required=True) args = parser.parse_args() # credentials = BasicAuthentication('', args.token) credentials = BasicAuthentication('PAT', args.token) connection = Connection(base_url=args.org, creds=credentials) build_client = connection.clients_v5_1.get_build_client() wis = build_client.get_build_work_items_refs(project=args.proj, build_id=args.buildid) # wis = build_client.get_build_work_items_refs(project="Biology Editor", build_id=4388) wit_5_1_client = connection.clients_v5_1.get_work_item_tracking_client() for wi in wis: pprint.pprint(wi.id) wit_5_1_client.add_comment(project=args.proj, work_item_id=str(wi.id), request=CommentCreate(text=args.comment))
def __init__(self, config): self._credentials = BasicAuthentication( '', config['azure_personal_access_token']) self._connection = Connection( base_url=config['azure_organization_url'], creds=self._credentials)