def register_repo(github_org, repo_name, config: AzureConfig = default_config): from vsts.build.v4_1.models import ( BuildDefinition, BuildDefinitionReference, BuildRepository, ) from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient import inspect bclient = build_client() aclient = TaskAgentClient(config.instance_base_url, config.credentials) source_repo = get_repo_reference(config, github_org, repo_name) new_repo = BuildRepository( type="GitHub", url=source_repo.properties["cloneUrl"], **{ k: v for k, v in source_repo.as_dict().items() if k in set(inspect.getfullargspec(BuildRepository).args) - {"url"} }, ) new_repo.name = source_repo.properties["fullName"] new_repo.properties["cleanOptions"] = "0" new_repo.properties["skipSyncSource"] = "false" new_repo.properties["gitLfsSupport"] = "false" new_repo.properties["checkoutNestedSubmodules"] = "false" new_repo.properties["labelSources"] = "0" new_repo.properties["fetchDepth"] = "0" new_repo.properties["labelSourcesFormat"] = "$(build.buildNumber)" new_repo.properties["reportBuildStatus"] = config.azure_report_build_status new_repo.clean = False queues = get_queues(config) default_queue = get_default_queue(config) service_endpoint = get_service_endpoint(config) build_definition = BuildDefinition( process={ "type": 2, # These might be optional; "resources": { "queues": [{ "id": q.id, "alias": q.name } for q in queues], "endpoints": [{ "id": service_endpoint.id, "alias": service_endpoint.name }], }, "yamlFilename": "/azure-pipelines.yml", }, # queue works queue=default_queue, # now onto this repository=new_repo, name=repo_name, # configure trigger for our builds. triggers=[ { "branchFilters": ["+*"], "forks": { "enabled": True, "allowSecrets": False }, "pathFilters": [], "isCommentRequiredForPullRequest": False, "triggerType": "pullRequest", }, { "branchFilters": ["+*"], "pathFilters": [], "batchChanges": False, "maxConcurrentBuildsPerBranch": 1, "pollingInterval": 0, "triggerType": "continuousIntegration", }, ], variable_groups=aclient.get_variable_groups( project=config.project_name, group_name="anaconda-org"), type="build", ) # clean up existing builds for the same feedstock if present existing_definitions: typing.List[ BuildDefinitionReference] = bclient.get_definitions( project=config.project_name, name=repo_name) if existing_definitions: assert len(existing_definitions) == 1 ed = existing_definitions[0] bclient.update_definition(definition=build_definition, definition_id=ed.id, project=ed.project.name) else: bclient.create_definition(definition=build_definition, project=config.project_name)
def register_repo(github_org, repo_name, config: AzureConfig = default_config): from vsts.build.v4_1.models import ( BuildDefinition, BuildDefinitionReference, BuildRepository, ) from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient import inspect bclient = build_client() aclient = TaskAgentClient(config.azure_team_instance, config.credentials) source_repo = get_repo_reference(config, github_org, repo_name) new_repo = BuildRepository( type="GitHub", url=source_repo.properties["cloneUrl"], **{ k: v for k, v in source_repo.as_dict().items() if k in set(inspect.getfullargspec(BuildRepository).args) - {"url"} }, ) new_repo.name = source_repo.properties["fullName"] new_repo.properties["cleanOptions"] = "0" new_repo.properties["skipSyncSource"] = "false" new_repo.properties["gitLfsSupport"] = "false" new_repo.properties["checkoutNestedSubmodules"] = "false" new_repo.properties["labelSources"] = "0" new_repo.properties["fetchDepth"] = "0" new_repo.properties["labelSourcesFormat"] = "$(build.buildNumber)" new_repo.properties["reportBuildStatus"] = config.azure_report_build_status new_repo.clean = False queues = get_queues(config) default_queue = get_default_queue(config) service_endpoint = get_service_endpoint(config) build_definition = BuildDefinition( process={ "type": 2, # These might be optional; "resources": { "queues": [{"id": q.id, "alias": q.name} for q in queues], "endpoints": [ {"id": service_endpoint.id, "alias": service_endpoint.name} ], }, "yamlFilename": "/azure-pipelines.yml", }, # queue works queue=default_queue, # now onto this repository=new_repo, name=repo_name, # configure trigger for our builds. triggers=[ { "branchFilters": ["+*"], "forks": {"enabled": True, "allowSecrets": False}, "pathFilters": [], "isCommentRequiredForPullRequest": False, "triggerType": "pullRequest", }, { "branchFilters": ["+*"], "pathFilters": [], "batchChanges": False, "maxConcurrentBuildsPerBranch": 1, "pollingInterval": 0, "triggerType": "continuousIntegration", }, ], variable_groups=aclient.get_variable_groups( project=config.project_name, group_name="anaconda-org" ), type="build", ) # clean up existing builds for the same feedstock if present existing_definitions: typing.List[ BuildDefinitionReference ] = bclient.get_definitions(project=config.project_name, name=repo_name) if existing_definitions: assert len(existing_definitions) == 1 ed = existing_definitions[0] bclient.update_definition( definition=build_definition, definition_id=ed.id, project=ed.project.name ) else: bclient.create_definition( definition=build_definition, project=config.project_name )