Example #1
0
def get_queues(project_id=AZURE_PROJECT_ID):
    from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient
    from vsts.task_agent.v4_0.models import TaskAgentQueue

    aclient = TaskAgentClient(AZURE_TEAM_INSTANCE, credentials)
    queues: typing.List[TaskAgentQueue] = aclient.get_agent_queues(project_id)
    return queues
def get_client_mock_helper(_self_dummy, client_type):
    from vsts.git.v4_0.git_client import GitClient
    from vsts.policy.v4_0.policy_client import PolicyClient
    from vsts.core.v4_0.core_client import CoreClient
    from vsts.release.v4_0.release_client import ReleaseClient
    from vsts.customer_intelligence.v4_0.customer_intelligence_client import CustomerIntelligenceClient
    from vsts.service_endpoint.v4_1.service_endpoint_client import ServiceEndpointClient
    from vsts.operations.v4_0.operations_client import OperationsClient
    from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient
    from vsts.work_item_tracking.v4_0.work_item_tracking_client import WorkItemTrackingClient
    from vsts.settings.v4_0.settings_client import SettingsClient
    from vsts.identity.v4_0.identity_client import IdentityClient
    from vsts.member_entitlement_management.v4_1.member_entitlement_management_client import (
        MemberEntitlementManagementClient)
    from vsts.location.v4_0.location_client import LocationClient
    from vsts.build.v4_0.build_client import BuildClient

    switcher = {
        'vsts.git.v4_0.git_client.GitClient': GitClient(base_url=TEST_DEVOPS_ORG_URL),
        'vsts.policy.v4_0.policy_client.PolicyClient': PolicyClient(base_url=TEST_DEVOPS_ORG_URL),
        'vsts.core.v4_0.core_client.CoreClient': CoreClient(base_url=TEST_DEVOPS_ORG_URL),
        'vsts.release.v4_0.release_client.ReleaseClient': ReleaseClient(base_url=TEST_DEVOPS_ORG_URL),
        'vsts.customer_intelligence.v4_0.customer_intelligence_client.CustomerIntelligenceClient': \
            CustomerIntelligenceClient(base_url=TEST_DEVOPS_ORG_URL),
        'vsts.service_endpoint.v4_1.service_endpoint_client.ServiceEndpointClient': ServiceEndpointClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.operations.v4_0.operations_client.OperationsClient': OperationsClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.task_agent.v4_0.task_agent_client.TaskAgentClient': TaskAgentClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.work_item_tracking.v4_0.work_item_tracking_client.WorkItemTrackingClient': WorkItemTrackingClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.settings.v4_0.settings_client.SettingsClient': SettingsClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.identity.v4_0.identity_client.IdentityClient': IdentityClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.member_entitlement_management.v4_1.member_entitlement_management_client.  \
            MemberEntitlementManagementClient'                                              : MemberEntitlementManagementClient(
                base_url=TEST_DEVOPS_ORG_URL),
        'vsts.location.v4_0.location_client.LocationClient': LocationClient(
            base_url=TEST_DEVOPS_ORG_URL),
        'vsts.build.v4_0.build_client.BuildClient': BuildClient(base_url=TEST_DEVOPS_ORG_URL)
    }

    return switcher.get(client_type, None)
Example #3
0
def get_queues(
        config: AzureConfig = default_config) -> typing.List[TaskAgentQueue]:
    aclient = TaskAgentClient(config.instance_base_url, config.credentials)
    return aclient.get_agent_queues(config.project_name)
Example #4
0
def register_repo(github_org, repo_name, config: AzureConfig = default_config):
    from vsts.build.v4_1.models import (
        BuildDefinition,
        BuildDefinitionReference,
        BuildRepository,
    )
    from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient
    import inspect

    bclient = build_client()
    aclient = TaskAgentClient(config.instance_base_url, config.credentials)

    source_repo = get_repo_reference(config, github_org, repo_name)

    new_repo = BuildRepository(
        type="GitHub",
        url=source_repo.properties["cloneUrl"],
        **{
            k: v
            for k, v in source_repo.as_dict().items()
            if k in set(inspect.getfullargspec(BuildRepository).args) -
            {"url"}
        },
    )
    new_repo.name = source_repo.properties["fullName"]
    new_repo.properties["cleanOptions"] = "0"
    new_repo.properties["skipSyncSource"] = "false"
    new_repo.properties["gitLfsSupport"] = "false"
    new_repo.properties["checkoutNestedSubmodules"] = "false"
    new_repo.properties["labelSources"] = "0"
    new_repo.properties["fetchDepth"] = "0"
    new_repo.properties["labelSourcesFormat"] = "$(build.buildNumber)"
    new_repo.properties["reportBuildStatus"] = config.azure_report_build_status
    new_repo.clean = False

    queues = get_queues(config)
    default_queue = get_default_queue(config)
    service_endpoint = get_service_endpoint(config)

    build_definition = BuildDefinition(
        process={
            "type": 2,
            # These might be optional;
            "resources": {
                "queues": [{
                    "id": q.id,
                    "alias": q.name
                } for q in queues],
                "endpoints": [{
                    "id": service_endpoint.id,
                    "alias": service_endpoint.name
                }],
            },
            "yamlFilename": "/azure-pipelines.yml",
        },
        # queue works
        queue=default_queue,
        # now onto this
        repository=new_repo,
        name=repo_name,
        # configure trigger for our builds.
        triggers=[
            {
                "branchFilters": ["+*"],
                "forks": {
                    "enabled": True,
                    "allowSecrets": False
                },
                "pathFilters": [],
                "isCommentRequiredForPullRequest": False,
                "triggerType": "pullRequest",
            },
            {
                "branchFilters": ["+*"],
                "pathFilters": [],
                "batchChanges": False,
                "maxConcurrentBuildsPerBranch": 1,
                "pollingInterval": 0,
                "triggerType": "continuousIntegration",
            },
        ],
        variable_groups=aclient.get_variable_groups(
            project=config.project_name, group_name="anaconda-org"),
        type="build",
    )

    # clean up existing builds for the same feedstock if present
    existing_definitions: typing.List[
        BuildDefinitionReference] = bclient.get_definitions(
            project=config.project_name, name=repo_name)
    if existing_definitions:
        assert len(existing_definitions) == 1
        ed = existing_definitions[0]
        bclient.update_definition(definition=build_definition,
                                  definition_id=ed.id,
                                  project=ed.project.name)
    else:
        bclient.create_definition(definition=build_definition,
                                  project=config.project_name)
Example #5
0
def get_queues(config: AzureConfig = default_config) -> typing.List[TaskAgentQueue]:
    aclient = TaskAgentClient(config.azure_team_instance, config.credentials)
    return aclient.get_agent_queues(config.project_name)
Example #6
0
def register_repo(github_org, repo_name, config: AzureConfig = default_config):
    from vsts.build.v4_1.models import (
        BuildDefinition,
        BuildDefinitionReference,
        BuildRepository,
    )
    from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient
    import inspect

    bclient = build_client()
    aclient = TaskAgentClient(config.azure_team_instance, config.credentials)

    source_repo = get_repo_reference(config, github_org, repo_name)

    new_repo = BuildRepository(
        type="GitHub",
        url=source_repo.properties["cloneUrl"],
        **{
            k: v
            for k, v in source_repo.as_dict().items()
            if k in set(inspect.getfullargspec(BuildRepository).args) - {"url"}
        },
    )
    new_repo.name = source_repo.properties["fullName"]
    new_repo.properties["cleanOptions"] = "0"
    new_repo.properties["skipSyncSource"] = "false"
    new_repo.properties["gitLfsSupport"] = "false"
    new_repo.properties["checkoutNestedSubmodules"] = "false"
    new_repo.properties["labelSources"] = "0"
    new_repo.properties["fetchDepth"] = "0"
    new_repo.properties["labelSourcesFormat"] = "$(build.buildNumber)"
    new_repo.properties["reportBuildStatus"] = config.azure_report_build_status
    new_repo.clean = False

    queues = get_queues(config)
    default_queue = get_default_queue(config)
    service_endpoint = get_service_endpoint(config)

    build_definition = BuildDefinition(
        process={
            "type": 2,
            # These might be optional;
            "resources": {
                "queues": [{"id": q.id, "alias": q.name} for q in queues],
                "endpoints": [
                    {"id": service_endpoint.id, "alias": service_endpoint.name}
                ],
            },
            "yamlFilename": "/azure-pipelines.yml",
        },
        # queue works
        queue=default_queue,
        # now onto this
        repository=new_repo,
        name=repo_name,
        # configure trigger for our builds.
        triggers=[
            {
                "branchFilters": ["+*"],
                "forks": {"enabled": True, "allowSecrets": False},
                "pathFilters": [],
                "isCommentRequiredForPullRequest": False,
                "triggerType": "pullRequest",
            },
            {
                "branchFilters": ["+*"],
                "pathFilters": [],
                "batchChanges": False,
                "maxConcurrentBuildsPerBranch": 1,
                "pollingInterval": 0,
                "triggerType": "continuousIntegration",
            },
        ],
        variable_groups=aclient.get_variable_groups(
            project=config.project_name, group_name="anaconda-org"
        ),
        type="build",
    )

    # clean up existing builds for the same feedstock if present
    existing_definitions: typing.List[
        BuildDefinitionReference
    ] = bclient.get_definitions(project=config.project_name, name=repo_name)
    if existing_definitions:
        assert len(existing_definitions) == 1
        ed = existing_definitions[0]
        bclient.update_definition(
            definition=build_definition, definition_id=ed.id, project=ed.project.name
        )
    else:
        bclient.create_definition(
            definition=build_definition, project=config.project_name
        )