Esempio n. 1
0
def list_dags():
    """Lists the dags stored in the Cloud Composer GCS bucket"""
    logger.log(
        logging.INFO,
        f"Entered list_dags -- {API_BASE_PATH_V1}/dag/list api GET method")
    req_data = request.get_json()
    if req_data:
        logger.log(logging.DEBUG,
                   f"Request contains a json payload, validating: {req_data}")
        api_validator.validate_project_json(req_data)
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            req_data)
        bucket_name = api_service.get_dag_bucket(project_id, location,
                                                 composer_environment)
    else:
        logger.log(
            logging.DEBUG,
            f"Request does not contain a json payload, validating implicitly")
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            None)
        bucket_name = api_service.get_dag_bucket(project_id, location,
                                                 composer_environment)

    dag_list = api_service.list_dags(project_id, bucket_name)

    next_actions = {
        'validate': f'{API_BASE_PATH_V1}/dag/validate',
        'deploy': f'{API_BASE_PATH_V1}/dag/deploy'
    }

    return jsonify(dag_list=dag_list, next_actions=next_actions)
 def test_get_gcp_composer_details_inline_invalid():
     payload = {
         'project_id': 'mock_project_id',
         'composer_environment': 'mock_composer_environment'
     }
     with pytest.raises(Exception):
         api_service.get_gcp_composer_details(payload)
Esempio n. 3
0
def trigger_dag(dag_name):
    """Triggers a specific, existing dag within a Cloud Composer environment"""
    logger.log(
        logging.INFO,
        f"Entered trigger_dag -- {API_BASE_PATH_V1}/dag/trigger/{dag_name} api PUT method"
    )
    req_data = request.get_json()
    if req_data:
        if 'conf' not in req_data or len(req_data['conf']) == 0:
            return {
                'error':
                "JSON payload provided but conf element is missing or empty"
            }, 500
        if 'project_id' in req_data:
            api_validator.validate_project_json(req_data)
            project_id, location, composer_environment = api_service.get_gcp_composer_details(
                req_data)
    else:
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            None)
    try:
        res = api_service.trigger_dag(project_id, location,
                                      composer_environment, dag_name)
    except:
        return {'error': traceback.print_exc()}, 500

    authenticated_session = auth_service.get_authenticated_session()
    airflow_uri, client_id = airflow_service.AirflowService(
        authenticated_session, project_id, location,
        composer_environment).get_airflow_experimental_api()

    return jsonify(api_response=res,
                   next_actions=api_service.get_next_actions_experimental_api(
                       airflow_uri, client_id))
def test_get_gcp_composer_details_by_invalid_json():
    json = {
        'project_id': os.environ.get('PROJECT_ID'),
        'location': os.environ.get('GCP_LOCATION')
    }
    with pytest.raises(Exception):
        api_service.get_gcp_composer_details(json)
Esempio n. 5
0
def get_composer_config():
    """Gets the configuration information of the Cloud Composer environment"""
    logger.log(
        logging.INFO,
        f"Entered get_composer_config -- {API_BASE_PATH_V1}/composer/config api GET method"
    )
    req_data = request.get_json()
    if req_data:
        logger.log(logging.DEBUG,
                   f"Request contains a json payload, validating: {req_data}")
        api_validator.validate_project_json(req_data)
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            req_data)
    else:
        logger.log(
            logging.DEBUG,
            f"Request does not contain a json payload, validating implicitly")
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            None)

    authenticated_session = auth_service.get_authenticated_session()
    airflow_config = airflow_service.AirflowService(
        authenticated_session, project_id, location,
        composer_environment).get_airflow_config()

    next_actions = {
        'list': f'{API_BASE_PATH_V1}/dag/list',
        'validate': f'{API_BASE_PATH_V1}/dag/validate',
        'deploy': f'{API_BASE_PATH_V1}/dag/deploy'
    }

    return jsonify(airflow_config=airflow_config, next_actions=next_actions)
Esempio n. 6
0
def get_composer_experimental_apì():
    """Gets the configuration information of the Cloud Composer experimental API"""
    logger.log(
        logging.INFO,
        f"Entered get_composer_experimental_apì -- {API_BASE_PATH_V1}/composer/api api GET method"
    )
    req_data = request.get_json()
    if req_data:
        logger.log(logging.DEBUG,
                   f"Request contains a json payload, validating: {req_data}")
        api_validator.validate_project_json(req_data)
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            req_data)
    else:
        logger.log(
            logging.DEBUG,
            f"Request does not contain a json payload, validating implicitly")
        project_id, location, composer_environment = api_service.get_gcp_composer_details(
            None)

    authenticated_session = auth_service.get_authenticated_session()
    airflow_uri, client_id = airflow_service.AirflowService(
        authenticated_session, project_id, location,
        composer_environment).get_airflow_experimental_api()

    return jsonify(airflow_uri=airflow_uri,
                   client_id=client_id,
                   next_actions=api_service.get_next_actions_experimental_api(
                       airflow_uri, client_id))
Esempio n. 7
0
def deploy_dag():
    """Deploys a dag to a Cloud Composer environment"""
    logger.log(
        logging.INFO,
        f"Entered deploy_dag -- {API_BASE_PATH_V1}/dag/deploy api POST method")
    req_data = request.get_json()
    if not req_data:
        return {'error': "Empty JSON payload"}, 500
    try:
        api_validator.validate_payload(req_data)
        if 'project_id' in req_data:
            project_id, location, composer_environment = api_service.get_gcp_composer_details(
                req_data)
        else:
            project_id, location, composer_environment = api_service.get_gcp_composer_details(
                None)

        airflow_dag_bucket_name = api_service.get_dag_bucket(
            project_id, location, composer_environment)
        dag_name = req_data['dag_name']

        next_actions = {
            'trigger': f'{API_BASE_PATH_V1}/dag/trigger/{dag_name}'
        }

        if req_data['mode'] == 'GCS':
            deploy_file = api_service.gcs_download_file(
                project_id, req_data['bucket_name'], req_data['file_path'])
            gcs_dag_path = api_service.deploy_dag(project_id,
                                                  'GCS',
                                                  airflow_dag_bucket_name,
                                                  dag_file=deploy_file)
            return jsonify(dag_name=dag_name,
                           dag_gcs_path=gcs_dag_path,
                           next_actions=next_actions)

        if req_data['mode'] == 'GIT':
            deploy_file = api_service.git_download_file(
                req_data['git_url'], req_data['repo_name'],
                req_data['file_path'])
            git_dag_path = api_service.deploy_dag(project_id,
                                                  'GIT',
                                                  airflow_dag_bucket_name,
                                                  dag_file=deploy_file)
            return jsonify(dag_name=dag_name,
                           dag_gcs_path=git_dag_path,
                           next_actions=next_actions)

        if req_data['mode'] == 'INLINE':
            gcs_dag_path = api_service.deploy_dag(project_id,
                                                  'INLINE',
                                                  airflow_dag_bucket_name,
                                                  dag_data=req_data)
            return jsonify(dag_name=dag_name,
                           dag_gcs_path=gcs_dag_path,
                           next_actions=next_actions)
    except:
        return {'error': traceback.print_exc()}, 500
def test_gcs_upload_file_temp_bucket():
    project_id, location, composer_environment = api_service.get_gcp_composer_details(None)
    bucket_name = os.environ.get('TEST_BUCKET')
    prefix = "dags/"
    upload_file = "static/dag_workflow_simple.py"
    file_path = os.path.join(os.path.dirname(Path(__file__)), upload_file)
    api_service.gcs_upload_file(project_id, bucket_name, prefix, file_path)
def test_get_gcp_composer_details_by_env_vars():
    project_id, location, composer_environment = api_service.get_gcp_composer_details()
    assert project_id is not None
    assert project_id == os.environ.get('PROJECT_ID')
    assert location is not None
    assert location == os.environ.get('GCP_LOCATION')
    assert composer_environment is not None
    assert composer_environment == os.environ.get('COMPOSER_ENVIRONMENT')
 def test_get_gcp_composer_details_inline_valid():
     payload = {
         'project_id': 'mock_project_id',
         'location': 'mock_gcp_location',
         'composer_environment': 'mock_composer_environment'
     }
     is_valid = api_service.get_gcp_composer_details(payload)
     assert is_valid
def test_get_gcp_composer_environment_by_valid_json():
    json = {
        'project_id': os.environ.get('PROJECT_ID'),
        'location': os.environ.get('GCP_LOCATION'),
        'composer_environment': os.environ.get('COMPOSER_ENVIRONMENT')
    }
    project_id, location, composer_environment = api_service.get_gcp_composer_details(json)
    req_data = api_service.__get_composer_environment(project_id, location, composer_environment)
    assert req_data is not None
Esempio n. 12
0
def validate_dag():
    """Validates a dag to ensure that it is error free and compatible with Cloud Composer"""
    logger.log(
        logging.INFO,
        f"Entered validate_dag -- {API_BASE_PATH_V1}/dag/validate api POST method"
    )
    req_data = request.get_json()
    if not req_data:
        return {'error': "Empty JSON payload"}, 500
    try:
        api_validator.validate_payload(req_data)
        if 'project_id' in req_data:
            project_id, location, composer_environment = api_service.get_gcp_composer_details(
                req_data)
        else:
            project_id, location, composer_environment = api_service.get_gcp_composer_details(
                None)

        next_actions = {'deploy': f'{API_BASE_PATH_V1}/dag/deploy'}

        if req_data['mode'] == 'GCS':
            deploy_file = api_service.gcs_download_file(
                project_id, req_data['bucket_name'], req_data['file_path'])
            validation_json = api_service.validate_dag('GCS', deploy_file)
            validation_json['next_actions'] = next_actions
            return jsonify(validation_json)

        if req_data['mode'] == 'GIT':
            deploy_file = api_service.git_download_file(
                req_data['git_url'], req_data['repo_name'],
                req_data['file_path'])
            validation_json = api_service.validate_dag('GIT', deploy_file)
            validation_json['next_actions'] = next_actions
            return jsonify(validation_json)

        if req_data['mode'] == 'INLINE':
            validation_json = api_service.validate_dag('INLINE', req_data)
            validation_json['next_actions'] = next_actions
            return jsonify(validation_json)

    except:
        return {'error': traceback.print_exc()}, 500
def test_gcs_download_file_temp_bucket():
    project_id, location, composer_environment = api_service.get_gcp_composer_details(None)
    test_gcs_upload_file_temp_bucket()
    file_path = api_service.gcs_download_file(
        project_id,
        os.environ.get('TEST_BUCKET'),
        'dags/dag_workflow_simple.py'
    )
    assert os.path.exists(file_path)
    assert os.path.isfile(file_path)
    assert not os.path.isdir(file_path)
def test_get_gcp_composer_details_by_valid_json():
    json = {
        'project_id': os.environ.get('PROJECT_ID'),
        'location': os.environ.get('GCP_LOCATION'),
        'composer_environment': os.environ.get('COMPOSER_ENVIRONMENT')
    }
    project_id, location, composer_environment = api_service.get_gcp_composer_details(json)
    assert project_id is not None
    assert project_id == os.environ.get('PROJECT_ID')
    assert location is not None
    assert location == os.environ.get('GCP_LOCATION')
    assert composer_environment is not None
    assert composer_environment == os.environ.get('COMPOSER_ENVIRONMENT')
def test_trigger_dag():
    project_id, location, composer_environment = api_service.get_gcp_composer_details(None)
    airflow_dag_bucket_name = api_service.get_dag_bucket(project_id, location, composer_environment)
    json = {
        'project_id': os.environ.get('PROJECT_ID'),
        'location': os.environ.get('GCP_LOCATION'),
        'composer_environment': os.environ.get('COMPOSER_ENVIRONMENT'),
        'dag_name': 'test_trigger_dag',
        'mode': 'INLINE',
        'kubernetes_pod_operators': [
            {
                'task_id': 'k8s_pod_operator_example_task_01',
                'name': 'k8s_pod_example_01',
                'image': 'bash'
            }
        ]
    }
    gcs_dag_path = api_service.deploy_dag(project_id, 'INLINE', airflow_dag_bucket_name, dag_data=json)

    # sleep in order to give the dag time to deploy
    time.sleep(30)

    response_text = api_service.trigger_dag(project_id, location, composer_environment, 'test_trigger_dag')
    assert response_text is not None
def test_get_gcp_composer_environment_by_envvars():
    project_id, location, composer_environment = api_service.get_gcp_composer_details()
    req_data = api_service.__get_composer_environment(project_id, location, composer_environment)
    assert req_data is not None
 def test_get_gcp_composer_details_env_var_valid():
     os.environ['PROJECT_ID'] = 'mock_project_id'
     os.environ['GCP_LOCATION'] = 'mock_location'
     os.environ['COMPOSER_ENVIRONMENT'] = 'mock_composer_env'
     is_valid = api_service.get_gcp_composer_details()
     assert is_valid