def MakeCommonDag(dag_args_func, name, schedule_interval='15 9 * * *', extra_param_lst=[]): """Creates the shared part of the daily/release dags. schedule_interval is in cron format '15 9 * * *')""" common_dag = DAG( name, catchup=False, default_args=default_args, schedule_interval=schedule_interval, ) tasks = dict() init_gcb_env_cmd, copy_env_from_gcb_prefix = getBashSettingsTemplate( extra_param_lst) def addAirflowInitBashOperator(task_id): task = BashOperator(task_id=task_id, bash_command=init_gcb_env_cmd, dag=common_dag) tasks[task_id] = task return def addAirflowBashOperator(cmd_name, task_id, **kwargs): cmd = copy_env_from_gcb_prefix + "\ntype %s\n %s" % (cmd_name, cmd_name) task = BashOperator(task_id=task_id, bash_command=cmd, dag=common_dag, **kwargs) tasks[task_id] = task return generate_flow_args = PythonOperator( task_id='generate_workflow_args', python_callable=dag_args_func, provide_context=True, dag=common_dag, ) tasks['generate_workflow_args'] = generate_flow_args addAirflowInitBashOperator('init_gcb_env') addAirflowBashOperator('get_git_commit_cmd', 'get_git_commit') addAirflowBashOperator('build_template', 'run_cloud_builder') addAirflowBashOperator('test_command', 'run_release_qualification_tests', retries=0) addAirflowBashOperator('modify_values_command', 'modify_values_helm') copy_files = GoogleCloudStorageCopyOperator( task_id='copy_files_for_release', source_bucket=GetSettingTemplate('CB_GCS_BUILD_BUCKET'), source_object=GetSettingTemplate('CB_GCS_STAGING_PATH'), destination_bucket=GetSettingTemplate('CB_GCS_STAGING_BUCKET'), dag=common_dag, ) tasks['copy_files_for_release'] = copy_files return common_dag, tasks, addAirflowBashOperator
def MakeCommonDag(name='istio_daily_flow_test', schedule_interval='15 9 * * *', monthly=False): """Creates the shared part of the daily/monthly dags.""" common_dag = DAG( name, catchup=False, default_args=default_args, schedule_interval=schedule_interval, ) def AirflowGetVariableOrBaseCase(var, base): try: return Variable.get(var) except KeyError: return base def GenerateTestArgs(**kwargs): """Loads the configuration that will be used for this Iteration.""" conf = kwargs['dag_run'].conf if conf is None: conf = dict() """ Airflow gives the execution date when the job is supposed to be run, however we dont backfill and only need to run one build therefore use the current date instead of the date that is passed in """ # date = kwargs['execution_date'] date = datetime.datetime.now() timestamp = time.mktime(date.timetuple()) # Monthly releases started in Nov 2017 with 0.3.0, so minor is # of months # from Aug 2017. minor_version = (date.year - 2017) * 12 + (date.month - 1) - 7 major_version = AirflowGetVariableOrBaseCase('major_version', 0) # This code gets information about the latest released version so we know # What version number to use for this round. r_minor = int(AirflowGetVariableOrBaseCase('released_version_minor', 0)) r_patch = int(AirflowGetVariableOrBaseCase('released_version_patch', 0)) # If we have already released a monthy for this mounth then bump # The patch number for the remander of the month. if r_minor == minor_version: patch = r_patch + 1 else: patch = 0 # If version is overriden then we should use it otherwise we use it's # default or monthly value. version = conf.get('VERSION') if monthly and not version: version = '{}.{}.{}'.format(major_version, minor_version, patch) default_conf = environment_config.get_airflow_config( version, timestamp, major=major_version, minor=minor_version, patch=patch, date=date.strftime('%Y%m%d'), rc=date.strftime('%H-%M')) config_settings = dict(VERSION=default_conf['VERSION']) config_settings_name = [ 'PROJECT_ID', 'MFEST_URL', 'MFEST_FILE', 'GCS_STAGING_BUCKET', 'SVC_ACCT', 'GITHUB_ORG', 'GITHUB_REPO', 'GCS_GITHUB_PATH', 'TOKEN_FILE', 'GCR_STAGING_DEST', 'GCR_RELEASE_DEST', 'GCS_MONTHLY_RELEASE_PATH', 'DOCKER_HUB', 'GCS_BUILD_BUCKET', 'RELEASE_PROJECT_ID', ] for name in config_settings_name: config_settings[name] = conf.get(name) or default_conf[name] if monthly: config_settings['MFEST_COMMIT'] = conf.get( 'MFEST_COMMIT') or Variable.get('latest_sha') gcs_path = conf.get('GCS_MONTHLY_STAGE_PATH') if not gcs_path: gcs_path = default_conf['GCS_MONTHLY_STAGE_PATH'] else: config_settings['MFEST_COMMIT'] = conf.get( 'MFEST_COMMIT') or default_conf['MFEST_COMMIT'] gcs_path = conf.get('GCS_DAILY_PATH') or default_conf['GCS_DAILY_PATH'] config_settings['GCS_STAGING_PATH'] = gcs_path config_settings['GCS_BUILD_PATH'] = '{}/{}'.format( config_settings['GCS_BUILD_BUCKET'], gcs_path) config_settings['GCS_RELEASE_TOOLS_PATH'] = '{}/release-tools/{}'.format( config_settings['GCS_BUILD_BUCKET'], gcs_path) config_settings['GCS_FULL_STAGING_PATH'] = '{}/{}'.format( config_settings['GCS_STAGING_BUCKET'], gcs_path) config_settings['ISTIO_REPO'] = 'https://github.com/{}/{}.git'.format( config_settings['GITHUB_ORG'], config_settings['GITHUB_REPO']) return config_settings generate_flow_args = PythonOperator( task_id='generate_workflow_args', python_callable=GenerateTestArgs, provide_context=True, dag=common_dag, ) get_git_commit_cmd = """ {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" git clone {{ settings.MFEST_URL }} green-builds || exit 2 pushd green-builds git checkout {{ settings.MFEST_COMMIT }} || exit 5 SHA=`grep {{ settings.GITHUB_ORG }}/{{ settings.GITHUB_REPO }} {{ settings.MFEST_FILE }} | cut -f 6 -d \\"` || exit 3 if [ -z ${SHA} ]; then echo "SHA not found" exit 6 fi popd git clone {{ settings.ISTIO_REPO }} istio-code pushd istio-code/release git checkout ${SHA} || exit 4 gsutil cp *.sh gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/ gsutil cp *.json gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/ popd pushd green-builds git rev-parse HEAD """ get_git_commit = BashOperator( task_id='get_git_commit', bash_command=get_git_commit_cmd, xcom_push=True, dag=common_dag) build_template = """ {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} {% set m_commit = task_instance.xcom_pull(task_ids='get_git_commit') %} gsutil cp gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/*.json . gsutil cp gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/*.sh . chmod u+x * ./start_gcb_build.sh -w -p {{ settings.PROJECT_ID \ }} -r {{ settings.GCR_STAGING_DEST }} -s {{ settings.GCS_BUILD_PATH }} \ -v "{{ settings.VERSION }}" \ -u "{{ settings.MFEST_URL }}" \ -t "{{ m_commit }}" -m "{{ settings.MFEST_FILE }}" \ -a {{ settings.SVC_ACCT }} """ # NOTE: if you add commands to build_template after start_gcb_build.sh then take care to preserve its return value build = BashOperator( task_id='run_cloud_builder', bash_command=build_template, dag=common_dag) test_command = """ cp /home/airflow/gcs/data/githubctl ./githubctl chmod u+x ./githubctl {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" ls -l ./githubctl ./githubctl \ --token_file="{{ settings.TOKEN_FILE }}" \ --op=dailyRelQual \ --hub=gcr.io/{{ settings.GCR_STAGING_DEST }} \ --gcs_path="{{ settings.GCS_BUILD_PATH }}" \ --tag="{{ settings.VERSION }}" """ run_release_qualification_tests = BashOperator( task_id='run_release_qualification_tests', bash_command=test_command, retries=0, dag=common_dag) copy_files = GoogleCloudStorageCopyOperator( task_id='copy_files_for_release', source_bucket=GetSettingTemplate('GCS_BUILD_BUCKET'), source_object=GetSettingTemplate('GCS_STAGING_PATH'), destination_bucket=GetSettingTemplate('GCS_STAGING_BUCKET'), dag=common_dag, ) generate_flow_args >> get_git_commit >> build run_release_qualification_tests.set_upstream(build) run_release_qualification_tests >> copy_files return common_dag, copy_files
def MakeCommonDag(dag_args_func, name='istio_daily_flow_test', schedule_interval='15 9 * * *'): """Creates the shared part of the daily/release dags.""" common_dag = DAG( name, catchup=False, default_args=default_args, schedule_interval=schedule_interval, ) tasksOD = collections.OrderedDict(CommonTasks) generate_flow_args = PythonOperator( task_id='generate_workflow_args', python_callable=dag_args_func, provide_context=True, dag=common_dag, ) tasksOD['generate_workflow_args'] = generate_flow_args get_git_commit_cmd = """ {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" git clone {{ settings.MFEST_URL }} green-builds || exit 2 pushd green-builds git checkout {{ settings.BRANCH }} git checkout {{ settings.MFEST_COMMIT }} || exit 3 ISTIO_SHA=`grep {{ settings.GITHUB_ORG }}/{{ settings.GITHUB_REPO }} {{ settings.MFEST_FILE }} | cut -f 6 -d \\"` || exit 4 API_SHA=` grep {{ settings.GITHUB_ORG }}/api {{ settings.MFEST_FILE }} | cut -f 6 -d \\"` || exit 5 PROXY_SHA=`grep {{ settings.GITHUB_ORG }}/proxy {{ settings.MFEST_FILE }} | cut -f 6 -d \\"` || exit 6 if [ -z ${ISTIO_SHA} ] || [ -z ${API_SHA} ] || [ -z ${PROXY_SHA} ]; then echo "ISTIO_SHA:$ISTIO_SHA API_SHA:$API_SHA PROXY_SHA:$PROXY_SHA some shas not found" exit 7 fi popd #green-builds git clone {{ settings.ISTIO_REPO }} istio-code -b {{ settings.BRANCH }} pushd istio-code/release ISTIO_HEAD_SHA=`git rev-parse HEAD` git checkout ${ISTIO_SHA} || exit 8 TS_SHA=` git show -s --format=%ct ${ISTIO_SHA}` TS_HEAD=`git show -s --format=%ct ${ISTIO_HEAD_SHA}` DIFF_SEC=$((TS_HEAD - TS_SHA)) DIFF_DAYS=$(($DIFF_SEC/86400)) if [ "{{ settings.CHECK_GREEN_SHA_AGE }}" = "true" ] && [ "$DIFF_DAYS" -gt "2" ]; then echo ERROR: ${ISTIO_SHA} is $DIFF_DAYS days older than head of branch {{ settings.BRANCH }} exit 9 fi popd #istio-code/release if [ "{{ settings.VERIFY_CONSISTENCY }}" = "true" ]; then PROXY_REPO=`dirname {{ settings.ISTIO_REPO }}`/proxy echo $PROXY_REPO git clone $PROXY_REPO proxy-code -b {{ settings.BRANCH }} pushd proxy-code PROXY_HEAD_SHA=`git rev-parse HEAD` PROXY_HEAD_API_SHA=`grep ISTIO_API istio.deps -A 4 | grep lastStableSHA | cut -f 4 -d '"'` popd if [ "$PROXY_HEAD_SHA" != "$PROXY_SHA" ]; then echo "inconsistent shas PROXY_HEAD_SHA $PROXY_HEAD_SHA != $PROXY_SHA PROXY_SHA" 1>&2 exit 10 fi if [ "$PROXY_HEAD_API_SHA" != "$API_SHA" ]; then echo "inconsistent shas PROXY_HEAD_API_SHA $PROXY_HEAD_API_SHA != $API_SHA API_SHA" 1>&2 exit 11 fi if [ "$ISTIO_HEAD_SHA" != "$ISTIO_SHA" ]; then echo "inconsistent shas ISTIO_HEAD_SHA $ISTIO_HEAD_SHA != $ISTIO_SHA ISTIO_SHA" 1>&2 exit 12 fi fi pushd istio-code/release gsutil cp *.sh gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/ gsutil cp *.json gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/ popd #istio-code/release pushd green-builds git rev-parse HEAD """ get_git_commit = BashOperator(task_id='get_git_commit', bash_command=get_git_commit_cmd, xcom_push=True, dag=common_dag) tasksOD['get_git_commit'] = get_git_commit build_template = """ {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} {% set m_commit = task_instance.xcom_pull(task_ids='get_git_commit') %} gsutil cp gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/*.json . gsutil cp gs://{{ settings.GCS_RELEASE_TOOLS_PATH }}/data/release/*.sh . chmod u+x * ./start_gcb_build.sh -w -p {{ settings.PROJECT_ID \ }} -r {{ settings.GCR_STAGING_DEST }} -s {{ settings.GCS_BUILD_PATH }} \ -v "{{ settings.VERSION }}" \ -u "{{ settings.MFEST_URL }}" \ -t "{{ m_commit }}" -m "{{ settings.MFEST_FILE }}" \ -a {{ settings.SVC_ACCT }} """ # NOTE: if you add commands to build_template after start_gcb_build.sh then take care to preserve its return value build = BashOperator(task_id='run_cloud_builder', bash_command=build_template, dag=common_dag) tasksOD['run_cloud_builder'] = build test_command = """ cp /home/airflow/gcs/data/githubctl ./githubctl chmod u+x ./githubctl {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" ls -l ./githubctl ./githubctl \ --token_file="{{ settings.TOKEN_FILE }}" \ --op=dailyRelQual \ --hub=gcr.io/{{ settings.GCR_STAGING_DEST }} \ --gcs_path="{{ settings.GCS_BUILD_PATH }}" \ --tag="{{ settings.VERSION }}" \ --base_branch="{{ settings.BRANCH }}" """ run_release_qualification_tests = BashOperator( task_id='run_release_qualification_tests', bash_command=test_command, retries=0, dag=common_dag) tasksOD[ 'run_release_qualification_tests'] = run_release_qualification_tests copy_files = GoogleCloudStorageCopyOperator( task_id='copy_files_for_release', source_bucket=GetSettingTemplate('GCS_BUILD_BUCKET'), source_object=GetSettingTemplate('GCS_STAGING_PATH'), destination_bucket=GetSettingTemplate('GCS_STAGING_BUCKET'), dag=common_dag, ) tasksOD['copy_files_for_release'] = copy_files return common_dag, tasksOD
def MakeCommonDag(name='istio_daily_flow_test', schedule_interval='15 3 * * *', monthly=False): """Creates the shared part of the daily/monthly dags.""" common_dag = DAG( name, default_args=default_args, schedule_interval=schedule_interval, ) def AirflowGetVariableOrBaseCase(var, base): try: return Variable.get(var) except KeyError: return base def GenerateTestArgs(**kwargs): """Loads the configuration that will be used for this Iteration.""" conf = kwargs['dag_run'].conf if conf is None: conf = dict() date = kwargs['execution_date'] timestamp = time.mktime(date.timetuple()) # Monthly releases started in Nov 2017 with 0.3.0, so minor is # of months # from Aug 2017. minor_version = (date.year - 2017) * 12 + (date.month - 1) - 7 major_version = AirflowGetVariableOrBaseCase('major_version', 0) # This code gets information about the latest released version so we know # What version number to use for this round. r_minor = int(AirflowGetVariableOrBaseCase('released_version_minor', 0)) r_patch = int(AirflowGetVariableOrBaseCase('released_version_patch', 0)) # If we have already released a monthy for this mounth then bump # The patch number for the remander of the month. if r_minor == minor_version: patch = r_patch + 1 else: patch = 0 if not monthly: version = conf.get('VERSION') else: version = '{}.{}.{}'.format(major_version, minor_version, patch) default_conf = environment_config.get_airflow_config( version, timestamp, major=major_version, minor=minor_version, patch=patch, date=date.strftime('%Y%m%d'), rc=date.strftime('%H-%M-%S')) config_settings = dict(VERSION=default_conf['VERSION']) config_settings_name = [ 'PROJECT_ID', 'MFEST_URL', 'MFEST_FILE', 'GCS_STAGING_BUCKET', 'SVC_ACCT', 'GITHUB_ORG', 'GITHUB_REPO', 'GCS_GITHUB_PATH', 'TOKEN_FILE', 'GCR_STAGING_DEST', 'GCR_RELEASE_DEST', 'GCS_MONTHLY_RELEASE_PATH', 'DOCKER_HUB', 'GCS_BUILD_BUCKET', 'RELEASE_PROJECT_ID', ] for name in config_settings_name: config_settings[name] = conf.get(name) or default_conf[name] if monthly: config_settings['MFEST_COMMIT'] = conf.get( 'MFEST_COMMIT') or Variable.get('latest_sha') gcs_path = conf.get('GCS_MONTHLY_STAGE_PATH') if not gcs_path: gcs_path= default_conf['GCS_MONTHLY_STAGE_PATH'] else: config_settings['MFEST_COMMIT'] = conf.get( 'MFEST_COMMIT') or default_conf['MFEST_COMMIT'] gcs_path = conf.get('GCS_DAILY_PATH') or default_conf['GCS_DAILY_PATH'] config_settings['GCS_STAGING_PATH'] = gcs_path config_settings['GCS_BUILD_PATH'] = '{}/{}'.format( config_settings['GCS_BUILD_BUCKET'], gcs_path) config_settings['GCS_FULL_STAGING_PATH'] = '{}/{}'.format( config_settings['GCS_STAGING_BUCKET'], gcs_path) return config_settings generate_flow_args = PythonOperator( task_id='generate_workflow_args', python_callable=GenerateTestArgs, provide_context=True, dag=common_dag, ) get_git_commit_cmd = """ {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" git clone {{ settings.MFEST_URL }} green-builds || exit 2 cd green-builds git checkout {{ settings.MFEST_COMMIT }} || exit 3 git rev-parse HEAD """ get_git_commit = BashOperator( task_id='get_git_commit', bash_command=get_git_commit_cmd, xcom_push=True, dag=common_dag) build_template = """ chmod +x /home/airflow/gcs/data/release/* {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} {% set m_commit = task_instance.xcom_pull(task_ids='get_git_commit') %} /home/airflow/gcs/data/release/start_gcb_build.sh -w -p {{ settings.PROJECT_ID \ }} -r {{ settings.GCR_STAGING_DEST }} -s {{ settings.GCS_BUILD_PATH }} \ -v "{{ settings.VERSION }}" \ -u "{{ settings.MFEST_URL }}" \ -t "{{ m_commit }}" -m "{{ settings.MFEST_FILE }}" \ -a {{ settings.SVC_ACCT }} """ build = BashOperator( task_id='run_cloud_builder', bash_command=build_template, dag=common_dag) test_command = """ chmod +x /home/airflow/gcs/data/githubctl {% set settings = task_instance.xcom_pull(task_ids='generate_workflow_args') %} git config --global user.name "TestRunnerBot" git config --global user.email "*****@*****.**" /home/airflow/gcs/data/githubctl \ --token_file="{{ settings.TOKEN_FILE }}" \ --op=dailyRelQual \ --hub=gcr.io/{{ settings.GCR_STAGING_DEST }} \ --gcs_path="{{ settings.GCS_BUILD_PATH }}" \ --tag="{{ settings.VERSION }}" """ run_release_quilification_tests = BashOperator( task_id='run_release_quilification_tests', bash_command=test_command, retries=2, dag=common_dag) copy_files = GoogleCloudStorageCopyOperator( task_id='copy_files_for_release', source_bucket=GetSettingTemplate('GCS_BUILD_BUCKET'), source_object=GetSettingTemplate('GCS_STAGING_PATH'), destination_bucket=GetSettingTemplate('GCS_STAGING_BUCKET'), destination_directory=GetSettingTemplate('GCS_STAGING_PATH'), dag=common_dag, ) generate_flow_args >> get_git_commit >> build run_release_quilification_tests.set_upstream(build) run_release_quilification_tests >> copy_files return common_dag, copy_files