示例#1
0
def write_metrics_to_s3(bucket, key, region, metrics):
    '''Helper method that uploads the desired metrics to s3
       bucket - String with S3 bucket where metrics should be written
       key - String with S3 bucket key where metrics should be written
       region - String with aws region
       metrics - Dictionary with metrics to write to s3
    '''
    try:
        s3_extra_args = get_s3_kms_extra_args()
        session = boto3.session.Session()
        s3_client = session.client('s3',
                                   region_name=region,
                                   config=get_boto_config())
        s3_client.put_object(Bucket=bucket,
                             Key=key,
                             Body=bytes(json.dumps(metrics), encoding='utf-8'),
                             **s3_extra_args)
    except botocore.exceptions.ClientError as err:
        log_and_exit(
            "Unable to write metrics to s3: bucket: {}, error: {}".format(
                bucket, err.response['Error']['Code']),
            SIMAPP_SIMULATION_WORKER_EXCEPTION, SIMAPP_EVENT_ERROR_CODE_400)
    except Exception as ex:
        log_and_exit("Unable to write metrics to s3, exception: {}".format(ex),
                     SIMAPP_SIMULATION_WORKER_EXCEPTION,
                     SIMAPP_EVENT_ERROR_CODE_500)
示例#2
0
 def upload_mpu_part_to_s3(self, episodes):
     if self.data_state != SIMTRACE_DATA_UPLOAD_UNKNOWN_STATE:
         logger.debug(
             "simtrace_data: Uploading mpu_part_to_s3::: mpu_id-{} mpu_part_number-{} episode-{}"
             .format(self.mpu_id, self.mpu_part_number, episodes))
         self.mpu_episodes = episodes
         s3_client = boto3.session.Session().client(
             's3',
             region_name=self.aws_region,
             config=utils.get_boto_config())
         metrics_body = self.simtrace_csv_data.getvalue()
         part = s3_client.upload_part(Body=bytes(metrics_body,
                                                 encoding='utf-8'),
                                      Bucket=self.s3_bucket,
                                      Key=self.s3_object_key,
                                      UploadId=self.mpu_id,
                                      PartNumber=self.mpu_part_number)
         self.mpu_parts.append({
             "PartNumber": self.mpu_part_number,
             "ETag": part["ETag"]
         })
         self.mpu_part_number += 1
         logger.info(
             "simtrace_data: Uploaded mpu_part_to_s3::: done! episode-{} mpu_id-{} mpu_part_number-{} mpu_parts-{}"
             .format(episodes, self.mpu_id, self.mpu_part_number,
                     self.mpu_parts))
示例#3
0
        def setup_mutipart_upload(self):
            logger.info("simtrace_data: setup_mutipart_upload to %s",
                        self.s3_bucket)

            #setup for SIM_TRACE data incremental uploads to S3
            self.simtrace_csv_data = StringIO()
            self.csvwriter = csv.writer(self.simtrace_csv_data)
            self.csvwriter.writerow(SIMTRACE_CSV_DATA_HEADER)

            self.aws_region = rospy.get_param('AWS_REGION')
            logger.info(
                "simtrace_data: setup_mutipart_upload on s3_bucket {} s3_object_key {} region {}"
                .format(self.s3_bucket, self.s3_object_key, self.aws_region))

            #initiate the multipart upload
            s3_client = boto3.session.Session().client(
                's3',
                region_name=self.aws_region,
                config=utils.get_boto_config())
            self.mpu = s3_client.create_multipart_upload(
                Bucket=self.s3_bucket, Key=self.s3_object_key)
            self.mpu_id = self.mpu["UploadId"]
            self.mpu_part_number = 1
            self.mpu_parts = []
            self.mpu_episodes = 0
            self.total_upload_size = 0
            self.data_state = SIMTRACE_DATA_UPLOAD_INIT_DONE
            logger.info(
                "simtrace_data: setup_mutipart_upload done! mpu_id= %s mpu_part_number",
                self.mpu_id)
示例#4
0
 def get_client(self):
     session = boto3.session.Session()
     return session.client('s3',
                           region_name=self.aws_region,
                           endpoint_url=self.s3_endpoint_url,
                           config=get_boto_config())
def test_get_boto_config():
    """This function checks the functionality of get_boto_config function
    in markov/utils.py
    """
    utils.get_boto_config()
def main():
    """ Main function for tournament"""
    try:
        # parse argument
        s3_region = sys.argv[1]
        s3_bucket = sys.argv[2]
        s3_prefix = sys.argv[3]
        s3_yaml_name = sys.argv[4]

        # create boto3 session/client and download yaml/json file
        session = boto3.session.Session()
        s3_endpoint_url = os.environ.get("S3_ENDPOINT_URL", None)
        s3_client = session.client('s3',
                                   region_name=s3_region,
                                   endpoint_url=s3_endpoint_url,
                                   config=get_boto_config())

        yaml_key = os.path.normpath(os.path.join(s3_prefix, s3_yaml_name))
        local_yaml_path = os.path.abspath(
            os.path.join(os.getcwd(), s3_yaml_name))
        s3_client.download_file(Bucket=s3_bucket,
                                Key=yaml_key,
                                Filename=local_yaml_path)

        # Intermediate tournament files
        queue_pickle_name = 'tournament_candidate_queue.pkl'
        queue_pickle_s3_key = os.path.normpath(
            os.path.join(s3_prefix, queue_pickle_name))
        local_queue_pickle_path = os.path.abspath(
            os.path.join(os.getcwd(), queue_pickle_name))

        report_pickle_name = 'tournament_report.pkl'
        report_pickle_s3_key = os.path.normpath(
            os.path.join(s3_prefix, report_pickle_name))
        local_report_pickle_path = os.path.abspath(
            os.path.join(os.getcwd(), report_pickle_name))

        final_report_name = 'tournament_report.json'
        final_report_s3_key = os.path.normpath(
            os.path.join(s3_prefix, final_report_name))

        try:
            s3_client.download_file(Bucket=s3_bucket,
                                    Key=queue_pickle_s3_key,
                                    Filename=local_queue_pickle_path)
            s3_client.download_file(Bucket=s3_bucket,
                                    Key=report_pickle_s3_key,
                                    Filename=local_report_pickle_path)
        except:
            pass

        # Get values passed in yaml files. Default values are for backward compatibility and for single racecar racing
        yaml_dict = get_yaml_dict(local_yaml_path)

        # Forcing the yaml parameter to list
        force_list_params = [
            MODEL_S3_BUCKET_YAML_KEY, MODEL_S3_PREFIX_YAML_KEY,
            MODEL_METADATA_FILE_S3_YAML_KEY, METRICS_S3_BUCKET_YAML_KEY,
            METRICS_S3_PREFIX_YAML_KEY, SIMTRACE_S3_BUCKET_YAML_KEY,
            SIMTRACE_S3_PREFIX_YAML_KEY, MP4_S3_BUCKET_YAML_KEY,
            MP4_S3_PREFIX_YAML_KEY, DISPLAY_NAME_YAML_KEY
        ]
        for params in force_list_params:
            yaml_dict[params] = force_list(yaml_dict.get(params, None))

        # Populate the model_metadata_s3_key values to handle both training and evaluation for all race_formats
        if None in yaml_dict[MODEL_METADATA_FILE_S3_YAML_KEY]:
            # MODEL_METADATA_FILE_S3_KEY not passed as part of yaml file ==> This happens during evaluation
            # Assume model_metadata.json is present in the s3_prefix/model/ folder
            yaml_dict[MODEL_METADATA_FILE_S3_YAML_KEY] = list()
            for s3_prefix in yaml_dict[MODEL_S3_PREFIX_YAML_KEY]:
                yaml_dict[MODEL_METADATA_FILE_S3_YAML_KEY].append(
                    os.path.join(s3_prefix, 'model/model_metadata.json'))

        # Validate the yaml values
        validate_yaml_values(yaml_dict)
        if os.path.exists(local_queue_pickle_path):
            with open(local_queue_pickle_path, 'rb') as f:
                tournament_candidate_queue = pickle.load(f)
            with open(local_report_pickle_path, 'rb') as f:
                tournament_report = pickle.load(f)
            logger.info('tournament_candidate_queue loaded from existing file')
        else:
            logger.info('tournament_candidate_queue initialized')
            tournament_candidate_queue = deque()
            for agent_idx, _ in enumerate(yaml_dict[MODEL_S3_BUCKET_YAML_KEY]):
                tournament_candidate_queue.append(
                    (yaml_dict[MODEL_S3_BUCKET_YAML_KEY][agent_idx],
                     yaml_dict[MODEL_S3_PREFIX_YAML_KEY][agent_idx],
                     yaml_dict[MODEL_METADATA_FILE_S3_YAML_KEY][agent_idx],
                     yaml_dict[METRICS_S3_BUCKET_YAML_KEY][agent_idx],
                     yaml_dict[METRICS_S3_PREFIX_YAML_KEY][agent_idx],
                     yaml_dict[SIMTRACE_S3_BUCKET_YAML_KEY][agent_idx],
                     yaml_dict[SIMTRACE_S3_PREFIX_YAML_KEY][agent_idx],
                     yaml_dict[MP4_S3_BUCKET_YAML_KEY][agent_idx],
                     yaml_dict[MP4_S3_PREFIX_YAML_KEY][agent_idx],
                     yaml_dict[DISPLAY_NAME_YAML_KEY][agent_idx]))
            tournament_report = []

        race_idx = len(tournament_report)
        while len(tournament_candidate_queue) > 1:
            car1 = tournament_candidate_queue.popleft()
            car2 = tournament_candidate_queue.popleft()
            (car1_model_s3_bucket, car1_s3_prefix, car1_model_metadata,
             car1_metrics_bucket, car1_metrics_s3_key, car1_simtrace_bucket,
             car1_simtrace_prefix, car1_mp4_bucket, car1_mp4_prefix,
             car1_display_name) = car1
            (car2_model_s3_bucket, car2_s3_prefix, car2_model_metadata,
             car2_metrics_bucket, car2_metrics_s3_key, car2_simtrace_bucket,
             car2_simtrace_prefix, car2_mp4_bucket, car2_mp4_prefix,
             car2_display_name) = car2

            race_yaml_dict = generate_race_yaml(yaml_dict=yaml_dict,
                                                car1=car1,
                                                car2=car2,
                                                race_idx=race_idx)

            race_car_colors = ["Orange", "Purple"]
            race_model_s3_buckets = [
                car1_model_s3_bucket, car2_model_s3_bucket
            ]
            race_model_metadatas = [car1_model_metadata, car2_model_metadata]

            # List of directories created
            dirs_to_delete = list()
            yaml_dir = os.path.abspath(os.path.join(os.getcwd(),
                                                    str(race_idx)))
            os.makedirs(yaml_dir)

            dirs_to_delete.append(yaml_dir)
            race_yaml_path = os.path.abspath(
                os.path.join(yaml_dir, 'evaluation_params.yaml'))
            with open(race_yaml_path, 'w') as race_yaml_file:
                yaml.dump(race_yaml_dict, race_yaml_file)

            # List of racecar names that should include second camera while launching
            racecars_with_stereo_cameras = list()
            # List of racecar names that should include lidar while launching
            racecars_with_lidars = list()
            # List of SimApp versions
            simapp_versions = list()
            for agent_index, model_s3_bucket in enumerate(
                    race_model_s3_buckets):
                racecar_name = 'racecar_' + str(agent_index)
                # Make a local folder with the racecar name to download the model_metadata.json
                os.makedirs(os.path.join(os.getcwd(), racecar_name))
                dirs_to_delete.append(os.path.join(os.getcwd(), racecar_name))
                local_model_metadata_path = os.path.abspath(
                    os.path.join(os.path.join(os.getcwd(), racecar_name),
                                 'model_metadata.json'))
                json_key = race_model_metadatas[agent_index]
                json_key = json_key.replace('s3://{}/'.format(model_s3_bucket),
                                            '')
                s3_client.download_file(Bucket=model_s3_bucket,
                                        Key=json_key,
                                        Filename=local_model_metadata_path)
                sensors, _, simapp_version = utils_parse_model_metadata.parse_model_metadata(
                    local_model_metadata_path)
                simapp_versions.append(simapp_version)
                if Input.STEREO.value in sensors:
                    racecars_with_stereo_cameras.append(racecar_name)
                if Input.LIDAR.value in sensors or Input.SECTOR_LIDAR.value in sensors:
                    racecars_with_lidars.append(racecar_name)

            cmd = [
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "tournament_race_node.py"),
                str(race_idx), race_yaml_path,
                ','.join(racecars_with_stereo_cameras),
                ','.join(racecars_with_lidars), ','.join(race_car_colors),
                ','.join(simapp_versions)
            ]
            try:
                return_code, _, stderr = run_cmd(cmd_args=cmd,
                                                 shell=False,
                                                 stdout=None,
                                                 stderr=None)
            except KeyboardInterrupt:
                logger.info(
                    "KeyboardInterrupt raised, SimApp must be faulted! exiting..."
                )
                return

            # Retrieve winner and append tournament report
            with open('race_report.pkl', 'rb') as f:
                race_report = pickle.load(f)
            race_report['race_idx'] = race_idx
            winner = car1 if race_report[
                'winner'] == car1_display_name else car2
            logger.info("race {}'s winner: {}".format(race_idx,
                                                      race_report['winner']))

            tournament_candidate_queue.append(winner)
            tournament_report.append(race_report)

            # Clean up directories created
            for dir_to_delete in dirs_to_delete:
                shutil.rmtree(dir_to_delete, ignore_errors=True)
            race_idx += 1

            # Persist latest queue and report to use after job restarts.
            with open(local_queue_pickle_path, 'wb') as f:
                pickle.dump(tournament_candidate_queue, f, protocol=2)
            s3_client.upload_file(Filename=local_queue_pickle_path,
                                  Bucket=s3_bucket,
                                  Key=queue_pickle_s3_key)

            with open(local_report_pickle_path, 'wb') as f:
                pickle.dump(tournament_report, f, protocol=2)
            s3_client.upload_file(Filename=local_report_pickle_path,
                                  Bucket=s3_bucket,
                                  Key=report_pickle_s3_key)

            # If there is more than 1 candidates then restart the simulation job otherwise
            # tournament is finished, persists final report and ends the job.
            if len(tournament_candidate_queue) > 1:
                restart_simulation_job(
                    os.environ.get('AWS_ROBOMAKER_SIMULATION_JOB_ARN'),
                    s3_region)
                break
            else:
                # Persist final tournament report in json format
                # and terminate the job by canceling it
                s3_client.put_object(Bucket=s3_bucket,
                                     Key=final_report_s3_key,
                                     Body=json.dumps(tournament_report))

                cancel_simulation_job(
                    os.environ.get('AWS_ROBOMAKER_SIMULATION_JOB_ARN'),
                    s3_region)
    except Exception as e:
        log_and_exit(
            "Tournament node failed: s3_bucket: {}, yaml_key: {}, {}".format(
                s3_bucket, yaml_key, e), SIMAPP_SIMULATION_WORKER_EXCEPTION,
            SIMAPP_EVENT_ERROR_CODE_500)
示例#7
0
        def complete_upload_to_s3(self):
            logger.info(
                "simtrace_data: complete_upload_to_s3::: data_state-{}".format(
                    self.data_state))

            try:
                if self.data_state == SIMTRACE_DATA_MPU_UPLOAD_IN_PROGRESS:
                    #Multi-part upload to s3
                    self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                    logger.info(
                        "simtrace_data: complete_upload_to_s3::Multi-part upload to S3 in progress, upload the last part number-{}, then complete mpu"
                        .format(self.mpu_part_number))
                    self.upload_mpu_part_to_s3(self.mpu_episodes)

                    #now complete the multi-part-upload
                    session = boto3.session.Session()
                    s3_client = session.client('s3',
                                               region_name=self.aws_region,
                                               config=utils.get_boto_config())
                    result = s3_client.complete_multipart_upload(
                        Bucket=self.s3_bucket,
                        Key=self.s3_object_key,
                        UploadId=self.mpu_id,
                        MultipartUpload={"Parts": self.mpu_parts})
                    self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                    logger.info(
                        "simtrace_data: complete_upload_to_s3 ::: multi-part-upload done,total raw size={}bytes result={}"
                        .format(self.total_upload_size, result))
                else:
                    #One-time upload to s3
                    if self.data_state == SIMTRACE_DATA_UPLOAD_INIT_DONE and self.data_state != SIMTRACE_DATA_UPLOAD_DONE:
                        self.data_state = SIMTRACE_DATA_UPLOAD_DONE
                        logger.info(
                            "simtrace_data:  complete_upload_to_s3 ::: write simtrace data to s3"
                        )
                        session = boto3.session.Session()
                        s3_client = session.client(
                            's3',
                            region_name=self.aws_region,
                            config=utils.get_boto_config())

                        # cancel multipart upload process
                        logger.info(
                            "simtrace_data: multi-part upload not required, cancel it before uploading the complete S3 object"
                        )
                        s3_client.abort_multipart_upload(
                            Bucket=self.s3_bucket,
                            Key=self.s3_object_key,
                            UploadId=self.mpu_id)
                        metrics_body = self.simtrace_csv_data.getvalue()
                        logger.info(
                            "simtrace_data: complete_upload_to_s3:: write to s3 csv-formatted-data size={}bytes"
                            .format(sys.getsizeof(metrics_body)))
                        result = s3_client.put_object(Bucket=self.s3_bucket,
                                                      Key=self.s3_object_key,
                                                      Body=bytes(
                                                          metrics_body,
                                                          encoding='utf-8'))
                        logger.info(
                            "simtrace_data: complete_upload_to_s3:: done writing simtrace total-unformatted-data size={}bytes to s3. result{}"
                            .format(self.total_upload_size, result))
                self.reset_mpu_part(self.mpu_episodes)
            except Exception as e:
                logger.info(
                    "simtrace_data: complete_upload_to_s3:: exception-{} ".
                    format(e))
示例#8
0
def main():
    """ Main function for downloading yaml params """
    try:
        # parse argument
        s3_region = sys.argv[1]
        s3_bucket = sys.argv[2]
        s3_prefix = sys.argv[3]
        s3_yaml_name = sys.argv[4]
        launch_name = sys.argv[5]

        # create boto3 session/client and download yaml/json file
        session = boto3.session.Session()

        s3_endpoint_url = os.environ.get("S3_ENDPOINT_URL", None)

        if s3_endpoint_url is not None:
            LOG.info('Endpoint URL {}'.format(s3_endpoint_url))
            rospy.set_param('S3_ENDPOINT_URL', s3_endpoint_url)
        else:
            # create boto3 session/client and download yaml/json file
            ec2_client = session.client('ec2', s3_region)
            LOG.info('Checking internet connection...')
            response = ec2_client.describe_vpcs()
            if not response['Vpcs']:
                log_and_exit("No VPC attached to instance",
                             SIMAPP_SIMULATION_WORKER_EXCEPTION,
                             SIMAPP_EVENT_ERROR_CODE_500)
            LOG.info('Verified internet connection')

        s3_client = session.client('s3',
                                   region_name=s3_region,
                                   endpoint_url=s3_endpoint_url,
                                   config=get_boto_config())

        yaml_key = os.path.normpath(os.path.join(s3_prefix, s3_yaml_name))
        local_yaml_path = os.path.abspath(
            os.path.join(os.getcwd(), s3_yaml_name))
        s3_client.download_file(Bucket=s3_bucket,
                                Key=yaml_key,
                                Filename=local_yaml_path)
        # Get values passed in yaml files. Default values are for backward compatibility and for single racecar racing
        default_yaml_values = {
            RACE_TYPE_YAML_KEY: TIME_TRIAL_RACE_TYPE,
            MODEL_S3_BUCKET_YAML_KEY: s3_bucket,
            MODEL_S3_PREFIX_YAML_KEY: s3_prefix,
            CAR_COLOR_YAML_KEY: DEFAULT_COLOR,
            MODEL_METADATA_FILE_S3_YAML_KEY: None
        }
        yaml_dict = get_yaml_dict(local_yaml_path)
        yaml_values = get_yaml_values(yaml_dict, default_yaml_values)

        # Forcing the yaml parameter to list
        force_list_params = [
            MODEL_METADATA_FILE_S3_YAML_KEY, MODEL_S3_BUCKET_YAML_KEY,
            MODEL_S3_PREFIX_YAML_KEY, CAR_COLOR_YAML_KEY
        ]

        for params in force_list_params:
            yaml_values[params] = force_list(yaml_values[params])

        # Populate the model_metadata_s3_key values to handle both training and evaluation for all race_formats
        if None in yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY]:
            # MODEL_METADATA_FILE_S3_KEY not passed as part of yaml file ==> This happens during evaluation
            # Assume model_metadata.json is present in the s3_prefix/model/ folder
            yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY] = list()
            for s3_prefix in yaml_values[MODEL_S3_PREFIX_YAML_KEY]:
                yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY].append(
                    os.path.join(s3_prefix, 'model/model_metadata.json'))

        # Set multicar value if its a head to model racetype
        multicar = yaml_values[RACE_TYPE_YAML_KEY] == HEAD_TO_MODEL_RACE_TYPE
        # Validate the yaml values
        validate_yaml_values(yaml_values, multicar)
        # List of racecar names that should include second camera while launching
        racecars_with_stereo_cameras = list()

        # List of racecar names that should include lidar while launching
        racecars_with_lidars = list()

        # List of SimApp versions
        simapp_versions = list()

        for agent_index, model_s3_bucket in enumerate(
                yaml_values[MODEL_S3_BUCKET_YAML_KEY]):

            racecar_name = 'racecar_' + str(agent_index) if len(
                yaml_values[MODEL_S3_BUCKET_YAML_KEY]) > 1 else 'racecar'
            # Make a local folder with the racecar name to download the model_metadata.json
            if not os.path.exists(os.path.join(os.getcwd(), racecar_name)):
                os.makedirs(os.path.join(os.getcwd(), racecar_name))
            local_model_metadata_path = os.path.abspath(
                os.path.join(os.path.join(os.getcwd(), racecar_name),
                             'model_metadata.json'))
            json_key = yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY][
                agent_index]
            json_key = json_key.replace('s3://{}/'.format(model_s3_bucket), '')
            s3_client.download_file(Bucket=model_s3_bucket,
                                    Key=json_key,
                                    Filename=local_model_metadata_path)
            sensors, _, simapp_version = utils_parse_model_metadata.parse_model_metadata(
                local_model_metadata_path)
            simapp_versions.append(simapp_version)
            if Input.STEREO.value in sensors:
                racecars_with_stereo_cameras.append(racecar_name)
            if Input.LIDAR.value in sensors or Input.SECTOR_LIDAR.value in sensors:
                racecars_with_lidars.append(racecar_name)

        cmd = [
            ''.join(("roslaunch deepracer_simulation_environment {} ".format(
                launch_name), "local_yaml_path:={} ".format(local_yaml_path),
                     "racecars_with_stereo_cameras:={} ".format(
                         ','.join(racecars_with_stereo_cameras)),
                     "racecars_with_lidars:={} multicar:={} ".format(
                         ','.join(racecars_with_lidars), multicar),
                     "car_colors:={} simapp_versions:={}".format(
                         ','.join(yaml_values[CAR_COLOR_YAML_KEY]),
                         ','.join(simapp_versions))))
        ]
        Popen(cmd, shell=True, executable="/bin/bash")

    except botocore.exceptions.ClientError as ex:
        log_and_exit(
            "Download params and launch of agent node failed: s3_bucket: {}, yaml_key: {}, {}"
            .format(s3_bucket, yaml_key, ex),
            SIMAPP_SIMULATION_WORKER_EXCEPTION, SIMAPP_EVENT_ERROR_CODE_400)
    except botocore.exceptions.EndpointConnectionError:
        log_and_exit("No Internet connection or s3 service unavailable",
                     SIMAPP_SIMULATION_WORKER_EXCEPTION,
                     SIMAPP_EVENT_ERROR_CODE_500)
    except Exception as ex:
        log_and_exit(
            "Download params and launch of agent node failed: s3_bucket: {}, yaml_key: {}, {}"
            .format(s3_bucket, yaml_key, ex),
            SIMAPP_SIMULATION_WORKER_EXCEPTION, SIMAPP_EVENT_ERROR_CODE_500)
示例#9
0
 def _get_client(self):
     session = boto3.session.Session()
     return session.client('s3',
                           region_name=self.params.aws_region,
                           config=get_boto_config())
def main():
    """ Main function for tournament"""
    try:
        # parse argument
        s3_region = sys.argv[1]
        s3_bucket = sys.argv[2]
        s3_prefix = sys.argv[3]
        s3_yaml_name = sys.argv[4]

        # create boto3 session/client and download yaml/json file
        session = boto3.session.Session()
        s3_endpoint_url = os.environ.get("S3_ENDPOINT_URL", None)
        s3_client = session.client('s3',
                                   region_name=s3_region,
                                   endpoint_url=s3_endpoint_url,
                                   config=get_boto_config())

        # Intermediate tournament files
        queue_pickle_name = 'tournament_candidate_queue.pkl'
        queue_pickle_s3_key = os.path.normpath(
            os.path.join(s3_prefix, queue_pickle_name))
        local_queue_pickle_path = os.path.abspath(
            os.path.join(os.getcwd(), queue_pickle_name))

        report_pickle_name = 'tournament_report.pkl'
        report_pickle_s3_key = os.path.normpath(
            os.path.join(s3_prefix, report_pickle_name))
        local_report_pickle_path = os.path.abspath(
            os.path.join(os.getcwd(), report_pickle_name))

        final_report_name = 'tournament_report.json'
        final_report_s3_key = os.path.normpath(
            os.path.join(s3_prefix, final_report_name))

        try:
            s3_client.download_file(Bucket=s3_bucket,
                                    Key=queue_pickle_s3_key,
                                    Filename=local_queue_pickle_path)
            s3_client.download_file(Bucket=s3_bucket,
                                    Key=report_pickle_s3_key,
                                    Filename=local_report_pickle_path)
        except:
            pass

        # download yaml file
        yaml_file = YamlFile(
            agent_type=AgentType.TOURNAMENT.value,
            bucket=s3_bucket,
            s3_key=get_s3_key(s3_prefix, s3_yaml_name),
            region_name=s3_region,
            s3_endpoint_url=s3_endpoint_url,
            local_path=YAML_LOCAL_PATH_FORMAT.format(s3_yaml_name))

        yaml_dict = yaml_file.get_yaml_values()

        if os.path.exists(local_queue_pickle_path):
            with open(local_queue_pickle_path, 'rb') as f:
                tournament_candidate_queue = pickle.load(f)
            with open(local_report_pickle_path, 'rb') as f:
                tournament_report = pickle.load(f)
            logger.info('tournament_candidate_queue loaded from existing file')
        else:
            logger.info('tournament_candidate_queue initialized')
            tournament_candidate_queue = deque()
            for agent_idx, _ in enumerate(
                    yaml_dict[YamlKey.MODEL_S3_BUCKET_YAML_KEY.value]):
                tournament_candidate_queue.append((
                    yaml_dict[YamlKey.MODEL_S3_BUCKET_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.MODEL_S3_PREFIX_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.MODEL_METADATA_FILE_S3_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.METRICS_S3_BUCKET_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.METRICS_S3_PREFIX_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.SIMTRACE_S3_BUCKET_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.SIMTRACE_S3_PREFIX_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.MP4_S3_BUCKET_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.MP4_S3_PREFIX_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.DISPLAY_NAME_YAML_KEY.value][agent_idx],
                    # TODO: Deprecate the DISPLAY_NAME and use only the RACER_NAME without if else check
                    "" if None in yaml_dict.get(YamlKey.RACER_NAME_YAML_KEY.value, [None]) \
                        else yaml_dict[YamlKey.RACER_NAME_YAML_KEY.value][agent_idx],
                    yaml_dict[YamlKey.BODY_SHELL_TYPE_YAML_KEY.value][agent_idx]
                ))
            tournament_report = {"race_results": []}

        race_idx = len(tournament_report["race_results"])
        while len(tournament_candidate_queue) > 1:
            car1 = tournament_candidate_queue.popleft()
            car2 = tournament_candidate_queue.popleft()
            (car1_model_s3_bucket, car1_s3_prefix, car1_model_metadata,
             car1_metrics_bucket, car1_metrics_s3_key, car1_simtrace_bucket,
             car1_simtrace_prefix, car1_mp4_bucket, car1_mp4_prefix,
             car1_display_name, car1_racer_name, car1_body_shell_type) = car1
            (car2_model_s3_bucket, car2_s3_prefix, car2_model_metadata,
             car2_metrics_bucket, car2_metrics_s3_key, car2_simtrace_bucket,
             car2_simtrace_prefix, car2_mp4_bucket, car2_mp4_prefix,
             car2_display_name, car2_racer_name, car2_body_shell_type) = car2

            race_yaml_dict = generate_race_yaml(yaml_dict=yaml_dict,
                                                car1=car1,
                                                car2=car2,
                                                race_idx=race_idx)

            race_model_s3_buckets = [
                car1_model_s3_bucket, car2_model_s3_bucket
            ]
            race_model_metadatas = [car1_model_metadata, car2_model_metadata]
            body_shell_types = [car1_body_shell_type, car2_body_shell_type]

            # List of directories created
            dirs_to_delete = list()
            yaml_dir = os.path.abspath(os.path.join(os.getcwd(),
                                                    str(race_idx)))
            os.makedirs(yaml_dir)

            dirs_to_delete.append(yaml_dir)
            race_yaml_path = os.path.abspath(
                os.path.join(yaml_dir, 'evaluation_params.yaml'))
            with open(race_yaml_path, 'w') as race_yaml_file:
                yaml.dump(race_yaml_dict, race_yaml_file)

            # List of racecar names that should include second camera while launching
            racecars_with_stereo_cameras = list()
            # List of racecar names that should include lidar while launching
            racecars_with_lidars = list()
            # List of SimApp versions
            simapp_versions = list()
            for agent_index, model_s3_bucket in enumerate(
                    race_model_s3_buckets):
                racecar_name = 'racecar_' + str(agent_index)
                json_key = race_model_metadatas[agent_index]
                # download model metadata
                try:
                    model_metadata = ModelMetadata(
                        bucket=model_s3_bucket,
                        s3_key=json_key,
                        region_name=s3_region,
                        s3_endpoint_url=s3_endpoint_url,
                        local_path=MODEL_METADATA_LOCAL_PATH_FORMAT.format(
                            racecar_name))
                    dirs_to_delete.append(model_metadata.local_dir)
                except Exception as e:
                    log_and_exit(
                        "Failed to download model_metadata file: s3_bucket: {}, s3_key: {}, {}"
                        .format(model_s3_bucket, json_key,
                                e), SIMAPP_SIMULATION_WORKER_EXCEPTION,
                        SIMAPP_EVENT_ERROR_CODE_500)
                sensors, _, simapp_version = model_metadata.get_model_metadata_info(
                )
                simapp_versions.append(str(simapp_version))
                if Input.STEREO.value in sensors:
                    racecars_with_stereo_cameras.append(racecar_name)
                if Input.LIDAR.value in sensors or Input.SECTOR_LIDAR.value in sensors:
                    racecars_with_lidars.append(racecar_name)

            cmd = [
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             "tournament_race_node.py"),
                str(race_idx), race_yaml_path,
                ','.join(racecars_with_stereo_cameras),
                ','.join(racecars_with_lidars), ','.join(simapp_versions),
                ','.join(body_shell_types)
            ]
            try:
                return_code, _, stderr = run_cmd(cmd_args=cmd,
                                                 shell=False,
                                                 stdout=None,
                                                 stderr=None)
            except KeyboardInterrupt:
                logger.info(
                    "KeyboardInterrupt raised, SimApp must be faulted! exiting..."
                )
                return

            # Retrieve winner and append tournament report
            with open('race_report.pkl', 'rb') as f:
                race_report = pickle.load(f)
            race_report['race_idx'] = race_idx
            winner = car1 if race_report[
                'winner'] == car1_display_name else car2
            logger.info("race {}'s winner: {}".format(race_idx,
                                                      race_report['winner']))

            tournament_candidate_queue.append(winner)
            tournament_report["race_results"].append(race_report)

            # Clean up directories created
            for dir_to_delete in dirs_to_delete:
                shutil.rmtree(dir_to_delete, ignore_errors=True)
            race_idx += 1

            s3_extra_args = get_s3_kms_extra_args()
            # Persist latest queue and report to use after job restarts.
            with open(local_queue_pickle_path, 'wb') as f:
                pickle.dump(tournament_candidate_queue, f, protocol=2)
            s3_client.upload_file(Filename=local_queue_pickle_path,
                                  Bucket=s3_bucket,
                                  Key=queue_pickle_s3_key,
                                  ExtraArgs=s3_extra_args)

            with open(local_report_pickle_path, 'wb') as f:
                pickle.dump(tournament_report, f, protocol=2)
            s3_client.upload_file(Filename=local_report_pickle_path,
                                  Bucket=s3_bucket,
                                  Key=report_pickle_s3_key,
                                  ExtraArgs=s3_extra_args)

            # If there is more than 1 candidates then restart the simulation job otherwise
            # tournament is finished, persists final report and ends the job.
            if len(tournament_candidate_queue) > 1:
                restart_simulation_job(
                    os.environ.get('AWS_ROBOMAKER_SIMULATION_JOB_ARN'),
                    s3_region)
                break
            else:
                # Persist final tournament report in json format
                # and terminate the job by canceling it
                s3_client.put_object(Bucket=s3_bucket,
                                     Key=final_report_s3_key,
                                     Body=json.dumps(tournament_report),
                                     **s3_extra_args)

                cancel_simulation_job(
                    os.environ.get('AWS_ROBOMAKER_SIMULATION_JOB_ARN'),
                    s3_region)
    except ValueError as ex:
        log_and_exit("User modified model_metadata.json: {}".format(ex),
                     SIMAPP_SIMULATION_WORKER_EXCEPTION,
                     SIMAPP_EVENT_ERROR_CODE_400)
    except Exception as e:
        log_and_exit("Tournament node failed: {}".format(e),
                     SIMAPP_SIMULATION_WORKER_EXCEPTION,
                     SIMAPP_EVENT_ERROR_CODE_500)
def main():
    """ Main function for downloading yaml params """

    # parse argument
    s3_region = sys.argv[1]
    s3_bucket = sys.argv[2]
    s3_prefix = sys.argv[3]
    s3_yaml_name = sys.argv[4]
    launch_name = sys.argv[5]
    yaml_key = os.path.normpath(os.path.join(s3_prefix, s3_yaml_name))

    try:
        # create boto3 session/client and download yaml/json file
        session = boto3.session.Session()

        s3_endpoint_url = os.environ.get("S3_ENDPOINT_URL", None)

        if s3_endpoint_url is not None:
            LOG.info('Endpoint URL {}'.format(s3_endpoint_url))
            rospy.set_param('S3_ENDPOINT_URL', s3_endpoint_url)

        s3_client = session.client('s3',
                                   region_name=s3_region,
                                   endpoint_url=s3_endpoint_url,
                                   config=get_boto_config())

        local_yaml_path = os.path.abspath(
            os.path.join(os.getcwd(), s3_yaml_name))
        s3_client.download_file(Bucket=s3_bucket,
                                Key=yaml_key,
                                Filename=local_yaml_path)
        # Get values passed in yaml files. Default values are for backward compatibility and for single racecar racing
        default_yaml_values = {
            RACE_TYPE_YAML_KEY: TIME_TRIAL_RACE_TYPE,
            MODEL_S3_BUCKET_YAML_KEY: s3_bucket,
            MODEL_S3_PREFIX_YAML_KEY: s3_prefix,
            CAR_COLOR_YAML_KEY: DEFAULT_COLOR,
            BODY_SHELL_TYPE_YAML_KEY: None,
            MODEL_METADATA_FILE_S3_YAML_KEY: None,
            RACER_NAME_YAML_KEY: None
        }
        yaml_dict = get_yaml_dict(local_yaml_path)
        yaml_values = get_yaml_values(yaml_dict, default_yaml_values)

        # Forcing the yaml parameter to list
        force_list_params = [
            MODEL_METADATA_FILE_S3_YAML_KEY, MODEL_S3_BUCKET_YAML_KEY,
            MODEL_S3_PREFIX_YAML_KEY, CAR_COLOR_YAML_KEY,
            BODY_SHELL_TYPE_YAML_KEY, RACER_NAME_YAML_KEY
        ]

        for params in force_list_params:
            yaml_values[params] = force_list(yaml_values[params])

        # Populate the model_metadata_s3_key values to handle both training and evaluation for all race_formats
        if None in yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY]:
            # MODEL_METADATA_FILE_S3_KEY not passed as part of yaml file ==> This happens during evaluation
            # Assume model_metadata.json is present in the s3_prefix/model/ folder
            yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY] = list()
            for s3_prefix in yaml_values[MODEL_S3_PREFIX_YAML_KEY]:
                yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY].append(
                    os.path.join(s3_prefix, 'model/model_metadata.json'))

        # Set multicar value if there is more than one value in MODEL_S3_BUCKET_YAML_KEY.
        multicar = len(yaml_values[MODEL_S3_BUCKET_YAML_KEY]) > 1

        # Set f1 as true if RACE_TYPE is F1
        is_f1 = yaml_values[RACE_TYPE_YAML_KEY] == F1_RACE_TYPE

        # Validate the yaml values
        validate_yaml_values(yaml_values, multicar)
        # List of racecar names that should include second camera while launching
        racecars_with_stereo_cameras = list()

        # List of racecar names that should include lidar while launching
        racecars_with_lidars = list()

        # List of SimApp versions
        simapp_versions = list()
        # List of body shell types
        body_shell_types = yaml_values[BODY_SHELL_TYPE_YAML_KEY]
        racer_names = yaml_values[RACER_NAME_YAML_KEY]
        if None in body_shell_types:
            # use default shells
            if None in racer_names:
                body_shell_types = [BodyShellType.DEFAULT.value] * len(
                    yaml_values[MODEL_S3_BUCKET_YAML_KEY])
            # use default shells for regular user and f1 shell for users in F1_SHELL_USERS_LIST
            else:
                body_shell_types = [
                    BodyShellType.F1_2021.value if racer_alias
                    in F1_SHELL_USERS_LIST else BodyShellType.DEFAULT.value
                    for racer_alias in yaml_values[RACER_NAME_YAML_KEY]
                ]
                yaml_dict[BODY_SHELL_TYPE_YAML_KEY] = body_shell_types
                # override local yaml file with updated BODY_SHELL_TYPE
                with open(local_yaml_path, 'w') as yaml_file:
                    yaml.dump(yaml_dict, yaml_file)
        for agent_index, model_s3_bucket in enumerate(
                yaml_values[MODEL_S3_BUCKET_YAML_KEY]):

            racecar_name = 'racecar_' + str(agent_index) if len(
                yaml_values[MODEL_S3_BUCKET_YAML_KEY]) > 1 else 'racecar'
            # Make a local folder with the racecar name to download the model_metadata.json
            if not os.path.exists(os.path.join(os.getcwd(), racecar_name)):
                os.makedirs(os.path.join(os.getcwd(), racecar_name))
            local_model_metadata_path = os.path.abspath(
                os.path.join(os.path.join(os.getcwd(), racecar_name),
                             'model_metadata.json'))
            json_key = yaml_values[MODEL_METADATA_FILE_S3_YAML_KEY][
                agent_index]
            json_key = json_key.replace('s3://{}/'.format(model_s3_bucket), '')
            s3_client.download_file(Bucket=model_s3_bucket,
                                    Key=json_key,
                                    Filename=local_model_metadata_path)
            sensors, _, simapp_version = utils_parse_model_metadata.parse_model_metadata(
                local_model_metadata_path)
            simapp_versions.append(simapp_version)
            if Input.STEREO.value in sensors:
                racecars_with_stereo_cameras.append(racecar_name)
            if Input.LIDAR.value in sensors or Input.SECTOR_LIDAR.value in sensors:
                racecars_with_lidars.append(racecar_name)

        cmd = [
            ''.join(("roslaunch deepracer_simulation_environment {} ".format(
                launch_name), "local_yaml_path:={} ".format(local_yaml_path),
                     "racecars_with_stereo_cameras:={} ".format(
                         ','.join(racecars_with_stereo_cameras)),
                     "racecars_with_lidars:={} ".format(
                         ','.join(racecars_with_lidars)),
                     "multicar:={} ".format(multicar),
                     "body_shell_types:={} ".format(
                         ','.join(body_shell_types)),
                     "simapp_versions:={} ".format(','.join(simapp_versions)),
                     "f1:={}".format(is_f1)))
        ]
        Popen(cmd, shell=True, executable="/bin/bash")

    except botocore.exceptions.ClientError as ex:
        log_and_exit(
            "Download params and launch of agent node failed: s3_bucket: {}, yaml_key: {}, {}"
            .format(s3_bucket, yaml_key, ex),
            SIMAPP_SIMULATION_WORKER_EXCEPTION, SIMAPP_EVENT_ERROR_CODE_400)
    except botocore.exceptions.EndpointConnectionError:
        log_and_exit("No Internet connection or s3 service unavailable",
                     SIMAPP_SIMULATION_WORKER_EXCEPTION,
                     SIMAPP_EVENT_ERROR_CODE_500)
    except Exception as ex:
        log_and_exit(
            "Download params and launch of agent node failed: s3_bucket: {}, yaml_key: {}, {}"
            .format(s3_bucket, yaml_key, ex),
            SIMAPP_SIMULATION_WORKER_EXCEPTION, SIMAPP_EVENT_ERROR_CODE_500)