def get_build_results_if_exists(s3_helper: S3Helper, s3_prefix: str) -> Optional[List[str]]: try: content = s3_helper.list_prefix(s3_prefix) return content except Exception as ex: logging.info("Got exception %s listing %s", ex, s3_prefix) return None
def check_for_success_run( s3_helper: S3Helper, s3_prefix: str, build_name: str, build_config: BuildConfig, ): logged_prefix = os.path.join(S3_BUILDS_BUCKET, s3_prefix) logging.info("Checking for artifacts in %s", logged_prefix) try: # TODO: theoretically, it would miss performance artifact for pr==0, # but luckily we rerun only really failed tasks now, so we're safe build_results = s3_helper.list_prefix(s3_prefix) except Exception as ex: logging.info("Got exception while listing %s: %s\nRerun", logged_prefix, ex) return if build_results is None or len(build_results) == 0: logging.info("Nothing found in %s, rerun", logged_prefix) return logging.info("Some build results found:\n%s", build_results) build_urls = [] log_url = "" for url in build_results: url_escaped = url.replace("+", "%2B").replace(" ", "%20") if BUILD_LOG_NAME in url: log_url = f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{url_escaped}" else: build_urls.append( f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{url_escaped}") if not log_url: # log is uploaded the last, so if there's no log we need to rerun the build return success = len(build_urls) > 0 create_json_artifact( TEMP_PATH, build_name, log_url, build_urls, build_config, 0, success, ) # Fail build job if not successeded if not success: sys.exit(1) else: sys.exit(0)