def s3_sync_code(config, dry=False): global S3_CODE_PATH if S3_CODE_PATH is not None: return S3_CODE_PATH base = config.AWS_CODE_SYNC_S3_PATH has_git = True try: current_commit = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip() clean_state = len( subprocess.check_output(["git", "status", "--porcelain"])) == 0 except subprocess.CalledProcessError as _: print "Warning: failed to execute git commands" has_git = False dir_hash = base64.b64encode(subprocess.check_output(["pwd"])) code_path = "%s_%s" % (dir_hash, (current_commit if clean_state else "%s_dirty_%s" % (current_commit, timestamp)) if has_git else timestamp) full_path = "%s/%s" % (base, code_path) cache_path = "%s/%s" % (base, dir_hash) cache_cmds = ["aws", "s3", "sync"] + \ [cache_path, full_path] cmds = ["aws", "s3", "sync"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [".", full_path] caching_cmds = ["aws", "s3", "sync"] + \ [full_path, cache_path] print cache_cmds, cmds, caching_cmds if not dry: subprocess.check_call(cache_cmds) subprocess.check_call(cmds) subprocess.check_call(caching_cmds) S3_CODE_PATH = full_path return full_path
def s3_sync_code(config, dry=False): global S3_CODE_PATH if S3_CODE_PATH is not None: return S3_CODE_PATH base = config.AWS_CODE_SYNC_S3_PATH has_git = True try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip() clean_state = len( subprocess.check_output(["git", "status", "--porcelain"])) == 0 except subprocess.CalledProcessError as _: print "Warning: failed to execute git commands" has_git = False dir_hash = base64.b64encode(subprocess.check_output(["pwd"])) code_path = "%s_%s" % ( dir_hash, (current_commit if clean_state else "%s_dirty_%s" % (current_commit, timestamp)) if has_git else timestamp ) full_path = "%s/%s" % (base, code_path) cache_path = "%s/%s" % (base, dir_hash) cache_cmds = ["aws", "s3", "sync"] + \ [cache_path, full_path] cmds = ["aws", "s3", "sync"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [".", full_path] caching_cmds = ["aws", "s3", "sync"] + \ [full_path, cache_path] print cache_cmds, cmds, caching_cmds if not dry: subprocess.check_call(cache_cmds) subprocess.check_call(cmds) subprocess.check_call(caching_cmds) S3_CODE_PATH = full_path return full_path
def s3_sync_code(config, dry=False): global S3_CODE_PATH if S3_CODE_PATH is not None: return S3_CODE_PATH base = config.AWS_CODE_SYNC_S3_PATH has_git = True if config.FAST_CODE_SYNC: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") current_commit = None file_name = str(timestamp) + "_" + hashlib.sha224( subprocess.check_output(["pwd"]) + str(current_commit).encode() + str(timestamp).encode() ).hexdigest() + ".tar.gz" file_path = "/tmp/" + file_name tar_cmd = ["tar", "-zcvf", file_path, "-C", config.PROJECT_PATH] for pattern in config.FAST_CODE_SYNC_IGNORES: tar_cmd += ["--exclude", pattern] tar_cmd += ["-h", "."] remote_path = "%s/%s" % (base, file_name) upload_cmd = ["aws", "s3", "cp", file_path, remote_path] mujoco_key_cmd = [ "aws", "s3", "sync", config.MUJOCO_KEY_PATH, "{}/.mujoco/".format(base)] print(" ".join(tar_cmd)) print(" ".join(upload_cmd)) print(" ".join(mujoco_key_cmd)) if not dry: subprocess.check_call(tar_cmd) subprocess.check_call(upload_cmd) subprocess.check_call(mujoco_key_cmd) S3_CODE_PATH = remote_path return remote_path else: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") clean_state = len( subprocess.check_output(["git", "status", "--porcelain"])) == 0 except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") has_git = False dir_hash = base64.b64encode(subprocess.check_output(["pwd"])).decode("utf-8") code_path = "%s_%s" % ( dir_hash, (current_commit if clean_state else "%s_dirty_%s" % (current_commit, timestamp)) if has_git else timestamp ) full_path = "%s/%s" % (base, code_path) cache_path = "%s/%s" % (base, dir_hash) cache_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [cache_path, full_path] cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [".", full_path] caching_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [full_path, cache_path] mujoco_key_cmd = [ "aws", "s3", "sync", config.MUJOCO_KEY_PATH, "{}/.mujoco/".format(base)] print(cache_cmds, cmds, caching_cmds, mujoco_key_cmd) if not dry: subprocess.check_call(cache_cmds) subprocess.check_call(cmds) subprocess.check_call(caching_cmds) try: subprocess.check_call(mujoco_key_cmd) except Exception: print('Unable to sync mujoco keys!') S3_CODE_PATH = full_path return full_path
def s3_sync_code(config, dry=False): global S3_CODE_PATH if S3_CODE_PATH is not None: return S3_CODE_PATH base = config.AWS_CODE_SYNC_S3_PATH has_git = True if config.FAST_CODE_SYNC: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") current_commit = None file_name = str(timestamp) + "_" + hashlib.sha224( subprocess.check_output(["pwd"]) + str(current_commit).encode() + str(timestamp).encode()).hexdigest() + ".tar.gz" file_path = "/tmp/" + file_name tar_cmd = ["tar", "-zcvf", file_path, "-C", config.PROJECT_PATH] for pattern in config.FAST_CODE_SYNC_IGNORES: tar_cmd += ["--exclude", pattern] tar_cmd += ["-h", "."] remote_path = "%s/%s" % (base, file_name) upload_cmd = ["aws", "s3", "cp", file_path, remote_path] print(" ".join(tar_cmd)) print(" ".join(upload_cmd)) if not dry: subprocess.check_call(tar_cmd) subprocess.check_call(upload_cmd) S3_CODE_PATH = remote_path return remote_path else: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") clean_state = len( subprocess.check_output(["git", "status", "--porcelain"])) == 0 except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") has_git = False dir_hash = base64.b64encode(subprocess.check_output( ["pwd"])).decode("utf-8") code_path = "%s_%s" % ( dir_hash, (current_commit if clean_state else "%s_dirty_%s" % (current_commit, timestamp)) if has_git else timestamp) full_path = "%s/%s" % (base, code_path) cache_path = "%s/%s" % (base, dir_hash) cache_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [cache_path, full_path] cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [".", full_path] caching_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [full_path, cache_path] mujoco_key_cmd = [ "aws", "s3", "sync", config.MUJOCO_KEY_PATH, "{}/.mujoco/".format(base) ] print(cache_cmds, cmds, caching_cmds, mujoco_key_cmd) if not dry: subprocess.check_call(cache_cmds) subprocess.check_call(cmds) subprocess.check_call(caching_cmds) try: subprocess.check_call(mujoco_key_cmd) except Exception: print('Unable to sync mujoco keys!') S3_CODE_PATH = full_path return full_path