def download_directory(context, digest_string, directory_path, verify): if os.path.exists(directory_path): if not os.path.isdir(directory_path) or os.listdir(directory_path): click.echo("Error: Invalid value, " + "path=[{}] already exists.".format(directory_path), err=True) return digest = parse_digest(digest_string) try: with download(context.channel, instance=context.instance_name) as downloader: downloader.download_directory(digest, directory_path) except ConnectionError as e: click.echo('Error: Downloading directory: {}'.format(e), err=True) sys.exit(-1) if verify: last_directory_node = None for node, _, _ in merkle_tree_maker(directory_path): if node.DESCRIPTOR is remote_execution_pb2.DirectoryNode.DESCRIPTOR: last_directory_node = node if last_directory_node.digest != digest: click.echo( "Error: Failed to verify path=[{}]".format(directory_path), err=True) return if os.path.isdir(directory_path): click.echo("Success: Pulled path=[{}] from digest=[{}/{}]".format( directory_path, digest.hash, digest.size_bytes)) else: click.echo("Error: Failed pulling path=[{}]".format(directory_path), err=True)
def get_blob(self, digest): self.__logger.debug("Getting blob: [{}]".format(digest)) with download(self.channel, instance=self.instance_name) as downloader: blob = downloader.get_blob(digest) if blob is not None: return io.BytesIO(blob) else: return None
def __test_download_directory(queue, remote, instance, digests, paths): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) with download(channel, instance) as downloader: if len(digests) > 1: for digest, path in zip(digests, paths): downloader.download_directory(digest, path) else: downloader.download_directory(digests[0], paths[0]) queue.put(None)
def __test_download_blob(queue, remote, instance, digests): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) blobs = [] with download(channel, instance) as downloader: if len(digests) > 1: blobs.extend(downloader.get_blobs(digests)) else: blobs.append(downloader.get_blob(digests[0])) queue.put(blobs)
def __test_download_message(queue, remote, instance, digests, empty_messages): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) messages = [] with download(channel, instance) as downloader: if len(digests) > 1: messages = downloader.get_messages(digests, empty_messages) messages = list([m.SerializeToString() for m in messages]) else: message = downloader.get_message(digests[0], empty_messages[0]) messages.append(message.SerializeToString()) queue.put(messages)
def update(context, action_digest_string, action_result_digest_string): """Entry-point of the ``bgd action-cache update`` CLI command. Note: Digest strings are expected to be like: ``{hash}/{size_bytes}``. """ action_digest = parse_digest(action_digest_string) if action_digest is None: click.echo( "Error: Invalid digest string '{}'.".format(action_digest_string), err=True) sys.exit(-1) action_result_digest = parse_digest(action_result_digest_string) if action_result_digest is None: click.echo("Error: Invalid digest string '{}'.".format( action_result_digest_string), err=True) sys.exit(-1) # We have to download the ActionResult message from CAS first... with download(context.channel, instance=context.instance_name) as downloader: try: action_result = downloader.get_message( action_result_digest, remote_execution_pb2.ActionResult()) except ConnectionError as e: click.echo('Error: Fetching ActionResult from CAS: {}'.format(e), err=True) sys.exit(-1) # And only then we can update the action cache for the given digest: with query(context.channel, instance=context.instance_name) as action_cache: try: action_result = action_cache.update(action_digest, action_result) except ConnectionError as e: click.echo('Error: Uploading to ActionCache: {}'.format(e), err=True) sys.exit(-1) if action_result is None: click.echo( "Error: Failed updating cache result for action=[{}/{}].". format(action_digest.hash, action_digest.size_bytes), err=True) sys.exit(-1)
def download_file(context, digest_path_list, verify): # Downloading files: downloaded_files = {} try: with download(context.channel, instance=context.instance_name) as downloader: for (digest_string, file_path) in zip(digest_path_list[0::2], digest_path_list[1::2]): if os.path.exists(file_path): click.echo("Error: Invalid value for " + "path=[{}] already exists.".format(file_path), err=True) continue digest = parse_digest(digest_string) downloader.download_file(digest, file_path) downloaded_files[file_path] = digest except Exception as e: click.echo('Error: Downloading file: {}'.format(e), err=True) sys.exit(-1) except FileNotFoundError: click.echo('Error: Blob not found in CAS', err=True) sys.exit(-1) # Verifying: for (file_path, digest) in downloaded_files.items(): if verify: file_digest = create_digest(read_file(file_path)) if file_digest != digest: click.echo( "Error: Failed to verify path=[{}]".format(file_path), err=True) continue if os.path.isfile(file_path): click.echo("Success: Pulled path=[{}] from digest=[{}/{}]".format( file_path, digest.hash, digest.size_bytes)) else: click.echo('Error: Failed pulling "{}"'.format(file_path), err=True)
def work_host_tools(lease, context, event): """Executes a lease for a build action, using host tools. """ instance_name = context.parent logger = logging.getLogger(__name__) action_digest = remote_execution_pb2.Digest() action_result = remote_execution_pb2.ActionResult() lease.payload.Unpack(action_digest) lease.result.Clear() action_result.execution_metadata.worker = get_hostname() with tempfile.TemporaryDirectory() as temp_directory: with download(context.cas_channel, instance=instance_name) as downloader: action = downloader.get_message(action_digest, remote_execution_pb2.Action()) assert action.command_digest.hash command = downloader.get_message(action.command_digest, remote_execution_pb2.Command()) action_result.execution_metadata.input_fetch_start_timestamp.GetCurrentTime() downloader.download_directory(action.input_root_digest, temp_directory) logger.debug("Command digest: [{}/{}]" .format(action.command_digest.hash, action.command_digest.size_bytes)) logger.debug("Input root digest: [{}/{}]" .format(action.input_root_digest.hash, action.input_root_digest.size_bytes)) action_result.execution_metadata.input_fetch_completed_timestamp.GetCurrentTime() environment = os.environ.copy() for variable in command.environment_variables: if variable.name not in ['PWD']: environment[variable.name] = variable.value command_line = [] for argument in command.arguments: command_line.append(argument.strip()) working_directory = None if command.working_directory: working_directory = os.path.join(temp_directory, command.working_directory) os.makedirs(working_directory, exist_ok=True) else: working_directory = temp_directory # Ensure that output files and directories structure exists: for output_path in itertools.chain(command.output_files, command.output_directories): parent_path = os.path.join(working_directory, os.path.dirname(output_path)) os.makedirs(parent_path, exist_ok=True) logger.info("Starting execution: [{}...]".format(command.arguments[0])) action_result.execution_metadata.execution_start_timestamp.GetCurrentTime() process = subprocess.Popen(command_line, cwd=working_directory, env=environment, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() returncode = process.returncode action_result.execution_metadata.execution_completed_timestamp.GetCurrentTime() action_result.exit_code = returncode logger.info("Execution finished with code: [{}]".format(returncode)) action_result.execution_metadata.output_upload_start_timestamp.GetCurrentTime() with upload(context.cas_channel, instance=instance_name) as uploader: for output_path in itertools.chain(command.output_files, command.output_directories): file_path = os.path.join(working_directory, output_path) # Missing outputs should simply be omitted in ActionResult: if not os.path.exists(file_path): continue if os.path.isdir(file_path): tree_digest = uploader.upload_tree(file_path, queue=True) output_directory = output_directory_maker(file_path, working_directory, tree_digest) action_result.output_directories.append(output_directory) logger.debug("Output tree digest: [{}/{}]" .format(tree_digest.hash, tree_digest.size_bytes)) else: file_digest = uploader.upload_file(file_path, queue=True) output_file = output_file_maker(file_path, working_directory, file_digest) action_result.output_files.append(output_file) logger.debug("Output file digest: [{}/{}]" .format(file_digest.hash, file_digest.size_bytes)) if action_result.ByteSize() + len(stdout) > MAX_REQUEST_SIZE: stdout_digest = uploader.put_blob(stdout) action_result.stdout_digest.CopyFrom(stdout_digest) else: action_result.stdout_raw = stdout if action_result.ByteSize() + len(stderr) > MAX_REQUEST_SIZE: stderr_digest = uploader.put_blob(stderr) action_result.stderr_digest.CopyFrom(stderr_digest) else: action_result.stderr_raw = stderr action_result.execution_metadata.output_upload_completed_timestamp.GetCurrentTime() lease.result.Pack(action_result) return lease
def work_buildbox(lease, context, event): """Executes a lease for a build action, using buildbox. """ local_cas_directory = context.local_cas # instance_name = context.parent logger = logging.getLogger(__name__) action_digest = remote_execution_pb2.Digest() lease.payload.Unpack(action_digest) lease.result.Clear() with download(context.cas_channel) as downloader: action = downloader.get_message(action_digest, remote_execution_pb2.Action()) assert action.command_digest.hash command = downloader.get_message(action.command_digest, remote_execution_pb2.Command()) if command.working_directory: working_directory = command.working_directory else: working_directory = '/' logger.debug("Command digest: [{}/{}]" .format(action.command_digest.hash, action.command_digest.size_bytes)) logger.debug("Input root digest: [{}/{}]" .format(action.input_root_digest.hash, action.input_root_digest.size_bytes)) os.makedirs(os.path.join(local_cas_directory, 'tmp'), exist_ok=True) os.makedirs(context.fuse_dir, exist_ok=True) tempdir = os.path.join(local_cas_directory, 'tmp') with tempfile.NamedTemporaryFile(dir=tempdir) as input_digest_file: # Input hash must be written to disk for BuildBox write_file(input_digest_file.name, action.input_root_digest.SerializeToString()) with tempfile.NamedTemporaryFile(dir=tempdir) as output_digest_file: with tempfile.NamedTemporaryFile(dir=tempdir) as timestamps_file: command_line = ['buildbox', '--remote={}'.format(context.remote_cas_url), '--input-digest={}'.format(input_digest_file.name), '--output-digest={}'.format(output_digest_file.name), '--chdir={}'.format(working_directory), '--local={}'.format(local_cas_directory), '--output-times={}'.format(timestamps_file.name)] if context.cas_client_key: command_line.append('--client-key={}'.format(context.cas_client_key)) if context.cas_client_cert: command_line.append('--client-cert={}'.format(context.cas_client_cert)) if context.cas_server_cert: command_line.append('--server-cert={}'.format(context.cas_server_cert)) command_line.append('--clearenv') for variable in command.environment_variables: command_line.append('--setenv') command_line.append(variable.name) command_line.append(variable.value) command_line.append(context.fuse_dir) command_line.extend(command.arguments) logger.info("Starting execution: [{}...]".format(command.arguments[0])) command_line = subprocess.Popen(command_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = command_line.communicate() returncode = command_line.returncode action_result = remote_execution_pb2.ActionResult() action_result.exit_code = returncode logger.info("Execution finished with code: [{}]".format(returncode)) output_digest = remote_execution_pb2.Digest() output_digest.ParseFromString(read_file(output_digest_file.name)) logger.debug("Output root digest: [{}/{}]" .format(output_digest.hash, output_digest.size_bytes)) metadata = read_file(timestamps_file.name) logger.debug("metadata: {}".format(metadata)) action_result.execution_metadata.ParseFromString(metadata) if len(output_digest.hash) != HASH_LENGTH: raise BotError( stdout, detail=stderr, reason="Output root digest too small.") # TODO: Have BuildBox helping us creating the Tree instance here # See https://gitlab.com/BuildStream/buildbox/issues/7 for details with download(context.cas_channel) as downloader: output_tree = _cas_tree_maker(downloader, output_digest) with upload(context.cas_channel) as uploader: output_tree_digest = uploader.put_message(output_tree) output_directory = remote_execution_pb2.OutputDirectory() output_directory.tree_digest.CopyFrom(output_tree_digest) output_directory.path = os.path.relpath(working_directory, start='/') action_result.output_directories.extend([output_directory]) if action_result.ByteSize() + len(stdout) > MAX_REQUEST_SIZE: stdout_digest = uploader.put_blob(stdout) action_result.stdout_digest.CopyFrom(stdout_digest) else: action_result.stdout_raw = stdout if action_result.ByteSize() + len(stderr) > MAX_REQUEST_SIZE: stderr_digest = uploader.put_blob(stderr) action_result.stderr_digest.CopyFrom(stderr_digest) else: action_result.stderr_raw = stderr lease.result.Pack(action_result) return lease