def upload_dummy(context): command = remote_execution_pb2.Command() try: with upload(context.channel, instance=context.instance_name) as uploader: command_digest = uploader.put_message(command) except ConnectionError as e: click.echo('Error: Uploading dummy: {}'.format(e), err=True) sys.exit(-1) if command_digest.ByteSize(): click.echo('Success: Pushed Command, digest=["{}/{}]"'.format( command_digest.hash, command_digest.size_bytes)) else: click.echo("Error: Failed pushing empty Command.", err=True) action = remote_execution_pb2.Action(command_digest=command_digest, do_not_cache=True) with upload(context.channel, instance=context.instance_name) as uploader: action_digest = uploader.put_message(action) if action_digest.ByteSize(): click.echo('Success: Pushed Action, digest=["{}/{}]"'.format( action_digest.hash, action_digest.size_bytes)) else: click.echo("Error: Failed pushing empty Action.", err=True)
def upload_directory(context, directory_path, verify): sent_digests = [] try: with upload(context.channel, instance=context.instance_name) as uploader: for node, blob, path in merkle_tree_maker(directory_path): if not os.path.isabs(directory_path): path = os.path.relpath(path) click.echo("Queueing path=[{}]".format(path)) node_digest = uploader.put_blob(blob, digest=node.digest, queue=True) sent_digests.append((node_digest, path)) except ConnectionError as e: click.echo('Error: Uploading directory: {}'.format(e), err=True) sys.exit(-1) for node_digest, node_path in sent_digests: if verify and (os.path.isfile(node_path) and node_digest.size_bytes != os.stat(node_path).st_size): click.echo("Error: Failed to verify path=[{}]".format(node_path), err=True) elif node_digest.ByteSize(): click.echo("Success: Pushed path=[{}] with digest=[{}/{}]".format( node_path, node_digest.hash, node_digest.size_bytes)) else: click.echo("Error: Failed pushing path=[{}]".format(node_path), err=True)
def upload_file(context, file_path, verify): sent_digests = [] try: with upload(context.channel, instance=context.instance_name) as uploader: for path in file_path: if not os.path.isabs(path): path = os.path.relpath(path) click.echo("Queueing path=[{}]".format(path)) file_digest = uploader.upload_file(path, queue=True) sent_digests.append((file_digest, path)) except ConnectionError as e: click.echo('Error: Uploading file: {}'.format(e), err=True) sys.exit(-1) for file_digest, file_path in sent_digests: if verify and file_digest.size_bytes != os.stat(file_path).st_size: click.echo("Error: Failed to verify '{}'".format(file_path), err=True) elif file_digest.ByteSize(): click.echo("Success: Pushed path=[{}] with digest=[{}/{}]".format( file_path, file_digest.hash, file_digest.size_bytes)) else: click.echo("Error: Failed pushing path=[{}]".format(file_path), err=True)
def __test_upload_message(queue, remote, instance, messages): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) digests = [] with upload(channel, instance) as uploader: if len(messages) > 1: for message in messages: digest = uploader.put_message(message, queue=True) digests.append(digest.SerializeToString()) else: digest = uploader.put_message(messages[0], queue=False) digests.append(digest.SerializeToString()) queue.put(digests)
def __test_upload_blob(queue, remote, instance, blobs): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) digests = [] with upload(channel, instance) as uploader: if len(blobs) > 1: for blob in blobs: digest = uploader.put_blob(blob, queue=True) digests.append(digest.SerializeToString()) else: digest = uploader.put_blob(blobs[0], queue=False) digests.append(digest.SerializeToString()) queue.put(digests)
def __test_upload_tree(queue, remote, instance, directory_paths): # Open a channel to the remote CAS server: channel = grpc.insecure_channel(remote) digests = [] with upload(channel, instance) as uploader: if len(directory_paths) > 1: for directory_path in directory_paths: digest = uploader.upload_tree(directory_path, queue=True) digests.append(digest.SerializeToString()) else: digest = uploader.upload_tree(directory_paths[0], queue=False) digests.append(digest.SerializeToString()) queue.put(digests)
def bulk_update_blobs(self, blobs): sent_digests = [] with upload(self.channel, instance=self.instance_name) as uploader: for digest, blob in blobs: if len(blob) != digest.size_bytes or HASH( blob).hexdigest() != digest.hash: sent_digests.append(remote_execution_pb2.Digest()) else: sent_digests.append( uploader.put_blob(blob, digest=digest, queue=True)) assert len(sent_digests) == len(blobs) return [ status_pb2.Status(code=code_pb2.OK) if d.ByteSize() > 0 else status_pb2.Status(code=code_pb2.UNKNOWN) for d in sent_digests ]
def work_host_tools(lease, context, event): """Executes a lease for a build action, using host tools. """ instance_name = context.parent logger = logging.getLogger(__name__) action_digest = remote_execution_pb2.Digest() action_result = remote_execution_pb2.ActionResult() lease.payload.Unpack(action_digest) lease.result.Clear() action_result.execution_metadata.worker = get_hostname() with tempfile.TemporaryDirectory() as temp_directory: with download(context.cas_channel, instance=instance_name) as downloader: action = downloader.get_message(action_digest, remote_execution_pb2.Action()) assert action.command_digest.hash command = downloader.get_message(action.command_digest, remote_execution_pb2.Command()) action_result.execution_metadata.input_fetch_start_timestamp.GetCurrentTime() downloader.download_directory(action.input_root_digest, temp_directory) logger.debug("Command digest: [{}/{}]" .format(action.command_digest.hash, action.command_digest.size_bytes)) logger.debug("Input root digest: [{}/{}]" .format(action.input_root_digest.hash, action.input_root_digest.size_bytes)) action_result.execution_metadata.input_fetch_completed_timestamp.GetCurrentTime() environment = os.environ.copy() for variable in command.environment_variables: if variable.name not in ['PWD']: environment[variable.name] = variable.value command_line = [] for argument in command.arguments: command_line.append(argument.strip()) working_directory = None if command.working_directory: working_directory = os.path.join(temp_directory, command.working_directory) os.makedirs(working_directory, exist_ok=True) else: working_directory = temp_directory # Ensure that output files and directories structure exists: for output_path in itertools.chain(command.output_files, command.output_directories): parent_path = os.path.join(working_directory, os.path.dirname(output_path)) os.makedirs(parent_path, exist_ok=True) logger.info("Starting execution: [{}...]".format(command.arguments[0])) action_result.execution_metadata.execution_start_timestamp.GetCurrentTime() process = subprocess.Popen(command_line, cwd=working_directory, env=environment, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() returncode = process.returncode action_result.execution_metadata.execution_completed_timestamp.GetCurrentTime() action_result.exit_code = returncode logger.info("Execution finished with code: [{}]".format(returncode)) action_result.execution_metadata.output_upload_start_timestamp.GetCurrentTime() with upload(context.cas_channel, instance=instance_name) as uploader: for output_path in itertools.chain(command.output_files, command.output_directories): file_path = os.path.join(working_directory, output_path) # Missing outputs should simply be omitted in ActionResult: if not os.path.exists(file_path): continue if os.path.isdir(file_path): tree_digest = uploader.upload_tree(file_path, queue=True) output_directory = output_directory_maker(file_path, working_directory, tree_digest) action_result.output_directories.append(output_directory) logger.debug("Output tree digest: [{}/{}]" .format(tree_digest.hash, tree_digest.size_bytes)) else: file_digest = uploader.upload_file(file_path, queue=True) output_file = output_file_maker(file_path, working_directory, file_digest) action_result.output_files.append(output_file) logger.debug("Output file digest: [{}/{}]" .format(file_digest.hash, file_digest.size_bytes)) if action_result.ByteSize() + len(stdout) > MAX_REQUEST_SIZE: stdout_digest = uploader.put_blob(stdout) action_result.stdout_digest.CopyFrom(stdout_digest) else: action_result.stdout_raw = stdout if action_result.ByteSize() + len(stderr) > MAX_REQUEST_SIZE: stderr_digest = uploader.put_blob(stderr) action_result.stderr_digest.CopyFrom(stderr_digest) else: action_result.stderr_raw = stderr action_result.execution_metadata.output_upload_completed_timestamp.GetCurrentTime() lease.result.Pack(action_result) return lease
def work_buildbox(lease, context, event): """Executes a lease for a build action, using buildbox. """ local_cas_directory = context.local_cas # instance_name = context.parent logger = logging.getLogger(__name__) action_digest = remote_execution_pb2.Digest() lease.payload.Unpack(action_digest) lease.result.Clear() with download(context.cas_channel) as downloader: action = downloader.get_message(action_digest, remote_execution_pb2.Action()) assert action.command_digest.hash command = downloader.get_message(action.command_digest, remote_execution_pb2.Command()) if command.working_directory: working_directory = command.working_directory else: working_directory = '/' logger.debug("Command digest: [{}/{}]" .format(action.command_digest.hash, action.command_digest.size_bytes)) logger.debug("Input root digest: [{}/{}]" .format(action.input_root_digest.hash, action.input_root_digest.size_bytes)) os.makedirs(os.path.join(local_cas_directory, 'tmp'), exist_ok=True) os.makedirs(context.fuse_dir, exist_ok=True) tempdir = os.path.join(local_cas_directory, 'tmp') with tempfile.NamedTemporaryFile(dir=tempdir) as input_digest_file: # Input hash must be written to disk for BuildBox write_file(input_digest_file.name, action.input_root_digest.SerializeToString()) with tempfile.NamedTemporaryFile(dir=tempdir) as output_digest_file: with tempfile.NamedTemporaryFile(dir=tempdir) as timestamps_file: command_line = ['buildbox', '--remote={}'.format(context.remote_cas_url), '--input-digest={}'.format(input_digest_file.name), '--output-digest={}'.format(output_digest_file.name), '--chdir={}'.format(working_directory), '--local={}'.format(local_cas_directory), '--output-times={}'.format(timestamps_file.name)] if context.cas_client_key: command_line.append('--client-key={}'.format(context.cas_client_key)) if context.cas_client_cert: command_line.append('--client-cert={}'.format(context.cas_client_cert)) if context.cas_server_cert: command_line.append('--server-cert={}'.format(context.cas_server_cert)) command_line.append('--clearenv') for variable in command.environment_variables: command_line.append('--setenv') command_line.append(variable.name) command_line.append(variable.value) command_line.append(context.fuse_dir) command_line.extend(command.arguments) logger.info("Starting execution: [{}...]".format(command.arguments[0])) command_line = subprocess.Popen(command_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = command_line.communicate() returncode = command_line.returncode action_result = remote_execution_pb2.ActionResult() action_result.exit_code = returncode logger.info("Execution finished with code: [{}]".format(returncode)) output_digest = remote_execution_pb2.Digest() output_digest.ParseFromString(read_file(output_digest_file.name)) logger.debug("Output root digest: [{}/{}]" .format(output_digest.hash, output_digest.size_bytes)) metadata = read_file(timestamps_file.name) logger.debug("metadata: {}".format(metadata)) action_result.execution_metadata.ParseFromString(metadata) if len(output_digest.hash) != HASH_LENGTH: raise BotError( stdout, detail=stderr, reason="Output root digest too small.") # TODO: Have BuildBox helping us creating the Tree instance here # See https://gitlab.com/BuildStream/buildbox/issues/7 for details with download(context.cas_channel) as downloader: output_tree = _cas_tree_maker(downloader, output_digest) with upload(context.cas_channel) as uploader: output_tree_digest = uploader.put_message(output_tree) output_directory = remote_execution_pb2.OutputDirectory() output_directory.tree_digest.CopyFrom(output_tree_digest) output_directory.path = os.path.relpath(working_directory, start='/') action_result.output_directories.extend([output_directory]) if action_result.ByteSize() + len(stdout) > MAX_REQUEST_SIZE: stdout_digest = uploader.put_blob(stdout) action_result.stdout_digest.CopyFrom(stdout_digest) else: action_result.stdout_raw = stdout if action_result.ByteSize() + len(stderr) > MAX_REQUEST_SIZE: stderr_digest = uploader.put_blob(stderr) action_result.stderr_digest.CopyFrom(stderr_digest) else: action_result.stderr_raw = stderr lease.result.Pack(action_result) return lease
def commit_write(self, digest, write_session): self.__logger.debug("Writing blob: [{}]".format(digest)) with upload(self.channel, instance=self.instance_name) as uploader: uploader.put_blob(write_session.getvalue())