def main(unused_argv):
    identifier = utils.ExtractValue(FLAGS.id)

    layers = []
    for kv in FLAGS.layer:
        (k, v) = kv.split('=', 1)
        layers.append({
            'name': utils.ExtractValue(k),
            'layer': v,
        })

    create_image(FLAGS.output, identifier, layers, FLAGS.config, FLAGS.tag,
                 FLAGS.base)
Exemple #2
0
def create_tag_to_file_content_map(stamp_info, tag_file_pairs):
  """
    Creates a Docker image tag to file content map.

    Args:
      stamp_info - Tag substitutions to make in the input tags, e.g. {BUILD_USER}
      tag_file_pairs - List of input tags and file names
          (e.g. ...:image=@bazel-out/...image.0.config)
  """
  tag_to_file_content = {}

  if tag_file_pairs:
    for entry in tag_file_pairs:
      elts = entry.split('=')
      if len(elts) != 2:
        raise Exception('Expected associative list key=value, got: %s' % entry)
      (fq_tag, filename) = elts

      formatted_tag = fq_tag.format(**stamp_info)
      tag = docker_name.Tag(formatted_tag, strict=False)
      file_contents = utils.ExtractValue(filename)

      # Add the mapping in one direction.
      tag_to_file_content[tag] = file_contents

  return tag_to_file_content
def main():
  args = parser.parse_args()

  def Stamp(inp):
    """Perform substitutions in the provided value."""
    if not args.stamp_info_file or not inp:
      return inp
    format_args = {}
    for infofile in args.stamp_info_file:
      with open(infofile) as info:
        for line in info:
          line = line.strip('\n')
          key, value = line.split(' ', 1)
          if key in format_args:
            print ('WARNING: Duplicate value for key "%s": '
                   'using "%s"' % (key, value))
          format_args[key] = value

    return inp.format(**format_args)

  base_json = '{}'
  if args.base:
    with open(args.base, 'r') as r:
      base_json = r.read()
  data = json.loads(base_json)

  layers = []
  for layer in args.layer:
    layers.append(utils.ExtractValue(layer))

  labels = KeyValueToDict(args.labels)
  for label, value in six.iteritems(labels):
    if value.startswith('@'):
      with open(value[1:], 'r') as f:
        labels[label] = f.read()
    elif '{' in value:
      labels[label] = Stamp(value)

  output = v2_2_metadata.Override(data, v2_2_metadata.Overrides(
      author='Bazel', created_by='bazel build ...',
      layers=layers, entrypoint=list(map(Stamp, fix_dashdash(args.entrypoint))),
      cmd=list(map(Stamp, fix_dashdash(args.command))), user=Stamp(args.user),
      labels=labels, env={
        k: Stamp(v)
        for (k, v) in six.iteritems(KeyValueToDict(args.env))
      },
      ports=args.ports, volumes=args.volumes, workdir=Stamp(args.workdir)),
                                  architecture=_PROCESSOR_ARCHITECTURE,
                                  operating_system=_OPERATING_SYSTEM)

  with open(args.output, 'w') as fp:
    json.dump(output, fp, sort_keys=True)
    fp.write('\n')
Exemple #4
0
def main():
  args = parser.parse_args()

  tag_to_config = {}
  tag_to_manifest = {}
  stamp_info = {}
  diffid_to_blobsum = {}
  blobsum_to_unzipped = {}
  blobsum_to_zipped = {}

  if args.stamp_info_file:
    for infofile in args.stamp_info_file:
      with open(infofile) as info:
        for line in info:
          line = line.strip("\n")
          key, value = line.split(" ", 1)
          if key in stamp_info:
            print ("WARNING: Duplicate value for workspace status key '%s': "
                   "using '%s'" % (key, value))
          stamp_info[key] = value

  tag_to_config = create_tag_to_file_content_map(stamp_info, args.tags)
  tag_to_manifest = create_tag_to_file_content_map(stamp_info, args.manifests)

  # Do this first so that if there is overlap with the loop below it wins.
  blobsum_to_legacy = {}
  for tar in args.legacy or []:
    with v2_2_image.FromTarball(tar) as legacy_image:
      config_file = legacy_image.config_file()
      cfg = json.loads(config_file)
      fs_layers = list(reversed(legacy_image.fs_layers()))
      for i, diff_id in enumerate(cfg['rootfs']['diff_ids']):
        blob_sum = fs_layers[i]
        diffid_to_blobsum[diff_id] = blob_sum
        blobsum_to_legacy[blob_sum] = legacy_image

  if args.layer:
    for entry in args.layer:
      elts = entry.split('=')
      if len(elts) != 4:
        raise Exception('Expected associative list key=value, got: %s' % entry)
      (diffid_filename, blobsum_filename,
      unzipped_filename, zipped_filename) = elts

      diff_id = 'sha256:' + utils.ExtractValue(diffid_filename)
      blob_sum = 'sha256:' + utils.ExtractValue(blobsum_filename)

      diffid_to_blobsum[diff_id] = blob_sum
      blobsum_to_unzipped[blob_sum] = unzipped_filename
      blobsum_to_zipped[blob_sum] = zipped_filename

  # add foreign layers
  #
  # Windows base images distributed by Microsoft are using foreign layers.
  # Foreign layers are not stored in the Docker repository like normal layers.
  # Instead they include a list of URLs where the layer can be downloaded.
  # This is done because Windows base images are large (2+GB).  When someone
  # pulls a Windows image, it downloads the foreign layers from those URLs
  # instead of requesting the blob from the registry.
  # When adding foreign layers through bazel, the actual layer blob is not
  # present on the system.  Instead the base image manifest is used to
  # describe the parent image layers.
  for tag, manifest_file in tag_to_manifest.items():
    manifest = json.loads(manifest_file)
    if 'layers' in manifest:
      config = json.loads(tag_to_config[tag])
      for i, layer in enumerate(manifest['layers']):
        diff_id = config['rootfs']['diff_ids'][i]
        if layer['mediaType'] == docker_http.FOREIGN_LAYER_MIME:
          blob_sum = layer['digest']
          diffid_to_blobsum[diff_id] = blob_sum

  create_bundle(
      args.output, tag_to_config, tag_to_manifest, diffid_to_blobsum,
      blobsum_to_unzipped, blobsum_to_zipped, blobsum_to_legacy)
Exemple #5
0
def main():
    args = parser.parse_args()

    tag_to_config = {}
    stamp_info = {}
    diffid_to_blobsum = {}
    blobsum_to_unzipped = {}
    blobsum_to_zipped = {}

    if args.stamp_info_file:
        for infofile in args.stamp_info_file:
            with open(infofile) as info:
                for line in info:
                    line = line.strip("\n")
                    key, value = line.split(" ", 1)
                    if key in stamp_info:
                        print(
                            "WARNING: Duplicate value for workspace status key '%s': "
                            "using '%s'" % (key, value))
                    stamp_info[key] = value

    for entry in args.tags:
        elts = entry.split('=')
        if len(elts) != 2:
            raise Exception('Expected associative list key=value, got: %s' %
                            entry)
        (fq_tag, config_filename) = elts

        formatted_tag = fq_tag.format(**stamp_info)
        tag = docker_name.Tag(formatted_tag, strict=False)
        config_file = utils.ExtractValue(config_filename)

        # Add the mapping in one direction.
        tag_to_config[tag] = config_file

    # Do this first so that if there is overlap with the loop below it wins.
    blobsum_to_legacy = {}
    for tar in args.legacy or []:
        with v2_2_image.FromTarball(tar) as legacy_image:
            config_file = legacy_image.config_file()
            cfg = json.loads(config_file)
            fs_layers = list(reversed(legacy_image.fs_layers()))
            for i, diff_id in enumerate(cfg['rootfs']['diff_ids']):
                blob_sum = fs_layers[i]
                diffid_to_blobsum[diff_id] = blob_sum
                blobsum_to_legacy[blob_sum] = legacy_image

    for entry in args.layer:
        elts = entry.split('=')
        if len(elts) != 4:
            raise Exception('Expected associative list key=value, got: %s' %
                            entry)
        (diffid_filename, blobsum_filename, unzipped_filename,
         zipped_filename) = elts

        diff_id = 'sha256:' + utils.ExtractValue(diffid_filename)
        blob_sum = 'sha256:' + utils.ExtractValue(diffid_filename)

        diffid_to_blobsum[diff_id] = blob_sum
        blobsum_to_unzipped[blob_sum] = unzipped_filename
        blobsum_to_zipped[blob_sum] = zipped_filename

    create_bundle(args.output, tag_to_config, diffid_to_blobsum,
                  blobsum_to_unzipped, blobsum_to_zipped, blobsum_to_legacy)
def main():
    args = parser.parse_args()

    def Stamp(inp):
        """Perform substitutions in the provided value."""
        if not args.stamp_info_file or not inp:
            return inp
        format_args = {}
        for infofile in args.stamp_info_file:
            with open(infofile) as info:
                for line in info:
                    line = line.strip('\n')
                    key, value = line.split(' ', 1)
                    if key in format_args:
                        print('WARNING: Duplicate value for key "%s": '
                              'using "%s"' % (key, value))
                    format_args[key] = value

        return inp.format(**format_args)

    base_json = '{}'
    if args.base:
        with open(args.base, 'r') as r:
            base_json = r.read()
    data = json.loads(base_json)

    layers = []
    for layer in args.layer:
        layers.append(utils.ExtractValue(layer))

    labels = KeyValueToDict(args.labels)
    for label, value in six.iteritems(labels):
        if value.startswith('@'):
            with open(value[1:], 'r') as f:
                labels[label] = f.read()
        elif '{' in value:
            labels[label] = Stamp(value)

    creation_time = None
    if args.creation_time:
        creation_time = Stamp(args.creation_time)
        try:
            # If creation_time is parsable as a floating point type, assume unix epoch
            # timestamp.
            parsed_unix_timestamp = float(creation_time)
            if parsed_unix_timestamp > 1.0e+11:
                # Bazel < 0.12 was bugged and used milliseconds since unix epoch as
                # the default. Values > 1e11 are assumed to be unix epoch
                # milliseconds.
                parsed_unix_timestamp = parsed_unix_timestamp / 1000.0

            # Construct a RFC 3339 date/time from the Unix epoch.
            creation_time = (datetime.datetime.utcfromtimestamp(
                parsed_unix_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ"))
        except ValueError:
            # Otherwise, assume RFC 3339 date/time format.
            pass

    output = v2_2_metadata.Override(
        data,
        v2_2_metadata.Overrides(
            author='Bazel',
            created_by='bazel build ...',
            layers=layers,
            entrypoint=list(map(Stamp, fix_dashdash(args.entrypoint))),
            cmd=list(map(Stamp, fix_dashdash(args.command))),
            creation_time=creation_time,
            user=Stamp(args.user),
            labels=labels,
            env={
                k: Stamp(v)
                for (k, v) in six.iteritems(KeyValueToDict(args.env))
            },
            ports=args.ports,
            volumes=args.volumes,
            workdir=Stamp(args.workdir)),
        architecture=_PROCESSOR_ARCHITECTURE,
        operating_system=_OPERATING_SYSTEM)

    with open(args.output, 'w') as fp:
        json.dump(output, fp, sort_keys=True)
        fp.write('\n')