def main():
  args = parser.parse_args()

  base_json = '{}'
  if args.base:
    with v2_2_image.FromTarball(args.base) as v2_2_img:
      base_json = v2_2_img.config_file()
  data = json.loads(base_json)

  layers = []
  for layer in args.layer:
    layers.append(utils.ExtractValue(layer))

  labels = KeyValueToDict(args.labels)
  for label, value in labels.iteritems():
    if value.startswith('@'):
      with open(value[1:], 'r') as f:
        labels[label] = f.read()

  output = v2_2_metadata.Override(data, v2_2_metadata.Overrides(
      author='Bazel', created_by='bazel build ...',
      layers=layers, entrypoint=args.entrypoint, cmd=args.command,
      user=args.user, labels=labels, env=KeyValueToDict(args.env),
      ports=args.ports, volumes=args.volumes, workdir=args.workdir),
                                  architecture=_PROCESSOR_ARCHITECTURE,
                                  operating_system=_OPERATING_SYSTEM)

  with open(args.output, 'w') as fp:
    json.dump(output, fp, sort_keys=True)
    fp.write('\n')
Exemplo n.º 2
0
def CfgDctToOverrides(config_dct):
    """
    Takes a dct of config values and runs them through
    the whitelist
    """
    overrides_dct = {}
    for k, v in config_dct.iteritems():
        if k == 'created':
            # this key change is made as the key is
            # 'creation_time' in an Overrides object
            # but 'created' in the config_file
            overrides_dct['creation_time'] = v
    for k, v in config_dct['config'].iteritems():
        if k == 'Entrypoint':
            # this key change is made as the key is
            # 'entrypoint' in an Overrides object
            # but 'Entrypoint' in the config_file
            overrides_dct['entrypoint'] = v
        elif k == 'Env':
            # this key change is made as the key is
            # 'env' in an Overrides object
            # but 'Env' in the config_file
            overrides_dct['env'] = v
        elif k == 'ExposedPorts':
            # this key change is made as the key is
            # 'ports' in an Overrides object
            # but 'ExposedPorts' in the config_file
            overrides_dct['ports'] = v
    return metadata.Overrides(**overrides_dct)
Exemplo n.º 3
0
    def config_file(self):
        """The raw blob string of the config file."""
        if self._config_file is None:
            _PROCESSOR_ARCHITECTURE = 'amd64'
            _OPERATING_SYSTEM = 'linux'

            entrypoint = self._overrides.pop('Entrypoint', [])
            env = self._overrides.pop('Env', {})
            exposed_ports = self._overrides.pop('ExposedPorts', {})

            output = v2_2_metadata.Override(
                json.loads('{}'),
                v2_2_metadata.Overrides(
                    author='Bazel',
                    created_by='bazel build ...',
                    layers=[k for k in self._diff_id_to_u_layer],
                    entrypoint=entrypoint,
                    env=env,
                    ports=exposed_ports),
                architecture=_PROCESSOR_ARCHITECTURE,
                operating_system=_OPERATING_SYSTEM)
            output['rootfs'] = {
                'diff_ids': [k for k in self._diff_id_to_u_layer]
            }
            if len(self._overrides) > 0:
                output.update(self._overrides)
            self._config_file = json.dumps(output, sort_keys=True)
        return self._config_file
Exemplo n.º 4
0
 def _build(self, transport, src):
     file, hash = self.preprocessor.context_tar_gz()  # pylint:disable=redefined-builtin
     self.context_file, self.context_hash = file, hash
     self.image_tag = self.full_image_name(self.context_hash)
     creds = docker_creds.DefaultKeychain.Resolve(src)
     with v2_2_image.FromRegistry(src, creds, transport) as src_image:
         with open(self.context_file, 'rb') as f:
             new_img = append.Layer(src_image, f.read(), overrides=metadata.Overrides(
                 cmd=self.preprocessor.get_command(),
                 user='******', env={"FAIRING_RUNTIME": "1"}))
     return new_img
def main():
  args = parser.parse_args()

  def Stamp(inp):
    """Perform substitutions in the provided value."""
    if not args.stamp_info_file or not inp:
      return inp
    format_args = {}
    for infofile in args.stamp_info_file:
      with open(infofile) as info:
        for line in info:
          line = line.strip('\n')
          key, value = line.split(' ', 1)
          if key in format_args:
            print ('WARNING: Duplicate value for key "%s": '
                   'using "%s"' % (key, value))
          format_args[key] = value

    return inp.format(**format_args)

  base_json = '{}'
  if args.base:
    with open(args.base, 'r') as r:
      base_json = r.read()
  data = json.loads(base_json)

  layers = []
  for layer in args.layer:
    layers.append(utils.ExtractValue(layer))

  labels = KeyValueToDict(args.labels)
  for label, value in six.iteritems(labels):
    if value.startswith('@'):
      with open(value[1:], 'r') as f:
        labels[label] = f.read()
    elif '{' in value:
      labels[label] = Stamp(value)

  output = v2_2_metadata.Override(data, v2_2_metadata.Overrides(
      author='Bazel', created_by='bazel build ...',
      layers=layers, entrypoint=list(map(Stamp, fix_dashdash(args.entrypoint))),
      cmd=list(map(Stamp, fix_dashdash(args.command))), user=Stamp(args.user),
      labels=labels, env={
        k: Stamp(v)
        for (k, v) in six.iteritems(KeyValueToDict(args.env))
      },
      ports=args.ports, volumes=args.volumes, workdir=Stamp(args.workdir)),
                                  architecture=_PROCESSOR_ARCHITECTURE,
                                  operating_system=_OPERATING_SYSTEM)

  with open(args.output, 'w') as fp:
    json.dump(output, fp, sort_keys=True)
    fp.write('\n')
Exemplo n.º 6
0
  def __init__(self,
               base,
               tar_gz,
               diff_id = None,
               overrides = None):
    """Creates a new layer on top of a base with optional tar.gz.

    Args:
      base: a base DockerImage for a new layer.
      tar_gz: an optional gzipped tarball passed as a bytes with filesystem
          changeset.
      diff_id: an optional string containing the digest of the
          uncompressed tar_gz.
      overrides: an optional metadata.Overrides object of properties to override
          on the base image.
    """
    self._base = base
    manifest = json.loads(self._base.manifest())
    config_file = json.loads(self._base.config_file())

    overrides = overrides or metadata.Overrides()
    overrides = overrides.Override(created_by=docker_name.USER_AGENT)

    if tar_gz:
      self._blob = tar_gz
      self._blob_sum = docker_digest.SHA256(self._blob)
      manifest['layers'].append({
          'digest': self._blob_sum,
          'mediaType': docker_http.LAYER_MIME,
          'size': len(self._blob),
      })
      if not diff_id:
        diff_id = docker_digest.SHA256(self.uncompressed_blob(self._blob_sum))

      # Takes naked hex.
      overrides = overrides.Override(layers=[diff_id[len('sha256:'):]])
    else:
      # The empty layer.
      overrides = overrides.Override(layers=[docker_digest.SHA256(b'', '')])

    config_file = metadata.Override(config_file, overrides)

    self._config_file = json.dumps(config_file, sort_keys=True)
    utf8_encoded_config = self._config_file.encode('utf8')
    manifest['config']['digest'] = docker_digest.SHA256(utf8_encoded_config)
    manifest['config']['size'] = len(utf8_encoded_config)
    self._manifest = json.dumps(manifest, sort_keys=True)
Exemplo n.º 7
0
    def CreatePackageBase(self, base_image, cache, use_cache=True):
        """Override."""
        # Figure out if we need to override entrypoint.
        # Save the overrides for later to avoid adding an extra layer.
        pj_contents = {}
        if self._ctx.Contains(_PACKAGE_JSON):
            pj_contents = json.loads(self._ctx.GetFile(_PACKAGE_JSON))
        entrypoint = parse_entrypoint(pj_contents)
        overrides = metadata.Overrides(entrypoint=entrypoint)

        descriptor = None
        for f in [_PACKAGE_LOCK, _PACKAGE_JSON]:
            if self._ctx.Contains(f):
                descriptor = f
                descriptor_contents = self._ctx.GetFile(f)
                break

        if not descriptor:
            logging.info('No package descriptor found. No packages installed.')

            # Add the overrides now.
            return append.Layer(base_image, tar_gz=None, overrides=overrides)

        checksum = hashlib.sha256(descriptor_contents).hexdigest()
        if use_cache:
            hit = cache.Get(base_image, _NODE_NAMESPACE, checksum)
            if hit:
                logging.info('Found cached dependency layer for %s' % checksum)
                return hit
            else:
                logging.info('No cached dependency layer for %s' % checksum)
        else:
            logging.info('Skipping checking cache for dependency layer %s'
                         % checksum)

        layer, sha = self._gen_package_tar(descriptor, descriptor_contents)

        with append.Layer(
          base_image, layer, diff_id=sha, overrides=overrides) as dep_image:
            if use_cache:
                logging.info('Storing layer %s in cache.', sha)
                cache.Store(base_image, _NODE_NAMESPACE, checksum, dep_image)
            else:
                logging.info('Skipping storing layer %s in cache.', sha)
            return dep_image
def main():
    args = parser.parse_args()

    def Stamp(inp):
        """Perform substitutions in the provided value."""
        if not args.stamp_info_file or not inp:
            return inp
        format_args = {}
        for infofile in args.stamp_info_file:
            with open(infofile) as info:
                for line in info:
                    line = line.strip('\n')
                    key, value = line.split(' ', 1)
                    if key in format_args:
                        print('WARNING: Duplicate value for key "%s": '
                              'using "%s"' % (key, value))
                    format_args[key] = value

        return inp.format(**format_args)

    base_json = '{}'
    if args.base:
        with open(args.base, 'r') as r:
            base_json = r.read()
    data = json.loads(base_json)

    layers = []
    for layer in args.layer:
        layers.append(utils.ExtractValue(layer))

    labels = KeyValueToDict(args.labels)
    for label, value in six.iteritems(labels):
        if value.startswith('@'):
            with open(value[1:], 'r') as f:
                labels[label] = f.read()
        elif '{' in value:
            labels[label] = Stamp(value)

    creation_time = None
    if args.creation_time:
        creation_time = Stamp(args.creation_time)
        try:
            # If creation_time is parsable as a floating point type, assume unix epoch
            # timestamp.
            parsed_unix_timestamp = float(creation_time)
            if parsed_unix_timestamp > 1.0e+11:
                # Bazel < 0.12 was bugged and used milliseconds since unix epoch as
                # the default. Values > 1e11 are assumed to be unix epoch
                # milliseconds.
                parsed_unix_timestamp = parsed_unix_timestamp / 1000.0

            # Construct a RFC 3339 date/time from the Unix epoch.
            creation_time = (datetime.datetime.utcfromtimestamp(
                parsed_unix_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ"))
        except ValueError:
            # Otherwise, assume RFC 3339 date/time format.
            pass

    output = v2_2_metadata.Override(
        data,
        v2_2_metadata.Overrides(
            author='Bazel',
            created_by='bazel build ...',
            layers=layers,
            entrypoint=list(map(Stamp, fix_dashdash(args.entrypoint))),
            cmd=list(map(Stamp, fix_dashdash(args.command))),
            creation_time=creation_time,
            user=Stamp(args.user),
            labels=labels,
            env={
                k: Stamp(v)
                for (k, v) in six.iteritems(KeyValueToDict(args.env))
            },
            ports=args.ports,
            volumes=args.volumes,
            workdir=Stamp(args.workdir)),
        architecture=_PROCESSOR_ARCHITECTURE,
        operating_system=_OPERATING_SYSTEM)

    with open(args.output, 'w') as fp:
        json.dump(output, fp, sort_keys=True)
        fp.write('\n')
Exemplo n.º 9
0
    def CreatePackageBase(self, base_image, cache):
        """Override."""
        # Figure out if we need to override entrypoint.
        # Save the overrides for later to avoid adding an extra layer.
        pj_contents = {}
        if self._ctx.Contains(_PACKAGE_JSON):
            pj_contents = json.loads(self._ctx.GetFile(_PACKAGE_JSON))
        entrypoint = parse_entrypoint(pj_contents)
        overrides = metadata.Overrides(entrypoint=entrypoint)

        descriptor = None
        for f in [_PACKAGE_LOCK, _PACKAGE_JSON]:
            if self._ctx.Contains(f):
                descriptor = f
                descriptor_contents = self._ctx.GetFile(f)
                break

        if not descriptor:
            logging.info('No package descriptor found. No packages installed.')

            # Add the overrides now.
            return append.Layer(base_image, tar_gz=None, overrides=overrides)

        checksum = hashlib.sha256(descriptor_contents).hexdigest()
        hit = cache.Get(base_image, _NODE_NAMESPACE, checksum)
        if hit:
            logging.info('Found cached dependency layer for %s' % checksum)
            return hit
        else:
            logging.info('No cached dependency layer for %s' % checksum)

        # We want the node_modules directory rooted at /app/node_modules in
        # the final image.
        # So we build a hierarchy like:
        # /$tmp/app/node_modules
        # And use the -C flag to tar to root the tarball at /$tmp.
        tmp = tempfile.mkdtemp()
        app_dir = os.path.join(tmp, 'app')
        os.mkdir(app_dir)

        # Copy out the relevant package descriptors to a tempdir.
        with open(os.path.join(app_dir, descriptor), 'w') as f:
            f.write(descriptor_contents)

        tar_path = tempfile.mktemp()
        check_gcp_build(json.loads(self._ctx.GetFile(_PACKAGE_JSON)), app_dir)
        subprocess.check_call(
            ['rm', '-rf', os.path.join(app_dir, 'node_modules')])
        subprocess.check_call(['npm', 'install', '--production', '--no-cache'],
                              cwd=app_dir)
        subprocess.check_call(['tar', '-C', tmp, '-cf', tar_path, '.'])

        # We need the sha of the unzipped and zipped tarball.
        # So for performance, tar, sha, zip, sha.
        # We use gzip for performance instead of python's zip.
        sha = 'sha256:' + hashlib.sha256(open(tar_path).read()).hexdigest()
        subprocess.check_call(['gzip', tar_path])
        layer = open(os.path.join(tmp, tar_path + '.gz'), 'rb').read()

        with append.Layer(base_image, layer, diff_id=sha,
                          overrides=overrides) as dep_image:
            logging.info('Storing layer %s in cache.', sha)
            cache.Store(base_image, _NODE_NAMESPACE, checksum, dep_image)
            return dep_image