def iter(cls, owner=None, remote=False):
     """
     Yield the name of each blueprint.
     """
     if owner is None:
         owner = auth.login()
     gitname = cls.gitname(owner)
     with context_managers.breakout():
         if not os.path.isdir(gitname):
             return
         if remote:
             with context_managers.cd(gitname):
                 p = subprocess.Popen(
                     ['git', 'ls-remote', '--heads', 'origin'],
                     close_fds=True, stdout=subprocess.PIPE)
                 stdout, stderr = p.communicate()
                 if 0 != p.returncode:
                     return
                 for line in stdout.splitlines():
                     sha, refname = line.split()
                     if 'refs/heads' == os.path.dirname(refname):
                         yield os.path.basename(refname)
         else:
             repo = dulwich.repo.Repo(gitname)
             for refname in repo.refs.keys():
                 if 'refs/heads' == os.path.dirname(refname):
                     yield os.path.basename(refname)
    def sh(self):
        """
        Generate shell code.
        """
        s = sh.Script(self.name, comment=self.DISCLAIMER)

        # Extract source tarballs.
        with context_managers.breakout():
            for dirname, filename in sorted(self.sources.iteritems()):
                commit = self.repo[self._sha]
                blob = self.repo[self.repo[commit.tree][filename][1]]
                s.add('tar xf {0} -C {1}',
                      filename,
                      dirname,
                      sources={filename: blob.data})

        # Place files.
        for pathname, f in sorted(self.files.iteritems()):
            s.add('mkdir -p {0}', os.path.dirname(pathname))
            if '120000' == f['mode'] or '120777' == f['mode']:
                s.add('ln -s {0} {1}', f['content'], pathname)
                continue
            command = 'cat'
            if 'base64' == f['encoding']:
                command = 'base64 --decode'
            eof = 'EOF'
            while re.search(r'{0}'.format(eof), f['content']):
                eof += 'EOF'
            s.add('{0} >{1} <<{2}', command, pathname, eof)
            s.add(raw=f['content'])
            if 0 < len(f['content']) and '\n' != f['content'][-1]:
                eof = '\n{0}'.format(eof)
            s.add(eof)
            if 'root' != f['owner']:
                s.add('chown {0} {1}', f['owner'], pathname)
            if 'root' != f['group']:
                s.add('chgrp {0} {1}', f['group'], pathname)
            if '000644' != f['mode']:
                s.add('chmod {0} {1}', f['mode'][-4:], pathname)

        # Install packages.
        def before(manager):
            if 'apt' == manager.name:
                s.add('apt-get -q update')
        def package(manager, package, version):
            if manager.name == package:
                return
            s.add(manager(package, version))
            match = re.match(r'^rubygems(\d+\.\d+(?:\.\d+)?)$', package)
            if 'apt' != manager.name:
                return
            if match is not None and rubygems_update():
                s.add('/usr/bin/gem{0} install --no-rdoc --no-ri '
                  'rubygems-update', match.group(1))
                s.add('/usr/bin/ruby{0} $(PATH=$PATH:/var/lib/gems/{0}/bin '
                  'which update_rubygems)', match.group(1))
        self.walk(before=before, package=package)

        return s
 def push(cls, name, owner=None):
     """
     Push the named blueprint to the central Git repository.
     """
     if owner is None:
         owner = auth.login()
     gitname = cls.gitname(owner)
     with context_managers.breakout():
         with context_managers.cd(gitname):
             status = os.system('git push origin {0}'.format(name))
             if 0 != status:
                 raise KeyError(name)
    def commit(self, message=''):
        """
        Create a new revision of this blueprint in the local Git repository.
        Include the blueprint JSON and any source archives referenced by
        the JSON.
        """
        with context_managers.breakout() as context:
            tree = dulwich.objects.Tree()

            # Create and store a blob from the `blueprint`(5) JSON.
            blob = dulwich.objects.Blob.from_string(self.dumps())
            self.repo.object_store.add_object(blob)
            tree.add(0o100644, "blueprint.json", blob.id)

            # Create and store blobs for each source tarball.
            if context.name:
                dirname = os.path.join('/var/sandboxes',
                                       context.name,
                                       context.cwd[1:])
            else:
                dirname = context.cwd
            for filename in self.sources.itervalues():
                f = open(os.path.join(dirname, filename))
                blob = dulwich.objects.Blob.from_string(f.read())
                f.close()
                self.repo.object_store.add_object(blob)
                tree.add(0o100644, str(filename), blob.id)

            # Store the tree and create a commit that references it.
            self.repo.object_store.add_object(tree)
            commit = dulwich.objects.Commit()
            commit.tree = tree.id
            refname = os.path.join('refs/heads', self.name)
            try:
                commit.parents = [self.repo.refs[refname]]
            except KeyError:
                pass
            commit.author = commit.committer = self.author
            commit.commit_time = commit.author_time = int(time.time())
            timezone = dulwich.objects.parse_timezone('+0000')
            if isinstance(timezone, tuple):
                timezone = timezone[0]
            commit.commit_timezone = commit.author_timezone = timezone
            commit.encoding = 'UTF-8'
            commit.message = message
            self.repo.object_store.add_object(commit)
            self.repo.refs[refname] = self._sha = commit.id
 def pull(cls, name, owner=None):
     """
     Pull the named blueprint from the central Git repository.
     """
     if owner is None:
         owner = auth.login()
     gitname = cls.gitname(owner)
     with context_managers.breakout():
         with context_managers.cd(gitname):
             status = os.system('git remote update origin')
             if 0 != status:
                 raise KeyError(name)
             if os.path.exists('refs/heads/{0}'.format(name)):
                 return
             status = os.system(
                 'git branch {0} remotes/origin/{0}'.format(name))
             if 0 != status:
                 raise KeyError(name)
 def destroy(cls, name, owner=None, remote=False):
     """
     Destroy the named blueprint.
     """
     if owner is None:
         owner = auth.login()
     gitname = cls.gitname(owner)
     with context_managers.breakout():
         if remote:
             with context_managers.cd(gitname):
                 os.system('git push origin :{0}'.format(name))
         else:
             if not os.path.isdir(gitname):
                 raise KeyError(name)
             repo = dulwich.repo.Repo(gitname)
             refname = os.path.join('refs/heads', name)
             repo.refs[refname] # To induce KeyError.
             del repo.refs[refname]
    def chef(self):
        """
        Generate Chef code.
        """
        c = chef.Cookbook(self.name, comment=self.DISCLAIMER)

        # Extract source tarballs.
        with context_managers.breakout():
            for dirname, filename in sorted(self.sources.iteritems()):
                commit = self.repo[self._sha]
                blob = self.repo[self.repo[commit.tree][filename][1]]
                pathname = os.path.join('/tmp', filename)
                c.file(pathname,
                       blob.data,
                       owner='root',
                       group='root',
                       mode='0644',
                       backup=False,
                       source=pathname[1:])
                c.execute('tar xf {0}'.format(pathname), cwd=dirname)

        # Place files.
        for pathname, f in sorted(self.files.iteritems()):
            c.directory(os.path.dirname(pathname),
                        group='root',
                        mode='755',
                        owner='root',
                        recursive=True)
            if '120000' == f['mode'] or '120777' == f['mode']:
                c.link(pathname,
                       owner=f['owner'],
                       group=f['group'],
                       to=f['content'])
                continue
            content = f['content']
            if 'base64' == f['encoding']:
                content = base64.b64decode(content)
            c.file(pathname, content,
                   owner=f['owner'],
                   group=f['group'],
                   mode=f['mode'][-4:],
                   backup=False,
                   source=pathname[1:])

        # Install packages.
        def before(manager):
            if 'apt' == manager.name:
                c.execute('apt-get -q update')
        def package(manager, package, version):
            if manager.name == package:
                return

            if 'apt' == manager.name:
                c.apt_package(package, version=version)
                match = re.match(r'^rubygems(\d+\.\d+(?:\.\d+)?)$', package)
                if match is not None and rubygems_update():
                    c.execute('/usr/bin/gem{0} install --no-rdoc --no-ri '
                              'rubygems-update'.format(match.group(1)))
                    c.execute('/usr/bin/ruby{0} '
                              '$(PATH=$PATH:/var/lib/gems/{0}/bin '
                              'which update_rubygems)"'.format(match.group(1)))

            # All types of gems get to have package resources.
            elif re.search(r'ruby', manager.name) is not None:
                match = re.match(r'^ruby(?:gems)?(\d+\.\d+(?:\.\d+)?)',
                                 manager.name)
                c.gem_package(package,
                    gem_binary='/usr/bin/gem{0}'.format(match.group(1)),
                    version=version)

            # Everything else is an execute resource.
            else:
                c.execute(manager(package, version))

        self.walk(before=before, package=package)

        return c
    def puppet(self):
        """
        Generate Puppet code.
        """
        m = puppet.Manifest(self.name, comment=self.DISCLAIMER)

        # Set the default PATH for exec resources.
        m.add(puppet.Exec.defaults(path=os.environ['PATH']))

        # Extract source tarballs.
        with context_managers.breakout():
            for dirname, filename in sorted(self.sources.iteritems()):
                commit = self.repo[self._sha]
                blob = self.repo[self.repo[commit.tree][filename][1]]
                pathname = os.path.join('/tmp', filename)
                m['sources'].add(puppet.File(
                    pathname,
                    self.name,
                    blob.data,
                    owner='root',
                    group='root',
                    mode='0644',
                    source='puppet:///{0}/{1}'.format(self.name,
                                                      pathname[1:])))
                m['sources'].add(puppet.Exec(
                    'tar xf {0}'.format(pathname),
                    cwd=dirname,
                    require=puppet.File.ref(pathname)))

        # Place files.
        if 0 < len(self.files):
            for pathname, f in sorted(self.files.iteritems()):

                # Create resources for parent directories and let the
                # autorequire mechanism work out dependencies.
                dirnames = os.path.dirname(pathname).split('/')[1:]
                for i in xrange(len(dirnames)):
                    m['files'].add(puppet.File(
                        os.path.join('/', *dirnames[0:i+1]),
                        ensure='directory'))

                # Create the actual file resource.
                if '120000' == f['mode'] or '120777' == f['mode']:
                    m['files'].add(puppet.File(pathname,
                                               None,
                                               None,
                                               owner=f['owner'],
                                               group=f['group'],
                                               ensure=f['content']))
                    continue
                content = f['content']
                if 'base64' == f['encoding']:
                    content = base64.b64decode(content)
                m['files'].add(puppet.File(pathname,
                                           self.name,
                                           content,
                                           owner=f['owner'],
                                           group=f['group'],
                                           mode=f['mode'][-4:],
                                           ensure='file'))

        # Install packages.
        deps = []
        def before(manager):
            deps.append(manager)
            if 'apt' != manager.name:
                return
            if 0 == len(manager):
                return
            if 1 == len(manager) and manager.name in manager:
                return
            m['packages'].add(puppet.Exec('apt-get -q update',
                                          before=puppet.Class.ref('apt')))
        def package(manager, package, version):

            # APT is easy since it's the default.
            if 'apt' == manager.name:
                m['packages'][manager].add(puppet.Package(package,
                                                          ensure=version))

                # If APT is installing RubyGems, get complicated.
                match = re.match(r'^rubygems(\d+\.\d+(?:\.\d+)?)$', package)
                if match is not None and rubygems_update():
                    m['packages'][manager].add(puppet.Exec('/bin/sh -c "'
                        '/usr/bin/gem{0} install --no-rdoc --no-ri '
                        'rubygems-update; '
                        '/usr/bin/ruby{0} $(PATH=$PATH:/var/lib/gems/{0}/bin '
                        'which update_rubygems)"'.format(match.group(1)),
                        require=puppet.Package.ref(package)))

            # RubyGems for Ruby 1.8 is easy, too, because Puppet has a
            # built in provider.
            elif 'rubygems1.8' == manager.name:
                m['packages'][manager].add(puppet.Package(package,
                    ensure=version,
                    provider='gem'))

            # Other versions of RubyGems are slightly more complicated.
            elif re.search(r'ruby', manager.name) is not None:
                match = re.match(r'^ruby(?:gems)?(\d+\.\d+(?:\.\d+)?)',
                                 manager.name)
                m['packages'][manager].add(puppet.Exec(
                    manager(package, version),
                    creates='{0}/{1}/gems/{2}-{3}'.format(rubygems_path(),
                                                          match.group(1),
                                                          package,
                                                          version)))

            # Python works basically like alternative versions of Ruby
            # but follows a less predictable directory structure so the
            # directory is not known ahead of time.  This just so happens
            # to be the way everything else works, too.
            else:
                m['packages'][manager].add(puppet.Exec(
                    manager(package, version)))

        self.walk(before=before, package=package)
        m['packages'].dep(*[puppet.Class.ref(dep) for dep in deps])

        # Strict ordering of classes.
        deps = []
        if 0 < len(self.sources):
            deps.append('sources')
        if 0 < len(self.files):
            deps.append('files')
        if 0 < len(self.packages):
            deps.append('packages')
        m.dep(*[puppet.Class.ref(dep) for dep in deps])

        return m
    def __init__(self,
                 name=None,
                 owner=None,
                 author=None,
                 json=None,
                 oldskool=None,
                 sha=None):
        """
        Construct a blueprint in the new format in a backwards-compatible
        manner.
        """
        if name is not None:
            self._name = name
        if owner is not None:
            self._owner = owner
        if author is not None:
            self._author = author

        # Create a blueprint from a JSON blob.
        if json is not None:
            super(Blueprint, self).__init__(**loads(json))

        # Convert the old format to the new format.
        elif oldskool is not None:
            oldskool = loads(oldskool)
            b = oldskool.get('blueprint', oldskool)

            # Bring _files to the top level as files.  Make each file
            # be explicit about its attributes.
            def f(x):
                if not isinstance(x, dict):
                    x = dict(content=x, encoding='plain')
                x['owner'] = x.pop('_owner', 'root')
                x['group'] = x.pop('_group', 'root')
                x['mode'] = x.pop('_mode', '000644').zfill(6)
                x['encoding'] = 'plain'
                if '_base64' in x:
                    x['encoding'] = 'base64'
                    x['content'] = x.pop('_base64')
                if '_content' in x:
                    x['content'] = x.pop('_content')
                if '_target' in x:
                    x['mode'] = '120000'
                    x['content'] = x.pop('_target')
                return x
            files = b.get('_files', {})
            files = dict([(k, f(v)) for k, v in files.iteritems()])

            # Bring _packages to the top level as packages.  Make all
            # version numbers appear in an array to simplify processing.
            def f(x):
                x = x['_packages']
                for k, v in x.iteritems():
                    if isinstance(v, basestring):
                        x[k] = [v]
                return x
            packages = b.get('_packages', {})
            packages = dict([(k, Manager(k, f(v)))
                for k, v in packages.iteritems()])

            # Bring _sources to the top level as sources.
            sources = b.get('_sources', {})

            # Bring _arch to the top level as arch.
            arch = b.get('_arch', None)

            if not hasattr(self, '_name'):
                self._name = oldskool.get('name', None)
            if not hasattr(self, '_owner'):
                self._owner = oldskool.get('owner', None)
            if not hasattr(self, '_author'):
                self._author = oldskool.get('author', None)
            super(Blueprint, self).__init__(files=files,
                                            packages=packages,
                                            sources=sources,
                                            arch=arch)

        # Create a blueprint from a Git repository.
        elif name is not None:
            with context_managers.breakout():
                if sha is None:
                    sha = self.repo.refs[os.path.join('refs/heads', self.name)]
                self._sha = sha
                commit = self.repo[sha]
                self._author = commit.author
                blob = self.repo[self.repo[commit.tree]["blueprint.json"][1]]
                super(Blueprint, self).__init__(**loads(blob.data))