Ejemplo n.º 1
0
def builddoc(builders='', rootdir=None, sphinxopts='', apidoc='false'):
    """
    Build documentation. Requires Sphinx.

    :param builders: List of document builders separated with comma
    :param rootdir: Directory where to generate documents, per builder (optional)
    :param sphinxopts: Additional parameters to pass to Sphinx
    :param apidoc: If true, runs also apidoc. Defaults to false.
    """
    builders = builders.split(',') if builders else ['html']
    rootdir = rootdir if rootdir else build_join('docs')
    sphinxopts = sphinxopts or '-E'
    sourcedir = rel_join('docs')
    pysourcedir = rel_join('plugins/multiproject/multiproject')
    exclude_paths = ['tests', 'core/test', 'core/tests']

    for builder in builders:
        docbuild = os.path.join(rootdir, builder)
        if not os.path.exists(docbuild):
            os.makedirs(docbuild)

        if get_bool_str(apidoc):
            local('rm -rf %s' % join(sourcedir, 'reference'))
            local('sphinx-apidoc -o %s/reference %s %s' %
                  (sourcedir, pysourcedir, ' '.join(exclude_paths)))
        local('sphinx-build %s -b %s %s %s' %
              (sphinxopts, builder, sourcedir, docbuild))

        logger.info('Built %s documentation to: %s' % (builder, docbuild))
Ejemplo n.º 2
0
def builddoc(builders='', rootdir=None, sphinxopts='', apidoc='false'):
    """
    Build documentation. Requires Sphinx.

    :param builders: List of document builders separated with comma
    :param rootdir: Directory where to generate documents, per builder (optional)
    :param sphinxopts: Additional parameters to pass to Sphinx
    :param apidoc: If true, runs also apidoc. Defaults to false.
    """
    builders = builders.split(',') if builders else ['html']
    rootdir = rootdir if rootdir else build_join('docs')
    sphinxopts = sphinxopts or '-E'
    sourcedir = rel_join('docs')
    pysourcedir = rel_join('plugins/multiproject/multiproject')
    exclude_paths = ['tests', 'core/test', 'core/tests']

    for builder in builders:
        docbuild = os.path.join(rootdir, builder)
        if not os.path.exists(docbuild):
            os.makedirs(docbuild)

        if get_bool_str(apidoc):
            local('rm -rf %s' % join(sourcedir, 'reference'))
            local('sphinx-apidoc -o %s/reference %s %s' % (sourcedir, pysourcedir, ' '.join(exclude_paths)))
        local('sphinx-build %s -b %s %s %s' % (sphinxopts, builder, sourcedir, docbuild))

        logger.info('Built %s documentation to: %s' % (builder, docbuild))
Ejemplo n.º 3
0
def clean(ext='false'):
    """
    Clean up the generated files and temp dirs.
    :param ext:
        Cleanup even the external, downloaded packages. Default is 'false'.
        This should be only needed when the resource has been changed,
        thus a complete rebuild is needed.
    """
    # Clean dist dir
    shutil.rmtree(DIST_DIR, ignore_errors=True)

    # Clean build dir
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    for plugin_dir in PLUGIN_DIRS:
        shutil.rmtree(os.path.join(PROJECT_DIR, plugin_dir, 'dist'), ignore_errors=True)

    if get_bool_str(ext):
        for res in ext_resources:
            res_path = get_ext_path(res.name)
            shutil.rmtree(res_path, ignore_errors=True)
Ejemplo n.º 4
0
def clean(ext='false'):
    """
    Clean up the generated files and temp dirs.
    :param ext:
        Cleanup even the external, downloaded packages. Default is 'false'.
        This should be only needed when the resource has been changed,
        thus a complete rebuild is needed.
    """
    # Clean dist dir
    shutil.rmtree(DIST_DIR, ignore_errors=True)

    # Clean build dir
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    for plugin_dir in PLUGIN_DIRS:
        shutil.rmtree(os.path.join(PROJECT_DIR, plugin_dir, 'dist'),
                      ignore_errors=True)

    if get_bool_str(ext):
        for res in ext_resources:
            res_path = get_ext_path(res.name)
            shutil.rmtree(res_path, ignore_errors=True)
Ejemplo n.º 5
0
def build(release='false', compress='false', docs='', pkgs='tar', version='', ext='true',
          extbranch='master'):
    """
    Create distributable packages. Builds eggs and tar.gz compressed packages, based on
    parameters. Also capable of downloading and patching external dependencies.

    :param release:
        Make release build or not. Release sets/increments the version number. Default 'false'
    :param compress:
        Compress js/css files nor not. Default 'false'
    :param docs:
        Names of the documentation targets to build. Default '' means no doc building
    :param pkgs:
        Package formats to build sources into, separated with space. Valid values: tar deb rpm
    :param version:
        Version number to set for whole package. Default '' -> take the version from VERSION.txt
        (or default to 1.0.0)
    :param ext:
        Build and include external modules into big package. Default is 'false'.
        If ext is 'all', builds also other than own forks (GitResources).
    :param extbranch:
        Defines from which branch the fork packages are to be built from.

    Examples::

        fab dist.build
        fab dist.build:release=true,docs=html
        fab dist.build:compress=true,version=1.2.3,pkgs="deb tar rpm"

    .. NOTE:: Python modules get their version number from setup.py

    """
    # NOTE: Fabric parameters are always in string format

    # Get the list of package formats (space delimeter)
    pkg_formats = pkgs.split(' ')

    # Determine the version: parameter vs. VERSION.txt vs. default
    if not version:
        version_path = os.path.join(PROJECT_DIR, 'VERSION.txt')
        if os.path.exists(version_path):
            version = set_version_in_file(version_path, version)
        else:
            version = '1.0.0'

    # Create package name from pkg name and version
    package_name = '%s-%s' % (PKG_NAME, version)
    pkg_join = lambda *path: join(BUILD_DIR, package_name, *path)

    logger.info('Preparing build env...')

    # Copy relevant files to build dir (so that they can be edited directly)
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    del SRC_DIRS[SRC_DIRS.index('libs')]
    del SRC_DIRS[SRC_DIRS.index('etc')]
    for src_dir in SRC_DIRS:
        shutil.copytree(src_dir, pkg_join(src_dir))

    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Copy additional files
    shutil.copy('README.rst', pkg_join('README'))
    os.makedirs(pkg_join('scripts'))
    shutil.copy(rel_join('scripts/deploy.sh'), pkg_join('scripts/deploy.sh'))
    shutil.copy(rel_join('scripts/update.py'), pkg_join('scripts/update.py'))
    shutil.copytree(rel_join('scripts/hooks'), pkg_join('scripts/hooks'))
    shutil.copytree(rel_join('scripts/cron'), pkg_join('scripts/cron'))

    # Build documentation
    if docs:
        # List the target formats/builders
        builddoc(docs, pkg_join('docs'))
    else:
        # Ensure there is at least empty directory (for archive)
        os.makedirs(pkg_join('docs'))

    # Build configuration
    buildetc(outdir=pkg_join('etc'), section='DEFAULT')

    # Increment version of each plugin if making a release
    if get_bool_str(release):
        logger.info('Setting/incrementing version numbers...')
        for setuppy_path in get_files(pkg_join('plugins'), 'setup.py', recursive=True):
            # Check if plugin folder contains VERSION.txt (non-versioned file)
            version_path = os.path.join(os.path.dirname(setuppy_path), 'VERSION.txt')
            if not os.path.exists(version_path):
                logger.warning('VERSION.txt missing, using version found in setup.py')
                version_path = setuppy_path

            # Set version information in file.
            # NOTE: If version is empty, it is determined from version file (either VERSION.txt or setup.py)
            set_version_in_file(version_path, version)

    # Optional compress (edits copied files under build)
    if get_bool_str(compress):
        logger.info('Compressing files...')

        with settings(warn_only=True):
            # Compress theme resources
            for respath in get_files(pkg_join('themes'), '*.css', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))
            for respath in get_files(pkg_join('themes'), '*.js', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))

            # Compress plugin resources
            for respath in get_files(pkg_join('plugins'), '*.css', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))
            for respath in get_files(pkg_join('plugins'), '*.js', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))

        # Aggregate js+css resources into bundle
        for template_path in get_files(pkg_join('themes'), 'resources.html', recursive=True):
            logger.info('Template path: %s' % template_path)
            bundle(template_path, pkg_join('themes/default/htdocs'))

        logger.info('Compression completed.')

    # Build eggs and source packages (in build dir)
    logger.info('Laying eggs and source packages...')
    with lcd(pkg_join()):
        for plugin_dir in PLUGIN_DIRS:
            with lcd(plugin_dir):
                local('python setup.py bdist_egg')
                local('python setup.py sdist')

    # Build external plugins as well, optionally even non-fork plugins
    # Retrieve and build external plugins and copy the artifacts into plugins folder.
    # NOTE: Next egg copying will put them into correct place, no need to rerun the file copy
    allext = 'true' if ext.lower() == 'all' else 'false'
    if get_bool_str(ext) or allext:
        buildext(allext=allext,branch=extbranch)
        for egg in get_files(build_join('ext'), '*.egg', recursive=True):
            shutil.copy(egg, pkg_join('plugins', os.path.basename(egg)))

    # Copy eggs and sdisted files from plugins directory to dist and plugin directories
    for egg in get_files(pkg_join('plugins/multiproject'), '*.egg', recursive=True):
        shutil.copy(egg, dist_join(os.path.basename(egg)))
        shutil.copy(egg, pkg_join('plugins'))

    for targz in get_files(pkg_join('plugins'), '*.tar.gz', recursive=True):
        shutil.copy(targz, dist_join(os.path.basename(targz)))

    # Create dist if not available
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Create one big package to contain 'em all
    if 'tar.gz' in pkg_formats or 'tar' in pkg_formats:
        logger.info('Creating complete .tar.gz package...')
        # TODO: Archive could be implemented in pure python
        # TODO: These patterns seem to assume build dir == project dir
        exclude_patterns = [
            '.*', 'tests', 'documents', '*.egg-info', 'ext/libs', 'ext/plugins',
            'sample', 'build', 'plugins/multiproject'
        ]
        exclude_param = ' '.join(['--exclude=%s' % pt for pt in exclude_patterns])
        with lcd(BUILD_DIR):
            #local('tar -czf %s.tar.gz --exclude-vcs %s %s' %
            #    (dist_join(package_name), exclude_param, package_name))
            local('tar -czf %s.tar.gz %s' %
                (dist_join(package_name), package_name))

    # Debian package
    if 'deb' in pkg_formats:
        logger.info('Creating .deb package...')
        try:
            from stdeb import command
        except ImportError:
            command = None
            abort('Module stddep (http://pypi.python.org/pypi/stdeb) was not found, cannot build .deb package')

        # Run setup.py bdist_deb inside each plugin. It generates deb_dist/<pkgname>/ directory
        for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')), 'setup.py', recursive=True):
            plugin_dir = os.path.dirname(setuppy_path)

            with settings(hide('stdout', 'stderr')):
                with lcd(plugin_dir):
                    local('python setup.py --command-packages=stdeb.command bdist_deb')

                # Package command needs to be run inside the generated folder. Find it and run the command
                for debdist_path in get_files(os.path.join(plugin_dir, 'deb_dist'), 'setup.py', recursive=True):
                    with lcd(os.path.dirname(debdist_path)):
                        local('dpkg-buildpackage -rfakeroot -uc -us')

        # Copy .deb packages to dist
        for deb_path in get_files(pkg_join('plugins'), '*.deb', recursive=True):
            shutil.copy(deb_path, dist_join(os.path.basename(deb_path)))

    # Redhat package
    if 'rpm' in pkg_formats:
        logger.info('Creating .rpm package...')

        with settings(hide('stdout', 'running')):
            # Run setup.py bdist_rpm inside each plugin. It generates deb_dist/<pkgname>/ directory
            for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')), 'setup.py', recursive=True):
                plugin_dir = os.path.dirname(setuppy_path)
                with lcd(plugin_dir):
                    local('python setup.py bdist_rpm')

            # Copy .rpm packages to dist
            for rpm_path in get_files(pkg_join('plugins'), '*.rpm', recursive=True):
                shutil.copy(rpm_path, dist_join(os.path.basename(rpm_path)))

    logger.info('Building completed.')
Ejemplo n.º 6
0
def buildext(allext='false',patch='true',branch='master'):
    """
    Build and optionally patch the 3rd party modules and libraries.
    The outcome (tar.gz/egg) files are placed in dist directory

    :param allext:
        Download also non-GitResources. Default is 'false'.
    :param patch:
        Patch those plugins having patches, currently, trac and gitosis.
    :param branch:
        For GitResources, selects the branch to be used. Default is 'master'.

    .. NOTE::

        If you want to include the external release into multiproject-all package, run the build
        task with parameters::

            fab dist.build:ext=true

    """
    allext = get_bool_str(allext)
    # Construct and create building directory for external resources
    extbuild = build_join('ext')
    shutil.rmtree(extbuild, ignore_errors=True)
    os.makedirs(extbuild)

    for res in ext_resources:
        res_path = get_ext_path(res.name)
        is_git = isinstance(res, GitResource)
        if not is_git and not allext:
            continue
        logger.info('Starting to download / fetch resource %s' % res.name)

        must_retrieve = False
        resource_id_file = join(res_path, '.fabric_resource_id.txt')
        res_lines = [line.strip() for line in str(res).split(',')]
        res_lines.append('# This is a file used by fabric dist.buildext command.')
        if not os.path.exists(res_path):
            must_retrieve = True
        else:
            # Check folder contents.
            # If the fetch identifier is missing, it is assumed to be the correct one.
            if os.path.exists(resource_id_file):
                prev_lines = [line.strip() for line in open(resource_id_file, 'r')]
                if res_lines != prev_lines:
                    logger.warning('Resource %s has been changed, retrieving it.' % res.name)
                    logger.info('Previous resource: %s' % prev_lines)
                    logger.info('Current resource:  %s' % res_lines)
                    must_retrieve = True
            if not get_files(os.path.abspath(res_path), 'setup.py', recursive=True):
                must_retrieve = True
        if must_retrieve:
            shutil.rmtree(res_path, ignore_errors=True)
            os.makedirs(res_path)
            res.retrieve(res_path)
            outfile = open(resource_id_file, 'w')
            outfile.writelines([line + '\n' for line in res_lines])
            outfile.close()
        else:
            logger.warning('Resource %s was already retrieved.' % res.name)

        if is_git:
            # The GitResources are always updated
            if not os.path.exists(join(res_path, '.git')):
                raise Exception('GitResource in %s is invalid. Run `fab dist.clean:ext=true`' % res_path)
            with lcd(res_path):
                local('git fetch')

        # Else, we assume that the resource has been already retrieved
        # Copy the files into ext build dir
        ext_build_dir = join(extbuild, res.name)
        shutil.copytree(res_path, ext_build_dir)
        if isinstance(res, GitResource):
            with lcd(ext_build_dir):
                logger.info('For %s, git checkout %s' % (res.name, branch))
                local('git checkout %s' % branch)
                local('git merge origin/%s' % branch)

    # Now the plugin files are inside 'build/ext/', and we can continue

    # Work in build directory
    with lcd(extbuild):
        # Retreive resource and place them to build directory

        # Apply patches
        if get_bool_str(patch) and allext:

            # Patch Trac
            logger.info('Patching Trac...')
            with lcd(join(extbuild, 'trac')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/trac'), '*.patch', recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

            # Patch Gitosis
            logger.info('Patching Gitosis...')
            with lcd(join(extbuild, 'gitosis')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/gitosis'), '*.patch', recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

    # Build eggs (in build dir)
    logger.info('Laying eggs and source dists...')
    # Iterate folders where setup.py can be found
    for setuppy_path in get_files(os.path.abspath(extbuild), 'setup.py', recursive=True):
        plugin_dir = os.path.dirname(setuppy_path)
        logger.info('Building package for %s' % os.path.basename(plugin_dir))
        with lcd(plugin_dir):
            local('python setup.py bdist_egg')
            local('python setup.py sdist')

    # Copy distributable files to dist
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    for egg in get_files(os.path.abspath(extbuild), '*.egg', recursive=True):
        shutil.copy(egg, join(DIST_DIR, os.path.basename(egg)))

    for targz in get_files(os.path.abspath(extbuild), '*.tar.gz', recursive=True):
        shutil.copy(targz, join(DIST_DIR, os.path.basename(targz)))
Ejemplo n.º 7
0
def build(release='false',
          compress='false',
          docs='',
          pkgs='tar',
          version='',
          ext='true',
          extbranch='master'):
    """
    Create distributable packages. Builds eggs and tar.gz compressed packages, based on
    parameters. Also capable of downloading and patching external dependencies.

    :param release:
        Make release build or not. Release sets/increments the version number. Default 'false'
    :param compress:
        Compress js/css files nor not. Default 'false'
    :param docs:
        Names of the documentation targets to build. Default '' means no doc building
    :param pkgs:
        Package formats to build sources into, separated with space. Valid values: tar deb rpm
    :param version:
        Version number to set for whole package. Default '' -> take the version from VERSION.txt
        (or default to 1.0.0)
    :param ext:
        Build and include external modules into big package. Default is 'false'.
        If ext is 'all', builds also other than own forks (GitResources).
    :param extbranch:
        Defines from which branch the fork packages are to be built from.

    Examples::

        fab dist.build
        fab dist.build:release=true,docs=html
        fab dist.build:compress=true,version=1.2.3,pkgs="deb tar rpm"

    .. NOTE:: Python modules get their version number from setup.py

    """
    # NOTE: Fabric parameters are always in string format

    # Get the list of package formats (space delimeter)
    pkg_formats = pkgs.split(' ')

    # Determine the version: parameter vs. VERSION.txt vs. default
    if not version:
        version_path = os.path.join(PROJECT_DIR, 'VERSION.txt')
        if os.path.exists(version_path):
            version = set_version_in_file(version_path, version)
        else:
            version = '1.0.0'

    # Create package name from pkg name and version
    package_name = '%s-%s' % (PKG_NAME, version)
    pkg_join = lambda *path: join(BUILD_DIR, package_name, *path)

    logger.info('Preparing build env...')

    # Copy relevant files to build dir (so that they can be edited directly)
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    del SRC_DIRS[SRC_DIRS.index('libs')]
    del SRC_DIRS[SRC_DIRS.index('etc')]
    for src_dir in SRC_DIRS:
        shutil.copytree(src_dir, pkg_join(src_dir))

    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Copy additional files
    shutil.copy('README.rst', pkg_join('README'))
    os.makedirs(pkg_join('scripts'))
    shutil.copy(rel_join('scripts/deploy.sh'), pkg_join('scripts/deploy.sh'))
    shutil.copy(rel_join('scripts/update.py'), pkg_join('scripts/update.py'))
    shutil.copytree(rel_join('scripts/hooks'), pkg_join('scripts/hooks'))
    shutil.copytree(rel_join('scripts/cron'), pkg_join('scripts/cron'))

    # Build documentation
    if docs:
        # List the target formats/builders
        builddoc(docs, pkg_join('docs'))
    else:
        # Ensure there is at least empty directory (for archive)
        os.makedirs(pkg_join('docs'))

    # Build configuration
    buildetc(outdir=pkg_join('etc'), section='DEFAULT')

    # Increment version of each plugin if making a release
    if get_bool_str(release):
        logger.info('Setting/incrementing version numbers...')
        for setuppy_path in get_files(pkg_join('plugins'),
                                      'setup.py',
                                      recursive=True):
            # Check if plugin folder contains VERSION.txt (non-versioned file)
            version_path = os.path.join(os.path.dirname(setuppy_path),
                                        'VERSION.txt')
            if not os.path.exists(version_path):
                logger.warning(
                    'VERSION.txt missing, using version found in setup.py')
                version_path = setuppy_path

            # Set version information in file.
            # NOTE: If version is empty, it is determined from version file (either VERSION.txt or setup.py)
            set_version_in_file(version_path, version)

    # Optional compress (edits copied files under build)
    if get_bool_str(compress):
        logger.info('Compressing files...')

        with settings(warn_only=True):
            # Compress theme resources
            for respath in get_files(pkg_join('themes'),
                                     '*.css',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))
            for respath in get_files(pkg_join('themes'),
                                     '*.js',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))

            # Compress plugin resources
            for respath in get_files(pkg_join('plugins'),
                                     '*.css',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))
            for respath in get_files(pkg_join('plugins'),
                                     '*.js',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))

        # Aggregate js+css resources into bundle
        for template_path in get_files(pkg_join('themes'),
                                       'resources.html',
                                       recursive=True):
            logger.info('Template path: %s' % template_path)
            bundle(template_path, pkg_join('themes/default/htdocs'))

        logger.info('Compression completed.')

    # Build eggs and source packages (in build dir)
    logger.info('Laying eggs and source packages...')
    with lcd(pkg_join()):
        for plugin_dir in PLUGIN_DIRS:
            with lcd(plugin_dir):
                local('python setup.py bdist_egg')
                local('python setup.py sdist')

    # Build external plugins as well, optionally even non-fork plugins
    # Retrieve and build external plugins and copy the artifacts into plugins folder.
    # NOTE: Next egg copying will put them into correct place, no need to rerun the file copy
    allext = 'true' if ext.lower() == 'all' else 'false'
    if get_bool_str(ext) or allext:
        buildext(allext=allext, branch=extbranch)
        for egg in get_files(build_join('ext'), '*.egg', recursive=True):
            shutil.copy(egg, pkg_join('plugins', os.path.basename(egg)))

    # Copy eggs and sdisted files from plugins directory to dist and plugin directories
    for egg in get_files(pkg_join('plugins/multiproject'),
                         '*.egg',
                         recursive=True):
        shutil.copy(egg, dist_join(os.path.basename(egg)))
        shutil.copy(egg, pkg_join('plugins'))

    for targz in get_files(pkg_join('plugins'), '*.tar.gz', recursive=True):
        shutil.copy(targz, dist_join(os.path.basename(targz)))

    # Create dist if not available
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Create one big package to contain 'em all
    if 'tar.gz' in pkg_formats or 'tar' in pkg_formats:
        logger.info('Creating complete .tar.gz package...')
        # TODO: Archive could be implemented in pure python
        # TODO: These patterns seem to assume build dir == project dir
        exclude_patterns = [
            '.*', 'tests', 'documents', '*.egg-info', 'ext/libs',
            'ext/plugins', 'sample', 'build', 'plugins/multiproject'
        ]
        exclude_param = ' '.join(
            ['--exclude=%s' % pt for pt in exclude_patterns])
        with lcd(BUILD_DIR):
            #local('tar -czf %s.tar.gz --exclude-vcs %s %s' %
            #    (dist_join(package_name), exclude_param, package_name))
            local('tar -czf %s.tar.gz %s' %
                  (dist_join(package_name), package_name))

    # Debian package
    if 'deb' in pkg_formats:
        logger.info('Creating .deb package...')
        try:
            from stdeb import command
        except ImportError:
            command = None
            abort(
                'Module stddep (http://pypi.python.org/pypi/stdeb) was not found, cannot build .deb package'
            )

        # Run setup.py bdist_deb inside each plugin. It generates deb_dist/<pkgname>/ directory
        for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')),
                                      'setup.py',
                                      recursive=True):
            plugin_dir = os.path.dirname(setuppy_path)

            with settings(hide('stdout', 'stderr')):
                with lcd(plugin_dir):
                    local(
                        'python setup.py --command-packages=stdeb.command bdist_deb'
                    )

                # Package command needs to be run inside the generated folder. Find it and run the command
                for debdist_path in get_files(os.path.join(
                        plugin_dir, 'deb_dist'),
                                              'setup.py',
                                              recursive=True):
                    with lcd(os.path.dirname(debdist_path)):
                        local('dpkg-buildpackage -rfakeroot -uc -us')

        # Copy .deb packages to dist
        for deb_path in get_files(pkg_join('plugins'), '*.deb',
                                  recursive=True):
            shutil.copy(deb_path, dist_join(os.path.basename(deb_path)))

    # Redhat package
    if 'rpm' in pkg_formats:
        logger.info('Creating .rpm package...')

        with settings(hide('stdout', 'running')):
            # Run setup.py bdist_rpm inside each plugin. It generates deb_dist/<pkgname>/ directory
            for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')),
                                          'setup.py',
                                          recursive=True):
                plugin_dir = os.path.dirname(setuppy_path)
                with lcd(plugin_dir):
                    local('python setup.py bdist_rpm')

            # Copy .rpm packages to dist
            for rpm_path in get_files(pkg_join('plugins'),
                                      '*.rpm',
                                      recursive=True):
                shutil.copy(rpm_path, dist_join(os.path.basename(rpm_path)))

    logger.info('Building completed.')
Ejemplo n.º 8
0
def buildext(allext='false', patch='true', branch='master'):
    """
    Build and optionally patch the 3rd party modules and libraries.
    The outcome (tar.gz/egg) files are placed in dist directory

    :param allext:
        Download also non-GitResources. Default is 'false'.
    :param patch:
        Patch those plugins having patches, currently, trac and gitosis.
    :param branch:
        For GitResources, selects the branch to be used. Default is 'master'.

    .. NOTE::

        If you want to include the external release into multiproject-all package, run the build
        task with parameters::

            fab dist.build:ext=true

    """
    allext = get_bool_str(allext)
    # Construct and create building directory for external resources
    extbuild = build_join('ext')
    shutil.rmtree(extbuild, ignore_errors=True)
    os.makedirs(extbuild)

    for res in ext_resources:
        res_path = get_ext_path(res.name)
        is_git = isinstance(res, GitResource)
        if not is_git and not allext:
            continue
        logger.info('Starting to download / fetch resource %s' % res.name)

        must_retrieve = False
        resource_id_file = join(res_path, '.fabric_resource_id.txt')
        res_lines = [line.strip() for line in str(res).split(',')]
        res_lines.append(
            '# This is a file used by fabric dist.buildext command.')
        if not os.path.exists(res_path):
            must_retrieve = True
        else:
            # Check folder contents.
            # If the fetch identifier is missing, it is assumed to be the correct one.
            if os.path.exists(resource_id_file):
                prev_lines = [
                    line.strip() for line in open(resource_id_file, 'r')
                ]
                if res_lines != prev_lines:
                    logger.warning(
                        'Resource %s has been changed, retrieving it.' %
                        res.name)
                    logger.info('Previous resource: %s' % prev_lines)
                    logger.info('Current resource:  %s' % res_lines)
                    must_retrieve = True
            if not get_files(
                    os.path.abspath(res_path), 'setup.py', recursive=True):
                must_retrieve = True
        if must_retrieve:
            shutil.rmtree(res_path, ignore_errors=True)
            os.makedirs(res_path)
            res.retrieve(res_path)
            outfile = open(resource_id_file, 'w')
            outfile.writelines([line + '\n' for line in res_lines])
            outfile.close()
        else:
            logger.warning('Resource %s was already retrieved.' % res.name)

        if is_git:
            # The GitResources are always updated
            if not os.path.exists(join(res_path, '.git')):
                raise Exception(
                    'GitResource in %s is invalid. Run `fab dist.clean:ext=true`'
                    % res_path)
            with lcd(res_path):
                local('git fetch')

        # Else, we assume that the resource has been already retrieved
        # Copy the files into ext build dir
        ext_build_dir = join(extbuild, res.name)
        shutil.copytree(res_path, ext_build_dir)
        if isinstance(res, GitResource):
            with lcd(ext_build_dir):
                logger.info('For %s, git checkout %s' % (res.name, branch))
                local('git checkout %s' % branch)
                local('git merge origin/%s' % branch)

    # Now the plugin files are inside 'build/ext/', and we can continue

    # Work in build directory
    with lcd(extbuild):
        # Retreive resource and place them to build directory

        # Apply patches
        if get_bool_str(patch) and allext:

            # Patch Trac
            logger.info('Patching Trac...')
            with lcd(join(extbuild, 'trac')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/trac'),
                                       '*.patch',
                                       recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

            # Patch Gitosis
            logger.info('Patching Gitosis...')
            with lcd(join(extbuild, 'gitosis')):
                for patch in get_files(join(PROJECT_DIR,
                                            'ext/patches/gitosis'),
                                       '*.patch',
                                       recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

    # Build eggs (in build dir)
    logger.info('Laying eggs and source dists...')
    # Iterate folders where setup.py can be found
    for setuppy_path in get_files(os.path.abspath(extbuild),
                                  'setup.py',
                                  recursive=True):
        plugin_dir = os.path.dirname(setuppy_path)
        logger.info('Building package for %s' % os.path.basename(plugin_dir))
        with lcd(plugin_dir):
            local('python setup.py bdist_egg')
            local('python setup.py sdist')

    # Copy distributable files to dist
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    for egg in get_files(os.path.abspath(extbuild), '*.egg', recursive=True):
        shutil.copy(egg, join(DIST_DIR, os.path.basename(egg)))

    for targz in get_files(os.path.abspath(extbuild),
                           '*.tar.gz',
                           recursive=True):
        shutil.copy(targz, join(DIST_DIR, os.path.basename(targz)))