Beispiel #1
0
def autobuild(taskname='builddoc', watchdir='docs', taskopts=''):
    """
    Run given task whenever the files are changed
    Requires pyinotify

    :param taskname: Name of fabric task to run on events (inside dist namespace).
    :param watchdir: Path to directory what to watch for events. Relative to current directory.
    :param taskopts: Options to pass to task, separated by comma
    """
    import pyinotify

    watchdir = os.path.join(os.path.curdir, watchdir)
    wm = pyinotify.WatchManager()
    event = 'IN_CLOSE_WRITE'
    mask = getattr(pyinotify, event)

    class EventHandler(pyinotify.ProcessEvent):
        handling = False

        def process_IN_CLOSE_WRITE(self, event):
            if not self.handling:
                self.handling = True
                logger.info('Autorunning task: %s' % taskname)
                if taskopts:
                    globals()[taskname](*[opt for opt in taskopts.split(',')])
                else:
                    globals()[taskname]()
                self.handling = False

    handler = EventHandler()
    notifier = pyinotify.Notifier(wm, handler)
    wm.add_watch(watchdir, mask, rec=True)

    logger.info('Listening %s for %s events and running task: %s' % (watchdir, event, taskname))
    notifier.loop()
Beispiel #2
0
def builddoc(builders='', rootdir=None, sphinxopts='', apidoc='false'):
    """
    Build documentation. Requires Sphinx.

    :param builders: List of document builders separated with comma
    :param rootdir: Directory where to generate documents, per builder (optional)
    :param sphinxopts: Additional parameters to pass to Sphinx
    :param apidoc: If true, runs also apidoc. Defaults to false.
    """
    builders = builders.split(',') if builders else ['html']
    rootdir = rootdir if rootdir else build_join('docs')
    sphinxopts = sphinxopts or '-E'
    sourcedir = rel_join('docs')
    pysourcedir = rel_join('plugins/multiproject/multiproject')
    exclude_paths = ['tests', 'core/test', 'core/tests']

    for builder in builders:
        docbuild = os.path.join(rootdir, builder)
        if not os.path.exists(docbuild):
            os.makedirs(docbuild)

        if get_bool_str(apidoc):
            local('rm -rf %s' % join(sourcedir, 'reference'))
            local('sphinx-apidoc -o %s/reference %s %s' % (sourcedir, pysourcedir, ' '.join(exclude_paths)))
        local('sphinx-build %s -b %s %s %s' % (sphinxopts, builder, sourcedir, docbuild))

        logger.info('Built %s documentation to: %s' % (builder, docbuild))
Beispiel #3
0
def test_perf(params=''):
    """
    Runs the performance tests against the configured service and produce the report
    in dist/

    :params str params: Parameters to pass to Funkload bench

    Examples::

        fab dist.test_perf
        fab dist.test_perf:"-c 1:15 -D 1"

    """
    try:
        import funkload
    except ImportError:
        abort('Funkload module missing, please install it first')

    # Create report folder if needed
    report_dir = dist_join('report/html')
    if not os.path.exists(report_dir):
        os.makedirs(report_dir)

    # Run the Funkload tests in perf test folder
    with lcd(rel_join('tests/perf')):
        with settings(show('running','stdout')):
            local('fl-run-bench %s tests.py MultiprojectTestCase.test_smoke' % params)
            local('fl-build-report -o %s --html smoke-bench.xml' % report_dir)

    logger.info('Testing completed. Test report can be found in: %s' % report_dir)
Beispiel #4
0
def test_perf(params=''):
    """
    Runs the performance tests against the configured service and produce the report
    in dist/

    :params str params: Parameters to pass to Funkload bench

    Examples::

        fab dist.test_perf
        fab dist.test_perf:"-c 1:15 -D 1"

    """
    try:
        import funkload
    except ImportError:
        abort('Funkload module missing, please install it first')

    # Create report folder if needed
    report_dir = dist_join('report/html')
    if not os.path.exists(report_dir):
        os.makedirs(report_dir)

    # Run the Funkload tests in perf test folder
    with lcd(rel_join('tests/perf')):
        with settings(show('running', 'stdout')):
            local('fl-run-bench %s tests.py MultiprojectTestCase.test_smoke' %
                  params)
            local('fl-build-report -o %s --html smoke-bench.xml' % report_dir)

    logger.info('Testing completed. Test report can be found in: %s' %
                report_dir)
Beispiel #5
0
def builddoc(builders='', rootdir=None, sphinxopts='', apidoc='false'):
    """
    Build documentation. Requires Sphinx.

    :param builders: List of document builders separated with comma
    :param rootdir: Directory where to generate documents, per builder (optional)
    :param sphinxopts: Additional parameters to pass to Sphinx
    :param apidoc: If true, runs also apidoc. Defaults to false.
    """
    builders = builders.split(',') if builders else ['html']
    rootdir = rootdir if rootdir else build_join('docs')
    sphinxopts = sphinxopts or '-E'
    sourcedir = rel_join('docs')
    pysourcedir = rel_join('plugins/multiproject/multiproject')
    exclude_paths = ['tests', 'core/test', 'core/tests']

    for builder in builders:
        docbuild = os.path.join(rootdir, builder)
        if not os.path.exists(docbuild):
            os.makedirs(docbuild)

        if get_bool_str(apidoc):
            local('rm -rf %s' % join(sourcedir, 'reference'))
            local('sphinx-apidoc -o %s/reference %s %s' %
                  (sourcedir, pysourcedir, ' '.join(exclude_paths)))
        local('sphinx-build %s -b %s %s %s' %
              (sphinxopts, builder, sourcedir, docbuild))

        logger.info('Built %s documentation to: %s' % (builder, docbuild))
Beispiel #6
0
def whoami():
    """
    Show whoami both with and without sudo
    Testing purpose.
    """
    logger.info('Running whoami with sudo: %s' % sudo('whoami'))
    logger.info('Running whoami with user: %s' % run('whoami'))
Beispiel #7
0
def whoami():
    """
    Show whoami both with and without sudo
    Testing purpose.
    """
    logger.info('Running whoami with sudo: %s' % sudo('whoami'))
    logger.info('Running whoami with user: %s' % run('whoami'))
def show_active_users_count(month='', year=''):
    """
    Show the number of users logged in within 3months
    """
    # NOTE: Fabric parameters are always strings
    # Get the month's
    now = datetime.utcnow()
    month = int(month) if month else now.month
    year = int(year) if year else now.year

    weekday, endday = calendar.monthrange(year, month)
    start_date = datetime(year=year, month=month, day=1, hour=0, minute=0, second=0)
    end_date = datetime(year=year, month=month, day=endday, hour=23, minute=59, second=59)

    startstr = start_date.strftime('%Y-%m-%d %H:%M:%S')
    endstr = end_date.strftime('%Y-%m-%d %H:%M:%S')

    dbm = DatabaseManager('trac_admin')
    sql = '''
    SELECT COUNT(*)
    FROM user
    WHERE
        last_login >= TIMESTAMP('%s')
    AND
        last_login <= TIMESTAMP('%s')\G
    ''' % (startstr, endstr)

    output = dbm.run(sql, re.compile(r': (\d+)'))
    logger.info('Date range: %s - %s ' % (startstr, endstr))
    logger.info('Active users: %s' % output)
Beispiel #9
0
def test(case='', config='tests.ini'):
    """
    Runs the functional tests against the setup specified in configuration

    :param str case: Name or path to case file
    :param str config: Path to config file, relative to current directory

    Examples::

        fab dist.test:smoke
        fab dist.test:path/to/case.py
        fab dist.test:smoke,~/firefox.ini
        fab dist.test:smoke,config=path/to/config.ini

    """
    try:
        from nose.core import TestProgram
        from nose.plugins import Plugin
    except ImportError:
        TestProgram = None
        Plugin = object
        return abort('For running tests, Nose testing framework is required. Please install it first: "pip install nose"')

    if not case:
        return abort('Please provide either name or path to test case')

    # Determine the case file: name or path accepted
    webtests_dir = rel_join('tests/webtests')
    casepath = os.path.abspath(case) if case.endswith('.py') else join(webtests_dir, 'cases/%s.py' % case)
    configpath = os.path.join(os.curdir, os.path.expanduser(config))

    logger.info('Running functional tests from: %s' % casepath)
    logger.info('Reading tests configuration from: %s' % configpath)

    class TestConfigPlugin(Plugin):
        """
        Simple Nose plugin to set test configuration path to testcase::

            class MyTestcase(unittest.TestCase)
                def setUp(self):
                    self.config_path

        """
        name = 'testconfig'
        can_configure = True
        enabled = True

        def options(self, parser, env):
            pass

        def configure(self, options, conf):
            pass

        def startTest(self, test):
            test_case = test.test.__class__
            test_case.config_path = configpath

    TestProgram(argv=['fab', casepath], addplugins=[TestConfigPlugin()])
Beispiel #10
0
def show_users_count():
    """
    Show the number of registered users
    """
    dbm = DatabaseManager('trac_admin')
    sql = 'SELECT COUNT(*) FROM user\G'
    output = dbm.run(sql, re.compile(r': (\d+)'))

    logger.info('Users: %s' % output)
Beispiel #11
0
 def process_IN_CLOSE_WRITE(self, event):
     if not self.handling:
         self.handling = True
         logger.info('Autorunning task: %s' % taskname)
         if taskopts:
             globals()[taskname](*[opt for opt in taskopts.split(',')])
         else:
             globals()[taskname]()
         self.handling = False
Beispiel #12
0
 def process_IN_CLOSE_WRITE(self, event):
     if not self.handling:
         self.handling = True
         logger.info('Autorunning task: %s' % taskname)
         if taskopts:
             globals()[taskname](*[opt for opt in taskopts.split(',')])
         else:
             globals()[taskname]()
         self.handling = False
Beispiel #13
0
def show_projects_top_weekly_downloads(week='', year='', limit='50'):
    """
    Show the number of downloads done within selected week

    Example query::

        fab -H hostname service.show_projects_top_downloads
        fab -H hostname service.show_projects_top_downloads:week=10
        fab -H hostname service.show_projects_top_downloads:week=9,year=2011,limit=10

    """
    # NOTE: Fabric parameters are always strings
    # Get the month's start and end date
    now = datetime.utcnow()
    # isocalendar returns tuple year, week, weekday
    week = int(week) if week else now.isocalendar()[1]
    year = int(year) if year else now.year

    first_day = datetime(year=year,month=1,day=1)

    # If first day is > Thu, next week is the first week
    if first_day.isoweekday() > 4:
        first_day = first_day + timedelta(days=7-first_day.weekday())
    else:
        first_day = first_day - timedelta(days=first_day.weekday())

    start_date = first_day + timedelta(weeks=week-1)
    end_date = start_date + timedelta(weeks=1)

    startstr = start_date.strftime('%Y-%m-%d %H:%M:%S')
    endstr = end_date.strftime('%Y-%m-%d %H:%M:%S')

    dbm = DatabaseManager('trac_analytical')
    sql = '''
    SELECT project_dim.project_name AS Project, COUNT(*) AS Downloads
    FROM event_fact, project_dim
    WHERE event_sk = (
        SELECT event_sk
        FROM event_dim
        WHERE action_name = 'release_downloaded'
    )
    AND (
        timestamp >= TIMESTAMP('%(startdate)s')
    AND
        timestamp < TIMESTAMPADD(WEEK, 1, '%(startdate)s')
    )
    AND project_dim.project_sk = event_fact.project_sk
    GROUP BY event_fact.project_sk
    ORDER BY Downloads DESC
    LIMIT %(limit)s;
    ''' % dict(startdate=startstr, limit=limit)

    output = dbm.run(sql)

    logger.info('Date range: week %s, %s - %s ' % (str(week), startstr, endstr))
    logger.info('Top %s downloads:\n %s' % (limit, output))
Beispiel #14
0
def buildetc(outdir=None, section=None):
    """
    Builds the configuration file based given configuration values in ``fabfile.ini``.

    Parameters:
        - outdir: Option path to directory where the built configuration files are placed. Subdirectory is created
          for each section.
        - section: Optional section name, if you want to build only one section. In this case, no subdirectory is created.

    """
    # Copy configuration files under build dir where they can be processed; config.config is a ConfigParser
    # object. config is a global object imported via fablib.api
    sections = [section] if section else config.get_setups_sections()

    # Iterate all sections unless the specific name is given
    for csection in sections:
        dest = csection.replace('setup:', '')

        # Set the target directory based on given parameters
        if section:
            build_etc = outdir
        elif outdir:
            build_etc = build_join(outdir, dest)
        else:
            build_etc = build_join('etc', dest)

        logger.info('Target directory for configuration files: %s' % build_etc)

        # Create target dir if needed
        if not os.path.exists(build_etc):
            os.makedirs(build_etc)

        # Copy the config files from etc/templates, place them in build/etc/<section> and replace the placeholders
        etc_src = rel_join('etc/templates')
        for configfile in get_files(
                etc_src,
                pattern='*.ini, *.conf, *.sql, *.sh, multiproject',
                recursive=True):
            # Copy the configuration file into build_etc directory, using the same directory structure
            configcopy = build_join(build_etc,
                                    configfile.replace(etc_src, './'))
            configcopy_dir = os.path.dirname(configcopy)
            if not os.path.exists(configcopy_dir):
                os.makedirs(configcopy_dir)
            shutil.copy(configfile, configcopy)

            # Replace placeholder in configfile
            logger.info('Building configuration file: %s' % configcopy)
            template = TemplateResource(configcopy)
            template.build(**dict(config.config.items(csection)))
Beispiel #15
0
def create_repository(pname=None, rname=None, rtype=None):
    """
    Creates version control repository

    :param str pname: Name of project where to create repository
    :param str rname: Name of repository you want to create
    :param str rtype: Type of the repository you want to create. Valid types are: git, ...

    Example usage::

        fab system.create_repository:myproject,reponame,git

    """
    assert rtype, 'Please provide repository type. Example: git'
    assert pname, 'Please provide project name. Example: myproject'
    assert rname, 'Please provide repository name. Example: reponame'

    root_dir = config['trac_repositories_path']
    repo_dir = os.path.join(root_dir, '%s.%s' % (pname, rname))
    webserver_user = config['webserver_user']
    webserver_group = config['webserver_group']

    # Dictionary for easy-replace
    variables = {
        'rname':rname,
        'pname':pname,
        'rdir':repo_dir,
        'rtype':rtype,
    }

    if rtype == 'git':
        q = 'Creating a %(rtype)s repo "%(rname)s" in %(rdir)s. Is this right (y/n)?' % variables
        if not str(prompt(q, default='n')).lower() == 'y':
            logger.info('Aborting.')
            return

        logger.info('Creating git repository')
        sudo('git --bare --git-dir=%(rdir)s init --shared=true' % variables)
        sudo('git --git-dir=%(rdir)s update-server-info' % variables)
        sudo('chown -R %s:%s %s' % (webserver_user, webserver_group, repo_dir))

        logger.warn('Manual step required. Add following in projects trac.ini')
        print '[repositories]'
        print '%(rname)s.dir = %(rdir)s' % variables
        print '%(rname)s.type = %(rtype)s' % variables
        print ''

    else:
        raise NotImplementedError('Repository type not implemented')
Beispiel #16
0
def create_repository(pname=None, rname=None, rtype=None):
    """
    Creates version control repository

    :param str pname: Name of project where to create repository
    :param str rname: Name of repository you want to create
    :param str rtype: Type of the repository you want to create. Valid types are: git, ...

    Example usage::

        fab system.create_repository:myproject,reponame,git

    """
    assert rtype, 'Please provide repository type. Example: git'
    assert pname, 'Please provide project name. Example: myproject'
    assert rname, 'Please provide repository name. Example: reponame'

    root_dir = config['trac_repositories_path']
    repo_dir = os.path.join(root_dir, '%s.%s' % (pname, rname))
    webserver_user = config['webserver_user']
    webserver_group = config['webserver_group']

    # Dictionary for easy-replace
    variables = {
        'rname': rname,
        'pname': pname,
        'rdir': repo_dir,
        'rtype': rtype,
    }

    if rtype == 'git':
        q = 'Creating a %(rtype)s repo "%(rname)s" in %(rdir)s. Is this right (y/n)?' % variables
        if not str(prompt(q, default='n')).lower() == 'y':
            logger.info('Aborting.')
            return

        logger.info('Creating git repository')
        sudo('git --bare --git-dir=%(rdir)s init --shared=true' % variables)
        sudo('git --git-dir=%(rdir)s update-server-info' % variables)
        sudo('chown -R %s:%s %s' % (webserver_user, webserver_group, repo_dir))

        logger.warn('Manual step required. Add following in projects trac.ini')
        print '[repositories]'
        print '%(rname)s.dir = %(rdir)s' % variables
        print '%(rname)s.type = %(rtype)s' % variables
        print ''

    else:
        raise NotImplementedError('Repository type not implemented')
Beispiel #17
0
def show_projects_top_downloads(month='', year='', limit='50'):
    """
    Show the number of downloads done within selected month

    Example query::

        fab -H hostname service.show_projects_top_downloads
        fab -H hostname service.show_projects_top_downloads:month=10
        fab -H hostname service.show_projects_top_downloads:month=9,year=2011,limit=10

    """
    # NOTE: Fabric parameters are always strings
    # Get the month's start and end date
    now = datetime.utcnow()
    month = int(month) if month else now.month
    year = int(year) if year else now.year

    weekday, endday = calendar.monthrange(year, month)
    start_date = datetime(year=year, month=month, day=1, hour=0, minute=0, second=0)
    end_date = datetime(year=year, month=month, day=endday, hour=23, minute=59, second=59)

    startstr = start_date.strftime('%Y-%m-%d %H:%M:%S')
    endstr = end_date.strftime('%Y-%m-%d %H:%M:%S')

    dbm = DatabaseManager('trac_analytical')
    sql = '''
    SELECT project_dim.project_name AS Project, COUNT(*) AS Downloads
    FROM event_fact, project_dim
    WHERE event_sk = (
        SELECT event_sk
        FROM event_dim
        WHERE action_name = 'release_downloaded'
    )
    AND (
        timestamp >= TIMESTAMP('%(startdate)s')
    AND
        timestamp < TIMESTAMPADD(MONTH, 1, '%(startdate)s')
    )
    AND project_dim.project_sk = event_fact.project_sk
    GROUP BY event_fact.project_sk
    ORDER BY Downloads DESC
    LIMIT %(limit)s;
    ''' % dict(startdate=startstr, limit=limit)

    output = dbm.run(sql)

    logger.info('Date range: %s - %s ' % (startstr, endstr))
    logger.info('Top %s downloads:\n %s' % (limit, output))
Beispiel #18
0
def buildetc(outdir=None, section=None):
    """
    Builds the configuration file based given configuration values in ``fabfile.ini``.

    Parameters:
        - outdir: Option path to directory where the built configuration files are placed. Subdirectory is created
          for each section.
        - section: Optional section name, if you want to build only one section. In this case, no subdirectory is created.

    """
    # Copy configuration files under build dir where they can be processed; config.config is a ConfigParser
    # object. config is a global object imported via fablib.api
    sections = [section] if section else config.get_setups_sections()

    # Iterate all sections unless the specific name is given
    for csection in sections:
        dest = csection.replace('setup:', '')

        # Set the target directory based on given parameters
        if section:
            build_etc = outdir
        elif outdir:
            build_etc = build_join(outdir, dest)
        else:
            build_etc = build_join('etc', dest)

        logger.info('Target directory for configuration files: %s' % build_etc)

        # Create target dir if needed
        if not os.path.exists(build_etc):
            os.makedirs(build_etc)

        # Copy the config files from etc/templates, place them in build/etc/<section> and replace the placeholders
        etc_src = rel_join('etc/templates')
        for configfile in get_files(etc_src, pattern='*.ini, *.conf, *.sql, *.sh, multiproject', recursive=True):
            # Copy the configuration file into build_etc directory, using the same directory structure
            configcopy = build_join(build_etc, configfile.replace(etc_src, './'))
            configcopy_dir = os.path.dirname(configcopy)
            if not os.path.exists(configcopy_dir):
                os.makedirs(configcopy_dir)
            shutil.copy(configfile, configcopy)

            # Replace placeholder in configfile
            logger.info('Building configuration file: %s' % configcopy)
            template = TemplateResource(configcopy)
            template.build(**dict(config.config.items(csection)))
Beispiel #19
0
def deploy_targz(packagename, opts):
    """
    Run the deploy activities for source/custom tar.gz package.
    The script can deploy following kind of tar.gz packages:

    - Python source package: if setup.py is found from root folder, it is run with python setup.py install
    - Custom source package (created with build task): runs scripts/deploy.sh found from the package

    :param str packagename: Name of the package to deploy. Example 'mypackage-1.1.0.tar.gz'
    :param str opts: Optional parameters to pass to deploying app (easy_install, rpm, dpkg, deploy.sh)

    """
    root_dir = config['trac_root']
    webserver_user = config['webserver_user']
    webserver_group = config['webserver_group']
    releasename, releaseversion, releaseextension = split_package_name(
        packagename)

    # Get the subdirectory (where all the files are place) of the archive, if any
    out = run('tar ztf %s' % packagename)
    subdir = os.path.commonprefix(out.stdout.splitlines())

    run('tar zxf %s' % packagename)

    with cd('~/%s' % subdir):
        # Run the setup.py if is found
        if exists('setup.py'):
            logger.info(
                'Installing python module from source, using: %s/setup.py' %
                releasename)
            sudo('python setup.py install %s' % opts)

        # Custom package, expect to find scripts/deploy.sh
        else:
            logger.info(
                'Running deploy script ./scripts/deploy.sh at directory: %s' %
                subdir)
            with settings(show('stdout')):
                sudo('./scripts/deploy.sh %s' % opts)

            # Fix file permissions
            logger.info('Setting the permissions to deployment folder')
            sudo('chown -L -R %s:%s %s' % (webserver_user, webserver_group,
                                           join(root_dir, 'dist', 'current')))

    # Cleanup - needs to be done with sudo because of sudo is being used at running deploy.sh (at the moment)
    with cd('~'):
        logger.info('Cleaning up...')
        if subdir:
            sudo('rm -rf ./%s' % subdir)
        sudo('rm -rf ./%s' % releasename)
Beispiel #20
0
def upload(package, rdir=''):
    """
    Uploads the given package to remote host

    :param str package: Path to package, absolute or relative
    :param str rdir: Remote directory where to upload the package. Defaults to users home directory

    Examples::

        fab dist.upload:package.tar.gz,/tmp
        fab dist.upload:package=../../package.tar.gz
        fab dist.upload:package=../../packa*.tar.gz
        fab dist.upload:package=../../package.tar.gz,rdir=/tmp

    .. NOTE::

        Special paths, containing environment variables or tilde characters are not supported.

    """
    # Use glob to find package from local filesystem (glob supports wildcards)
    pmatches = glob(os.path.expandvars(os.path.expanduser(package)))
    if not pmatches:
        return abort('No package can be found with name: %s' % package)

    # Upload package(s) to remote host and determine the name of release folder
    for pmatch in pmatches:
        package = os.path.normpath(pmatch)

        # Get the release name from package: drop the extension and version
        packagename = os.path.basename(package)
        target_path = join(rdir, packagename) if rdir else packagename
        target_dir = os.path.dirname(target_path)

        # Upload package to specified directory, with the same name as the orig
        logger.info('Uploading the package: %s -> %s' % (package, target_path))

        if not exists(target_dir):
            run(target_dir)

        put(package, target_path)
Beispiel #21
0
def upload(package, rdir=''):
    """
    Uploads the given package to remote host

    :param str package: Path to package, absolute or relative
    :param str rdir: Remote directory where to upload the package. Defaults to users home directory

    Examples::

        fab dist.upload:package.tar.gz,/tmp
        fab dist.upload:package=../../package.tar.gz
        fab dist.upload:package=../../packa*.tar.gz
        fab dist.upload:package=../../package.tar.gz,rdir=/tmp

    .. NOTE::

        Special paths, containing environment variables or tilde characters are not supported.

    """
    # Use glob to find package from local filesystem (glob supports wildcards)
    pmatches = glob(os.path.expandvars(os.path.expanduser(package)))
    if not pmatches:
        return abort('No package can be found with name: %s' % package)

    # Upload package(s) to remote host and determine the name of release folder
    for pmatch in pmatches:
        package = os.path.normpath(pmatch)

        # Get the release name from package: drop the extension and version
        packagename = os.path.basename(package)
        target_path = join(rdir, packagename)  if rdir else packagename
        target_dir = os.path.dirname(target_path)

        # Upload package to specified directory, with the same name as the orig
        logger.info('Uploading the package: %s -> %s' % (package, target_path))

        if not exists(target_dir):
            run(target_dir)

        put(package, target_path)
Beispiel #22
0
def deploy(package, opts=''):
    """
    Uploads the given package to remote host and deploys it there.

    :param str package:
        Path to tar.gz package in local file system. In case of wildcard, all the matched package are deployed.
        Package can be in formats: tar.gz (custom package structure, deb, rpm
    :param str opts:
        Optional parameters to pass to deploying app (easy_install, rpm, dpkg, deploy.sh)

    Examples::

        fab dist.deploy:package=../../package.tar.gz
        fab dist.deploy:package=../../package.tar.gz,opts="--theme --activate"
        fab dist.deploy:package=../../*.deb
        fab dist.deploy:package=../../*.deb,opts='--force'

    """
    # Use glob to find package from local filesystem (glob supports wildcards)
    pmatches = glob(os.path.expandvars(os.path.expanduser(package)))
    if not pmatches:
        return abort('No package can be found with name: %s' % package)

    # Iterate matched packages
    # Upload package(s) to remote host and determine the name of release folder
    for pmatch in pmatches:
        package = os.path.normpath(pmatch)

        # Get the release name from package: drop the extension and version
        packagename = os.path.basename(package)
        releasename, releaseversion, releaseextension = split_package_name(
            packagename)

        # Upload package to home directory, with same as the orig
        logger.info('Uploading the package: %s -> %s' % (package, packagename))
        put(package, packagename)

        logger.info('Release name: %s' % releasename)

        # Run the package specific deployment actions
        if releaseextension == 'tar.gz':
            deploy_targz(packagename, opts)
        elif releaseextension == 'egg':
            opts = opts or '-Z'
            sudo('easy_install %s %s' % (opts, packagename))
        elif releaseextension == 'deb':
            opts = opts or '--install'
            sudo('dpkg %s %s' % (opts, packagename))
        elif releaseextension == 'rpm':
            opts = opts or '-Uvh'
            sudo('rpm %s %s' % (opts, packagename))

        # Remove the package
        with cd('~'):
            sudo('rm -f ./%s' % packagename)

    # Restart apache
    logger.info('Restarting apache')
    apache = Apache()
    apache.restart()
Beispiel #23
0
def deploy(package, opts=''):
    """
    Uploads the given package to remote host and deploys it there.

    :param str package:
        Path to tar.gz package in local file system. In case of wildcard, all the matched package are deployed.
        Package can be in formats: tar.gz (custom package structure, deb, rpm
    :param str opts:
        Optional parameters to pass to deploying app (easy_install, rpm, dpkg, deploy.sh)

    Examples::

        fab dist.deploy:package=../../package.tar.gz
        fab dist.deploy:package=../../package.tar.gz,opts="--theme --activate"
        fab dist.deploy:package=../../*.deb
        fab dist.deploy:package=../../*.deb,opts='--force'

    """
    # Use glob to find package from local filesystem (glob supports wildcards)
    pmatches = glob(os.path.expandvars(os.path.expanduser(package)))
    if not pmatches:
        return abort('No package can be found with name: %s' % package)

    # Iterate matched packages
    # Upload package(s) to remote host and determine the name of release folder
    for pmatch in pmatches:
        package = os.path.normpath(pmatch)

        # Get the release name from package: drop the extension and version
        packagename = os.path.basename(package)
        releasename, releaseversion, releaseextension = split_package_name(packagename)

        # Upload package to home directory, with same as the orig
        logger.info('Uploading the package: %s -> %s' % (package, packagename))
        put(package, packagename)

        logger.info('Release name: %s' % releasename)

        # Run the package specific deployment actions
        if releaseextension == 'tar.gz':
            deploy_targz(packagename, opts)
        elif releaseextension == 'egg':
            opts = opts or '-Z'
            sudo('easy_install %s %s' % (opts, packagename))
        elif releaseextension == 'deb':
            opts = opts or '--install'
            sudo('dpkg %s %s' % (opts, packagename))
        elif releaseextension == 'rpm':
            opts = opts or '-Uvh'
            sudo('rpm %s %s' % (opts, packagename))

        # Remove the package
        with cd('~'):
            sudo('rm -f ./%s' % packagename)

    # Restart apache
    logger.info('Restarting apache')
    apache = Apache()
    apache.restart()
Beispiel #24
0
def show_projects_downloads(month='', year=''):
    """
    Show the number of downloads done within selected month
    """
    # NOTE: Fabric parameters are always strings
    # Get the month's start and end date
    now = datetime.utcnow()
    month = int(month) if month else now.month
    year = int(year) if year else now.year

    weekday, endday = calendar.monthrange(year, month)
    start_date = datetime(year=year, month=month, day=1, hour=0, minute=0, second=0)
    end_date = datetime(year=year, month=month, day=endday, hour=23, minute=59, second=59)

    startstr = start_date.strftime('%Y-%m-%d %H:%M:%S')
    endstr = end_date.strftime('%Y-%m-%d %H:%M:%S')

    dbm = DatabaseManager('trac_analytical')
    sql = '''
    SELECT COUNT(*)
    FROM event_fact AS ef
    WHERE
        event_sk = (
            SELECT event_sk
            FROM event_dim
            WHERE action_name = 'release_downloaded'
        )
    AND
        ef.timestamp >= TIMESTAMP('%s')
    AND
        ef.timestamp <= TIMESTAMP('%s')\G
    ''' % (startstr, endstr)

    output = dbm.run(sql, re.compile(r': (\d+)'))

    logger.info('Date range: %s - %s ' % (startstr, endstr))
    logger.info('Project downloads: %s' % output)
Beispiel #25
0
def autobuild(taskname='builddoc', watchdir='docs', taskopts=''):
    """
    Run given task whenever the files are changed
    Requires pyinotify

    :param taskname: Name of fabric task to run on events (inside dist namespace).
    :param watchdir: Path to directory what to watch for events. Relative to current directory.
    :param taskopts: Options to pass to task, separated by comma
    """
    import pyinotify

    watchdir = os.path.join(os.path.curdir, watchdir)
    wm = pyinotify.WatchManager()
    event = 'IN_CLOSE_WRITE'
    mask = getattr(pyinotify, event)

    class EventHandler(pyinotify.ProcessEvent):
        handling = False

        def process_IN_CLOSE_WRITE(self, event):
            if not self.handling:
                self.handling = True
                logger.info('Autorunning task: %s' % taskname)
                if taskopts:
                    globals()[taskname](*[opt for opt in taskopts.split(',')])
                else:
                    globals()[taskname]()
                self.handling = False

    handler = EventHandler()
    notifier = pyinotify.Notifier(wm, handler)
    wm.add_watch(watchdir, mask, rec=True)

    logger.info('Listening %s for %s events and running task: %s' %
                (watchdir, event, taskname))
    notifier.loop()
Beispiel #26
0
def deploy_targz(packagename, opts):
    """
    Run the deploy activities for source/custom tar.gz package.
    The script can deploy following kind of tar.gz packages:

    - Python source package: if setup.py is found from root folder, it is run with python setup.py install
    - Custom source package (created with build task): runs scripts/deploy.sh found from the package

    :param str packagename: Name of the package to deploy. Example 'mypackage-1.1.0.tar.gz'
    :param str opts: Optional parameters to pass to deploying app (easy_install, rpm, dpkg, deploy.sh)

    """
    root_dir = config['trac_root']
    webserver_user = config['webserver_user']
    webserver_group = config['webserver_group']
    releasename, releaseversion, releaseextension = split_package_name(packagename)

    # Get the subdirectory (where all the files are place) of the archive, if any
    out = run('tar ztf %s' % packagename)
    subdir = os.path.commonprefix(out.stdout.splitlines())

    run('tar zxf %s' % packagename)

    with cd('~/%s' % subdir):
        # Run the setup.py if is found
        if exists('setup.py'):
            logger.info('Installing python module from source, using: %s/setup.py' % releasename)
            sudo('python setup.py install %s' % opts)

        # Custom package, expect to find scripts/deploy.sh
        else:
            logger.info('Running deploy script ./scripts/deploy.sh at directory: %s' % subdir)
            with settings(show('stdout')):
                sudo('./scripts/deploy.sh %s' % opts)

            # Fix file permissions
            logger.info('Setting the permissions to deployment folder')
            sudo('chown -L -R %s:%s %s' % (webserver_user, webserver_group, join(root_dir, 'dist', 'current')))

    # Cleanup - needs to be done with sudo because of sudo is being used at running deploy.sh (at the moment)
    with cd('~'):
        logger.info('Cleaning up...')
        if subdir:
            sudo('rm -rf ./%s' % subdir)
        sudo('rm -rf ./%s' % releasename)
Beispiel #27
0
def remove_unused_images(dirpath, extensions='*.png', cmd=''):
    """
    List/remove unused images

    :param dirpath: Directory where to search for unused images
    :param extensions: Extensions to include in search. Example: "*.png", "*.png,*.gif"
    :param cmd: Command to execute on unsed images. For example: "git rm", "rm"
                The file path is appended in the end of the command. By default, just prints the image path

    Example execution::

        fab devel.remove_unused_images:"themes/default/",cmd="git rm"

    """
    dirpath = os.path.abspath(dirpath or os.path.curdir)
    logger.info('Looking for unused images: %s' % dirpath)

    # Iterate images
    for imagepath in get_files(dirpath, extensions, recursive=True):
        logger.debug('Processing image: %s' % imagepath)
        image = os.path.basename(imagepath)

        with settings(hide('running', 'stderr'), warn_only=True):
            # Run inside the dirpath
            with lcd(dirpath):
                # Check if image can be found from source files
                findcmd = 'grin "%s"' % image
                search = local(findcmd, capture=True)

                # If output does not contain image, then it is unused
                if not search.stdout:
                    if cmd:
                        rmcmd = '%s %s' % (cmd, imagepath)
                        logger.info('Executing command: %s' % rmcmd)
                        local(rmcmd)
                    else:
                        logger.info('Unsed image: %s' % imagepath)
Beispiel #28
0
def bundle(template_path='', resource_dir=''):
    """
    Finds out the resources files (css,js) from given template file (in example, ``resources.html``), bundles them into
    one package (stylepack_yymmdd.css, scriptpack_yymmdd.js) and generates bundle.html (in the same directory where
    template_path is), containing references to these files. This bundle.html file can be used in Genshi
    templates as follows::

         <xi:include href="bundle.html">
            <xi:fallback>
                <!-- Link resources normally -->
                <xi:include href="resources.html">
            <xi:fallback>
        </xi:include>

    Where ``resources.html`` contains::

        <html xmlns="http://www.w3.org/1999/xhtml"
              xmlns:py="http://genshi.edgewall.org/"
              py:strip="">
            <link rel="stylesheet" type="text/css" href="ui.css" />
            <script type="text/javascript" src="jquery-1.4.1.min.js" />
        </html>

    If bundle.html is found, only packaged resources are served to end users. Otherwise, the resources are served as-is.

    :param str template_path: Absolute path to HTML template
    :param str resource_dir: The parent directory where the resources are located in
    """
    assert template_path, 'Please provide template_path'
    assert resource_dir, 'Please provide resource_dir'

    now = datetime.utcnow()
    variable_regx = re.compile('\${(\w|-|_|\.)+}\/')
    hrp = HTMLResourceParser()

    logger.info('Parsing template file: %s' % template_path)

    # Operate with layout file
    with io.open(template_path, 'r') as layout_fd:
        # Read file into parser
        content = layout_fd.read()
        hrp.feed(content)

        # Bundle stylesheets
        css_bundle_path = os.path.join(resource_dir, 'css',
                                       now.strftime('stylepack_%y%m%d.css'))
        with io.open(css_bundle_path, 'w') as bundle_fd:
            # Iterate styles
            for style in hrp.styles:
                # Replace variable prefixed stylesheet with actual one
                style_path = os.path.join(resource_dir,
                                          variable_regx.sub('', style['href']))
                logger.debug('Bundle script: %s' % style_path)
                with io.open(style_path, 'r') as style_fd:
                    bundle_fd.write(style_fd.read())

        logger.info('Bundled %d stylesheets into %s' %
                    (len(hrp.styles), css_bundle_path))

        # Bundle scripts
        js_bundle_path = os.path.join(resource_dir, 'js',
                                      now.strftime('scriptpack_%y%m%d.js'))
        with io.open(js_bundle_path, 'w') as bundle_fd:
            # Iterate scripts
            for script in hrp.scripts:
                # Replace variable prefixed script with actual one
                script_path = os.path.join(
                    resource_dir, variable_regx.sub('', script['src']))
                logger.debug('Bundle script: %s' % script_path)
                with io.open(script_path, 'r') as script_fd:
                    bundle_fd.write(script_fd.read())

        logger.info('Bundled %d scripts into %s' %
                    (len(hrp.scripts), js_bundle_path))

        # Generate build.html files, containing the resource links
        bundle_html_path = os.path.join(os.path.dirname(template_path),
                                        'bundle.html')
        with open(bundle_html_path, 'w') as bundle_html_fd:
            bundle_html_fd.write('''
            <html xmlns="http://www.w3.org/1999/xhtml"
                  xmlns:py="http://genshi.edgewall.org/"
                  py:strip="">
                <link rel="stylesheet" type="text/css" href="${conf.theme_htdocs_location}/css/%s" />
                <script type="text/javascript" src="${conf.theme_htdocs_location}/js/%s" />
            </html>
            ''' % (os.path.basename(css_bundle_path),
                   os.path.basename(js_bundle_path)))

        logger.info('Generated bundle HTML file: %s' % bundle_html_path)
Beispiel #29
0
def buildext(allext='false', patch='true', branch='master'):
    """
    Build and optionally patch the 3rd party modules and libraries.
    The outcome (tar.gz/egg) files are placed in dist directory

    :param allext:
        Download also non-GitResources. Default is 'false'.
    :param patch:
        Patch those plugins having patches, currently, trac and gitosis.
    :param branch:
        For GitResources, selects the branch to be used. Default is 'master'.

    .. NOTE::

        If you want to include the external release into multiproject-all package, run the build
        task with parameters::

            fab dist.build:ext=true

    """
    allext = get_bool_str(allext)
    # Construct and create building directory for external resources
    extbuild = build_join('ext')
    shutil.rmtree(extbuild, ignore_errors=True)
    os.makedirs(extbuild)

    for res in ext_resources:
        res_path = get_ext_path(res.name)
        is_git = isinstance(res, GitResource)
        if not is_git and not allext:
            continue
        logger.info('Starting to download / fetch resource %s' % res.name)

        must_retrieve = False
        resource_id_file = join(res_path, '.fabric_resource_id.txt')
        res_lines = [line.strip() for line in str(res).split(',')]
        res_lines.append(
            '# This is a file used by fabric dist.buildext command.')
        if not os.path.exists(res_path):
            must_retrieve = True
        else:
            # Check folder contents.
            # If the fetch identifier is missing, it is assumed to be the correct one.
            if os.path.exists(resource_id_file):
                prev_lines = [
                    line.strip() for line in open(resource_id_file, 'r')
                ]
                if res_lines != prev_lines:
                    logger.warning(
                        'Resource %s has been changed, retrieving it.' %
                        res.name)
                    logger.info('Previous resource: %s' % prev_lines)
                    logger.info('Current resource:  %s' % res_lines)
                    must_retrieve = True
            if not get_files(
                    os.path.abspath(res_path), 'setup.py', recursive=True):
                must_retrieve = True
        if must_retrieve:
            shutil.rmtree(res_path, ignore_errors=True)
            os.makedirs(res_path)
            res.retrieve(res_path)
            outfile = open(resource_id_file, 'w')
            outfile.writelines([line + '\n' for line in res_lines])
            outfile.close()
        else:
            logger.warning('Resource %s was already retrieved.' % res.name)

        if is_git:
            # The GitResources are always updated
            if not os.path.exists(join(res_path, '.git')):
                raise Exception(
                    'GitResource in %s is invalid. Run `fab dist.clean:ext=true`'
                    % res_path)
            with lcd(res_path):
                local('git fetch')

        # Else, we assume that the resource has been already retrieved
        # Copy the files into ext build dir
        ext_build_dir = join(extbuild, res.name)
        shutil.copytree(res_path, ext_build_dir)
        if isinstance(res, GitResource):
            with lcd(ext_build_dir):
                logger.info('For %s, git checkout %s' % (res.name, branch))
                local('git checkout %s' % branch)
                local('git merge origin/%s' % branch)

    # Now the plugin files are inside 'build/ext/', and we can continue

    # Work in build directory
    with lcd(extbuild):
        # Retreive resource and place them to build directory

        # Apply patches
        if get_bool_str(patch) and allext:

            # Patch Trac
            logger.info('Patching Trac...')
            with lcd(join(extbuild, 'trac')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/trac'),
                                       '*.patch',
                                       recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

            # Patch Gitosis
            logger.info('Patching Gitosis...')
            with lcd(join(extbuild, 'gitosis')):
                for patch in get_files(join(PROJECT_DIR,
                                            'ext/patches/gitosis'),
                                       '*.patch',
                                       recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

    # Build eggs (in build dir)
    logger.info('Laying eggs and source dists...')
    # Iterate folders where setup.py can be found
    for setuppy_path in get_files(os.path.abspath(extbuild),
                                  'setup.py',
                                  recursive=True):
        plugin_dir = os.path.dirname(setuppy_path)
        logger.info('Building package for %s' % os.path.basename(plugin_dir))
        with lcd(plugin_dir):
            local('python setup.py bdist_egg')
            local('python setup.py sdist')

    # Copy distributable files to dist
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    for egg in get_files(os.path.abspath(extbuild), '*.egg', recursive=True):
        shutil.copy(egg, join(DIST_DIR, os.path.basename(egg)))

    for targz in get_files(os.path.abspath(extbuild),
                           '*.tar.gz',
                           recursive=True):
        shutil.copy(targz, join(DIST_DIR, os.path.basename(targz)))
Beispiel #30
0
def bundle(template_path='', resource_dir=''):
    """
    Finds out the resources files (css,js) from given template file (in example, ``resources.html``), bundles them into
    one package (stylepack_yymmdd.css, scriptpack_yymmdd.js) and generates bundle.html (in the same directory where
    template_path is), containing references to these files. This bundle.html file can be used in Genshi
    templates as follows::

         <xi:include href="bundle.html">
            <xi:fallback>
                <!-- Link resources normally -->
                <xi:include href="resources.html">
            <xi:fallback>
        </xi:include>

    Where ``resources.html`` contains::

        <html xmlns="http://www.w3.org/1999/xhtml"
              xmlns:py="http://genshi.edgewall.org/"
              py:strip="">
            <link rel="stylesheet" type="text/css" href="ui.css" />
            <script type="text/javascript" src="jquery-1.4.1.min.js" />
        </html>

    If bundle.html is found, only packaged resources are served to end users. Otherwise, the resources are served as-is.

    :param str template_path: Absolute path to HTML template
    :param str resource_dir: The parent directory where the resources are located in
    """
    assert template_path, 'Please provide template_path'
    assert resource_dir, 'Please provide resource_dir'

    now = datetime.utcnow()
    variable_regx = re.compile('\${(\w|-|_|\.)+}\/')
    hrp = HTMLResourceParser()

    logger.info('Parsing template file: %s' % template_path)

    # Operate with layout file
    with io.open(template_path, 'r') as layout_fd:
        # Read file into parser
        content = layout_fd.read()
        hrp.feed(content)

        # Bundle stylesheets
        css_bundle_path = os.path.join(resource_dir, 'css', now.strftime('stylepack_%y%m%d.css'))
        with io.open(css_bundle_path, 'w') as bundle_fd:
            # Iterate styles
            for style in hrp.styles:
                # Replace variable prefixed stylesheet with actual one
                style_path = os.path.join(resource_dir, variable_regx.sub('', style['href']))
                logger.debug('Bundle script: %s' % style_path)
                with io.open(style_path, 'r') as style_fd:
                    bundle_fd.write(style_fd.read())

        logger.info('Bundled %d stylesheets into %s' % (len(hrp.styles), css_bundle_path))

        # Bundle scripts
        js_bundle_path = os.path.join(resource_dir, 'js', now.strftime('scriptpack_%y%m%d.js'))
        with io.open(js_bundle_path, 'w') as bundle_fd:
            # Iterate scripts
            for script in hrp.scripts:
                # Replace variable prefixed script with actual one
                script_path = os.path.join(resource_dir, variable_regx.sub('', script['src']))
                logger.debug('Bundle script: %s' % script_path)
                with io.open(script_path, 'r') as script_fd:
                    bundle_fd.write(script_fd.read())

        logger.info('Bundled %d scripts into %s' % (len(hrp.scripts), js_bundle_path))

        # Generate build.html files, containing the resource links
        bundle_html_path = os.path.join(os.path.dirname(template_path), 'bundle.html')
        with open(bundle_html_path, 'w') as bundle_html_fd:
            bundle_html_fd.write('''
            <html xmlns="http://www.w3.org/1999/xhtml"
                  xmlns:py="http://genshi.edgewall.org/"
                  py:strip="">
                <link rel="stylesheet" type="text/css" href="${conf.theme_htdocs_location}/css/%s" />
                <script type="text/javascript" src="${conf.theme_htdocs_location}/js/%s" />
            </html>
            ''' % (os.path.basename(css_bundle_path), os.path.basename(js_bundle_path)))

        logger.info('Generated bundle HTML file: %s' % bundle_html_path)
Beispiel #31
0
def buildext(allext='false',patch='true',branch='master'):
    """
    Build and optionally patch the 3rd party modules and libraries.
    The outcome (tar.gz/egg) files are placed in dist directory

    :param allext:
        Download also non-GitResources. Default is 'false'.
    :param patch:
        Patch those plugins having patches, currently, trac and gitosis.
    :param branch:
        For GitResources, selects the branch to be used. Default is 'master'.

    .. NOTE::

        If you want to include the external release into multiproject-all package, run the build
        task with parameters::

            fab dist.build:ext=true

    """
    allext = get_bool_str(allext)
    # Construct and create building directory for external resources
    extbuild = build_join('ext')
    shutil.rmtree(extbuild, ignore_errors=True)
    os.makedirs(extbuild)

    for res in ext_resources:
        res_path = get_ext_path(res.name)
        is_git = isinstance(res, GitResource)
        if not is_git and not allext:
            continue
        logger.info('Starting to download / fetch resource %s' % res.name)

        must_retrieve = False
        resource_id_file = join(res_path, '.fabric_resource_id.txt')
        res_lines = [line.strip() for line in str(res).split(',')]
        res_lines.append('# This is a file used by fabric dist.buildext command.')
        if not os.path.exists(res_path):
            must_retrieve = True
        else:
            # Check folder contents.
            # If the fetch identifier is missing, it is assumed to be the correct one.
            if os.path.exists(resource_id_file):
                prev_lines = [line.strip() for line in open(resource_id_file, 'r')]
                if res_lines != prev_lines:
                    logger.warning('Resource %s has been changed, retrieving it.' % res.name)
                    logger.info('Previous resource: %s' % prev_lines)
                    logger.info('Current resource:  %s' % res_lines)
                    must_retrieve = True
            if not get_files(os.path.abspath(res_path), 'setup.py', recursive=True):
                must_retrieve = True
        if must_retrieve:
            shutil.rmtree(res_path, ignore_errors=True)
            os.makedirs(res_path)
            res.retrieve(res_path)
            outfile = open(resource_id_file, 'w')
            outfile.writelines([line + '\n' for line in res_lines])
            outfile.close()
        else:
            logger.warning('Resource %s was already retrieved.' % res.name)

        if is_git:
            # The GitResources are always updated
            if not os.path.exists(join(res_path, '.git')):
                raise Exception('GitResource in %s is invalid. Run `fab dist.clean:ext=true`' % res_path)
            with lcd(res_path):
                local('git fetch')

        # Else, we assume that the resource has been already retrieved
        # Copy the files into ext build dir
        ext_build_dir = join(extbuild, res.name)
        shutil.copytree(res_path, ext_build_dir)
        if isinstance(res, GitResource):
            with lcd(ext_build_dir):
                logger.info('For %s, git checkout %s' % (res.name, branch))
                local('git checkout %s' % branch)
                local('git merge origin/%s' % branch)

    # Now the plugin files are inside 'build/ext/', and we can continue

    # Work in build directory
    with lcd(extbuild):
        # Retreive resource and place them to build directory

        # Apply patches
        if get_bool_str(patch) and allext:

            # Patch Trac
            logger.info('Patching Trac...')
            with lcd(join(extbuild, 'trac')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/trac'), '*.patch', recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

            # Patch Gitosis
            logger.info('Patching Gitosis...')
            with lcd(join(extbuild, 'gitosis')):
                for patch in get_files(join(PROJECT_DIR, 'ext/patches/gitosis'), '*.patch', recursive=True):
                    local('patch --ignore-whitespace -p0 -i %s' % patch)

    # Build eggs (in build dir)
    logger.info('Laying eggs and source dists...')
    # Iterate folders where setup.py can be found
    for setuppy_path in get_files(os.path.abspath(extbuild), 'setup.py', recursive=True):
        plugin_dir = os.path.dirname(setuppy_path)
        logger.info('Building package for %s' % os.path.basename(plugin_dir))
        with lcd(plugin_dir):
            local('python setup.py bdist_egg')
            local('python setup.py sdist')

    # Copy distributable files to dist
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    for egg in get_files(os.path.abspath(extbuild), '*.egg', recursive=True):
        shutil.copy(egg, join(DIST_DIR, os.path.basename(egg)))

    for targz in get_files(os.path.abspath(extbuild), '*.tar.gz', recursive=True):
        shutil.copy(targz, join(DIST_DIR, os.path.basename(targz)))
Beispiel #32
0
def build(release='false', compress='false', docs='', pkgs='tar', version='', ext='true',
          extbranch='master'):
    """
    Create distributable packages. Builds eggs and tar.gz compressed packages, based on
    parameters. Also capable of downloading and patching external dependencies.

    :param release:
        Make release build or not. Release sets/increments the version number. Default 'false'
    :param compress:
        Compress js/css files nor not. Default 'false'
    :param docs:
        Names of the documentation targets to build. Default '' means no doc building
    :param pkgs:
        Package formats to build sources into, separated with space. Valid values: tar deb rpm
    :param version:
        Version number to set for whole package. Default '' -> take the version from VERSION.txt
        (or default to 1.0.0)
    :param ext:
        Build and include external modules into big package. Default is 'false'.
        If ext is 'all', builds also other than own forks (GitResources).
    :param extbranch:
        Defines from which branch the fork packages are to be built from.

    Examples::

        fab dist.build
        fab dist.build:release=true,docs=html
        fab dist.build:compress=true,version=1.2.3,pkgs="deb tar rpm"

    .. NOTE:: Python modules get their version number from setup.py

    """
    # NOTE: Fabric parameters are always in string format

    # Get the list of package formats (space delimeter)
    pkg_formats = pkgs.split(' ')

    # Determine the version: parameter vs. VERSION.txt vs. default
    if not version:
        version_path = os.path.join(PROJECT_DIR, 'VERSION.txt')
        if os.path.exists(version_path):
            version = set_version_in_file(version_path, version)
        else:
            version = '1.0.0'

    # Create package name from pkg name and version
    package_name = '%s-%s' % (PKG_NAME, version)
    pkg_join = lambda *path: join(BUILD_DIR, package_name, *path)

    logger.info('Preparing build env...')

    # Copy relevant files to build dir (so that they can be edited directly)
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    del SRC_DIRS[SRC_DIRS.index('libs')]
    del SRC_DIRS[SRC_DIRS.index('etc')]
    for src_dir in SRC_DIRS:
        shutil.copytree(src_dir, pkg_join(src_dir))

    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Copy additional files
    shutil.copy('README.rst', pkg_join('README'))
    os.makedirs(pkg_join('scripts'))
    shutil.copy(rel_join('scripts/deploy.sh'), pkg_join('scripts/deploy.sh'))
    shutil.copy(rel_join('scripts/update.py'), pkg_join('scripts/update.py'))
    shutil.copytree(rel_join('scripts/hooks'), pkg_join('scripts/hooks'))
    shutil.copytree(rel_join('scripts/cron'), pkg_join('scripts/cron'))

    # Build documentation
    if docs:
        # List the target formats/builders
        builddoc(docs, pkg_join('docs'))
    else:
        # Ensure there is at least empty directory (for archive)
        os.makedirs(pkg_join('docs'))

    # Build configuration
    buildetc(outdir=pkg_join('etc'), section='DEFAULT')

    # Increment version of each plugin if making a release
    if get_bool_str(release):
        logger.info('Setting/incrementing version numbers...')
        for setuppy_path in get_files(pkg_join('plugins'), 'setup.py', recursive=True):
            # Check if plugin folder contains VERSION.txt (non-versioned file)
            version_path = os.path.join(os.path.dirname(setuppy_path), 'VERSION.txt')
            if not os.path.exists(version_path):
                logger.warning('VERSION.txt missing, using version found in setup.py')
                version_path = setuppy_path

            # Set version information in file.
            # NOTE: If version is empty, it is determined from version file (either VERSION.txt or setup.py)
            set_version_in_file(version_path, version)

    # Optional compress (edits copied files under build)
    if get_bool_str(compress):
        logger.info('Compressing files...')

        with settings(warn_only=True):
            # Compress theme resources
            for respath in get_files(pkg_join('themes'), '*.css', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))
            for respath in get_files(pkg_join('themes'), '*.js', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))

            # Compress plugin resources
            for respath in get_files(pkg_join('plugins'), '*.css', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))
            for respath in get_files(pkg_join('plugins'), '*.js', recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' % (respath, respath))

        # Aggregate js+css resources into bundle
        for template_path in get_files(pkg_join('themes'), 'resources.html', recursive=True):
            logger.info('Template path: %s' % template_path)
            bundle(template_path, pkg_join('themes/default/htdocs'))

        logger.info('Compression completed.')

    # Build eggs and source packages (in build dir)
    logger.info('Laying eggs and source packages...')
    with lcd(pkg_join()):
        for plugin_dir in PLUGIN_DIRS:
            with lcd(plugin_dir):
                local('python setup.py bdist_egg')
                local('python setup.py sdist')

    # Build external plugins as well, optionally even non-fork plugins
    # Retrieve and build external plugins and copy the artifacts into plugins folder.
    # NOTE: Next egg copying will put them into correct place, no need to rerun the file copy
    allext = 'true' if ext.lower() == 'all' else 'false'
    if get_bool_str(ext) or allext:
        buildext(allext=allext,branch=extbranch)
        for egg in get_files(build_join('ext'), '*.egg', recursive=True):
            shutil.copy(egg, pkg_join('plugins', os.path.basename(egg)))

    # Copy eggs and sdisted files from plugins directory to dist and plugin directories
    for egg in get_files(pkg_join('plugins/multiproject'), '*.egg', recursive=True):
        shutil.copy(egg, dist_join(os.path.basename(egg)))
        shutil.copy(egg, pkg_join('plugins'))

    for targz in get_files(pkg_join('plugins'), '*.tar.gz', recursive=True):
        shutil.copy(targz, dist_join(os.path.basename(targz)))

    # Create dist if not available
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Create one big package to contain 'em all
    if 'tar.gz' in pkg_formats or 'tar' in pkg_formats:
        logger.info('Creating complete .tar.gz package...')
        # TODO: Archive could be implemented in pure python
        # TODO: These patterns seem to assume build dir == project dir
        exclude_patterns = [
            '.*', 'tests', 'documents', '*.egg-info', 'ext/libs', 'ext/plugins',
            'sample', 'build', 'plugins/multiproject'
        ]
        exclude_param = ' '.join(['--exclude=%s' % pt for pt in exclude_patterns])
        with lcd(BUILD_DIR):
            #local('tar -czf %s.tar.gz --exclude-vcs %s %s' %
            #    (dist_join(package_name), exclude_param, package_name))
            local('tar -czf %s.tar.gz %s' %
                (dist_join(package_name), package_name))

    # Debian package
    if 'deb' in pkg_formats:
        logger.info('Creating .deb package...')
        try:
            from stdeb import command
        except ImportError:
            command = None
            abort('Module stddep (http://pypi.python.org/pypi/stdeb) was not found, cannot build .deb package')

        # Run setup.py bdist_deb inside each plugin. It generates deb_dist/<pkgname>/ directory
        for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')), 'setup.py', recursive=True):
            plugin_dir = os.path.dirname(setuppy_path)

            with settings(hide('stdout', 'stderr')):
                with lcd(plugin_dir):
                    local('python setup.py --command-packages=stdeb.command bdist_deb')

                # Package command needs to be run inside the generated folder. Find it and run the command
                for debdist_path in get_files(os.path.join(plugin_dir, 'deb_dist'), 'setup.py', recursive=True):
                    with lcd(os.path.dirname(debdist_path)):
                        local('dpkg-buildpackage -rfakeroot -uc -us')

        # Copy .deb packages to dist
        for deb_path in get_files(pkg_join('plugins'), '*.deb', recursive=True):
            shutil.copy(deb_path, dist_join(os.path.basename(deb_path)))

    # Redhat package
    if 'rpm' in pkg_formats:
        logger.info('Creating .rpm package...')

        with settings(hide('stdout', 'running')):
            # Run setup.py bdist_rpm inside each plugin. It generates deb_dist/<pkgname>/ directory
            for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')), 'setup.py', recursive=True):
                plugin_dir = os.path.dirname(setuppy_path)
                with lcd(plugin_dir):
                    local('python setup.py bdist_rpm')

            # Copy .rpm packages to dist
            for rpm_path in get_files(pkg_join('plugins'), '*.rpm', recursive=True):
                shutil.copy(rpm_path, dist_join(os.path.basename(rpm_path)))

    logger.info('Building completed.')
Beispiel #33
0
def build(release='false',
          compress='false',
          docs='',
          pkgs='tar',
          version='',
          ext='true',
          extbranch='master'):
    """
    Create distributable packages. Builds eggs and tar.gz compressed packages, based on
    parameters. Also capable of downloading and patching external dependencies.

    :param release:
        Make release build or not. Release sets/increments the version number. Default 'false'
    :param compress:
        Compress js/css files nor not. Default 'false'
    :param docs:
        Names of the documentation targets to build. Default '' means no doc building
    :param pkgs:
        Package formats to build sources into, separated with space. Valid values: tar deb rpm
    :param version:
        Version number to set for whole package. Default '' -> take the version from VERSION.txt
        (or default to 1.0.0)
    :param ext:
        Build and include external modules into big package. Default is 'false'.
        If ext is 'all', builds also other than own forks (GitResources).
    :param extbranch:
        Defines from which branch the fork packages are to be built from.

    Examples::

        fab dist.build
        fab dist.build:release=true,docs=html
        fab dist.build:compress=true,version=1.2.3,pkgs="deb tar rpm"

    .. NOTE:: Python modules get their version number from setup.py

    """
    # NOTE: Fabric parameters are always in string format

    # Get the list of package formats (space delimeter)
    pkg_formats = pkgs.split(' ')

    # Determine the version: parameter vs. VERSION.txt vs. default
    if not version:
        version_path = os.path.join(PROJECT_DIR, 'VERSION.txt')
        if os.path.exists(version_path):
            version = set_version_in_file(version_path, version)
        else:
            version = '1.0.0'

    # Create package name from pkg name and version
    package_name = '%s-%s' % (PKG_NAME, version)
    pkg_join = lambda *path: join(BUILD_DIR, package_name, *path)

    logger.info('Preparing build env...')

    # Copy relevant files to build dir (so that they can be edited directly)
    shutil.rmtree(BUILD_DIR, ignore_errors=True)
    del SRC_DIRS[SRC_DIRS.index('libs')]
    del SRC_DIRS[SRC_DIRS.index('etc')]
    for src_dir in SRC_DIRS:
        shutil.copytree(src_dir, pkg_join(src_dir))

    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Copy additional files
    shutil.copy('README.rst', pkg_join('README'))
    os.makedirs(pkg_join('scripts'))
    shutil.copy(rel_join('scripts/deploy.sh'), pkg_join('scripts/deploy.sh'))
    shutil.copy(rel_join('scripts/update.py'), pkg_join('scripts/update.py'))
    shutil.copytree(rel_join('scripts/hooks'), pkg_join('scripts/hooks'))
    shutil.copytree(rel_join('scripts/cron'), pkg_join('scripts/cron'))

    # Build documentation
    if docs:
        # List the target formats/builders
        builddoc(docs, pkg_join('docs'))
    else:
        # Ensure there is at least empty directory (for archive)
        os.makedirs(pkg_join('docs'))

    # Build configuration
    buildetc(outdir=pkg_join('etc'), section='DEFAULT')

    # Increment version of each plugin if making a release
    if get_bool_str(release):
        logger.info('Setting/incrementing version numbers...')
        for setuppy_path in get_files(pkg_join('plugins'),
                                      'setup.py',
                                      recursive=True):
            # Check if plugin folder contains VERSION.txt (non-versioned file)
            version_path = os.path.join(os.path.dirname(setuppy_path),
                                        'VERSION.txt')
            if not os.path.exists(version_path):
                logger.warning(
                    'VERSION.txt missing, using version found in setup.py')
                version_path = setuppy_path

            # Set version information in file.
            # NOTE: If version is empty, it is determined from version file (either VERSION.txt or setup.py)
            set_version_in_file(version_path, version)

    # Optional compress (edits copied files under build)
    if get_bool_str(compress):
        logger.info('Compressing files...')

        with settings(warn_only=True):
            # Compress theme resources
            for respath in get_files(pkg_join('themes'),
                                     '*.css',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))
            for respath in get_files(pkg_join('themes'),
                                     '*.js',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))

            # Compress plugin resources
            for respath in get_files(pkg_join('plugins'),
                                     '*.css',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))
            for respath in get_files(pkg_join('plugins'),
                                     '*.js',
                                     recursive=True):
                local('yui-compressor --charset utf-8 -o %s %s' %
                      (respath, respath))

        # Aggregate js+css resources into bundle
        for template_path in get_files(pkg_join('themes'),
                                       'resources.html',
                                       recursive=True):
            logger.info('Template path: %s' % template_path)
            bundle(template_path, pkg_join('themes/default/htdocs'))

        logger.info('Compression completed.')

    # Build eggs and source packages (in build dir)
    logger.info('Laying eggs and source packages...')
    with lcd(pkg_join()):
        for plugin_dir in PLUGIN_DIRS:
            with lcd(plugin_dir):
                local('python setup.py bdist_egg')
                local('python setup.py sdist')

    # Build external plugins as well, optionally even non-fork plugins
    # Retrieve and build external plugins and copy the artifacts into plugins folder.
    # NOTE: Next egg copying will put them into correct place, no need to rerun the file copy
    allext = 'true' if ext.lower() == 'all' else 'false'
    if get_bool_str(ext) or allext:
        buildext(allext=allext, branch=extbranch)
        for egg in get_files(build_join('ext'), '*.egg', recursive=True):
            shutil.copy(egg, pkg_join('plugins', os.path.basename(egg)))

    # Copy eggs and sdisted files from plugins directory to dist and plugin directories
    for egg in get_files(pkg_join('plugins/multiproject'),
                         '*.egg',
                         recursive=True):
        shutil.copy(egg, dist_join(os.path.basename(egg)))
        shutil.copy(egg, pkg_join('plugins'))

    for targz in get_files(pkg_join('plugins'), '*.tar.gz', recursive=True):
        shutil.copy(targz, dist_join(os.path.basename(targz)))

    # Create dist if not available
    if not os.path.exists(DIST_DIR):
        os.makedirs(DIST_DIR)

    # Create one big package to contain 'em all
    if 'tar.gz' in pkg_formats or 'tar' in pkg_formats:
        logger.info('Creating complete .tar.gz package...')
        # TODO: Archive could be implemented in pure python
        # TODO: These patterns seem to assume build dir == project dir
        exclude_patterns = [
            '.*', 'tests', 'documents', '*.egg-info', 'ext/libs',
            'ext/plugins', 'sample', 'build', 'plugins/multiproject'
        ]
        exclude_param = ' '.join(
            ['--exclude=%s' % pt for pt in exclude_patterns])
        with lcd(BUILD_DIR):
            #local('tar -czf %s.tar.gz --exclude-vcs %s %s' %
            #    (dist_join(package_name), exclude_param, package_name))
            local('tar -czf %s.tar.gz %s' %
                  (dist_join(package_name), package_name))

    # Debian package
    if 'deb' in pkg_formats:
        logger.info('Creating .deb package...')
        try:
            from stdeb import command
        except ImportError:
            command = None
            abort(
                'Module stddep (http://pypi.python.org/pypi/stdeb) was not found, cannot build .deb package'
            )

        # Run setup.py bdist_deb inside each plugin. It generates deb_dist/<pkgname>/ directory
        for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')),
                                      'setup.py',
                                      recursive=True):
            plugin_dir = os.path.dirname(setuppy_path)

            with settings(hide('stdout', 'stderr')):
                with lcd(plugin_dir):
                    local(
                        'python setup.py --command-packages=stdeb.command bdist_deb'
                    )

                # Package command needs to be run inside the generated folder. Find it and run the command
                for debdist_path in get_files(os.path.join(
                        plugin_dir, 'deb_dist'),
                                              'setup.py',
                                              recursive=True):
                    with lcd(os.path.dirname(debdist_path)):
                        local('dpkg-buildpackage -rfakeroot -uc -us')

        # Copy .deb packages to dist
        for deb_path in get_files(pkg_join('plugins'), '*.deb',
                                  recursive=True):
            shutil.copy(deb_path, dist_join(os.path.basename(deb_path)))

    # Redhat package
    if 'rpm' in pkg_formats:
        logger.info('Creating .rpm package...')

        with settings(hide('stdout', 'running')):
            # Run setup.py bdist_rpm inside each plugin. It generates deb_dist/<pkgname>/ directory
            for setuppy_path in get_files(os.path.abspath(pkg_join('plugins')),
                                          'setup.py',
                                          recursive=True):
                plugin_dir = os.path.dirname(setuppy_path)
                with lcd(plugin_dir):
                    local('python setup.py bdist_rpm')

            # Copy .rpm packages to dist
            for rpm_path in get_files(pkg_join('plugins'),
                                      '*.rpm',
                                      recursive=True):
                shutil.copy(rpm_path, dist_join(os.path.basename(rpm_path)))

    logger.info('Building completed.')
Beispiel #34
0
        TestProgram = None
        Plugin = object
        return abort(
            'For running tests, Nose testing framework is required. Please install it first: "pip install nose"'
        )

    if not case:
        return abort('Please provide either name or path to test case')

    # Determine the case file: name or path accepted
    webtests_dir = rel_join('tests/webtests')
    casepath = os.path.abspath(case) if case.endswith('.py') else join(
        webtests_dir, 'cases/%s.py' % case)
    configpath = os.path.join(os.curdir, os.path.expanduser(config))

    logger.info('Running functional tests from: %s' % casepath)
    logger.info('Reading tests configuration from: %s' % configpath)

    class TestConfigPlugin(Plugin):
        """
        Simple Nose plugin to set test configuration path to testcase::

            class MyTestcase(unittest.TestCase)
                def setUp(self):
                    self.config_path

        """
        name = 'testconfig'
        can_configure = True
        enabled = True
Beispiel #35
0
def show_summary():
    """
    Shows summary about the setup
    """
    logger.info(run('echo ROOT=$ROOT'))