Ejemplo n.º 1
0
def pychecker(args):
    """Run pychecker on sources."""
    if not args:
        args = options.pychecker.default_args.split()

    # -- COLLECT: command options, files
    problematic = []
    cmdopts = []
    files   = []
    for arg in args:
        path_ = path(arg)
        if arg.startswith("-"):
            cmdopts.append(arg)
        elif path_.isdir():
            files.extend(path_.walkfiles("*.py"))
        elif arg.endswith(".py") and path_.exists():
            files.append(arg)
        else:
            error("UNKNOWN FILE: {0}".format(arg))
            problematic.append(arg)

    # -- EXECUTE:
    cmdopts = " ".join(cmdopts)
    for file_ in files:
        try:
            sh("pychecker {opts} {file}".format(opts=cmdopts, file=file_))
        except Exception, e:
            error("FAILURE: {0}".format(e))
            problematic.append(file_)
Ejemplo n.º 2
0
def sync(options):
    """
    Run the syncdb and migrate management commands to create and migrate a DB
    """
    sh("python manage.py syncdb --all --noinput")
    #sh("python manage.py migrate --noinput")
    sh("python manage.py loaddata sample_admin.json")
Ejemplo n.º 3
0
def devstack(args):
    """
    Start the devstack lms or studio server
    """
    parser = argparse.ArgumentParser(prog='paver devstack')
    parser.add_argument('system', type=str, nargs=1, help="lms or studio")
    parser.add_argument('--fast', action='store_true', default=False, help="Skip updating assets")
    parser.add_argument('--optimized', action='store_true', default=False, help="Run with optimized assets")
    parser.add_argument('--settings', type=str, default=DEFAULT_SETTINGS, help="Settings file")
    parser.add_argument('--asset-settings', type=str, default=None, help=ASSET_SETTINGS_HELP)
    parser.add_argument(
        '--no-contracts',
        action='store_true',
        default=False,
        help="Disable contracts. By default, they're enabled in devstack."
    )
    args = parser.parse_args(args)
    settings = args.settings
    asset_settings = args.asset_settings if args.asset_settings else settings
    if args.optimized:
        settings = OPTIMIZED_SETTINGS
        asset_settings = OPTIMIZED_ASSETS_SETTINGS
    sh(django_cmd('cms', settings, 'reindex_course', '--setup'))
    run_server(
        args.system[0],
        fast=args.fast,
        settings=settings,
        asset_settings=asset_settings,
        contracts=not args.no_contracts,
    )
Ejemplo n.º 4
0
def clean_dir(directory):
    """
    Clean coverage files, to ensure that we don't use stale data to generate reports.
    """
    # We delete the files but preserve the directory structure
    # so that coverage.py has a place to put the reports.
    sh('find {dir} -type f -delete'.format(dir=directory))
Ejemplo n.º 5
0
def setup(options):
    """Get dependencies and prepare a GeoNode development environment."""
    sh('pip install -e .')

    info(('GeoNode development environment successfully set up.'
          'If you have not set up an administrative account,'
          ' please do so now. Use "paver start" to start up the server.'))
Ejemplo n.º 6
0
def stop_geoserver():
    """
    Stop GeoServer
    """
    # we use docker-compose for integration tests
    if integration_tests:
        return

    # only start if using Geoserver backend
    if 'geonode.geoserver' not in INSTALLED_APPS or OGC_SERVER['default']['BACKEND'] == 'geonode.qgis_server':
        return
    kill('java', 'geoserver')

    # Kill process.
    try:
        # proc = subprocess.Popen("ps -ef | grep -i -e '[j]ava\|geoserver' |
        # awk '{print $2}'",
        proc = subprocess.Popen(
            "ps -ef | grep -i -e 'geoserver' | awk '{print $2}'",
                                shell=True,
                                stdout=subprocess.PIPE)
        for pid in proc.stdout:
            info('Stopping geoserver (process number %s)' % int(pid))
            os.kill(int(pid), signal.SIGKILL)
            os.kill(int(pid), 9)
            sh('sleep 30')
            # Check if the process that we killed is alive.
            try:
                os.kill(int(pid), 0)
                # raise Exception("""wasn't able to kill the process\nHINT:use
                # signal.SIGKILL or signal.SIGABORT""")
            except OSError as ex:
                continue
    except Exception as e:
        info(e)
Ejemplo n.º 7
0
def run_tests(options):
    """
    Executes the entire test suite.
    """
    if options.get('coverage'):
        prefix = 'coverage run --branch --source=geonode --omit="*/management/*,geonode/contrib/*,*/test*,*/wsgi*,*/middleware*"'
    else:
        prefix = 'python'
    local = options.get('local', 'false')  # travis uses default to false

    if not integration_tests:
        sh('%s manage.py test geonode.tests.smoke %s %s' % (prefix, _keepdb, _parallel))
        call_task('test', options={'prefix': prefix})
    else:
        call_task('test_integration')
        call_task('test_integration', options={'name': 'geonode.tests.csw'})

        # only start if using Geoserver backend
        if 'geonode.geoserver' in INSTALLED_APPS and OGC_SERVER['default']['BACKEND'] == 'geonode.geoserver':
            call_task('test_integration',
                      options={'name': 'geonode.upload.tests.integration',
                               'settings': 'geonode.upload.tests.test_settings'})

        call_task('test_bdd', options={'local': local})

    sh('flake8 geonode')
Ejemplo n.º 8
0
def start_django():
    """
    Start the GeoNode Django application
    """
    bind = options.get('bind', '')
    foreground = '' if options.get('foreground', False) else '&'
    sh('python manage.py runserver %s %s' % (bind, foreground))
Ejemplo n.º 9
0
def package(options):
    """
    Creates a tarball to use for building the system elsewhere
    """
    import tarfile
    import geonode

    version = geonode.get_version()
    # Use GeoNode's version for the package name.
    pkgname = 'GeoNode-%s-all' % version

    # Create the output directory.
    out_pkg = path(pkgname)
    out_pkg_tar = path("%s.tar.gz" % pkgname)

    # Create a distribution in zip format for the geonode python package.
    dist_dir = path('dist')
    dist_dir.rmtree()
    sh('python setup.py sdist --formats=zip')

    with pushd('package'):

        # Delete old tar files in that directory
        for f in glob.glob('GeoNode*.tar.gz'):
            old_package = path(f)
            if old_package != out_pkg_tar:
                old_package.remove()

        if out_pkg_tar.exists():
            info('There is already a package for version %s' % version)
            return

        # Clean anything that is in the oupout package tree.
        out_pkg.rmtree()
        out_pkg.makedirs()

        support_folder = path('support')
        install_file = path('install.sh')

        # And copy the default files from the package folder.
        justcopy(support_folder, out_pkg / 'support')
        justcopy(install_file, out_pkg)

        geonode_dist = path('..') / 'dist' / 'GeoNode-%s.zip' % version
        justcopy(geonode_dist, out_pkg)

        # Create a tar file with all files in the output package folder.
        tar = tarfile.open(out_pkg_tar, "w:gz")
        for file in out_pkg.walkfiles():
            tar.add(file)

        # Add the README with the license and important links to documentation.
        tar.add('README', arcname=('%s/README.rst' % out_pkg))
        tar.close()

        # Remove all the files in the temporary output package directory.
        out_pkg.rmtree()

    # Report the info about the new package.
    info("%s created" % out_pkg_tar.abspath())
Ejemplo n.º 10
0
    def prepare_bokchoy_run(self):
        """
        Sets up and starts servers for a Bok Choy run. If --fasttest is not
        specified then static assets are collected
        """
        sh("{}/scripts/reset-test-db.sh".format(Env.REPO_ROOT))

        if not self.fasttest:
            self.generate_optimized_static_assets()

        # Clear any test data already in Mongo or MySQLand invalidate
        # the cache
        bokchoy_utils.clear_mongo()
        self.cache.flush_all()

        # load data in db_fixtures
        self.load_data()

        # load courses if self.imports_dir is set
        self.load_courses()

        # Ensure the test servers are available
        msg = colorize('green', "Confirming servers are running...")
        print msg
        bokchoy_utils.start_servers(self.default_store, self.coveragerc)
Ejemplo n.º 11
0
    def get_test_course(self):
        """
        Fetches the test course.
        """
        self.imports_dir.makedirs_p()
        zipped_course = self.imports_dir + 'demo_course.tar.gz'

        msg = colorize('green', "Fetching the test course from github...")
        print msg

        sh(
            'wget {tar_gz_file} -O {zipped_course}'.format(
                tar_gz_file=self.tar_gz_file,
                zipped_course=zipped_course,
            )
        )

        msg = colorize('green', "Uncompressing the test course...")
        print msg

        sh(
            'tar zxf {zipped_course} -C {courses_dir}'.format(
                zipped_course=zipped_course,
                courses_dir=self.imports_dir,
            )
        )
Ejemplo n.º 12
0
def find_fixme(options):
    """
    Run pylint on system code, only looking for fixme items.
    """
    num_fixme = 0
    systems = getattr(options, 'system', ALL_SYSTEMS).split(',')

    for system in systems:
        # Directory to put the pylint report in.
        # This makes the folder if it doesn't already exist.
        report_dir = (Env.REPORT_DIR / system).makedirs_p()

        apps_list = ' '.join(top_python_dirs(system))

        cmd = (
            "pylint --disable all --enable=fixme "
            "--output-format=parseable {apps} "
            "> {report_dir}/pylint_fixme.report".format(
                apps=apps_list,
                report_dir=report_dir
            )
        )

        sh(cmd, ignore_error=True)

        num_fixme += _count_pylint_violations(
            "{report_dir}/pylint_fixme.report".format(report_dir=report_dir))

    print "Number of pylint fixmes: " + str(num_fixme)
Ejemplo n.º 13
0
def run_diff_quality(
        violations_type=None, reports=None, percentage_string=None, branch_string=None, dquality_dir=None
):
    """
    This executes the diff-quality commandline tool for the given violation type (e.g., pylint, eslint).
    If diff-quality fails due to quality issues, this method returns False.

    """
    try:
        sh(
            "diff-quality --violations={type} "
            "{reports} {percentage_string} {compare_branch_string} "
            "--html-report {dquality_dir}/diff_quality_{type}.html ".format(
                type=violations_type,
                reports=reports,
                percentage_string=percentage_string,
                compare_branch_string=branch_string,
                dquality_dir=dquality_dir,
            )
        )
        return True
    except BuildFailure, error_message:
        if is_percentage_failure(error_message):
            return False
        else:
            raise BuildFailure('FAILURE: {}'.format(error_message))
Ejemplo n.º 14
0
def run_complexity():
    """
    Uses radon to examine cyclomatic complexity.
    For additional details on radon, see http://radon.readthedocs.org/
    """
    system_string = '/ '.join(ALL_SYSTEMS.split(',')) + '/'
    complexity_report_dir = (Env.REPORT_DIR / "complexity")
    complexity_report = complexity_report_dir / "python_complexity.log"

    # Ensure directory structure is in place: metrics dir, and an empty complexity report dir.
    Env.METRICS_DIR.makedirs_p()
    _prepare_report_dir(complexity_report_dir)

    print "--> Calculating cyclomatic complexity of python files..."
    try:
        sh(
            "radon cc {system_string} --total-average > {complexity_report}".format(
                system_string=system_string,
                complexity_report=complexity_report
            )
        )
        complexity_metric = _get_count_from_last_line(complexity_report, "python_complexity")
        _write_metric(
            complexity_metric,
            (Env.METRICS_DIR / "python_complexity")
        )
        print "--> Python cyclomatic complexity report complete."
        print "radon cyclomatic complexity score: {metric}".format(metric=str(complexity_metric))

    except BuildFailure:
        print "FAILURE: Unable to calculate python-only code-complexity."
Ejemplo n.º 15
0
def _get_stylelint_violations():
    """
    Returns the number of Stylelint violations.
    """
    stylelint_report_dir = (Env.REPORT_DIR / "stylelint")
    stylelint_report = stylelint_report_dir / "stylelint.report"
    _prepare_report_dir(stylelint_report_dir)
    formatter = 'node_modules/stylelint-formatter-pretty'

    sh(
        "stylelint **/*.scss --custom-formatter={formatter} | tee {stylelint_report}".format(
            formatter=formatter,
            stylelint_report=stylelint_report,
        ),
        ignore_error=True
    )

    try:
        return int(_get_count_from_last_line(stylelint_report, "stylelint"))
    except TypeError:
        raise BuildFailure(
            "FAILURE: Number of stylelint violations could not be found in {stylelint_report}".format(
                stylelint_report=stylelint_report
            )
        )
Ejemplo n.º 16
0
def verifyconfigref(options):
    sh(
        "PYTHONPATH=. {0} extra/release/verify_config_reference.py \
            docs/configuration.rst".format(
            sys.executable
        )
    )
Ejemplo n.º 17
0
 def generate_optimized_static_assets(self):
     """
     Collect static assets using test_static_optimized.py which generates
     optimized files to a dedicated test static root.
     """
     print colorize("green", "Generating optimized static assets...")
     sh("paver update_assets --settings=test_static_optimized")
Ejemplo n.º 18
0
def test(options):
    cmd = "CELERY_LOADER=default nosetests"
    if getattr(options, "coverage", False):
        cmd += " --with-coverage"
    if getattr(options, "verbose", False):
        cmd += " --verbosity=2"
    sh(cmd)
Ejemplo n.º 19
0
def removepyc(options):
    sh(
        "find . -type f -a \\( {0} \\) | xargs rm".format(
            " -o ".join("-name '{0}'".format(pat) for pat in PYCOMPILE_CACHES)
        )
    )
    sh('find . -type d -name "__pycache__" | xargs rm -r')
Ejemplo n.º 20
0
def readme(options):
    sh(
        "{0} extra/release/sphinx-to-rst.py docs/templates/readme.txt \
            > README.rst".format(
            sys.executable
        )
    )
Ejemplo n.º 21
0
def contributing(options):
    sh(
        "{0} extra/release/sphinx-to-rst.py docs/contributing.rst \
            > CONTRIBUTING.rst".format(
            sys.executable
        )
    )
Ejemplo n.º 22
0
def mount_s3():
    kwargs = dict(bucket=os.environ['AWS_STORAGE_BUCKET_NAME'],
                  mount_point='/srv/omaha_s3')
    env = dict(AWSACCESSKEYID=os.environ['AWS_ACCESS_KEY_ID'],
               AWSSECRETACCESSKEY=os.environ['AWS_SECRET_ACCESS_KEY'])
    cmd = 's3fs {bucket} {mount_point} -ouse_cache=/tmp'.format(**kwargs)
    sh(cmd, env=env)
Ejemplo n.º 23
0
def clean_dir(directory):
    """
    Delete all the files from the specified directory.
    """
    # We delete the files but preserve the directory structure
    # so that coverage.py has a place to put the reports.
    sh('find {dir} -type f -delete'.format(dir=directory))
Ejemplo n.º 24
0
def gen_tests_html():
    """Generate tests/index.html for online testing"""
    with pushd("tests"):
        # ensure manager testsuite is writeable
        os.chmod(os.path.join("suites", "manager", "data"), 0777)
        os.chmod(os.path.join("suites", "manager", "data", "records.db"), 0666)
        sh("python gen_html.py > index.html")
Ejemplo n.º 25
0
def gen_tests_html():
    """Generate tests/index.html for online testing"""
    with pushd('tests'):
        # ensure manager testsuite is writeable
        os.chmod(os.path.join('suites', 'manager', 'data'), 0777)
        os.chmod(os.path.join('suites', 'manager', 'data', 'records.db'), 0666)
        sh('python gen_html.py > index.html')
Ejemplo n.º 26
0
def run_pii_check(options):  # pylint: disable=unused-argument
    """
    Guarantee that all Django models are PII-annotated.
    """
    pii_report_name = 'pii'
    default_report_dir = (Env.REPORT_DIR / pii_report_name)
    report_dir = getattr(options, 'report_dir', default_report_dir)
    output_file = os.path.join(report_dir, 'pii_check_{}.report')
    uncovered_model_counts = []
    for env_name, env_settings_file in (("CMS", "cms.envs.test"), ("LMS", "lms.envs.test")):
        try:
            print()
            print("Running {} PII Annotation check and report".format(env_name))
            print("-" * 45)
            run_output_file = six.text_type(output_file).format(env_name.lower())
            sh(
                "mkdir -p {} && "
                "export DJANGO_SETTINGS_MODULE={}; "
                "code_annotations django_find_annotations "
                "--config_file .pii_annotations.yml --report_path {} --app_name {} "
                "--lint --report --coverage | tee {}".format(
                    report_dir, env_settings_file, report_dir, env_name.lower(), run_output_file
                )
            )
            uncovered_model_counts.append(_extract_missing_pii_annotations(run_output_file))

        except BuildFailure as error_message:
            fail_quality(pii_report_name, 'FAILURE: {}'.format(error_message))

    uncovered_count = max(uncovered_model_counts)
    if uncovered_count is None:
        uncovered_count = 0
    _write_metric(uncovered_count, (Env.METRICS_DIR / pii_report_name))

    return True
Ejemplo n.º 27
0
def update_bokchoy_db_cache():
    """
    Update and cache the MYSQL database for bokchoy testing. This command
    will remove any previously cached database files and apply migrations
    on a fresh db.

    You can commit the resulting files in common/test/db_cache into
    git to speed up test runs
    """
    bokchoy_db_files = [
        'bok_choy_data_default.json',
        'bok_choy_data_student_module_history.json',
        'bok_choy_migrations_data_default.sql',
        'bok_choy_migrations_data_student_module_history.sql',
        'bok_choy_schema_default.sql',
        'bok_choy_schema_student_module_history.sql'
    ]
    print('Removing cached db files for bokchoy tests')
    for db_file in bokchoy_db_files:
        try:
            db_file_path = os.path.join(
                '{}/common/test/db_cache'.format(Env.REPO_ROOT), db_file
            )
            os.remove(db_file_path)
            print('\tRemoved {}'.format(db_file_path))
        except OSError:
            continue

    sh('{}/scripts/reset-test-db.sh'.format(Env.REPO_ROOT))
Ejemplo n.º 28
0
def i18n_generate_strict():
    """
    Compile localizable strings from sources, extracting strings first.
    Complains if files are missing.
    """
    cmd = "i18n_tool generate"
    sh(cmd + " --strict")
Ejemplo n.º 29
0
def compile_sass(options):
    """
    Compile Sass to CSS.
    """
    debug = options.get('debug')
    parts = ["sass"]
    parts.append("--update")
    parts.append("--cache-location {cache}".format(cache=SASS_CACHE_PATH))
    parts.append("--default-encoding utf-8")
    if debug:
        parts.append("--sourcemap")
    else:
        parts.append("--style compressed --quiet")
    if options.get('force'):
        parts.append("--force")
    parts.append("--load-path .")
    for load_path in SASS_LOAD_PATHS + SASS_DIRS:
        parts.append("--load-path {path}".format(path=load_path))

    for sass_dir in SASS_DIRS:
        css_dir = sass_dir.parent / "css"
        if css_dir:
            parts.append("{sass}:{css}".format(sass=sass_dir, css=css_dir))
        else:
            parts.append(sass_dir)

    sh(cmd(*parts))

    print("\t\tFinished compiling sass.")
Ejemplo n.º 30
0
def docker_run():
    migrate()
    loaddata()
    create_admin()
    collectstatic()
    mount_s3()
    sh('/usr/bin/supervisord')
Ejemplo n.º 31
0
def test_javascript(options):
    with pushd('geonode/static/geonode'):
        sh('./run-tests.sh')
Ejemplo n.º 32
0
def i18n_transifex_pull():
    """
    Pull translated strings from Transifex
    """
    sh("i18n_tool transifex pull")
Ejemplo n.º 33
0
def i18n_transifex_push():
    """
    Push source strings to Transifex for translation
    """
    sh("i18n_tool transifex push")
Ejemplo n.º 34
0
    try:
        import virtualenv
    except ImportError, e:
        raise RuntimeError("virtualenv is needed for bootstrap")

    bdir = options.bootstrap_dir
    if not os.path.exists(bdir):
        os.makedirs(bdir)
    bscript = "boostrap.py"

    options.virtualenv.script_name = os.path.join(options.bootstrap_dir,
                                                  bscript)
    options.virtualenv.no_site_packages = False
    options.bootstrap.no_site_packages = False
    call_task('paver.virtual.bootstrap')
    sh('cd %s; %s %s' % (bdir, sys.executable, bscript))

@task
def clean():
    """Remove build, dist, egg-info garbage."""
    d = ['build', 'dist', 'numpy.egg-info']
    for i in d:
        if os.path.exists(i):
            shutil.rmtree(i)

    bdir = os.path.join('doc', options.sphinx.builddir)
    if os.path.exists(bdir):
        shutil.rmtree(bdir)

@task
def clean_bootstrap():
Ejemplo n.º 35
0
def deb(options):
    """
    Creates debian packages.

    Example uses:
        paver deb
        paver deb -k 12345
        paver deb -k 12345 -p geonode/testing
    """
    key = options.get('key', None)
    ppa = options.get('ppa', None)

    version, simple_version = versions()

    info('Creating package for GeoNode version %s' % version)

    # Get rid of any uncommitted changes to debian/changelog
    info('Getting rid of any uncommitted changes in debian/changelog')
    sh('git checkout package/debian/changelog')

    # Workaround for git-dch bug
    # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594580
    sh('rm -rf %s/.git' % (os.path.realpath('package')))
    sh('ln -s %s %s' % (os.path.realpath('.git'), os.path.realpath('package')))

    with pushd('package'):

        # Install requirements
        # sh('sudo apt-get -y install debhelper devscripts git-buildpackage')

        # sh(('git-dch --spawn-editor=snapshot --git-author --new-version=%s'
        #     ' --id-length=6 --ignore-branch --release' % (simple_version)))
        # In case you publish from Ubuntu Xenial (git-dch is removed from upstream)
        #  use the following line instead:
        # sh(('gbp dch --spawn-editor=snapshot --git-author --new-version=%s'
        #    ' --id-length=6 --ignore-branch --release' % (simple_version)))
        distribution = "bionic"
        # sh(('gbp dch --distribution=%s --force-distribution --spawn-editor=snapshot --git-author --new-version=%s'
        #    ' --id-length=6 --ignore-branch --release' % (distribution, simple_version)))

        deb_changelog = path('debian') / 'changelog'
        for idx, line in enumerate(
                fileinput.input([deb_changelog], inplace=True)):
            if idx == 0:
                print "geonode (%s) %s; urgency=high" % (simple_version,
                                                         distribution),
            else:
                print line.replace("urgency=medium", "urgency=high"),

        # Revert workaround for git-dhc bug
        sh('rm -rf .git')

        if key is None and ppa is None:
            print("A local installable package")
            sh('debuild -uc -us -A')
        elif key is None and ppa is not None:
            print("A sources package, signed by daemon")
            sh('debuild -S')
        elif key is not None and ppa is None:
            print("A signed installable package")
            sh('debuild -k%s -A' % key)
        elif key is not None and ppa is not None:
            print("A signed, source package")
            sh('debuild -k%s -S' % key)

    if ppa is not None:
        sh('dput ppa:%s geonode_%s_source.changes' % (ppa, simple_version))
Ejemplo n.º 36
0
def reset_hard():
    """
    Reset a development environment (Database, GeoServer & Catalogue)
    """
    sh("git clean -dxf")
Ejemplo n.º 37
0
def test_integration(options):
    """
    Run GeoNode's Integration test suite against the external apps
    """
    _backend = os.environ.get('BACKEND', OGC_SERVER['default']['BACKEND'])
    if _backend == 'geonode.geoserver' or 'geonode.qgis_server' not in INSTALLED_APPS:
        call_task('stop_geoserver')
        _reset()
        # Start GeoServer
        call_task('start_geoserver')
    else:
        call_task('stop_qgis_server')
        _reset()
        # Start QGis Server
        call_task('start_qgis_server')

    sh('sleep 30')

    name = options.get('name', 'geonode.tests.integration')
    settings = options.get('settings', '')
    if not settings and name == 'geonode.upload.tests.integration':
        if _django_11:
            sh("cp geonode/upload/tests/test_settings.py geonode/")
            settings = 'geonode.test_settings'
        else:
            settings = 'geonode.upload.tests.test_settings'

    success = False
    try:
        if name == 'geonode.tests.csw':
            call_task('sync', options={'settings': settings})
            call_task('start', options={'settings': settings})
            call_task('setup_data', options={'settings': settings})

        settings = 'DJANGO_SETTINGS_MODULE=%s' % settings if settings else ''

        if name == 'geonode.upload.tests.integration':
            sh("%s python -W ignore manage.py makemigrations --noinput" %
               settings)
            sh("%s python -W ignore manage.py migrate --noinput" % settings)
            sh("%s python -W ignore manage.py loaddata sample_admin.json" %
               settings)
            sh("%s python -W ignore manage.py loaddata geonode/base/fixtures/default_oauth_apps.json"
               % settings)
            sh("%s python -W ignore manage.py loaddata geonode/base/fixtures/initial_data.json"
               % settings)
            call_task('start_geoserver')
            bind = options.get('bind', '0.0.0.0:8000')
            foreground = '' if options.get('foreground', False) else '&'
            sh('%s python -W ignore manage.py runmessaging %s' %
               (settings, foreground))
            sh('%s python -W ignore manage.py runserver %s %s' %
               (settings, bind, foreground))
            sh('sleep 30')
            settings = 'REUSE_DB=1 %s' % settings

        live_server_option = '--liveserver=localhost:8000'
        if _django_11:
            live_server_option = ''

        info("GeoNode is now available, running the tests now.")
        sh(('%s python -W ignore manage.py test %s'
            ' %s --noinput %s' %
            (settings, name, _keepdb, live_server_option)))

    except BuildFailure as e:
        info('Tests failed! %s' % str(e))
    else:
        success = True
    finally:
        # don't use call task here - it won't run since it already has
        stop()

    call_task('stop_geoserver')
    _reset()
    if not success:
        sys.exit(1)
Ejemplo n.º 38
0
def start():
    """
    Start GeoNode (Django, GeoServer & Client)
    """
    sh('sleep 30')
    info("GeoNode is now available.")
Ejemplo n.º 39
0
def static(options):
    with pushd('geonode/static'):
        sh('grunt production')
Ejemplo n.º 40
0
def start_geoserver(options):
    """
    Start GeoServer with GeoNode extensions
    """
    # we use docker-compose for integration tests
    if integration_tests or integration_csw_tests or integration_bdd_tests:
        return

    # only start if using Geoserver backend
    _backend = os.environ.get('BACKEND', OGC_SERVER['default']['BACKEND'])
    if _backend == 'geonode.qgis_server' or 'geonode.geoserver' not in INSTALLED_APPS:
        return

    GEOSERVER_BASE_URL = OGC_SERVER['default']['LOCATION']
    url = GEOSERVER_BASE_URL

    if urlparse(GEOSERVER_BASE_URL).hostname != 'localhost':
        print "Warning: OGC_SERVER['default']['LOCATION'] hostname is not equal to 'localhost'"

    if not GEOSERVER_BASE_URL.endswith('/'):
        print "Error: OGC_SERVER['default']['LOCATION'] does not end with a '/'"
        sys.exit(1)

    download_dir = path('downloaded').abspath()
    jetty_runner = download_dir / \
        os.path.basename(dev_config['JETTY_RUNNER_URL'])
    data_dir = path('geoserver/data').abspath()
    geofence_dir = path('geoserver/data/geofence').abspath()
    web_app = path('geoserver/geoserver').abspath()
    log_file = path('geoserver/jetty.log').abspath()
    config = path('scripts/misc/jetty-runner.xml').abspath()
    jetty_port = urlparse(GEOSERVER_BASE_URL).port

    import socket
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    socket_free = True
    try:
        s.bind(("127.0.0.1", jetty_port))
    except socket.error as e:
        socket_free = False
        if e.errno == 98:
            info('Port %s is already in use' % jetty_port)
        else:
            info(
                'Something else raised the socket.error exception while checking port %s'
                % jetty_port)
            print(e)
    finally:
        s.close()

    if socket_free:
        # @todo - we should not have set workdir to the datadir but a bug in geoserver
        # prevents geonode security from initializing correctly otherwise
        with pushd(data_dir):
            javapath = "java"
            if on_travis:
                sh(('echo debconf shared/accepted-oracle-license-v1-1 select true | sudo debconf-set-selections;'
                    ' echo debconf shared/accepted-oracle-license-v1-1 seen true | sudo debconf-set-selections;'
                    ' sudo apt install -y oracle-java8-set-default ant maven;'
                    ' sudo update-java-alternatives --set java-8-oracle'))
                # import subprocess
                # result = subprocess.run(['update-alternatives', '--list', 'java'], stdout=subprocess.PIPE)
                # javapath = result.stdout
                javapath = "/usr/lib/jvm/java-8-oracle/jre/bin/java"
            loggernullpath = os.devnull

            # checking if our loggernullpath exists and if not, reset it to
            # something manageable
            if loggernullpath == "nul":
                try:
                    open("../../downloaded/null.txt", 'w+').close()
                except IOError as e:
                    print "Chances are that you have Geoserver currently running.  You \
                            can either stop all servers with paver stop or start only \
                            the django application with paver start_django."

                    sys.exit(1)
                loggernullpath = "../../downloaded/null.txt"

            try:
                sh(('%(javapath)s -version') % locals())
            except BaseException:
                print "Java was not found in your path.  Trying some other options: "
                javapath_opt = None
                if os.environ.get('JAVA_HOME', None):
                    print "Using the JAVA_HOME environment variable"
                    javapath_opt = os.path.join(
                        os.path.abspath(os.environ['JAVA_HOME']), "bin",
                        "java.exe")
                elif options.get('java_path'):
                    javapath_opt = options.get('java_path')
                else:
                    print "Paver cannot find java in the Windows Environment.  \
                    Please provide the --java_path flag with your full path to \
                    java.exe e.g. --java_path=C:/path/to/java/bin/java.exe"

                    sys.exit(1)
                # if there are spaces
                javapath = 'START /B "" "' + javapath_opt + '"'

            sh((
                '%(javapath)s -Xms512m -Xmx2048m -server -XX:+UseConcMarkSweepGC -XX:MaxPermSize=512m'
                ' -DGEOSERVER_DATA_DIR=%(data_dir)s'
                ' -Dgeofence.dir=%(geofence_dir)s'
                # ' -Dgeofence-ovr=geofence-datasource-ovr.properties'
                # workaround for JAI sealed jar issue and jetty classloader
                # ' -Dorg.eclipse.jetty.server.webapp.parentLoaderPriority=true'
                ' -jar %(jetty_runner)s'
                ' --port %(jetty_port)i'
                ' --log %(log_file)s'
                ' %(config)s'
                ' > %(loggernullpath)s &' % locals()))

        info('Starting GeoServer on %s' % url)

    # wait for GeoServer to start
    started = waitfor(url)
    info('The logs are available at %s' % log_file)

    if not started:
        # If applications did not start in time we will give the user a chance
        # to inspect them and stop them manually.
        info(('GeoServer never started properly or timed out.'
              'It may still be running in the background.'))
        sys.exit(1)
Ejemplo n.º 41
0
def log_installed_python_prereqs():
    """  Logs output of pip freeze for debugging. """
    sh(u"pip freeze > {}".format(Env.GEN_LOG_DIR + "/pip_freeze.log"))
    return
Ejemplo n.º 42
0
def sync(options):
    """
    Run the migrate and migrate management commands to create and migrate a DB
    """
    settings = options.get('settings', '')
    if settings:
        settings = 'DJANGO_SETTINGS_MODULE=%s' % settings

    sh("%s python -W ignore manage.py makemigrations --noinput" % settings)
    sh("%s python -W ignore manage.py migrate --noinput" % settings)
    sh("%s python -W ignore manage.py loaddata sample_admin.json" % settings)
    sh("%s python -W ignore manage.py loaddata geonode/base/fixtures/default_oauth_apps.json"
       % settings)
    sh("%s python -W ignore manage.py loaddata geonode/base/fixtures/initial_data.json"
       % settings)
    sh("%s python -W ignore manage.py set_all_layers_alternate" % settings)
Ejemplo n.º 43
0
def run_sphinx(rvars=None, folder=None, application=None, http_host=None):
    # workingdir is the application folder
    workingdir = folder

    # sourcedir holds the all sources temporarily
    sourcedir = path.join(workingdir,rvars['projectname'])

    # create the custom_courses dir if it doesn't already exist
    if not os.path.exists(path.join(workingdir, 'custom_courses')):
        os.mkdir(path.join(workingdir, 'custom_courses'))

    # confdir holds the conf and index files
    confdir = path.join(workingdir, 'custom_courses', rvars['projectname'])
    if not os.path.exists(confdir):
        os.mkdir(confdir)

    ########
    # We're building a custom course.
    # Generate an index.rst and copy conf.py from devcourse.
    ########
    if rvars['coursetype'] == 'custom':
        row = db(db.projects.projectcode==rvars['projectname']).select()
        title = row[0].description

        # this is the temporary source dir for this build
        os.mkdir(sourcedir)

        # The conf and index files will be archived in custom_courses/coursename
        # so that the course can be rebuilt at a later date.
        # Copy the conf.py file from devcourse into our custom course.
        shutil.copy(path.join(workingdir, 'devcourse', 'conf.py'),
                    path.join(confdir, 'conf.py'))
        shutil.copy(path.join(workingdir, 'devcourse', 'conf.py'),
                    path.join(sourcedir, 'conf.py'))

        # generate index.rst and copy modules from source
        f = open(path.join(sourcedir,"index.rst"),"w")

        f.write('''.. Copyright (C)  Brad Miller, David Ranum
       Permission is granted to copy, distribute and/or modify this document
       under the terms of the GNU Free Documentation License, Version 1.3 or
       any later version published by the Free Software Foundation; with
       Invariant Sections being Forward, Prefaces, and Contributor List,
       no Front-Cover Texts, and no Back-Cover Texts.  A copy of the license
       is included in the section entitled "GNU Free Documentation License".''' + "\n\n")

        f.write("="*len(title) + "\n")
        f.write(title + "\n")
        f.write("="*len(title) + "\n\n")

        toc = rvars['toc']
        parts = toc.split(" ")

        idx = 0
        while idx<len(parts):
            item = parts[idx]
            if ".rst" in item:
                f.write("   "+item+"\n")
                idx=idx+1
                moduleDir = item.split('/')[0]
                try:
                    shutil.copytree(path.join(workingdir,'source',moduleDir),
                                    path.join(sourcedir,moduleDir))
                except:
                    print 'copying %s again' % moduleDir
            else:
                topic = ""
                while idx<len(parts) and ".rst" not in parts[idx]:
                    if topic != "":
                        topic =topic + " " + parts[idx]
                    else:
                        topic = topic + parts[idx]
                    idx=idx+1
                f.write("\n" + topic + "\n" + ":"*len(topic) + "\n\n")
                f.write('''.. toctree::
       :maxdepth: 2 \n\n''')

        f.write('''\nAcknowledgements
    ::::::::::::::::

    .. toctree::
       :maxdepth: 1

       FrontBackMatter/copyright.rst
       FrontBackMatter/prefaceinteractive.rst
       FrontBackMatter/foreword.rst
       FrontBackMatter/preface.rst
       FrontBackMatter/preface2e.rst
       FrontBackMatter/contrib.rst
       FrontBackMatter/fdl-1.3.rst''' + "\n")

        f.close()

        # archive the index file so the course can be rebuilt later
        shutil.copy(path.join(sourcedir, 'index.rst'), path.join(confdir, 'index.rst'))

        shutil.copytree(path.join(workingdir,'source','FrontBackMatter'),
                        path.join(sourcedir,'FrontBackMatter'))

    #########
    # We're rebuilding a course
    #########
    elif rvars['coursetype'] == 'rebuildcourse':
        try:
            # copy all the sources into the temporary sourcedir
            shutil.copytree(path.join(workingdir,'source'),sourcedir)

            # copy the index and conf files to the sourcedir
            shutil.copy(path.join(confdir, 'conf.py'), path.join(sourcedir, 'conf.py'))
            shutil.copy(path.join(confdir, 'index.rst'), path.join(sourcedir, 'index.rst'))
        except OSError:
            # Either the sourcedir already exists (meaning this is probably devcourse, thinkcspy, etc,
            # or the conf.py or index.rst files are missing for some reason.
            raise OSError



    ########
    # we're just copying one of the pre-existing books
    ########
    else:
        # copy all the sources into the temporary sourcedir
        shutil.copytree(path.join(workingdir,'source'),sourcedir)

        # copy the config file. We save it in confdir (to allow rebuilding the course at a later date),
        # and we also copy it to the sourcedir (which will be used for this build and then deleted.
        shutil.copy(path.join(workingdir,rvars['coursetype'],'conf.py'),
                    path.join(confdir,'conf.py'))
        shutil.copy(path.join(workingdir,rvars['coursetype'],'conf.py'),
                    path.join(sourcedir,'conf.py'))

        # copy the index file. Save in confdir (to allow rebuilding the course at a later date),
        # and copy to sourcedir for this build.
        shutil.copy(path.join(workingdir,rvars['coursetype'],'index.rst'),
                    path.join(confdir,'index.rst'))
        shutil.copy(path.join(workingdir,rvars['coursetype'],'index.rst'),
                    path.join(sourcedir,'index.rst'))



    ###########
    # Set up and run Sphinx
    ###########
    coursename = rvars['projectname']
    confdir = sourcedir # Sphinx build actually gets conf stuff from temp sourcedir
    outdir = path.join(folder, 'static' , coursename)
    doctreedir = path.join(outdir,'doctrees')
    buildername = 'html'
    confoverrides = {}
    confoverrides['html_context.appname'] = application
    confoverrides['html_context.course_id'] = coursename
    confoverrides['html_context.loglevel'] = 10
    confoverrides['html_context.course_url'] = 'http://' + http_host

    cwd = os.getcwd()
    os.chdir(path.join('applications',application))
    build_info = sh("git describe --long", capture=True)
    bi = open(path.join('custom_courses',coursename,'build_info'),'w')
    bi.write(build_info)
    bi.close()
    os.chdir(cwd)    
    build_split = build_info.split('-')
    confoverrides['html_context.build_info'] = build_split[0]

    if 'loginreq' in rvars:
        confoverrides['html_context.login_required'] = 'true'
    else:
        confoverrides['html_context.login_required'] = 'false'
    status = sys.stdout
    warning = sys.stdout
    freshenv = True
    warningiserror = False
    tags = []

    print "Finding chapters"
    sys.path.insert(0,path.join(folder,'modules'))
    from chapternames import addChapterInfoFromScheduler, findChaptersSubChapters

    force_all = True
    filenames = []

    app = Sphinx(sourcedir, confdir, outdir, doctreedir, buildername,
                confoverrides, status, warning, freshenv,
                warningiserror, tags)
    app.build(force_all, filenames)

    if rvars['coursetype'] == 'thinkcspy':
        idxname = 'toc.rst'
    else:
        idxname = 'index.rst'
    scd, ct = findChaptersSubChapters(path.join(sourcedir, idxname))
    addChapterInfoFromScheduler(scd, ct, rvars['projectname'],db)

    shutil.rmtree(sourcedir)
Ejemplo n.º 44
0
def publish():
    if 'GPG_KEY_GEONODE' in os.environ:
        key = os.environ['GPG_KEY_GEONODE']
    else:
        print "You need to set the GPG_KEY_GEONODE environment variable"
        return

    if 'PPA_GEONODE' in os.environ:
        ppa = os.environ['PPA_GEONODE']
    else:
        ppa = None

    call_task(
        'deb',
        options={
            'key': key,
            'ppa': ppa,
            # 'ppa': 'geonode/testing',
            # 'ppa': 'geonode/unstable',
        })

    version, simple_version = versions()
    if ppa:
        sh('git add package/debian/changelog')
        sh('git commit -m "Updated changelog for version %s"' % version)
        sh('git tag -f %s' % version)
        sh('git push origin %s' % version)
        sh('git tag -f debian/%s' % simple_version)
        sh('git push origin debian/%s' % simple_version)
        # sh('git push origin master')
        sh('python setup.py sdist upload -r pypi')
Ejemplo n.º 45
0
def _reset():
    sh("rm -rf geonode/development.db")
    sh("rm -rf geonode/uploaded/*")
    _install_data_dir()
Ejemplo n.º 46
0
def pip_install_req_file(req_file):
    """Pip install the requirements file."""
    pip_cmd = 'pip install -q --disable-pip-version-check --exists-action w'
    sh(u"{pip_cmd} -r {req_file}".format(pip_cmd=pip_cmd, req_file=req_file))
Ejemplo n.º 47
0
def start_geoserver(options):
    """
    Start GeoServer with GeoNode extensions
    """

    from geonode.settings import OGC_SERVER
    GEOSERVER_BASE_URL = OGC_SERVER['default']['LOCATION']

    url = "http://localhost:8080/geoserver/"
    if GEOSERVER_BASE_URL != url:
        print 'your GEOSERVER_BASE_URL does not match %s' % url
        sys.exit(1)

    download_dir = path('downloaded').abspath()
    jetty_runner = download_dir / os.path.basename(JETTY_RUNNER_URL)
    data_dir = path('geoserver/data').abspath()
    web_app = path('geoserver/geoserver').abspath()
    log_file = path('geoserver/jetty.log').abspath()
    config = path('scripts/misc/jetty-runner.xml').abspath()
    # @todo - we should not have set workdir to the datadir but a bug in geoserver
    # prevents geonode security from initializing correctly otherwise
    with pushd(data_dir):
        javapath = "java"
        loggernullpath = os.devnull

        # checking if our loggernullpath exists and if not, reset it to something manageable
        if loggernullpath == "nul":
            open("../../downloaded/null.txt", 'w+').close()
            loggernullpath = "../../downloaded/null.txt"

        try:
            sh(('java -version'))
        except:
            if not options.get('java_path', None):
                print "Paver cannot find java in the Windows Environment.  Please provide the --java_path flag with your full path to java.exe e.g. --java_path=C:/path/to/java/bin/java.exe"
                sys.exit(1)
            # if there are spaces
            javapath = 'START /B "" "' + options['java_path'] + '"'

        sh((
            '%(javapath)s -Xmx512m -XX:MaxPermSize=256m'
            ' -DGEOSERVER_DATA_DIR=%(data_dir)s'
            # workaround for JAI sealed jar issue and jetty classloader
            ' -Dorg.eclipse.jetty.server.webapp.parentLoaderPriority=true'
            ' -jar %(jetty_runner)s'
            ' --log %(log_file)s'
            ' %(config)s'
            ' > %(loggernullpath)s &' % locals()
        ))

    info('Starting GeoServer on %s' % url)

    # wait for GeoServer to start
    started = waitfor(url)
    info('The logs are available at %s' % log_file)

    if not started:
        # If applications did not start in time we will give the user a chance
        # to inspect them and stop them manually.
        info(('GeoServer never started properly or timed out.'
              'It may still be running in the background.'))
        sys.exit(1)
Ejemplo n.º 48
0
def deb(options):
    """
    Creates debian packages.

    Example uses:
        paver deb
        paver deb -k 12345
        paver deb -k 12345 -p geonode/testing
    """
    key = options.get('key', None)
    ppa = options.get('ppa', None)

    version, simple_version = versions()

    info('Creating package for GeoNode version %s' % version)

    # Get rid of any uncommitted changes to debian/changelog
    info('Getting rid of any uncommitted changes in debian/changelog')
    sh('git checkout package/debian/changelog')

    # Workaround for git-dch bug
    # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594580
    sh('ln -s %s %s' % (os.path.realpath('.git'), os.path.realpath('package')))

    with pushd('package'):

        # Install requirements
        #sh('sudo apt-get -y install debhelper devscripts git-buildpackage')

        sh(('git-dch --spawn-editor=snapshot --git-author --new-version=%s'
            ' --id-length=6 --ignore-branch --release' % (simple_version)))

        deb_changelog = path('debian') / 'changelog'
        for line in fileinput.input([deb_changelog], inplace=True):
            print line.replace("urgency=medium", "urgency=high"),

        ## Revert workaround for git-dhc bug
        sh('rm -rf .git')

        if key is None and ppa is None:
            # A local installable package
            sh('debuild -uc -us -A')
        elif key is None and ppa is not None:
                # A sources package, signed by daemon
                sh('debuild -S')
        elif key is not None and ppa is None:
                # A signed installable package
                sh('debuild -k%s -A' % key)
        elif key is not None and ppa is not None:
                # A signed, source package
                sh('debuild -k%s -S' % key)

    if ppa is not None:
        sh('dput ppa:%s geonode_%s_source.changes' % (ppa, simple_version))
Ejemplo n.º 49
0
def static(options):
    with pushd('geonode/static'):
        sh('make')
Ejemplo n.º 50
0
def test(options):
    """
    Run GeoNode's Unit Test Suite
    """
    sh("python manage.py test %s.tests --noinput" % '.tests '.join(GEONODE_APPS))
Ejemplo n.º 51
0
def run_xsslint(options):
    """
    Runs xsslint/xss_linter.py on the codebase
    """

    thresholds_option = getattr(options, 'thresholds', '{}')
    try:
        violation_thresholds = json.loads(thresholds_option)
    except ValueError:
        violation_thresholds = None
    if isinstance(violation_thresholds, dict) is False or \
            any(key not in ("total", "rules") for key in violation_thresholds.keys()):

        fail_quality(
            'xsslint',
            """FAILURE: Thresholds option "{thresholds_option}" was not supplied using proper format.\n"""
            """Here is a properly formatted example, '{{"total":100,"rules":{{"javascript-escape":0}}}}' """
            """with property names in double-quotes.""".format(
                thresholds_option=thresholds_option
            )
        )

    xsslint_script = "xss_linter.py"
    xsslint_report_dir = (Env.REPORT_DIR / "xsslint")
    xsslint_report = xsslint_report_dir / "xsslint.report"
    _prepare_report_dir(xsslint_report_dir)

    sh(
        u"{repo_root}/scripts/xsslint/{xsslint_script} --rule-totals --config={cfg_module} >> {xsslint_report}".format(
            repo_root=Env.REPO_ROOT,
            xsslint_script=xsslint_script,
            xsslint_report=xsslint_report,
            cfg_module='scripts.xsslint_config'
        ),
        ignore_error=True
    )

    xsslint_counts = _get_xsslint_counts(xsslint_report)

    try:
        metrics_str = u"Number of {xsslint_script} violations: {num_violations}\n".format(
            xsslint_script=xsslint_script, num_violations=int(xsslint_counts['total'])
        )
        if 'rules' in xsslint_counts and any(xsslint_counts['rules']):
            metrics_str += "\n"
            rule_keys = sorted(xsslint_counts['rules'].keys())
            for rule in rule_keys:
                metrics_str += u"{rule} violations: {count}\n".format(
                    rule=rule,
                    count=int(xsslint_counts['rules'][rule])
                )
    except TypeError:
        fail_quality(
            'xsslint',
            u"FAILURE: Number of {xsslint_script} violations could not be found in {xsslint_report}".format(
                xsslint_script=xsslint_script, xsslint_report=xsslint_report
            )
        )

    metrics_report = (Env.METRICS_DIR / "xsslint")
    # Record the metric
    _write_metric(metrics_str, metrics_report)
    # Print number of violations to log.
    sh(u"cat {metrics_report}".format(metrics_report=metrics_report), ignore_error=True)

    error_message = ""

    # Test total violations against threshold.
    if 'total' in list(violation_thresholds.keys()):
        if violation_thresholds['total'] < xsslint_counts['total']:
            error_message = u"Too many violations total ({count}).\nThe limit is {violations_limit}.".format(
                count=xsslint_counts['total'], violations_limit=violation_thresholds['total']
            )

    # Test rule violations against thresholds.
    if 'rules' in violation_thresholds:
        threshold_keys = sorted(violation_thresholds['rules'].keys())
        for threshold_key in threshold_keys:
            if threshold_key not in xsslint_counts['rules']:
                error_message += (
                    u"\nNumber of {xsslint_script} violations for {rule} could not be found in "
                    "{xsslint_report}."
                ).format(
                    xsslint_script=xsslint_script, rule=threshold_key, xsslint_report=xsslint_report
                )
            elif violation_thresholds['rules'][threshold_key] < xsslint_counts['rules'][threshold_key]:
                error_message += \
                    u"\nToo many {rule} violations ({count}).\nThe {rule} limit is {violations_limit}.".format(
                        rule=threshold_key, count=xsslint_counts['rules'][threshold_key],
                        violations_limit=violation_thresholds['rules'][threshold_key],
                    )

    if error_message:
        fail_quality(
            'xsslint',
            u"FAILURE: XSSLinter Failed.\n{error_message}\n"
            "See {xsslint_report} or run the following command to hone in on the problem:\n"
            "  ./scripts/xss-commit-linter.sh -h".format(
                error_message=error_message, xsslint_report=xsslint_report
            )
        )
    else:
        write_junit_xml('xsslint')
Ejemplo n.º 52
0
def package(options):
    """
    Creates a tarball to use for building the system elsewhere
    """
    import pkg_resources
    import tarfile
    import geonode

    version = geonode.get_version()
    # Use GeoNode's version for the package name.
    pkgname = 'GeoNode-%s-all' % version

    # Create the output directory.
    out_pkg = path(pkgname)
    out_pkg_tar = path("%s.tar.gz" % pkgname)

    # Create a distribution in zip format for the geonode python package.
    dist_dir = path('dist')
    dist_dir.rmtree()
    sh('python setup.py sdist --formats=zip')

    with pushd('package'):

        #Delete old tar files in that directory
        for f in glob.glob('GeoNode*.tar.gz'):
            old_package = path(f)
            if old_package != out_pkg_tar:
                old_package.remove()

        if out_pkg_tar.exists():
            info('There is already a package for version %s' % version)
            return

        # Clean anything that is in the oupout package tree.
        out_pkg.rmtree()
        out_pkg.makedirs()

        support_folder = path('support')
        install_file = path('install.sh')

        # And copy the default files from the package folder.
        justcopy(support_folder, out_pkg / 'support')
        justcopy(install_file, out_pkg)

        geonode_dist = path('..') / 'dist' / 'GeoNode-%s.zip' % version
        justcopy(geonode_dist, out_pkg)

        # Create a tar file with all files in the output package folder.
        tar = tarfile.open(out_pkg_tar, "w:gz")
        for file in out_pkg.walkfiles():
            tar.add(file)

        # Add the README with the license and important links to documentation.
        tar.add('README', arcname=('%s/README.rst' % out_pkg))
        tar.close()

        # Remove all the files in the temporary output package directory.
        out_pkg.rmtree()

    # Report the info about the new package.
    info("%s created" % out_pkg_tar.abspath())
Ejemplo n.º 53
0
def start_geoserver(options):
    """
    Start GeoServer with GeoNode extensions
    """
    # we use docker-compose for integration tests
    if on_travis and not options.get('force_exec', False):
        return

    # only start if using Geoserver backend
    if 'geonode.geoserver' not in INSTALLED_APPS:
        return

    GEOSERVER_BASE_URL = OGC_SERVER['default']['LOCATION']
    url = GEOSERVER_BASE_URL

    if urlparse(GEOSERVER_BASE_URL).hostname != 'localhost':
        logger.warning("Warning: OGC_SERVER['default']['LOCATION'] hostname is not equal to 'localhost'")

    if not GEOSERVER_BASE_URL.endswith('/'):
        logger.error("Error: OGC_SERVER['default']['LOCATION'] does not end with a '/'")
        sys.exit(1)

    download_dir = path('downloaded').abspath()
    jetty_runner = download_dir / \
        os.path.basename(dev_config['JETTY_RUNNER_URL'])
    data_dir = path('geoserver/data').abspath()
    geofence_dir = path('geoserver/data/geofence').abspath()
    web_app = path('geoserver/geoserver').abspath()
    log_file = path('geoserver/jetty.log').abspath()
    config = path('scripts/misc/jetty-runner.xml').abspath()
    jetty_port = urlparse(GEOSERVER_BASE_URL).port

    import socket
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    socket_free = True
    try:
        s.bind(("127.0.0.1", jetty_port))
    except OSError as e:
        socket_free = False
        if e.errno == 98:
            info(f'Port {jetty_port} is already in use')
        else:
            info(
                f'Something else raised the socket.error exception while checking port {jetty_port}')
            print(e)
    finally:
        s.close()

    if socket_free:
        # @todo - we should not have set workdir to the datadir but a bug in geoserver
        # prevents geonode security from initializing correctly otherwise
        with pushd(data_dir):
            javapath = "java"
            if on_travis:
                sh(
                    'sudo apt install -y openjdk-8-jre openjdk-8-jdk;'
                    ' sudo update-java-alternatives --set java-1.8.0-openjdk-amd64;'
                    ' export JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::");'
                    ' export PATH=$JAVA_HOME\'bin/java\':$PATH;'
                )
                # import subprocess
                # result = subprocess.run(['update-alternatives', '--list', 'java'], stdout=subprocess.PIPE)
                # javapath = result.stdout
                javapath = "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java"
            loggernullpath = os.devnull

            # checking if our loggernullpath exists and if not, reset it to
            # something manageable
            if loggernullpath == "nul":
                try:
                    open("../../downloaded/null.txt", 'w+').close()
                except OSError:
                    print("Chances are that you have Geoserver currently running. You "
                          "can either stop all servers with paver stop or start only "
                          "the django application with paver start_django.")
                    sys.exit(1)
                loggernullpath = "../../downloaded/null.txt"

            try:
                sh(('%(javapath)s -version') % locals())
            except Exception:
                logger.warning("Java was not found in your path.  Trying some other options: ")
                javapath_opt = None
                if os.environ.get('JAVA_HOME', None):
                    logger.info("Using the JAVA_HOME environment variable")
                    javapath_opt = os.path.join(os.path.abspath(
                        os.environ['JAVA_HOME']), "bin", "java.exe")
                elif options.get('java_path'):
                    javapath_opt = options.get('java_path')
                else:
                    logger.critical("Paver cannot find java in the Windows Environment. "
                                    "Please provide the --java_path flag with your full path to "
                                    "java.exe e.g. --java_path=C:/path/to/java/bin/java.exe")
                    sys.exit(1)
                # if there are spaces
                javapath = f"START /B \"\" \"{javapath_opt}\""

            sh(
                '%(javapath)s -Xms512m -Xmx2048m -server -XX:+UseConcMarkSweepGC -XX:MaxPermSize=512m'
                ' -DGEOSERVER_DATA_DIR=%(data_dir)s'
                ' -DGEOSERVER_CSRF_DISABLED=true'
                ' -Dgeofence.dir=%(geofence_dir)s'
                ' -Djava.awt.headless=true'
                # ' -Dgeofence-ovr=geofence-datasource-ovr.properties'
                # workaround for JAI sealed jar issue and jetty classloader
                # ' -Dorg.eclipse.jetty.server.webapp.parentLoaderPriority=true'
                ' -jar %(jetty_runner)s'
                ' --port %(jetty_port)i'
                ' --log %(log_file)s'
                ' %(config)s'
                ' > %(loggernullpath)s &' % locals()
            )

        info(f'Starting GeoServer on {url}')

    # wait for GeoServer to start
    started = waitfor(url)
    info(f'The logs are available at {log_file}')

    if not started:
        # If applications did not start in time we will give the user a chance
        # to inspect them and stop them manually.
        info('GeoServer never started properly or timed out.'
             'It may still be running in the background.')
        sys.exit(1)
Ejemplo n.º 54
0
def run_quality(options):
    """
    Build the html diff quality reports, and print the reports to the console.
    :param: b, the branch to compare against, defaults to origin/master
    :param: p, diff-quality will fail if the quality percentage calculated is
        below this percentage. For example, if p is set to 80, and diff-quality finds
        quality of the branch vs the compare branch is less than 80%, then this task will fail.
    """
    # Directory to put the diff reports in.
    # This makes the folder if it doesn't already exist.
    dquality_dir = (Env.REPORT_DIR / "diff_quality").makedirs_p()

    # Save the pass variable. It will be set to false later if failures are detected.
    diff_quality_pass = True
    failure_reasons = []

    def _lint_output(linter, count, violations_list, is_html=False, limit=0):
        """
        Given a count & list of pylint violations, pretty-print the output.
        If `is_html`, will print out with HTML markup.
        """
        if is_html:
            lines = ['<body>\n']
            sep = '-------------<br/>\n'
            title = HTML(u"<h1>Quality Report: {}</h1>\n").format(linter)
            violations_bullets = ''.join(
                [HTML('<li>{violation}</li><br/>\n').format(violation=violation) for violation in violations_list]
            )
            violations_str = HTML('<ul>\n{bullets}</ul>\n').format(bullets=HTML(violations_bullets))
            violations_count_str = HTML(u"<b>Violations</b>: {count}<br/>\n")
            fail_line = HTML(u"<b>FAILURE</b>: {} count should be 0<br/>\n").format(linter)
        else:
            lines = []
            sep = '-------------\n'
            title = u"Quality Report: {}\n".format(linter)
            violations_str = ''.join(violations_list)
            violations_count_str = u"Violations: {count}\n"
            fail_line = u"FAILURE: {} count should be {}\n".format(linter, limit)

        violations_count_str = violations_count_str.format(count=count)

        lines.extend([sep, title, sep, violations_str, sep, violations_count_str])

        if count > limit > -1:
            lines.append(fail_line)
        lines.append(sep + '\n')
        if is_html:
            lines.append('</body>')

        return ''.join(lines)

    # If pylint reports exist, use those
    # Otherwise, `diff-quality` will call pylint itself
    (count, violations_list) = _get_pylint_violations(clean=False)
    _, upper_violations_limit, _, _ = _parse_pylint_options(options)

    # Print total number of violations to log
    print(_lint_output('pylint', count, violations_list, limit=upper_violations_limit))
    if count > upper_violations_limit > -1:
        diff_quality_pass = False
        failure_reasons.append('Too many total violations.')

    # ----- Set up for diff-quality pylint call -----
    # Set the string to be used for the diff-quality --compare-branch switch.
    compare_branch = getattr(options, 'compare_branch', u'origin/master')
    compare_commit = sh(u'git merge-base HEAD {}'.format(compare_branch), capture=True).strip()
    if sh('git rev-parse HEAD', capture=True).strip() != compare_commit:
        compare_branch_string = u'--compare-branch={0}'.format(compare_commit)

        # Set the string, if needed, to be used for the diff-quality --fail-under switch.
        diff_threshold = int(getattr(options, 'percentage', -1))
        percentage_string = u''
        if diff_threshold > -1:
            percentage_string = u'--fail-under={0}'.format(diff_threshold)

        pylint_files = get_violations_reports("pylint")
        pylint_reports = u' '.join(pylint_files)
        if not run_diff_quality(
            violations_type="pylint",
            reports=pylint_reports,
            percentage_string=percentage_string,
            branch_string=compare_branch_string,
            dquality_dir=dquality_dir
        ):
            diff_quality_pass = False
            failure_reasons.append('Pylint violation(s) were found in the lines of code that were added or changed.')

        eslint_files = get_violations_reports("eslint")
        eslint_reports = u' '.join(eslint_files)
        if not run_diff_quality(
                violations_type="eslint",
                reports=eslint_reports,
                percentage_string=percentage_string,
                branch_string=compare_branch_string,
                dquality_dir=dquality_dir
        ):
            diff_quality_pass = False
            failure_reasons.append('Eslint violation(s) were found in the lines of code that were added or changed.')

    # If one of the quality runs fails, then paver exits with an error when it is finished
    if not diff_quality_pass:
        msg = "FAILURE: " + " ".join(failure_reasons)
        fail_quality('diff_quality', msg)
    else:
        write_junit_xml('diff_quality')
Ejemplo n.º 55
0
def common_cleanups(options):
    module = _get_module(options)
    sh('sed -i "" -e "s/.. include::/.. literalinclude::/g" source/%s/*.rst' % module)
    sh('sed -i "" -e "s/:literal:/:caption:/g" source/%s/*.rst' % module)
    sh("sed -i '' -e '/:Python Version:/d' source/%s/index.rst" % module)
    sh("sed -i '' -e 's/()`/`/g' source/%s/*.rst" % module)
    if path('source/{}'.format(module)).glob('*.py'):
        sh("sed -i '' -e 's|#!/usr/bin/env python$|#!/usr/bin/env python3|' source/%s/*.py" % module)
        sh("sed -i '' -e '/__version__ = \"$Id$\"/d' source/%s/*.py" % module)
        sh("sed -i '' -e '/__module_id__ = \'$Id$\'/d' source/%s/*.py" % module)
    else:
        print('*** skipping changes to *.py, no python modules found')
Ejemplo n.º 56
0
def test_integration(options):
    """
    Run GeoNode's Integration test suite against the external apps
    """
    prefix = options.get('prefix')
    local = str2bool(options.get('local', 'false'))
    if local:
        call_task('stop_geoserver')
        _reset()

    name = options.get('name', None)
    settings = options.get('settings', '')
    success = False
    try:
        call_task('setup', options={'settings': settings, 'force_exec': True})

        if not settings:
            settings = 'REUSE_DB=1 DJANGO_SETTINGS_MODULE=geonode.settings'

        if name and name in ('geonode.tests.csw', 'geonode.tests.integration', 'geonode.geoserver.tests.integration'):
            call_task('sync', options={'settings': settings})
            if local:
                call_task('start_geoserver', options={'settings': settings, 'force_exec': True})
                call_task('start', options={'settings': settings})
            if integration_server_tests:
                call_task('setup_data', options={'settings': settings})
        elif 'geonode.geoserver' in INSTALLED_APPS:
            if local:
                sh("cp geonode/upload/tests/test_settings.py geonode/")
                settings = 'geonode.test_settings'
                sh(f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py "
                   "makemigrations --noinput")
                sh(f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py "
                   "migrate --noinput")
                sh(f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py "
                   "loaddata sample_admin.json")
                sh(f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py "
                   "loaddata geonode/base/fixtures/default_oauth_apps.json")
                sh(f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py "
                   "loaddata geonode/base/fixtures/initial_data.json")
                call_task('start_geoserver')
                bind = options.get('bind', '0.0.0.0:8000')
                foreground = '' if options.get('foreground', False) else '&'
                sh(f'DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py runmessaging {foreground}')
                sh(f'DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py runserver {bind} {foreground}')
                sh('sleep 30')
                settings = f'REUSE_DB=1 DJANGO_SETTINGS_MODULE={settings}'
            else:
                call_task('sync', options={'settings': settings})

        live_server_option = ''
        info("Running the tests now...")
        sh(f'{settings} {prefix} manage.py test {name} -v 3 {_keepdb} --noinput {live_server_option}')

    except BuildFailure as e:
        info(f'Tests failed! {str(e)}')
    else:
        success = True
    finally:
        if local:
            stop(options)
            _reset()

    if not success:
        sys.exit(1)
Ejemplo n.º 57
0
def buildsitemap(options):
    sh('python2 ./bin/sitemap_gen.py --testing --config=%s' %
       options.sitemap_gen.config)
    return
Ejemplo n.º 58
0
def sync(options):
    """
    Run the migrate and migrate management commands to create and migrate a DB
    """
    settings = options.get('settings', '')
    if settings and 'DJANGO_SETTINGS_MODULE' not in settings:
        settings = f'DJANGO_SETTINGS_MODULE={settings}'

    sh(f"{settings} python -W ignore manage.py makemigrations --noinput")
    sh(f"{settings} python -W ignore manage.py migrate --noinput")
    sh(f"{settings} python -W ignore manage.py loaddata sample_admin.json")
    sh(f"{settings} python -W ignore manage.py loaddata geonode/base/fixtures/default_oauth_apps.json")
    sh(f"{settings} python -W ignore manage.py loaddata geonode/base/fixtures/initial_data.json")
    sh(f"{settings} python -W ignore manage.py set_all_datasets_alternate")
    sh(f"{settings} python -W ignore manage.py collectstatic --noinput")
Ejemplo n.º 59
0
def _flake8(infile):
    """Run flake8 against the input file"""
    return sh('flake8 -v %s' % infile)
Ejemplo n.º 60
0
def push(options):
    """Push changes to remote git repository.
    """
    sh('git push')