示例#1
0
def symlink_cnames(version):
    """
    OLD
    Link from HOME/user_builds/cnames/<cname> ->
              HOME/user_builds/<project>/rtd-builds/
    NEW
    Link from HOME/user_builds/cnametoproject/<cname> ->
              HOME/user_builds/<project>/
    """
    try:
        redis_conn = redis.Redis(**settings.REDIS)
        cnames = redis_conn.smembers('rtd_slug:v1:%s' % version.project.slug)
    except redis.ConnectionError:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Failed to symlink cnames, Redis error.'), exc_info=True)
        return
    for cname in cnames:
        log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking CNAME: %s" % cname))
        docs_dir = version.project.rtd_build_path(version.slug)
        # Chop off the version from the end.
        docs_dir = '/'.join(docs_dir.split('/')[:-1])
        # Old symlink location -- Keep this here til we change nginx over
        symlink = version.project.cnames_symlink_path(cname)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
        # New symlink location
        new_docs_dir = version.project.doc_path
        new_cname_symlink = os.path.join(getattr(settings, 'SITE_ROOT'), 'cnametoproject', cname)
        run_on_app_servers('mkdir -p %s' % '/'.join(new_cname_symlink.split('/')[:-1]))
        run_on_app_servers('ln -nsf %s %s' % (new_docs_dir, new_cname_symlink))
示例#2
0
def fileify(version_pk, commit):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version = Version.objects.get(pk=version_pk)
    project = version.project
    path = project.rtd_build_path(version.slug)
    if path:
        log.info(LOG_TEMPLATE.format(
            project=project.slug, version=version.slug, msg='Creating ImportedFiles'))
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(root.replace(path, '').lstrip('/'),
                                           filename.lstrip('/'))
                    obj, created = ImportedFile.objects.get_or_create(
                        project=project,
                        version=version,
                        path=dirpath,
                        name=filename,
                        commit=commit,
                    )
                    if not created:
                        obj.save()
        # Delete ImportedFiles from previous versions
        ImportedFile.objects.filter(project=project, version=version).exclude(commit=commit).delete()
    else:
        log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='No ImportedFile files'))
示例#3
0
def finish_build(version, build, results):
    """
    Build Finished, do house keeping bits
    """

    (ret, out, err) = results['html']

    if 'no targets are out of date.' in out:
        log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Build Unchanged"))
    else:
        if ret == 0:
            log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Successful Build"))
            #update_search(version)
            #fileify.delay(version.pk)
            symlinks.symlink_cnames(version)
            symlinks.symlink_translations(version)
            symlinks.symlink_subprojects(version)

            if version.project.single_version:
                symlinks.symlink_single_version(version)
            else:
                symlinks.remove_symlink_single_version(version)

            # This requires database access, must disable it for now.
            #send_notifications(version, build)
        else:
            log.warning(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Failed HTML Build"))
示例#4
0
def setup_vcs(version, build, api):
    """
    Update the checkout of the repo to make sure it's the latest.
    This also syncs versions in the DB.
    """

    log.info(
        LOG_TEMPLATE.format(project=version.project.slug,
                            version=version.slug,
                            msg='Updating docs from VCS'))
    try:
        update_output = update_imported_docs(version.pk, api)
        commit = version.project.vcs_repo(version.slug).commit
        if commit:
            build['commit'] = commit
    except ProjectImportError, err:
        log.error(LOG_TEMPLATE.format(
            project=version.project.slug,
            version=version.slug,
            msg='Failed to import project; skipping build'),
                  exc_info=True)
        build['state'] = 'finished'
        build['setup_error'] = ('Failed to import project; skipping build.\n'
                                '\nError\n-----\n\n%s' % err.message)
        api.build(build['id']).put(build)
        return False
示例#5
0
def symlink_cnames(version):
    """
    OLD
    Link from HOME/user_builds/cnames/<cname> ->
              HOME/user_builds/<project>/rtd-builds/
    NEW
    Link from HOME/user_builds/cnametoproject/<cname> ->
              HOME/user_builds/<project>/
    """
    try:
        redis_conn = redis.Redis(**settings.REDIS)
        cnames = redis_conn.smembers('rtd_slug:v1:%s' % version.project.slug)
    except redis.ConnectionError:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Failed to symlink cnames, Redis error.'), exc_info=True)
        return
    for cname in cnames:
        log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking CNAME: %s" % cname))
        docs_dir = version.project.rtd_build_path(version.slug)
        # Chop off the version from the end.
        docs_dir = '/'.join(docs_dir.split('/')[:-1])
        # Old symlink location -- Keep this here til we change nginx over
        symlink = version.project.cnames_symlink_path(cname)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
        # New symlink location
        new_docs_dir = version.project.doc_path
        new_cname_symlink = os.path.join(getattr(settings, 'SITE_ROOT'), 'cnametoproject', cname)
        run_on_app_servers('mkdir -p %s' % '/'.join(new_cname_symlink.split('/')[:-1]))
        run_on_app_servers('ln -nsf %s %s' % (new_docs_dir, new_cname_symlink))
示例#6
0
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    ret_dict = {}

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_nonblockinglock(version=version, max_lock_age=getattr(settings, "REPO_LOCK_SECONDS", 30)):
        if not project.vcs_repo():
            raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg="Checking out version {slug}: {identifier}".format(
                        slug=version.slug, identifier=version.identifier
                    ),
                )
            )
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            ret_dict["checkout"] = version_repo.checkout(version.identifier)
        else:
            # Does this ever get called?
            log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating to latest revision"))
            version_slug = "latest"
            version_repo = project.vcs_repo(version_slug)
            ret_dict["checkout"] = version_repo.update()

        # Update tags/version

        version_post_data = {"repo": version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data["tags"] = [
                {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.tags
            ]

        if version_repo.supports_branches:
            version_post_data["branches"] = [
                {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.branches
            ]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
示例#7
0
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False,
                pdf=True, man=True, epub=True, dash=True,
                search=True, force=False, intersphinx=True, localmedia=True,
                api=None, **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    # Dependency injection to allow for testing
    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building'))
    version = ensure_version(api, project, version_pk)
    build = create_build(build_pk)
    results = {}

    # Build Servery stuff
    try:
        record_build(api=api, build=build, record=record, results=results, state='cloning')
        vcs_results = setup_vcs(version, build, api)
        if vcs_results:
            results.update(vcs_results)

        if docker or settings.DOCKER_ENABLE:
            record_build(api=api, build=build, record=record, results=results, state='building')
            docker = DockerEnvironment(version)
            build_results = docker.build()
            results.update(build_results)
        else:
            record_build(api=api, build=build, record=record, results=results, state='installing')
            setup_results = setup_environment(version)
            results.update(setup_results)

            record_build(api=api, build=build, record=record, results=results, state='building')
            build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia)
            results.update(build_results)

    except vcs_support_utils.LockTimeout, e:
        results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.")
        log.info(LOG_TEMPLATE.format(project=version.project.slug,
                                     version=version.slug, msg="Unable to lock, will retry"))
        # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
        # Should completely retry the task for us until max_retries is exceeded
        update_docs.retry(exc=e, throw=False)
示例#8
0
def fileify(version_pk):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    path = project.rtd_build_path(version.slug)
    log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Indexing files'))
    if path:
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(root.replace(path, '').lstrip('/'),
                                           filename.lstrip('/'))
                    if getattr(settings, 'DONT_HIT_DB', True):
                        api.file.post(dict(
                            project="/api/v1/project/%s/" % project.pk,
                            version="/api/v1/version/%s/" % version.pk,
                            path=dirpath,
                            name=filename))
                    else:
                        obj, created = ImportedFile.objects.get_or_create(
                            project=project,
                            version=version,
                            path=dirpath,
                            name=filename)
                        if not created:
                            obj.save()
示例#9
0
def symlink_subprojects(version):
    """
    Link from HOME/user_builds/project/subprojects/<project> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    # Subprojects
    subprojects = apiv2.project(
        version.project.pk).subprojects.get()['subprojects']
    for subproject_data in subprojects:
        slugs = [subproject_data['slug']]
        if '_' in slugs[0]:
            slugs.append(slugs[0].replace('_', '-'))
        for subproject_slug in slugs:
            log.debug(
                LOG_TEMPLATE.format(project=version.project.slug,
                                    version=version.slug,
                                    msg="Symlinking subproject: %s" %
                                    subproject_slug))

            # The directory for this specific subproject
            symlink = version.project.subprojects_symlink_path(subproject_slug)
            run_on_app_servers('mkdir -p %s' %
                               '/'.join(symlink.split('/')[:-1]))

            # Where the actual docs live
            docs_dir = os.path.join(settings.DOCROOT, subproject_slug,
                                    'rtd-builds')
            run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#10
0
def symlink_translations(version):
    """
    Link from HOME/user_builds/project/translations/<lang> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    if getattr(settings, 'DONT_HIT_DB', True):
        translations = [(trans['slug'], trans['language']) for trans in apiv2.project(version.project.pk).translations.get()['translations']]
    else:
        translations = [(trans.slug, trans.language) for trans in version.project.translations.all()]
    for slug, language in translations:
        log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (language, slug)))

        # The directory for this specific translation
        symlink = version.project.translations_symlink_path(language)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

        # Where the actual docs live
        docs_dir = os.path.join(settings.DOCROOT, slug, 'rtd-builds')
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))

    # Hack in the en version for backwards compat
    symlink = version.project.translations_symlink_path('en')
    run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
    docs_dir = os.path.join(version.project.doc_path, 'rtd-builds')
    run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
    # Add the main language project to nginx too
    if version.project.language is not 'en':
        symlink = version.project.translations_symlink_path(version.project.language)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
        docs_dir = os.path.join(settings.DOCROOT, version.project.slug.replace('_', '-'), 'rtd-builds')
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#11
0
def record_pdf(api, record, results, state, version):
    if not record or 'sphinx' not in version.project.documentation_type:
        return None
    try:
        if 'pdf' in results:
            pdf_exit = results['pdf'][0]
            pdf_success = pdf_exit == 0
            pdf_output = results['pdf'][1]
            pdf_error = results['pdf'][2]
        else:
            pdf_exit = 999
            pdf_success = False
            pdf_output = pdf_error = "PDF Failed"
        api.build.post(dict(
            state=state,
            project='/api/v1/project/%s/' % version.project.pk,
            version='/api/v1/version/%s/' % version.pk,
            success=pdf_success,
            type='pdf',
            output=pdf_output,
            error=pdf_error,
            exit_code=pdf_exit,
        ))
    except Exception:
        log.error(LOG_TEMPLATE.format(project=version.project.slug,
                                      version=version.slug, msg="Unable to post a new build"), exc_info=True)
示例#12
0
def docker_build(version_pk,
                 pdf=True,
                 man=True,
                 epub=True,
                 dash=True,
                 search=True,
                 force=False,
                 intersphinx=True,
                 localmedia=True):
    """
    The code that executes inside of docker
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)

    environment_results = setup_environment(version)
    results = build_docs(version, force, pdf, man, epub, dash, search,
                         localmedia)
    results.update(environment_results)
    try:
        number = uuid.uuid4()
        path = os.path.join(version.project.doc_path, 'build.json')
        fh = open(path, 'w')
        json.dump(results, fh)
        fh.close()
    except IOError as e:
        log.debug(
            LOG_TEMPLATE.format(
                project=version.project.slug,
                version='',
                msg='Cannot write to build.json: {0}'.format(e)))
        return None
    return number
示例#13
0
def symlink_translations(version):
    """
    Link from HOME/user_builds/project/translations/<lang> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    translations = apiv2.project(version.project.pk).translations.get()['translations']
    for translation_data in translations:
        translation_slug = translation_data['slug'].replace('_', '-')
        translation_language = translation_data['language']
        log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (translation_language, translation_slug)))

        # The directory for this specific translation
        symlink = version.project.translations_symlink_path(translation_language)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

        # Where the actual docs live
        docs_dir = os.path.join(settings.DOCROOT, translation_slug, 'rtd-builds')
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))

    # Hack in the en version for backwards compat
    symlink = version.project.translations_symlink_path('en')
    run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
    docs_dir = os.path.join(version.project.doc_path, 'rtd-builds')
    run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
    # Add the main language project to nginx too
    if version.project.language is not 'en':
        symlink = version.project.translations_symlink_path(version.project.language)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))
        docs_dir = os.path.join(settings.DOCROOT, version.project.slug.replace('_', '-'), 'rtd-builds')
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#14
0
def record_pdf(api, record, results, state, version):
    if not record or 'sphinx' not in version.project.documentation_type:
        return None
    try:
        if 'pdf' in results:
            pdf_exit = results['pdf'][0]
            pdf_success = pdf_exit == 0
            pdf_output = results['pdf'][1]
            pdf_error = results['pdf'][2]
        else:
            pdf_exit = 999
            pdf_success = False
            pdf_output = pdf_error = "PDF Failed"

        pdf_output = pdf_output.decode('utf-8', 'ignore')
        pdf_error = pdf_error.decode('utf-8', 'ignore')

        api.build.post(
            dict(
                state=state,
                project='/api/v1/project/%s/' % version.project.pk,
                version='/api/v1/version/%s/' % version.pk,
                success=pdf_success,
                type='pdf',
                output=pdf_output,
                error=pdf_error,
                exit_code=pdf_exit,
            ))
    except Exception:
        log.error(LOG_TEMPLATE.format(project=version.project.slug,
                                      version=version.slug,
                                      msg="Unable to post a new build"),
                  exc_info=True)
示例#15
0
def email_notification(version, build, email):
    log.debug(
        LOG_TEMPLATE.format(project=version.project.slug,
                            version=version.slug,
                            msg='sending email to: %s' % email))
    context = {
        'version':
        version,
        'project':
        version.project,
        'build':
        build,
        'build_url':
        'https://{0}{1}'.format(
            getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
            build.get_absolute_url()),
        'unsub_url':
        'https://{0}{1}'.format(
            getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
            reverse('projects_notifications', args=[version.project.slug])),
    }

    if build.commit:
        title = _('Failed: {project.name} ({commit})').format(
            commit=build.commit[:8], **context)
    else:
        title = _('Failed: {project.name} ({version.verbose_name})').format(
            **context)

    send_email(email,
               title,
               template='projects/email/build_failed.txt',
               template_html='projects/email/build_failed.html',
               context=context)
示例#16
0
def symlink_subprojects(version):
    """
    Link from HOME/user_builds/project/subprojects/<project> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    # Subprojects
    if getattr(settings, 'DONT_HIT_DB', True):
        subproject_slugs = [data['slug'] for data in apiv2.project(version.project.pk).subprojects.get()['subprojects']]
    else:
        rels = version.project.subprojects.all()
        subproject_slugs = [rel.child.slug for rel in rels]
    for slug in subproject_slugs:
        slugs = [slug]
        if '_' in slugs[0]:
            slugs.append(slugs[0].replace('_', '-'))
        for subproject_slug in slugs:
            log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug))

            # The directory for this specific subproject
            symlink = version.project.subprojects_symlink_path(subproject_slug)
            run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

            # Where the actual docs live
            docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds')
            run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#17
0
def fileify(version_pk):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    path = project.rtd_build_path(version.slug)
    log.info(LOG_TEMPLATE.format(
        project=project.slug, version=version.slug, msg='Indexing files'))
    if path:
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(root.replace(path, '').lstrip('/'),
                                           filename.lstrip('/'))
                    if getattr(settings, 'DONT_HIT_DB', True):
                        api.file.post(dict(
                            project="/api/v1/project/%s/" % project.pk,
                            version="/api/v1/version/%s/" % version.pk,
                            path=dirpath,
                            name=filename))
                    else:
                        obj, created = ImportedFile.objects.get_or_create(
                            project=project,
                            version=version,
                            path=dirpath,
                            name=filename)
                        if not created:
                            obj.save()
示例#18
0
def email_notification(version, build, email):
    log.debug(
        LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="sending email to: %s" % email)
    )
    context = {
        "version": version,
        "project": version.project,
        "build": build,
        "build_url": "https://{0}{1}".format(
            getattr(settings, "PRODUCTION_DOMAIN", "readthedocs.org"), build.get_absolute_url()
        ),
        "unsub_url": "https://{0}{1}".format(
            getattr(settings, "PRODUCTION_DOMAIN", "readthedocs.org"),
            reverse("projects_notifications", args=[version.project.slug]),
        ),
    }

    if build.commit:
        title = _("Failed: {project.name} ({commit})").format(commit=build.commit[:8], **context)
    else:
        title = _("Failed: {project.name} ({version.verbose_name})").format(**context)

    send_email(
        email,
        title,
        template="projects/email/build_failed.txt",
        template_html="projects/email/build_failed.html",
        context=context,
    )
示例#19
0
def docker_build(version_pk, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True):
    """
    The code that executes inside of docker
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)

    environment_results = setup_environment(version)
    results = build_docs(version=version, force=force, pdf=pdf, man=man,
                         epub=epub, dash=dash, search=search, localmedia=localmedia)
    results.update(environment_results)
    try:
        number = uuid.uuid4()
        path = os.path.join(version.project.doc_path, 'build.json')
        fh = open(path, 'w')
        json.dump(results, fh)
        fh.close()
    except IOError as e:
        log.debug(LOG_TEMPLATE.format(
            project=version.project.slug,
            version='',
            msg='Cannot write to build.json: {0}'.format(e)
        ))
        return None
    return number
示例#20
0
def update_static_metadata(project_pk):
    """Update static metadata JSON file

    Metadata settings include the following project settings:

    version
      The default version for the project, default: `latest`

    language
      The default language for the project, default: `en`

    languages
      List of languages built by linked translation projects.
    """
    project_base = apiv2.project(project_pk)
    project_data = project_base.get()
    project = make_api_project(project_data)
    translations = project_base.translations.get()["translations"]
    languages = set([translation["language"] for translation in translations if "language" in translation])
    # Convert to JSON safe types
    metadata = {"version": project.default_version, "language": project.language, "languages": list(languages)}
    try:
        path = project.static_metadata_path()
        fh = open(path, "w")
        json.dump(metadata, fh)
        fh.close()
        copy_file_to_app_servers(path, path)
    except IOError as e:
        log.debug(
            LOG_TEMPLATE.format(project=project.slug, version="", msg="Cannot write to metadata.json: {0}".format(e))
        )
示例#21
0
def email_notification(version, build, email):
    log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug,
                                  msg='sending email to: %s' % email))
    context = {'version': version,
               'project': version.project,
               'build': build,
               'build_url': 'https://{0}{1}'.format(
                   getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
                   build.get_absolute_url()),
               'unsub_url': 'https://{0}{1}'.format(
                   getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
                   reverse('projects_notifications', args=[version.project.slug])),
               }

    if build.commit:
        title = _('Failed: {project.name} ({commit})').format(commit=build.commit[:8], **context)
    else:
        title = _('Failed: {project.name} ({version.verbose_name})').format(**context)

    send_email(
        email,
        title,
        template='projects/email/build_failed.txt',
        template_html='projects/email/build_failed.html',
        context=context
    )
示例#22
0
def update_static_metadata(project_pk):
    """Update static metadata JSON file

    Metadata settings include the following project settings:

    version
      The default version for the project, default: `latest`

    language
      The default language for the project, default: `en`

    languages
      List of languages built by linked translation projects.
    """
    project_base = apiv2.project(project_pk)
    project_data = project_base.get()
    project = make_api_project(project_data)
    log.info(
        LOG_TEMPLATE.format(
            project=project.slug,
            version='',
            msg='Updating static metadata',
        ))
    translations = project_base.translations.get()['translations']
    languages = set([
        translation['language'] for translation in translations
        if 'language' in translation
    ])
    # Convert to JSON safe types
    metadata = {
        'version': project.default_version,
        'language': project.language,
        'languages': list(languages),
        'single_version': project.single_version,
    }
    try:
        path = project.static_metadata_path()
        fh = open(path, 'w')
        json.dump(metadata, fh)
        fh.close()
        copy_file_to_app_servers(path, path)
    except IOError as e:
        log.debug(
            LOG_TEMPLATE.format(
                project=project.slug,
                version='',
                msg='Cannot write to metadata.json: {0}'.format(e)))
示例#23
0
def update_static_metadata(project_pk):
    """Update static metadata JSON file

    Metadata settings include the following project settings:

    version
      The default version for the project, default: `latest`

    language
      The default language for the project, default: `en`

    languages
      List of languages built by linked translation projects.
    """
    project_base = apiv2.project(project_pk)
    project_data = project_base.get()
    project = make_api_project(project_data)
    log.info(LOG_TEMPLATE.format(
        project=project.slug,
        version='',
        msg='Updating static metadata',
    ))
    translations = project_base.translations.get()['translations']
    languages = set([
        translation['language']
        for translation in translations
        if 'language' in translation
    ])
    # Convert to JSON safe types
    metadata = {
        'version': project.default_version,
        'language': project.language,
        'languages': list(languages),
        'single_version': project.single_version,
    }
    try:
        path = project.static_metadata_path()
        fh = open(path, 'w')
        json.dump(metadata, fh)
        fh.close()
        copy_file_to_app_servers(path, path)
    except IOError as e:
        log.debug(LOG_TEMPLATE.format(
            project=project.slug,
            version='',
            msg='Cannot write to metadata.json: {0}'.format(e)
        ))
示例#24
0
def fileify(version_pk, commit):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version = Version.objects.get(pk=version_pk)
    project = version.project

    if not commit:
        log.info(
            LOG_TEMPLATE.format(
                project=project.slug,
                version=version.slug,
                msg=
                'Imported File not being built because no commit information'))

    path = project.rtd_build_path(version.slug)
    if path:
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version=version.slug,
                                msg='Creating ImportedFiles'))
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(
                        root.replace(path, '').lstrip('/'),
                        filename.lstrip('/'))
                    obj, created = ImportedFile.objects.get_or_create(
                        project=project,
                        version=version,
                        path=dirpath,
                        name=filename,
                        commit=commit,
                    )
                    if not created:
                        obj.save()
        # Delete ImportedFiles from previous versions
        ImportedFile.objects.filter(
            project=project, version=version).exclude(commit=commit).delete()
    else:
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version=version.slug,
                                msg='No ImportedFile files'))
示例#25
0
def remove_symlink_single_version(version):
    """Remove single_version symlink"""
    log.debug(
        LOG_TEMPLATE.format(project=version.project.slug,
                            version=version.project.default_version,
                            msg="Removing symlink for single_version"))
    symlink = version.project.single_version_symlink_path()
    run_on_app_servers('rm -f %s' % symlink)
示例#26
0
def remove_symlink_single_version(version):
    """Remove single_version symlink"""
    log.debug(LOG_TEMPLATE.format(
        project=version.project.slug,
        version=version.project.get_default_version(),
        msg="Removing symlink for single_version")
    )
    symlink = version.project.single_version_symlink_path()
    run_on_app_servers('rm -f %s' % symlink)
示例#27
0
def setup_vcs(version, build, api):
    """
    Update the checkout of the repo to make sure it's the latest.
    This also syncs versions in the DB.
    """

    log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Updating docs from VCS'))
    try:
        update_output = update_imported_docs(version.pk, api)
    except ProjectImportError, err:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True)
        build['state'] = 'finished'
        build['setup_error'] = (
            'Failed to import project; skipping build.\n'
            '\nError\n-----\n\n%s' % err.message
        )
        api.build(build['id']).put(build)
        return False
示例#28
0
def setup_vcs(version, build, api):
    """
    Update the checkout of the repo to make sure it's the latest.
    This also syncs versions in the DB.
    """

    log.info(LOG_TEMPLATE.format(project=version.project.slug,
                                 version=version.slug, msg='Updating docs from VCS'))
    try:
        update_output = update_imported_docs(version.pk, api)
        commit = version.project.vcs_repo(version.slug).commit
        if commit:
            build['commit'] = commit
    except ProjectImportError:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug,
                                      msg='Failed to import project; skipping build'), exc_info=True)
        raise
    return update_output
示例#29
0
def setup_vcs(version, build, api):
    """
    Update the checkout of the repo to make sure it's the latest.
    This also syncs versions in the DB.
    """

    log.info(LOG_TEMPLATE.format(project=version.project.slug,
                                 version=version.slug, msg='Updating docs from VCS'))
    try:
        update_output = update_imported_docs(version.pk, api)
        commit = version.project.vcs_repo(version.slug).commit
        if commit:
            build['commit'] = commit
    except ProjectImportError:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug,
                                      msg='Failed to import project; skipping build'), exc_info=True)
        raise
    return update_output
示例#30
0
def update_static_metadata(project_pk, path=None):
    """Update static metadata JSON file

    Metadata settings include the following project settings:

    version
      The default version for the project, default: `latest`

    language
      The default language for the project, default: `en`

    languages
      List of languages built by linked translation projects.
    """
    project = Project.objects.get(pk=project_pk)
    if not path:
        path = project.static_metadata_path()

    log.info(
        LOG_TEMPLATE.format(
            project=project.slug,
            version='',
            msg='Updating static metadata',
        ))
    translations = [trans.language for trans in project.translations.all()]
    languages = set(translations)
    # Convert to JSON safe types
    metadata = {
        'version': project.default_version,
        'language': project.language,
        'languages': list(languages),
        'single_version': project.single_version,
    }
    try:
        fh = open(path, 'w+')
        json.dump(metadata, fh)
        fh.close()
        Syncer.copy(path, path, host=socket.gethostname(), file=True)
    except (AttributeError, IOError) as e:
        log.debug(
            LOG_TEMPLATE.format(
                project=project.slug,
                version='',
                msg='Cannot write to metadata.json: {0}'.format(e)))
示例#31
0
def update_static_metadata(project_pk, path=None):
    """Update static metadata JSON file

    Metadata settings include the following project settings:

    version
      The default version for the project, default: `latest`

    language
      The default language for the project, default: `en`

    languages
      List of languages built by linked translation projects.
    """
    project = Project.objects.get(pk=project_pk)
    if not path:
        path = project.static_metadata_path()

    log.info(LOG_TEMPLATE.format(
        project=project.slug,
        version='',
        msg='Updating static metadata',
    ))
    translations = [trans.language for trans in project.translations.all()]
    languages = set(translations)
    # Convert to JSON safe types
    metadata = {
        'version': project.default_version,
        'language': project.language,
        'languages': list(languages),
        'single_version': project.single_version,
    }
    try:
        fh = open(path, 'w+')
        json.dump(metadata, fh)
        fh.close()
        Syncer.copy(path, path, host=socket.gethostname(), file=True)
    except (AttributeError, IOError) as e:
        log.debug(LOG_TEMPLATE.format(
            project=project.slug,
            version='',
            msg='Cannot write to metadata.json: {0}'.format(e)
        ))
示例#32
0
def webhook_notification(version, build, hook_url):
    data = json.dumps(
        {
            "name": project.name,
            "slug": project.slug,
            "build": {"id": build.id, "success": build.success, "date": build.date.strftime("%Y-%m-%d %H:%M:%S")},
        }
    )
    log.debug(LOG_TEMPLATE.format(project=project.slug, version="", msg="sending notification to: %s" % hook_url))
    requests.post(hook_url, data=data)
示例#33
0
def setup_vcs(version, build, api):
    """
    Update the checkout of the repo to make sure it's the latest.
    This also syncs versions in the DB.
    """

    log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Updating docs from VCS"))
    try:
        update_output = update_imported_docs(version.pk, api)
    except ProjectImportError, err:
        log.error(
            LOG_TEMPLATE.format(
                project=version.project.slug, version=version.slug, msg="Failed to import project; skipping build"
            ),
            exc_info=True,
        )
        build["state"] = "finished"
        build["setup_error"] = "Failed to import project; skipping build.\n" "\nError\n-----\n\n%s" % err.message
        api.build(build["id"]).put(build)
        return False
示例#34
0
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(
                project.slug).get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(
                    LOG_TEMPLATE.format(
                        project=project.slug,
                        version='',
                        msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/version/%s/" %
                                       version_data['project'].pk)
            api.version(version.pk).put(version_data)

    return version
示例#35
0
def webhook_notification(version, build, hook_url):
    data = json.dumps({
        'name': project.name,
        'slug': project.slug,
        'build': {
            'id': build.id,
            'success': build.success,
            'date': build.date.strftime('%Y-%m-%d %H:%M:%S'),
        }
    })
    log.debug(LOG_TEMPLATE.format(project=project.slug, version='', msg='sending notification to: %s' % hook_url))
    requests.post(hook_url, data=data)
示例#36
0
def webhook_notification(version, build, hook_url):
    data = json.dumps({
        'name': project.name,
        'slug': project.slug,
        'build': {
            'id': build.id,
            'success': build.success,
            'date': build.date.strftime('%Y-%m-%d %H:%M:%S'),
        }
    })
    log.debug(LOG_TEMPLATE.format(project=project.slug, version='', msg='sending notification to: %s' % hook_url))
    requests.post(hook_url, data=data)
示例#37
0
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(project.slug)
                            .get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(LOG_TEMPLATE.format(
                    project=project.slug, version='', msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/project/%s/"
                                       % version_data['project'].pk)
            api.version(version.pk).put(version_data)

    return version
示例#38
0
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = api.version(project.slug).get(slug="latest")["objects"][0]
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project="/api/v1/project/%s/" % project.pk,
                slug="latest",
                type="branch",
                active=True,
                verbose_name="latest",
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(
                    LOG_TEMPLATE.format(project=project.slug, version="", msg="Exception in creating version: %s" % e)
                )
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data["verbose_name"] = "latest"
            to_save = True
        if not version.active:
            version_data["active"] = True
            to_save = True
        if version.identifier != branch:
            version_data["identifier"] = branch
            to_save = True
        if to_save:
            version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk
            api.version(version.pk).put(version_data)

    return version
示例#39
0
def symlink_single_version(version):
    """
    Link from HOME/user_builds/<project>/single_version ->
              HOME/user_builds/<project>/rtd-builds/<default_version>/
    """
    default_version = version.project.get_default_version()
    log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=default_version, msg="Symlinking single_version"))

    # The single_version directory
    symlink = version.project.single_version_symlink_path()
    run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

    # Where the actual docs live
    docs_dir = os.path.join(settings.DOCROOT, version.project.slug, 'rtd-builds', default_version)
    run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#40
0
def symlink_single_version(version):
    """
    Link from HOME/user_builds/<project>/single_version ->
              HOME/user_builds/<project>/rtd-builds/<default_version>/
    """
    default_version = version.project.get_default_version()
    log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=default_version, msg="Symlinking single_version"))

    # The single_version directory
    symlink = version.project.single_version_symlink_path()
    run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

    # Where the actual docs live
    docs_dir = os.path.join(settings.DOCROOT, version.project.slug, 'rtd-builds', default_version)
    run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#41
0
def record_pdf(api, record, results, state, version):
    if not record:
        return None
    try:
        api.build.post(dict(
            state=state,
            project='/api/v1/project/%s/' % version.project.pk,
            version='/api/v1/version/%s/' % version.pk,
            success=results['pdf'][0] == 0,
            type='pdf',
            output=results['pdf'][1],
            error=results['pdf'][2],
            exit_code=results['pdf'][0],
        ))
    except UnicodeDecodeError, e:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to post a new build"), exc_info=True)
示例#42
0
def symlink_subprojects(version):
    """
    Link from HOME/user_builds/project/subprojects/<project> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    # Subprojects
    subprojects = apiv2.project(version.project.pk).subprojects.get()['subprojects']
    for subproject_data in subprojects:
        subproject_slug = subproject_data['slug']
        log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug))

        # The directory for this specific subproject
        symlink = version.project.subprojects_symlink_path(subproject_slug)
        run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1]))

        # Where the actual docs live
        docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds')
        run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
示例#43
0
def record_pdf(api, record, results, state, version):
    if not record or "sphinx" not in version.project.documentation_type:
        return None
    if not version.project.enable_pdf_build:
        return None
    try:
        if "pdf" in results:
            pdf_exit = results["pdf"][0]
            pdf_success = pdf_exit == 0
            pdf_output = results["pdf"][1]
            pdf_error = results["pdf"][2]
        else:
            pdf_exit = 999
            pdf_success = False
            pdf_output = pdf_error = "PDF Failed"

        pdf_output = pdf_output.decode("utf-8", "ignore")
        pdf_error = pdf_error.decode("utf-8", "ignore")

        if "Output written on" in pdf_output:
            pdf_success = True

        api.build.post(
            dict(
                state=state,
                project="/api/v1/project/%s/" % version.project.pk,
                version="/api/v1/version/%s/" % version.pk,
                success=pdf_success,
                type="pdf",
                output=pdf_output,
                error=pdf_error,
                exit_code=pdf_exit,
                builder=socket.gethostname(),
            )
        )
    except Exception:
        log.error(
            LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to post a new build"),
            exc_info=True,
        )
示例#44
0
def symlink_translations(version):
    """
    Link from HOME/user_builds/project/translations/<lang> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    translations = {}

    if getattr(settings, 'DONT_HIT_DB', True):
        for trans in (api
                      .project(version.project.pk)
                      .translations.get()['translations']):
            translations[trans['language']] = trans['slug']
    else:
        for trans in version.project.translations.all():
            translations[trans.language] = trans.slug

    # Default language, and pointer for 'en'
    version_slug = version.project.slug.replace('_', '-')
    translations[version.project.language] = version_slug
    if not translations.has_key('en'):
        translations['en'] = version_slug

    run_on_app_servers(
        'mkdir -p {0}'
        .format(os.path.join(version.project.doc_path, 'translations')))

    for (language, slug) in translations.items():
        log.debug(LOG_TEMPLATE.format(
            project=version.project.slug,
            version=version.slug,
            msg="Symlinking translation: %s->%s" % (language, slug)
        ))

        # The directory for this specific translation
        symlink = version.project.translations_symlink_path(language)
        translation_path = os.path.join(settings.DOCROOT, slug, 'rtd-builds')
        run_on_app_servers('ln -nsf {0} {1}'.format(translation_path, symlink))
示例#45
0
def symlink_translations(version):
    """
    Link from HOME/user_builds/project/translations/<lang> ->
              HOME/user_builds/<project>/rtd-builds/
    """
    translations = {}

    if getattr(settings, 'DONT_HIT_DB', True):
        for trans in (apiv2
                      .project(version.project.pk)
                      .translations.get()['translations']):
            translations[trans['language']] = trans['slug']
    else:
        for trans in version.project.translations.all():
            translations[trans.language] = trans.slug

    # Default language, and pointer for 'en'
    version_slug = version.project.slug.replace('_', '-')
    translations[version.project.language] = version_slug
    if not translations.has_key('en'):
        translations['en'] = version_slug

    run_on_app_servers(
        'mkdir -p {0}'
        .format(os.path.join(version.project.doc_path, 'translations')))

    for (language, slug) in translations.items():
        log.debug(LOG_TEMPLATE.format(
            project=version.project.slug,
            version=version.slug,
            msg="Symlinking translation: %s->%s" % (language, slug)
        ))

        # The directory for this specific translation
        symlink = version.project.translations_symlink_path(language)
        translation_path = os.path.join(settings.DOCROOT, slug, 'rtd-builds')
        run_on_app_servers('ln -nsf {0} {1}'.format(translation_path, symlink))
示例#46
0
def record_pdf(record, results, state, version):
    if not record or 'sphinx' not in version.project.documentation_type:
        return None
    if not version.project.enable_pdf_build:
        return None
    try:
        if 'pdf' in results:
            pdf_exit = results['pdf'][0]
            pdf_success = pdf_exit == 0
            pdf_output = results['pdf'][1]
            pdf_error = results['pdf'][2]
        else:
            pdf_exit = 999
            pdf_success = False
            pdf_output = pdf_error = "PDF Failed"

        pdf_output = pdf_output.decode('utf-8', 'ignore')
        pdf_error = pdf_error.decode('utf-8', 'ignore')

        if 'Output written on' in pdf_output:
            pdf_success = True

        api_v1.build.post(dict(
            state=state,
            project='/api/v1/project/%s/' % version.project.pk,
            version='/api/v1/version/%s/' % version.pk,
            success=pdf_success,
            type='pdf',
            output=pdf_output,
            error=pdf_error,
            exit_code=pdf_exit,
            builder=socket.gethostname(),
        ))
    except Exception:
        log.error(LOG_TEMPLATE.format(project=version.project.slug,
                                      version=version.slug, msg="Unable to post a new build"), exc_info=True)
示例#47
0
    def build(self, **kwargs):
        project = self.version.project
        checkout_path = project.checkout_path(self.version.slug)
        site_path = os.path.join(checkout_path, 'site')
        os.chdir(checkout_path)

        # Pull mkdocs config data
        user_config = yaml.safe_load(open('mkdocs.yml', 'r'))
        docs_dir = user_config.get('docs_dir', 'docs')

        # Set mkdocs config values

        MEDIA_URL = getattr(settings, 'MEDIA_URL',
                            'https://media.readthedocs.org')
        if 'extra_javascript' in user_config:
            user_config['extra_javascript'].append(
                '%sjavascript/jquery/jquery-2.0.3.min.js' % MEDIA_URL)
            user_config['extra_javascript'].append('readthedocs-data.js')
            user_config['extra_javascript'].append(
                'readthedocs-dynamic-include.js')
            user_config['extra_javascript'].append(
                '%sjavascript/readthedocs-doc-embed.js' % MEDIA_URL)
        else:
            user_config['extra_javascript'] = [
                '%sjavascript/jquery/jquery-2.0.3.min.js' % MEDIA_URL,
                'readthedocs-data.js',
                'readthedocs-dynamic-include.js',
                '%sjavascript/readthedocs-doc-embed.js' % MEDIA_URL,
            ]

        if 'extra_css' in user_config:
            user_config['extra_css'].append(
                'https://media.readthedocs.org/css/badge_only.css')
            user_config['extra_css'].append(
                'https://media.readthedocs.org/css/readthedocs-doc-embed.css')
        else:
            user_config['extra_css'] = [
                'https://media.readthedocs.org/css/badge_only.css',
                'https://media.readthedocs.org/css/readthedocs-doc-embed.css',
            ]

        if 'pages' not in user_config:
            user_config['pages'] = []
            for root, dirnames, filenames in os.walk(docs_dir):
                for filename in filenames:
                    if fnmatch.fnmatch(filename, '*.md'):
                        full_path = os.path.join(root.replace(
                            docs_dir, ''), filename.lstrip('/')).lstrip('/')
                        user_config['pages'].append([full_path])

        yaml.dump(user_config, open('mkdocs.yml', 'w'))

        # RTD javascript writing

        READTHEDOCS_DATA = {
            'project':
            project.slug,
            'version':
            self.version.slug,
            'language':
            project.language,
            'page':
            None,
            'theme':
            "readthedocs",
            'docroot':
            docs_dir,
            'source_suffix':
            ".md",
            'api_host':
            getattr(settings, 'SLUMBER_API_HOST', 'https://readthedocs.org'),
            'commit':
            self.version.project.vcs_repo(self.version.slug).commit,
        }
        data_json = json.dumps(READTHEDOCS_DATA, indent=4)
        data_ctx = Context({
            'data_json': data_json,
            'current_version': READTHEDOCS_DATA['version'],
            'slug': READTHEDOCS_DATA['project'],
            'html_theme': READTHEDOCS_DATA['theme'],
            'pagename': None,
        })
        data_string = template_loader.get_template(
            'doc_builder/data.js.tmpl').render(data_ctx)

        data_file = open(os.path.join(docs_dir, 'readthedocs-data.js'), 'w+')
        data_file.write(data_string)
        data_file.close()

        include_ctx = Context({
            'global_analytics_code':
            getattr(settings, 'GLOBAL_ANALYTICS_CODE', 'UA-17997319-1'),
            'user_analytics_code':
            project.analytics_code,
        })
        include_string = template_loader.get_template(
            'doc_builder/include.js.tmpl').render(include_ctx)
        include_file = open(
            os.path.join(docs_dir, 'readthedocs-dynamic-include.js'), 'w+')
        include_file.write(include_string)
        include_file.close()

        # Actual build

        if project.use_virtualenv:
            build_command = "%s build --site-dir=site --theme=mkdocs" % (
                project.venv_bin(version=self.version.slug, bin='mkdocs'))
        else:
            build_command = "mkdocs build --site-dir=site --theme=mkdocs"
        results = run(build_command, shell=True)

        try:
            # Index Search
            page_list = []
            log.info(
                LOG_TEMPLATE.format(project=self.version.project.slug,
                                    version=self.version.slug,
                                    msg='Indexing files'))
            for root, dirnames, filenames in os.walk(site_path):
                for filename in filenames:
                    if fnmatch.fnmatch(filename, '*.html'):
                        full_path = os.path.join(root, filename.lstrip('/'))
                        relative_path = os.path.join(
                            root.replace(site_path, '').lstrip('/'),
                            filename.lstrip('/'))
                        relative_path = re.sub('.html$', '', relative_path)
                        html = parse_content_from_file(
                            documentation_type='mkdocs', file_path=full_path)
                        headers = parse_headers_from_file(
                            documentation_type='mkdocs', file_path=full_path)
                        sections = parse_sections_from_file(
                            documentation_type='mkdocs', file_path=full_path)
                        page_list.append({
                            'content': html,
                            'path': relative_path,
                            'title': sections[0]['title'],
                            'headers': headers,
                            'sections': sections
                        })

            data = {
                'page_list': page_list,
                'version_pk': self.version.pk,
                'project_pk': self.version.project.pk
            }
            log_msg = ' '.join([page['path'] for page in page_list])
            log.info("(Search Index) Sending Data: %s [%s]" %
                     (self.version.project.slug, log_msg))
            apiv2.index_search.post({'data': data})
        except:
            log.error('Search indexing failed')

        return results
示例#48
0
def update_docs(pk,
                version_pk=None,
                build_pk=None,
                record=True,
                docker=False,
                pdf=True,
                man=True,
                epub=True,
                dash=True,
                search=True,
                force=False,
                intersphinx=True,
                localmedia=True,
                api=None,
                basic=False,
                **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """
    # Dependency injection to allow for testing
    if api is None:
        api = tastyapi.api
        apiv2 = tastyapi.apiv2
    else:
        apiv2 = api

    start_time = datetime.datetime.utcnow()
    try:
        project_data = api.project(pk).get()
    except HttpClientError:
        log.exception(
            LOG_TEMPLATE.format(
                project=pk,
                version='',
                msg='Failed to get project data on build. Erroring.'))
    project = make_api_project(project_data)
    # Don't build skipped projects
    if project.skip:
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version='',
                                msg='Skipping'))
        return
    else:
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version='',
                                msg='Building'))
    version = ensure_version(api, project, version_pk)
    build = create_build(build_pk)
    results = {}

    # Build Servery stuff
    try:
        record_build(api=api,
                     build=build,
                     record=record,
                     results=results,
                     state='cloning')
        vcs_results = setup_vcs(version, build, api)
        if vcs_results:
            results.update(vcs_results)

        if project.documentation_type == 'auto':
            update_documentation_type(version, apiv2)

        if docker or settings.DOCKER_ENABLE:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            docker = DockerEnvironment(version)
            build_results = docker.build()
            results.update(build_results)
        else:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='installing')
            setup_results = setup_environment(version)
            results.update(setup_results)

            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            build_results = build_docs(version, force, pdf, man, epub, dash,
                                       search, localmedia)
            results.update(build_results)

    except vcs_support_utils.LockTimeout, e:
        results['checkout'] = (423, "",
                               "Version locked, retrying in 5 minutes.")
        log.info(
            LOG_TEMPLATE.format(project=version.project.slug,
                                version=version.slug,
                                msg="Unable to lock, will retry"))
        # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
        # Should completely retry the task for us until max_retries is exceeded
        update_docs.retry(exc=e, throw=False)
示例#49
0
def build_docs(version, force, pdf, man, epub, dash, search, localmedia):
    """
    This handles the actual building of the documentation
    """

    project = version.project
    results = {}

    before_build.send(sender=version)

    with project.repo_nonblockinglock(version=version,
                                      max_lock_age=getattr(
                                          settings, 'REPO_LOCK_SECONDS', 30)):
        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.append_conf()
        results['html'] = html_builder.build()
        if results['html'][0] == 0:
            html_builder.move()

        # Gracefully attempt to move files via task on web workers.
        try:
            move_files.delay(
                version_pk=version.pk,
                html=True,
                hostname=socket.gethostname(),
            )
        except socket.error:
            pass

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        if 'mkdocs' in project.documentation_type:
            if search:
                try:
                    search_builder = builder_loading.get('mkdocs_json')(
                        version)
                    results['search'] = search_builder.build()
                    if results['search'][0] == 0:
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug,
                                                  version=version.slug,
                                                  msg="JSON Build Error"),
                              exc_info=True)

        if 'sphinx' in project.documentation_type:
            # Search builder. Creates JSON from docs and sends it to the
            # server.
            if search:
                try:
                    search_builder = builder_loading.get('sphinx_search')(
                        version)
                    results['search'] = search_builder.build()
                    if results['search'][0] == 0:
                        # Copy json for safe keeping
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug,
                                                  version=version.slug,
                                                  msg="JSON Build Error"),
                              exc_info=True)
            # Local media builder for singlepage HTML download archive
            if localmedia:
                try:
                    localmedia_builder = builder_loading.get(
                        'sphinx_singlehtmllocalmedia')(version)
                    results['localmedia'] = localmedia_builder.build()
                    if results['localmedia'][0] == 0:
                        localmedia_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(
                        project=project.slug,
                        version=version.slug,
                        msg="Local Media HTML Build Error"),
                              exc_info=True)

            # Optional build steps
            if version.project.slug not in HTML_ONLY and not project.skip:
                if pdf:
                    pdf_builder = builder_loading.get('sphinx_pdf')(version)
                    results['pdf'] = pdf_builder.build()
                    # Always move pdf results even when there's an error.
                    # if pdf_results[0] == 0:
                    pdf_builder.move()
                else:
                    results['pdf'] = fake_results
                if epub:
                    epub_builder = builder_loading.get('sphinx_epub')(version)
                    results['epub'] = epub_builder.build()
                    if results['epub'][0] == 0:
                        epub_builder.move()
                else:
                    results['epub'] = fake_results

    after_build.send(sender=version)

    return results
示例#50
0
def setup_environment(version):
    """
    Build the virtualenv and install the project into it.

    Always build projects with a virtualenv.
    """
    ret_dict = {}
    project = version.project
    build_dir = os.path.join(project.venv_path(version=version.slug), 'build')
    if os.path.exists(build_dir):
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version=version.slug,
                                msg='Removing existing build dir'))
        shutil.rmtree(build_dir)
    if project.use_system_packages:
        site_packages = '--system-site-packages'
    else:
        site_packages = '--no-site-packages'
    # Here the command has been modified to support different
    # interpreters.
    ret_dict['venv'] = run('{cmd} {site_packages} {path}'.format(
        cmd='virtualenv-2.7 -p {interpreter}'.format(
            interpreter=project.python_interpreter),
        site_packages=site_packages,
        path=project.venv_path(version=version.slug)))
    # Other code expects sphinx-build to be installed inside the
    # virtualenv.  Using the -I option makes sure it gets installed
    # even if it is already installed system-wide (and
    # --system-site-packages is used)
    if project.use_system_packages:
        ignore_option = '-I'
    else:
        ignore_option = ''

    wheeldir = os.path.join(settings.SITE_ROOT, 'deploy', 'wheels')
    ret_dict['doc_builder'] = run((
        '{cmd} install --use-wheel --find-links={wheeldir} -U {ignore_option} '
        'sphinx==1.2.2 virtualenv==1.10.1 setuptools==1.1 docutils==0.11 mkdocs==0.11.1 mock==1.0.1 pillow==2.6.1'
        ' readthedocs-sphinx-ext==0.4.4 sphinx-rtd-theme==0.1.6 ').format(
            cmd=project.venv_bin(version=version.slug, bin='pip'),
            ignore_option=ignore_option,
            wheeldir=wheeldir,
        ))

    # Handle requirements

    requirements_file_path = project.requirements_file
    checkout_path = project.checkout_path(version.slug)
    if not requirements_file_path:
        docs_dir = builder_loading.get(
            project.documentation_type)(version).docs_dir()
        for path in [docs_dir, '']:
            for req_file in ['pip_requirements.txt', 'requirements.txt']:
                test_path = os.path.join(checkout_path, path, req_file)
                print('Testing %s' % test_path)
                if os.path.exists(test_path):
                    requirements_file_path = test_path
                    break

    if requirements_file_path:
        os.chdir(checkout_path)
        ret_dict['requirements'] = run(
            '{cmd} install --exists-action=w -r {requirements}'.format(
                cmd=project.venv_bin(version=version.slug, bin='pip'),
                requirements=requirements_file_path))

    # Handle setup.py

    os.chdir(project.checkout_path(version.slug))
    if os.path.isfile("setup.py"):
        if getattr(settings, 'USE_PIP_INSTALL', False):
            ret_dict['install'] = run(
                '{cmd} install --ignore-installed .'.format(
                    cmd=project.venv_bin(version=version.slug, bin='pip')))
        else:
            ret_dict['install'] = run('{cmd} setup.py install --force'.format(
                cmd=project.venv_bin(version=version.slug, bin='python')))
    else:
        ret_dict['install'] = (999, "", "No setup.py, skipping install")
    return ret_dict
示例#51
0
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    ret_dict = {}

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    if not project.vcs_repo():
        raise ProjectImportError(
            ("Repo type '{0}' unknown".format(project.repo_type)))

    with project.repo_nonblockinglock(version=version,
                                      max_lock_age=getattr(
                                          settings, 'REPO_LOCK_SECONDS', 30)):

        before_vcs.send(sender=version)
        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, identifier=version.identifier)))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            ret_dict['checkout'] = version_repo.checkout(version.identifier, )
        else:
            # Does this ever get called?
            log.info(
                LOG_TEMPLATE.format(project=project.slug,
                                    version=version.slug,
                                    msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            ret_dict['checkout'] = version_repo.update()

        after_vcs.send(sender=version)

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.tags]

        if version_repo.supports_branches:
            version_post_data['branches'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.branches]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Versions Exception: %s" % e.message
示例#52
0
         LOG_TEMPLATE.format(project=version.project.slug,
                             version=version.slug,
                             msg="Unable to lock, will retry"))
     # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
     # Should completely retry the task for us until max_retries is exceeded
     update_docs.retry(exc=e, throw=False)
 except ProjectImportError, e:
     results['checkout'] = (
         404, "",
         'Failed to import project; skipping build.\n\nError\n-----\n\n%s' %
         e.message)
     # Close out build in finally with error.
     pass
 except Exception, e:
     log.error(LOG_TEMPLATE.format(project=version.project.slug,
                                   version=version.slug,
                                   msg="Top-level Build Failure"),
               exc_info=True)
     results['checkout'] = (404, "",
                            'Top-level Build Failure: %s' % e.message)
 finally:
     record_build(api=api,
                  build=build,
                  record=record,
                  results=results,
                  state='finished',
                  start_time=start_time)
     record_pdf(api=api,
                record=record,
                results=results,
                state='finished',
示例#53
0
def build_docs(version, force, pdf, man, epub, dash, search, localmedia):
    """
    This handles the actual building of the documentation
    """

    project = version.project
    results = {}

    if 'sphinx' in project.documentation_type:
        try:
            project.conf_file(version.slug)
        except ProjectImportError:
            results['html'] = (999, 'Conf file not found.', '')
            return results

    with project.repo_nonblockinglock(version=version,
                                      max_lock_age=getattr(
                                          settings, 'REPO_LOCK_SECONDS', 30)):
        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        #html_builder.clean()
        if 'sphinx' in project.documentation_type:
            html_builder.append_conf()
        results['html'] = html_builder.build()
        if results['html'][0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        if 'sphinx' in project.documentation_type:
            # Search builder. Creates JSON from docs and sends it to the server.
            if search:
                try:
                    search_builder = builder_loading.get('sphinx_search')(
                        version)
                    results['search'] = search_builder.build()
                    if results['search'][0] == 0:
                        # Copy json for safe keeping
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug,
                                                  version=version.slug,
                                                  msg="JSON Build Error"),
                              exc_info=True)
            # Local media builder for singlepage HTML download archive
            if localmedia:
                try:
                    localmedia_builder = builder_loading.get(
                        'sphinx_singlehtmllocalmedia')(version)
                    results['localmedia'] = localmedia_builder.build()
                    if results['localmedia'][0] == 0:
                        localmedia_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(
                        project=project.slug,
                        version=version.slug,
                        msg="Local Media HTML Build Error"),
                              exc_info=True)

            # Optional build steps
            if version.project.slug not in HTML_ONLY and not project.skip:
                if pdf:
                    pdf_builder = builder_loading.get('sphinx_pdf')(version)
                    results['pdf'] = pdf_builder.build()
                    # Always move pdf results even when there's an error.
                    #if pdf_results[0] == 0:
                    pdf_builder.move()
                else:
                    results['pdf'] = fake_results
                if epub:
                    epub_builder = builder_loading.get('sphinx_epub')(version)
                    results['epub'] = epub_builder.build()
                    if results['epub'][0] == 0:
                        epub_builder.move()
                else:
                    results['epub'] = fake_results
    return results
示例#54
0
def setup_environment(version):
    """
    Build the virtualenv and install the project into it.
    """
    ret_dict = {}
    project = version.project
    if project.use_virtualenv:
        build_dir = os.path.join(project.venv_path(version=version.slug),
                                 'build')
        if os.path.exists(build_dir):
            log.info(
                LOG_TEMPLATE.format(project=project.slug,
                                    version=version.slug,
                                    msg='Removing existing build dir'))
            shutil.rmtree(build_dir)
        if project.use_system_packages:
            site_packages = '--system-site-packages'
        else:
            site_packages = '--no-site-packages'
        # Here the command has been modified to support different
        # interpreters.
        ret_dict['venv'] = run('{cmd} {site_packages} {path}'.format(
            cmd='virtualenv-2.7 -p {interpreter}'.format(
                interpreter=project.python_interpreter),
            site_packages=site_packages,
            path=project.venv_path(version=version.slug)))
        # Other code expects sphinx-build to be installed inside the
        # virtualenv.  Using the -I option makes sure it gets installed
        # even if it is already installed system-wide (and
        # --system-site-packages is used)
        if project.use_system_packages:
            ignore_option = '-I'
        else:
            ignore_option = ''
        sphinx = 'sphinx==1.2.2'
        if project.python_interpreter != 'python3':
            ret_dict['sphinx'] = run((
                '{cmd} install -U {ignore_option} {sphinx} '
                'virtualenv==1.10.1 setuptools==1.1 '
                'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
            ).format(cmd=project.venv_bin(version=version.slug, bin='pip'),
                     sphinx=sphinx,
                     ignore_option=ignore_option))
        else:
            # python 3 specific hax
            ret_dict['sphinx'] = run((
                '{cmd} install -U {ignore_option} {sphinx} '
                'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
            ).format(cmd=project.venv_bin(version=version.slug, bin='pip'),
                     sphinx=sphinx,
                     ignore_option=ignore_option))

        if project.requirements_file:
            os.chdir(project.checkout_path(version.slug))
            ret_dict['requirements'] = run(
                '{cmd} install --exists-action=w -r {requirements}'.format(
                    cmd=project.venv_bin(version=version.slug, bin='pip'),
                    requirements=project.requirements_file))
        os.chdir(project.checkout_path(version.slug))
        if os.path.isfile("setup.py"):
            if getattr(settings, 'USE_PIP_INSTALL', False):
                ret_dict['install'] = run(
                    '{cmd} install --ignore-installed .'.format(
                        cmd=project.venv_bin(version=version.slug, bin='pip')))
            else:
                ret_dict['install'] = run(
                    '{cmd} setup.py install --force'.format(
                        cmd=project.venv_bin(version=version.slug,
                                             bin='python')))
        else:
            ret_dict['install'] = (999, "", "No setup.py, skipping install")
    return ret_dict
示例#55
0
def update_docs(pk,
                version_pk=None,
                record=True,
                docker=False,
                pdf=True,
                man=True,
                epub=True,
                dash=True,
                search=True,
                force=False,
                intersphinx=True,
                localmedia=True,
                api=None,
                **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    # Dependency injection to allow for testing
    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    log.info(
        LOG_TEMPLATE.format(project=project.slug, version='', msg='Building'))
    version = ensure_version(api, project, version_pk)
    build = create_build(version, api, record)
    results = {}

    try:
        record_build(api=api,
                     build=build,
                     record=record,
                     results=results,
                     state='cloning')
        vcs_results = setup_vcs(version, build, api)
        results.update(vcs_results)

        if docker:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            build_results = run_docker(version)
            results.update(build_results)
        else:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='installing')
            setup_results = setup_environment(version)
            results.update(setup_results)

            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            build_results = build_docs(version, force, pdf, man, epub, dash,
                                       search, localmedia)
            results.update(build_results)

        move_files(version, results)
        record_pdf(api=api,
                   record=record,
                   results=results,
                   state='finished',
                   version=version)
        finish_build(version=version, build=build, results=results)

        if results['html'][0] == 0:
            # Mark version active on the site
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            # Need to delete this because a bug in tastypie breaks on the users
            # list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error(LOG_TEMPLATE.format(
                    project=version.project.slug,
                    version=version.slug,
                    msg="Unable to put a new version"),
                          exc_info=True)
    except vcs_support_utils.LockTimeout, e:
        results['checkout'] = (999, "",
                               "Version locked, retrying in 5 minutes.")
        log.info(
            LOG_TEMPLATE.format(project=version.project.slug,
                                version=version.slug,
                                msg="Unable to lock, will retry"))
        # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
        # Should completely retry the task for us until max_retries is exceeded
        update_docs.retry(exc=e, throw=False)
示例#56
0
            # Mark version active on the site
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            # Need to delete this because a bug in tastypie breaks on the users
            # list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to put a new version"), exc_info=True)
    except Exception, e:
        log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Top-level Build Failure"), exc_info=True)
    finally:
        record_build(api=api, build=build, record=record, results=results, state='finished')
        log.info(LOG_TEMPLATE.format(project=version.project.slug, version='', msg='Build finished'))

def move_files(version, results):
    if results['html'][0] == 0:
        from_path = version.project.artifact_path(version=version.slug, type=version.project.documentation_type)
        target = version.project.rtd_build_path(version.slug)
        core_utils.copy(from_path, target)

    if 'sphinx' in version.project.documentation_type:
        if 'localmedia' in results and results['localmedia'][0] == 0:
            from_path = version.project.artifact_path(version=version.slug, type='sphinx_localmedia')
            to_path = os.path.join(settings.MEDIA_ROOT, 'htmlzip', version.project.slug, version.slug)
            core_utils.copy(from_path, to_path)
        if 'search' in results and results['search'][0] == 0:
            from_path = version.project.artifact_path(version=version.slug, type='sphinx_search')
            to_path = os.path.join(settings.MEDIA_ROOT, 'json', version.project.slug, version.slug)
示例#57
0
def build_docs(version, force, pdf, man, epub, dash, search, localmedia):
    """
    This handles the actual building of the documentation
    """

    project = version.project
    results = {}

    if 'sphinx' in project.documentation_type:
        try:
            project.conf_file(version.slug)
        except ProjectImportError:
            results['html'] = (999, 'Conf file not found.', '')
            return results

    with project.repo_nonblockinglock(version=version,
                                      max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        # html_builder.clean()
        if 'sphinx' in project.documentation_type:
            html_builder.append_conf()
        results['html'] = html_builder.build()
        if results['html'][0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        if 'sphinx' in project.documentation_type:
            # Search builder. Creates JSON from docs and sends it to the
            # server.
            if search:
                try:
                    search_builder = builder_loading.get(
                        'sphinx_search')(version)
                    results['search'] = search_builder.build()
                    if results['search'][0] == 0:
                        # Copy json for safe keeping
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(
                        project=project.slug, version=version.slug, msg="JSON Build Error"), exc_info=True)
            # Local media builder for singlepage HTML download archive
            if localmedia:
                try:
                    localmedia_builder = builder_loading.get(
                        'sphinx_singlehtmllocalmedia')(version)
                    results['localmedia'] = localmedia_builder.build()
                    if results['localmedia'][0] == 0:
                        localmedia_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(
                        project=project.slug, version=version.slug, msg="Local Media HTML Build Error"), exc_info=True)

            # Optional build steps
            if version.project.slug not in HTML_ONLY and not project.skip:
                if pdf:
                    pdf_builder = builder_loading.get('sphinx_pdf')(version)
                    results['pdf'] = pdf_builder.build()
                    # Always move pdf results even when there's an error.
                    # if pdf_results[0] == 0:
                    pdf_builder.move()
                else:
                    results['pdf'] = fake_results
                if epub:
                    epub_builder = builder_loading.get('sphinx_epub')(version)
                    results['epub'] = epub_builder.build()
                    if results['epub'][0] == 0:
                        epub_builder.move()
                else:
                    results['epub'] = fake_results
    return results