コード例 #1
0
ファイル: tasks.py プロジェクト: Nagyman/readthedocs.org
def build_docs(project, build, version, pdf, man, epub, record, force, update_output={}):
    """
    This handles the actual building of the documentation and DB records
    """
    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    html_builder = builder_loading.get(project.documentation_type)(version)
    if force:
        html_builder.force()
    html_builder.clean()
    html_output = html_builder.build()
    successful = (html_output[0] == 0)
    if successful:
        html_builder.move()
        if version:
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            #Need to delete this because a bug in tastypie breaks on the users list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error("Unable to post a new version", exc_info=True)
コード例 #2
0
def build_docs(project, build, version, pdf, man, epub, record, force, update_output={}):
    """
    This handles the actual building of the documentation and DB records
    """
    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    html_builder = builder_loading.get(project.documentation_type)(version)
    if force:
        html_builder.force()
    html_builder.clean()
    html_output = html_builder.build(id=build['id'])
    successful = (html_output[0] == 0)
    if successful:
        html_builder.move()
        if version:
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            #Need to delete this because a bug in tastypie breaks on the users list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error("Unable to post a new version", exc_info=True)
コード例 #3
0
ファイル: tasks.py プロジェクト: starlocke/readthedocs.org
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        version_data = api.version(project.slug).get(slug='latest')['objects'][0]
    version = make_api_version(version_data)
    return version
コード例 #4
0
ファイル: tasks.py プロジェクト: agjohnson/readthedocs.org
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        version_data = api.version(project.slug).get(slug=LATEST)['objects'][0]
    version = make_api_version(version_data)
    return version
コード例 #5
0
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(
                project.slug).get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(
                    LOG_TEMPLATE.format(
                        project=project.slug,
                        version='',
                        msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/version/%s/" %
                                       version_data['project'].pk)
            api.version(version.pk).put(version_data)

    return version
コード例 #6
0
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(project.slug)
                            .get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(LOG_TEMPLATE.format(
                    project=project.slug, version='', msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/project/%s/"
                                       % version_data['project'].pk)
            api.version(version.pk).put(version_data)

    return version
コード例 #7
0
ファイル: tasks.py プロジェクト: WojtekK1902/readthedocs.org
def ensure_version(api, project, version_pk):
    """
    Ensure we're using a sane version.
    This also creates the "latest" version if it doesn't exist.
    """

    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = api.version(project.slug).get(slug="latest")["objects"][0]
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project="/api/v1/project/%s/" % project.pk,
                slug="latest",
                type="branch",
                active=True,
                verbose_name="latest",
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(
                    LOG_TEMPLATE.format(project=project.slug, version="", msg="Exception in creating version: %s" % e)
                )
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data["verbose_name"] = "latest"
            to_save = True
        if not version.active:
            version_data["active"] = True
            to_save = True
        if version.identifier != branch:
            version_data["identifier"] = branch
            to_save = True
        if to_save:
            version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk
            api.version(version.pk).put(version_data)

    return version
コード例 #8
0
 def handle(self, *args, **options):
     if len(args):
         for slug in args:
             version_data = api.version(slug).get(slug="latest")['objects'][0]
             version = tasks.make_api_version(version_data)
             log.info("Building %s" % version)
             tasks.docker_build(version_pk=version.pk)
コード例 #9
0
def docker_build(version_pk, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True):
    """
    The code that executes inside of docker
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)

    environment_results = setup_environment(version)
    results = build_docs(version=version, force=force, pdf=pdf, man=man,
                         epub=epub, dash=dash, search=search, localmedia=localmedia)
    results.update(environment_results)
    try:
        number = uuid.uuid4()
        path = os.path.join(version.project.doc_path, 'build.json')
        fh = open(path, 'w')
        json.dump(results, fh)
        fh.close()
    except IOError as e:
        log.debug(LOG_TEMPLATE.format(
            project=version.project.slug,
            version='',
            msg='Cannot write to build.json: {0}'.format(e)
        ))
        return None
    return number
コード例 #10
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
def fileify(version_pk):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    path = project.rtd_build_path(version.slug)
    log.info("Indexing files for %s" % project)
    if path:
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, "*.html"):
                    dirpath = os.path.join(root.replace(path, "").lstrip("/"), filename.lstrip("/"))
                    if getattr(settings, "DONT_HIT_DB", True):
                        api.file.post(
                            dict(
                                project="/api/v1/project/%s/" % project.pk,
                                version="/api/v1/version/%s/" % version.pk,
                                path=dirpath,
                                name=filename,
                            )
                        )
                    else:
                        ImportedFile.objects.get_or_create(
                            project=project, version=version, path=dirpath, name=filename
                        )
コード例 #11
0
ファイル: tasks.py プロジェクト: titilambert/readthedocs.org
def fileify(version_pk):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    path = project.rtd_build_path(version.slug)
    log.info('Indexing files for %s' % project)
    if path:
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(
                        root.replace(path, '').lstrip('/'),
                        filename.lstrip('/'))
                    if getattr(settings, 'DONT_HIT_DB', True):
                        api.file.post(
                            dict(project="/api/v1/project/%s/" % project.pk,
                                 version="/api/v1/version/%s/" % version.pk,
                                 path=dirpath,
                                 name=filename))
                    else:
                        ImportedFile.objects.get_or_create(project=project,
                                                           version=version,
                                                           path=dirpath,
                                                           name=filename)
コード例 #12
0
ファイル: tasks.py プロジェクト: warrior1724/readthedocs.org
def docker_build(version_pk,
                 pdf=True,
                 man=True,
                 epub=True,
                 dash=True,
                 search=True,
                 force=False,
                 intersphinx=True,
                 localmedia=True):
    """
    The code that executes inside of docker
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)

    environment_results = setup_environment(version)
    results = build_docs(version, force, pdf, man, epub, dash, search,
                         localmedia)
    results.update(environment_results)
    try:
        number = uuid.uuid4()
        path = os.path.join(version.project.doc_path, 'build.json')
        fh = open(path, 'w')
        json.dump(results, fh)
        fh.close()
    except IOError as e:
        log.debug(
            LOG_TEMPLATE.format(
                project=version.project.slug,
                version='',
                msg='Cannot write to build.json: {0}'.format(e)))
        return None
    return number
コード例 #13
0
ファイル: tasks.py プロジェクト: yildizib/readthedocs.org
def update_config_from_json(version_pk):
    """
    Check out or update the given project's repository.
    """
    # Remove circular import
    from projects.forms import ImportProjectForm
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Checking for json config"))
    try:
        rtd_json = open(os.path.join(
            project.checkout_path(version.slug),
            '.rtd.json'
        ))
        json_obj = json.load(rtd_json)
        for key in json_obj.keys():
            # Treat the defined fields on the Import form as 
            # the canonical list of allowed user editable fields.
            # This is in essense just another UI for that form.
            if key not in ImportProjectForm._meta.fields:
                del json_obj[key]
    except IOError:
        log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="No rtd.json found."))
        return None

    project_data = api.project(project.pk).get()
    project_data.update(json_obj)
    api.project(project.pk).put(project_data)
    log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updated from JSON."))
コード例 #14
0
ファイル: tasks.py プロジェクト: awesome/readthedocs.org
def update_config_from_json(version_pk):
    """
    Check out or update the given project's repository.
    """
    # Remove circular import
    from projects.forms import ImportProjectForm
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Checking for json config"))
    try:
        rtd_json = open(os.path.join(
            project.checkout_path(version.slug),
            '.rtd.json'
        ))
        json_obj = json.load(rtd_json)
        for key in json_obj.keys():
            # Treat the defined fields on the Import form as 
            # the canonical list of allowed user editable fields.
            # This is in essense just another UI for that form.
            if key not in ImportProjectForm._meta.fields:
                del json_obj[key]
    except IOError:
        log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="No rtd.json found."))
        return None

    project_data = api.project(project.pk).get()
    project_data.update(json_obj)
    api.project(project.pk).put(project_data)
    log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updated from JSON."))
コード例 #15
0
ファイル: tasks.py プロジェクト: titilambert/readthedocs.org
def build_docs(version_pk, pdf, man, epub, dash, record, force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and not project.skip:
            if pdf:
                pdf_builder = builder_loading.get('sphinx_pdf')(version)
                latex_results, pdf_results = pdf_builder.build()
                # Always move pdf results even when there's an error.
                #if pdf_results[0] == 0:
                pdf_builder.move()
            else:
                pdf_results = latex_results = fake_results
            if man:
                man_builder = builder_loading.get('sphinx_man')(version)
                man_results = man_builder.build()
                if man_results[0] == 0:
                    man_builder.move()
            else:
                man_results = fake_results
            if epub:
                epub_builder = builder_loading.get('sphinx_epub')(version)
                epub_results = epub_builder.build()
                if epub_results[0] == 0:
                    epub_builder.move()
            else:
                epub_results = fake_results
            # Disable dash building for now.
            dash = False
            if dash:
                dash_builder = builder_loading.get('sphinx_dash')(version)
                dash_results = dash_builder.build()
                if dash_results[0] == 0:
                    dash_builder.move()
            else:
                dash_results = fake_results

    return (html_results, latex_results, pdf_results, man_results,
            epub_results, dash_results)
コード例 #16
0
ファイル: tasks.py プロジェクト: WojtekK1902/readthedocs.org
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    ret_dict = {}

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_nonblockinglock(version=version, max_lock_age=getattr(settings, "REPO_LOCK_SECONDS", 30)):
        if not project.vcs_repo():
            raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg="Checking out version {slug}: {identifier}".format(
                        slug=version.slug, identifier=version.identifier
                    ),
                )
            )
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            ret_dict["checkout"] = version_repo.checkout(version.identifier)
        else:
            # Does this ever get called?
            log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating to latest revision"))
            version_slug = "latest"
            version_repo = project.vcs_repo(version_slug)
            ret_dict["checkout"] = version_repo.update()

        # Update tags/version

        version_post_data = {"repo": version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data["tags"] = [
                {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.tags
            ]

        if version_repo.supports_branches:
            version_post_data["branches"] = [
                {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.branches
            ]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
コード例 #17
0
def build_docs(version_pk, pdf, man, epub, dash, record, force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and not project.skip:
            if pdf:
                pdf_builder = builder_loading.get('sphinx_pdf')(version)
                latex_results, pdf_results = pdf_builder.build()
                # Always move pdf results even when there's an error.
                #if pdf_results[0] == 0:
                pdf_builder.move()
            else:
                pdf_results = latex_results = fake_results
            if man:
                man_builder = builder_loading.get('sphinx_man')(version)
                man_results = man_builder.build()
                if man_results[0] == 0:
                    man_builder.move()
            else:
                man_results = fake_results
            if epub:
                epub_builder = builder_loading.get('sphinx_epub')(version)
                epub_results = epub_builder.build()
                if epub_results[0] == 0:
                    epub_builder.move()
            else:
                epub_results = fake_results
            # Disable dash building for now.
            dash = False
            if dash:
                dash_builder = builder_loading.get('sphinx_dash')(version)
                dash_results = dash_builder.build()
                if dash_results[0] == 0:
                    dash_builder.move()
            else:
                dash_results = fake_results

    return (html_results, latex_results, pdf_results, man_results,
            epub_results, dash_results)
コード例 #18
0
ファイル: tasks.py プロジェクト: ctb/readthedocs.org
def clear_artifacts(version_pk):
    """ Remove artifacts from the build server. """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    run('rm -rf %s' % version.project.full_epub_path(version.slug))
    run('rm -rf %s' % version.project.full_man_path(version.slug))
    run('rm -rf %s' % version.project.full_build_path(version.slug))
    run('rm -rf %s' % version.project.full_latex_path(version.slug))
コード例 #19
0
def clear_artifacts(version_pk):
    """ Remove artifacts from the build server. """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    run('rm -rf %s' % version.project.full_epub_path(version.slug))
    run('rm -rf %s' % version.project.full_man_path(version.slug))
    run('rm -rf %s' % version.project.full_build_path(version.slug))
    run('rm -rf %s' % version.project.full_latex_path(version.slug))
コード例 #20
0
 def handle(self, *args, **options):
     if len(args):
         for slug in args:
             version_data = api.version(slug).get(
                 slug="latest")['objects'][0]
             version = tasks.make_api_version(version_data)
             log.info("Building %s" % version)
             tasks.docker_build(version_pk=version.pk)
コード例 #21
0
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    try:
        object_file = version.project.find('objects.inv', version.slug)[0]
    except IndexError, e:
        print "Failed to find objects file"
        return None
コード例 #22
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    try:
        object_file = version.project.find("objects.inv", version.slug)[0]
    except IndexError, e:
        print "Failed to find objects file"
        return None
コード例 #23
0
ファイル: utils.py プロジェクト: k-okada/readthedocs.org
def symlink(project, version="latest"):
    from projects import tasks
    from builds.models import Version
    from tastyapi import api

    if getattr(settings, "DONT_HIT_DB", True):
        version_data = api.version().get(project=project, slug=version)["results"][0]
        v = tasks.make_api_version(version_data)
    else:
        v = Version.objects.get(project__slug=project, slug=version)
    log.info("Symlinking %s" % v)
    tasks.symlink_subprojects(v)
    tasks.symlink_cnames(v)
    tasks.symlink_translations(v)
コード例 #24
0
ファイル: tasks.py プロジェクト: dangra/readthedocs.org
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    del version_data['resource_uri']
    project_data = version_data['project']
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)
    version_data['project'] = project
    version = Version(**version_data)

    try:
        object_file = version.project.find('objects.inv', version.slug)[0]
    except IndexError, e:
        print "Failed to find objects file"
        return None
コード例 #25
0
ファイル: tasks.py プロジェクト: jalaziz/readthedocs.org
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    del version_data['resource_uri']
    project_data = version_data['project']
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)
    version_data['project'] = project
    version = Version(**version_data)

    try:
        object_file = version.project.find('objects.inv', version.slug)[0]
    except IndexError, e:
        print "Failed to find objects file"
        return None
コード例 #26
0
ファイル: tasks.py プロジェクト: rmaceissoft/readthedocs.org
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    del version_data["resource_uri"]
    project_data = version_data["project"]
    del project_data["users"]
    del project_data["resource_uri"]
    del project_data["absolute_url"]
    project = Project(**project_data)
    version_data["project"] = project
    version = Version(**version_data)

    object_file = version.project.find("objects.inv", version.slug)[0]
    path = version.project.rtd_build_path(version.slug)
    if not path:
        print "ERR: %s has no path" % version
        return None
    app = DictObj()
    app.srcdir = path
    try:
        inv = fetch_inventory(app, path, object_file)
    except TypeError:
        print "Failed to fetch inventory for %s" % version
        return None
    # I'm entirelty not sure this is even close to correct.
    # There's a lot of info I'm throwing away here; revisit later?
    for keytype in inv:
        for term in inv[keytype]:
            try:
                _, _, url, title = inv[keytype][term]
                if not title or title == "-":
                    if "#" in url:
                        title = url.rsplit("#")[-1]
                    else:
                        title = url
                find_str = "rtd-builds/latest"
                latest = url.find(find_str)
                url = url[latest + len(find_str) + 1 :]
                url = "http://%s.readthedocs.org/en/latest/%s" % (version.project.slug, url)
                save_term(version, term, url, title)
                if "." in term:
                    save_term(version, term.split(".")[-1], url, title)
            except Exception, e:  # Yes, I'm an evil person.
                print "*** Failed updating %s: %s" % (term, e)
コード例 #27
0
ファイル: tasks.py プロジェクト: awesome/readthedocs.org
def update_intersphinx(version_pk, api=None):
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    try:
        object_file = version.project.find('objects.inv', version.slug)[0]
    except IndexError:
        print "Failed to find objects file"
        return None

    f = open(object_file)
    f.readline()
    urlpattern = "http://%s/en/%s/%%s" % (project.subdomain, version.slug)
    data = intersphinx.read_inventory_v2(f, urlpattern, operator.mod)
    for top_key in data.keys():
        #print "KEY: %s" % top_key
        inner_keys = data[top_key].keys()
        for inner_key in inner_keys:
            #print "INNER KEY: %s" % inner_key
            _project, sphinx_version, url, title = data[top_key][inner_key]
            try:
                url_key = url.split('#')[1]
            except IndexError:
                # Invalid data
                continue
            if ":" in url_key:
                #This dumps junk data into the url namespace we don't need
                #print "INNER: %s->%s" % (inner_key, url)
                save_term(version, inner_key, url)
            else:
                last_key = url_key.split('.')[-1]
                if last_key != url_key:
                    #Only save last key if it differes
                    #print "LAST: %s->%s" % (last_key, url)
                    save_term(version, last_key, url)
                #print "URL: %s->%s" % (url_key, url)
                save_term(version, url_key, url)
コード例 #28
0
def update_intersphinx(version_pk, api=None):
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    try:
        object_file = version.project.find('objects.inv', version.slug)[0]
    except IndexError:
        print "Failed to find objects file"
        return None

    f = open(object_file)
    f.readline()
    urlpattern = "http://%s/en/%s/%%s" % (project.subdomain, version.slug)
    data = intersphinx.read_inventory_v2(f, urlpattern, operator.mod)
    for top_key in data.keys():
        #print "KEY: %s" % top_key
        inner_keys = data[top_key].keys()
        for inner_key in inner_keys:
            #print "INNER KEY: %s" % inner_key
            _project, sphinx_version, url, title = data[top_key][inner_key]
            try:
                url_key = url.split('#')[1]
            except IndexError:
                # Invalid data
                continue
            if ":" in url_key:
                #This dumps junk data into the url namespace we don't need
                #print "INNER: %s->%s" % (inner_key, url)
                save_term(version, inner_key, url)
            else:
                last_key = url_key.split('.')[-1]
                if last_key != url_key:
                    #Only save last key if it differes
                    #print "LAST: %s->%s" % (last_key, url)
                    save_term(version, last_key, url)
                #print "URL: %s->%s" % (url_key, url)
                save_term(version, url_key, url)
コード例 #29
0
ファイル: tasks.py プロジェクト: Nagyman/readthedocs.org
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    del version_data['resource_uri']
    project_data = version_data['project']
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)
    version_data['project'] = project
    version = Version(**version_data)

    object_file = version.project.find('objects.inv', version.slug)[0]
    path = version.project.rtd_build_path(version.slug)
    if not path:
        log.warning("%s has no path" % version)
        return None
    app = DictObj()
    app.srcdir = path
    try:
        inv = fetch_inventory(app, path, object_file)
    except TypeError, e:
        log.error("Failed to fetch inventory for %s" % version, exc_info=True)
        return None
コード例 #30
0
def update_intersphinx(version_pk):
    version_data = api.version(version_pk).get()
    del version_data['resource_uri']
    project_data = version_data['project']
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)
    version_data['project'] = project
    version = Version(**version_data)

    object_file = version.project.find('objects.inv', version.slug)[0]
    path = version.project.rtd_build_path(version.slug)
    if not path:
        log.warning("%s has no path" % version)
        return None
    app = DictObj()
    app.srcdir = path
    try:
        inv = fetch_inventory(app, path, object_file)
    except TypeError, e:
        log.error("Failed to fetch inventory for %s" % version, exc_info=True)
        return None
コード例 #31
0
ファイル: tasks.py プロジェクト: AquaBindi/readthedocs.org
def fileify(version_pk):
    """
    Create ImportedFile objects for all of a version's files.

    This is a prereq for indexing the docs for search.
    It also causes celery-haystack to kick off an index of the file.
    """
    if getattr(settings, 'DONT_HIT_DB', True):
        version_data = api.version(version_pk).get()
        version = make_api_version(version_data)
    else:
        version = Version.objects.get(pk=version_pk)
    project = version.project
    path = project.rtd_build_path(version.slug)
    log.info(LOG_TEMPLATE.format(
        project=project.slug, version=version.slug, msg='Creating ImportedFiles'))
    if path:
        for root, dirnames, filenames in os.walk(path):
            for filename in filenames:
                if fnmatch.fnmatch(filename, '*.html'):
                    dirpath = os.path.join(root.replace(path, '').lstrip('/'),
                                           filename.lstrip('/'))
                    if getattr(settings, 'DONT_HIT_DB', True):
                        api.file.post(dict(
                            project="/api/v1/project/%s/" % project.pk,
                            version="/api/v1/version/%s/" % version.pk,
                            path=dirpath,
                            name=filename))
                    else:
                        obj, created = ImportedFile.objects.get_or_create(
                            project=project,
                            version=version,
                            path=dirpath,
                            name=filename)
                        if not created:
                            obj.save()
コード例 #32
0
ファイル: tasks.py プロジェクト: rmaceissoft/readthedocs.org
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we created it.
    Then it will build the html docs and other requested parts.
    It also handles clearing the varnish cache.
    """

    ###
    # Handle passed in arguments
    ###
    update_output = kwargs.get("update_output", {})
    project_data = api.project(pk).get()
    del project_data["users"]
    del project_data["resource_uri"]
    del project_data["absolute_url"]
    project = Project(**project_data)

    def new_save(*args, **kwargs):
        # fields = [(field, field.value_to_string(self)) for field in self._meta.fields]
        print "*** Called save on a non-real object."
        # print fields
        # raise TypeError('Not a real model')
        return 0

    project.save = new_save
    print "Building %s" % project
    if version_pk:
        version_data = api.version(version_pk).get()
        del version_data["resource_uri"]
    else:
        # Create or use the 'latest' branch, which is the default for a project.
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            version_data = api.version(project.slug).get(slug="latest")["objects"][0]
            del version_data["resource_uri"]
        except (slumber.exceptions.HttpClientError, IndexError) as exc:
            # if exc.response.status_code in [404,500]:
            version_data = dict(
                project="/api/v1/project/%s/" % project.pk,
                slug="latest",
                active=True,
                verbose_name="latest",
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
                del version_data["resource_uri"]
            except Exception as e:
                raise e
    version_data["project"] = project
    version = Version(**version_data)
    version.save = new_save

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data["verbose_name"] = "latest"
            to_save = True
        if not version.active:
            version_data["active"] = True
            to_save = True
        if version.identifier != branch:
            version_data["identifier"] = branch
            to_save = True
        if to_save:
            version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk
            api.version(version.pk).put(version_data)

    if record:
        # Create Build Object.
        build = api.build.post(
            dict(
                project="/api/v1/project/%s/" % project.pk,
                version="/api/v1/version/%s/" % version.pk,
                type="html",
                state="triggered",
            )
        )
    else:
        build = {}

    # Make Dirs
    path = project.doc_path
    if not os.path.exists(path):
        os.makedirs(path)
    with project.repo_lock(30):
        if project.is_imported:
            try:
                update_output = update_imported_docs(project, version)
            except ProjectImportError, err:
                print ("Error importing project: %s. Skipping build." % err)
                return False

            # scrape_conf_file(version)
        else:
コード例 #33
0
def update_imported_docs(version_pk):
    """
    Check out or update the given project's repository.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(
                ("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info('Checking out version {slug}: {identifier}'.format(
                slug=version.slug, identifier=version.identifier))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier)
        else:
            # Does this ever get called?
            log.info('Updating to latest revision')
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} --distribute {site_packages} {path}'.format(
                    cmd='virtualenv -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)))
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            sphinx = ('hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c'
                      '#egg=Sphinx')
            if project.python_interpreter != 'python3':
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.8.2 distribute==0.6.28 '
                     'docutils==0.8.1').format(cmd=project.venv_bin(
                         version=version_slug, bin='pip'),
                                               sphinx=sphinx,
                                               ignore_option=ignore_option))
            else:
                # python 3 specific hax
                update_docs_output['sphinx'] = run((
                    '{cmd} install {ignore_option} {sphinx} virtualenv==1.8.2 '
                    'docutils==0.8.1').format(cmd=project.venv_bin(
                        version=version_slug, bin='pip'),
                                              sphinx=sphinx,
                                              ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if getattr(settings, 'USE_PIP_INSTALL', False):
                update_docs_output['install'] = run(
                    '{cmd} install --ignore-installed .'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip')))
            else:
                update_docs_output['install'] = run(
                    '{cmd} setup.py install --force'.format(
                        cmd=project.venv_bin(version=version_slug,
                                             bin='python')))

        # check tags/version
        #XXX:dc: what in this block raises the values error?
        try:
            old_versions = [
                obj['identifier']
                for obj in api.version.get(project__slug=project.slug,
                                           limit=5000)['objects']
            ]
            if version_repo.supports_tags:
                transaction.enter_transaction_management(True)
                tags = version_repo.tags
                for tag in tags:
                    if tag.identifier in old_versions:
                        continue
                    log.debug('NEW TAG: (%s not in %s)' %
                              (tag.identifier, old_versions))
                    slug = slugify_uniquely(Version,
                                            tag.verbose_name,
                                            'slug',
                                            255,
                                            project=project)
                    try:

                        version_data = api.version.post(
                            dict(project="/api/v1/project/%s/" % project.pk,
                                 slug=slug,
                                 identifier=tag.identifier,
                                 verbose_name=tag.verbose_name))
                        ver = make_api_version(version_data)
                        log.info("New tag found: {0}".format(tag.identifier))
                        ver, highest = project.highest_version[1]
                        ver_obj = mkversion(ver)
                        # TODO: Handle updating higher versions automatically.
                        # This never worked very well, anyways.
                        if highest and ver_obj and ver_obj > highest:
                            log.info("Highest version known, building docs")
                            update_docs.delay(ver.project.pk,
                                              version_pk=ver.pk)
                    except Exception:
                        log.error("Failed to create version (tag)",
                                  exc_info=True)
                        transaction.rollback()
                transaction.leave_transaction_management()
            if version_repo.supports_branches:
                transaction.enter_transaction_management(True)
                branches = version_repo.branches
                for branch in branches:
                    if branch.identifier in old_versions:
                        continue
                    log.debug('NEW BRANCH: (%s not in %s)' %
                              (branch, old_versions))
                    slug = slugify_uniquely(Version,
                                            branch.verbose_name,
                                            'slug',
                                            255,
                                            project=project)
                    try:
                        api.version.post(
                            dict(project="/api/v1/project/%s/" % project.pk,
                                 slug=slug,
                                 identifier=branch.identifier,
                                 verbose_name=branch.verbose_name))
                        log.info(("New branch found: {0}".format(
                            branch.identifier)))
                    except Exception:
                        log.error("Failed to create version (branch)",
                                  exc_info=True)
                        transaction.rollback()
                transaction.leave_transaction_management()
                #TODO: Kill deleted branches
        except ValueError:
            log.error("Error getting tags", exc_info=True)
    return update_docs_output
コード例 #34
0
ファイル: tasks.py プロジェクト: titilambert/readthedocs.org
def update_imported_docs(version_pk):
    """
    Check out or update the given project's repository.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(
                ("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info('Checking out version {slug}: {identifier}'.format(
                slug=version.slug, identifier=version.identifier))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier)
        else:
            # Does this ever get called?
            log.info('Updating to latest revision')
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} --distribute {site_packages} {path}'.format(
                    cmd='virtualenv -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)))
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = (
                    'hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c'
                    '#egg=Sphinx')
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.8.2 distribute==0.6.28 '
                     'docutils==0.8.1').format(cmd=project.venv_bin(
                         version=version_slug, bin='pip'),
                                               sphinx=sphinx,
                                               ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.1.3'
                # python 3 specific hax
                update_docs_output['sphinx'] = run(
                    ('{cmd} install {ignore_option} {sphinx} '
                     'virtualenv==1.9.1 docutils==0.11').format(
                         cmd=project.venv_bin(version=version_slug, bin='pip'),
                         sphinx=sphinx,
                         ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if getattr(settings, 'USE_PIP_INSTALL', False):
                update_docs_output['install'] = run(
                    '{cmd} install --ignore-installed .'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip')))
            else:
                update_docs_output['install'] = run(
                    '{cmd} setup.py install --force'.format(
                        cmd=project.venv_bin(version=version_slug,
                                             bin='python')))

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.tags]

        if version_repo.supports_branches:
            version_post_data['branches'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.branches]

        try:
            api.project(project.pk).sync_versions.post(
                json.dumps(version_post_data))
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
コード例 #35
0
ファイル: tasks.py プロジェクト: titilambert/readthedocs.org
        api.build.post(
            dict(
                project='/api/v1/project/%s/' % project.pk,
                version='/api/v1/version/%s/' % version.pk,
                success=pdf_results[0] == 0,
                type='pdf',
                setup=latex_results[1],
                setup_error=latex_results[2],
                output=pdf_results[1],
                error=pdf_results[2],
            ))

    if version:
        # Mark version active on the site
        version_data = api.version(version.pk).get()
        version_data['active'] = True
        version_data['built'] = True
        # Need to delete this because a bug in tastypie breaks on the users
        # list.
        del version_data['project']
        try:
            api.version(version.pk).put(version_data)
        except Exception, e:
            log.error("Unable to post a new version", exc_info=True)

    # Build Finished, do house keeping bits

    if 'no targets are out of date.' in out:
        log.info("Build Unchanged")
    else:
コード例 #36
0
ファイル: tasks.py プロジェクト: Nagyman/readthedocs.org
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we created it.
    Then it will build the html docs and other requested parts.
    It also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful 
        for preventing changes visible to the end-user when running commands from
        the shell, for example.
    """

    ###
    # Handle passed in arguments
    ###
    update_output = kwargs.get('update_output', {})
    project_data = api.project(pk).get()
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)

    # Prevent saving the temporary Project instance
    def new_save(*args, **kwargs):
        log.warning("Called save on a non-real object.")
        return 0
    project.save = new_save

    log.info("Building %s" % project)
    if version_pk:
        version_data = api.version(version_pk).get()
        del version_data['resource_uri']
    else:
        #Create or use the 'latest' branch, which is the default for a project.
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            version_data = api.version(project.slug).get(slug='latest')['objects'][0]
            del version_data['resource_uri']
        except (slumber.exceptions.HttpClientError, IndexError) as exc:
            #if exc.response.status_code in [404,500]:
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                active=True,
                verbose_name='latest',
                identifier=branch,
                )
            try:
                version_data = api.version.post(version_data)
                del version_data['resource_uri']
            except Exception as e:
                raise e
    version_data['project'] = project
    version = Version(**version_data)
    version.save = new_save

    if not version_pk:
        #Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = "/api/v1/version/%s/" % version_data['project'].pk
            api.version(version.pk).put(version_data)

    if record:
        #Create Build Object.
        build = api.build.post(dict(
            project= '/api/v1/project/%s/' % project.pk,
            version= '/api/v1/version/%s/' % version.pk,
            type='html',
            state='triggered',
        ))
    else:
        build = {}

    #Make Dirs
    path = project.doc_path
    if not os.path.exists(path):
        os.makedirs(path)
    with project.repo_lock(30):
        if project.is_imported:
            try:
                update_output = update_imported_docs(project, version)
            except ProjectImportError, err:
                log.error("Failed to import project; skipping build.", exc_info=True)
                return False
        else:
コード例 #37
0
def update_imported_docs(version_pk):
    """
    Check out or update the given project's repository.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(("Repo type '{0}' unknown"
                                      .format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info('Checking out version {slug}: {identifier}'.format(
                slug=version.slug, identifier=version.identifier))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier
            )
        else:
            # Does this ever get called?
            log.info('Updating to latest revision')
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} --distribute {site_packages} {path}'.format(
                    cmd='virtualenv -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)
                )
            )
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = ('hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c'
                          '#egg=Sphinx')
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.8.2 distribute==0.6.28 '
                     'docutils==0.8.1').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.1.3'
                # python 3 specific hax
                update_docs_output['sphinx'] = run(
                    ('{cmd} install {ignore_option} {sphinx} '
                     'virtualenv==1.9.1 docutils==0.11').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if getattr(settings, 'USE_PIP_INSTALL', False):
                update_docs_output['install'] = run(
                    '{cmd} install --ignore-installed .'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip')))
            else:
                update_docs_output['install'] = run(
                    '{cmd} setup.py install --force'.format(
                        cmd=project.venv_bin(version=version_slug,
                                             bin='python')))

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.tags
            ]

        if version_repo.supports_branches:
            version_post_data['branches'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.branches
            ]

        try:
            api.project(project.pk).sync_versions.post(json.dumps(version_post_data))
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
コード例 #38
0
ファイル: tasks.py プロジェクト: awesome/readthedocs.org
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True,
                search=True, version_pk=None, force=False, intersphinx=True,
                localmedia=True, api=None, **kwargs):
    """The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts. It
    also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    if 'tryton' in  project.repo:
        # Skip for now
        return

    log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building'))
    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(project.slug)
                            .get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/version/%s/"
                                       % version_data['project'].pk)
            api.version(version.pk).put(version_data)

    if record:
        # Create Build Object.
        build = api.build.post(dict(
            project='/api/v1/project/%s/' % project.pk,
            version='/api/v1/version/%s/' % version.pk,
            type='html',
            state='triggered',
        ))
    else:
        build = {}

    try:
        log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating docs from VCS'))
        update_output = update_imported_docs(version.pk, api)
        #update_output = update_result.get()
    except ProjectImportError, err:
        log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True)
        build['state'] = 'finished'
        build['setup_error'] = (
            'Failed to import project; skipping build.\n'
            '\nError\n-----\n\n%s' % err.message
        )
        api.build(build['id']).put(build)
        return False
コード例 #39
0
def build_docs(version_pk, pdf, man, epub, dash, search, localmedia, record,
               force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if 'sphinx' in project.documentation_type and not project.conf_file(
            version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(version, getattr(settings, 'REPO_LOCK_SECONDS',
                                            30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and 'sphinx' in project.documentation_type:
            # Search builder. Creates JSON from docs and sends it to the server.
            if search:
                try:
                    search_builder = builder_loading.get('sphinx_search')(
                        version)
                    search_results = search_builder.build()
                    if search_results[0] == 0:
                        # Update search index
                        search_builder.upload()
                        # Copy json for safe keeping
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug,
                                                  version=version.slug,
                                                  msg="JSON Build Error"),
                              exc_info=True)
            # Local media builder for singlepage HTML download archive
            if localmedia:
                try:
                    localmedia_builder = builder_loading.get(
                        'sphinx_singlehtmllocalmedia')(version)
                    localmedia_results = localmedia_builder.build()
                    if localmedia_results[0] == 0:
                        localmedia_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(
                        project=project.slug,
                        version=version.slug,
                        msg="Local Media HTML Build Error"),
                              exc_info=True)

            # Optional build steps
            if version.project.slug not in HTML_ONLY and not project.skip:
                if pdf:
                    pdf_builder = builder_loading.get('sphinx_pdf')(version)
                    latex_results, pdf_results = pdf_builder.build()
                    # Always move pdf results even when there's an error.
                    #if pdf_results[0] == 0:
                    pdf_builder.move()
                else:
                    pdf_results = latex_results = fake_results
                if dash:
                    dash_builder = builder_loading.get('sphinx_dash')(version)
                    dash_results = dash_builder.build()
                    if dash_results[0] == 0:
                        dash_builder.move()
                else:
                    dash_results = fake_results

                if epub:
                    epub_builder = builder_loading.get('sphinx_epub')(version)
                    epub_results = epub_builder.build()
                    if epub_results[0] == 0:
                        epub_builder.move()
                else:
                    epub_results = fake_results
            else:
                search_results = dash_results = latex_results = pdf_results = epub_results = (
                    999, "Optional builds disabled",
                    "Optional builds disabled")
        else:
            search_results = dash_results = latex_results = pdf_results = epub_results = (
                999, "Optional builds disabled", "Optional builds disabled")

    return (html_results, latex_results, pdf_results, epub_results,
            dash_results, search_results)
コード例 #40
0
def update_docs(pk,
                version_pk=None,
                record=True,
                docker=False,
                pdf=True,
                man=True,
                epub=True,
                dash=True,
                search=True,
                force=False,
                intersphinx=True,
                localmedia=True,
                api=None,
                **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    # Dependency injection to allow for testing
    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    log.info(
        LOG_TEMPLATE.format(project=project.slug, version='', msg='Building'))
    version = ensure_version(api, project, version_pk)
    build = create_build(version, api, record)
    results = {}

    try:
        record_build(api=api,
                     build=build,
                     record=record,
                     results=results,
                     state='cloning')
        vcs_results = setup_vcs(version, build, api)
        results.update(vcs_results)

        if docker:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            build_results = run_docker(version)
            results.update(build_results)
        else:
            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='installing')
            setup_results = setup_environment(version)
            results.update(setup_results)

            record_build(api=api,
                         build=build,
                         record=record,
                         results=results,
                         state='building')
            build_results = build_docs(version, force, pdf, man, epub, dash,
                                       search, localmedia)
            results.update(build_results)

        move_files(version, results)
        record_pdf(api=api,
                   record=record,
                   results=results,
                   state='finished',
                   version=version)
        finish_build(version=version, build=build, results=results)

        if results['html'][0] == 0:
            # Mark version active on the site
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            # Need to delete this because a bug in tastypie breaks on the users
            # list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error(LOG_TEMPLATE.format(
                    project=version.project.slug,
                    version=version.slug,
                    msg="Unable to put a new version"),
                          exc_info=True)
    except vcs_support_utils.LockTimeout, e:
        results['checkout'] = (999, "",
                               "Version locked, retrying in 5 minutes.")
        log.info(
            LOG_TEMPLATE.format(project=version.project.slug,
                                version=version.slug,
                                msg="Unable to lock, will retry"))
        # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
        # Should completely retry the task for us until max_retries is exceeded
        update_docs.retry(exc=e, throw=False)
コード例 #41
0
def update_docs(pk,
                record=True,
                pdf=True,
                man=True,
                epub=True,
                dash=True,
                search=True,
                version_pk=None,
                force=False,
                intersphinx=True,
                localmedia=True,
                api=None,
                **kwargs):
    """The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts. It
    also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    if 'tryton' in project.repo:
        # Skip for now
        return

    log.info(
        LOG_TEMPLATE.format(project=project.slug, version='', msg='Building'))
    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(
                project.slug).get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                type='branch',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info(
                    LOG_TEMPLATE.format(
                        project=project.slug,
                        version='',
                        msg='Exception in creating version: %s' % e))
                raise e

    version = make_api_version(version_data)

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/version/%s/" %
                                       version_data['project'].pk)
            api.version(version.pk).put(version_data)

    if record:
        # Create Build Object.
        build = api.build.post(
            dict(
                project='/api/v1/project/%s/' % project.pk,
                version='/api/v1/version/%s/' % version.pk,
                type='html',
                state='triggered',
            ))
    else:
        build = {}

    try:
        log.info(
            LOG_TEMPLATE.format(project=project.slug,
                                version=version.slug,
                                msg='Updating docs from VCS'))
        update_output = update_imported_docs(version.pk, api)
        #update_output = update_result.get()
    except ProjectImportError, err:
        log.error(LOG_TEMPLATE.format(
            project=project.slug,
            version=version.slug,
            msg='Failed to import project; skipping build'),
                  exc_info=True)
        build['state'] = 'finished'
        build['setup_error'] = ('Failed to import project; skipping build.\n'
                                '\nError\n-----\n\n%s' % err.message)
        api.build(build['id']).put(build)
        return False
コード例 #42
0
def build_docs(version_pk, pdf, man, epub, dash, search, record, force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and not project.skip:
            if dash:
                dash_builder = builder_loading.get('sphinx_dash')(version)
                dash_results = dash_builder.build()
                if dash_results[0] == 0:
                    dash_builder.move()
            else:
                dash_results = fake_results
            if pdf:
                pdf_builder = builder_loading.get('sphinx_pdf')(version)
                latex_results, pdf_results = pdf_builder.build()
                # Always move pdf results even when there's an error.
                #if pdf_results[0] == 0:
                pdf_builder.move()
            else:
                pdf_results = latex_results = fake_results
            if man:
                man_builder = builder_loading.get('sphinx_man')(version)
                man_results = man_builder.build()
                if man_results[0] == 0:
                    man_builder.move()
            else:
                man_results = fake_results
            if epub:
                epub_builder = builder_loading.get('sphinx_epub')(version)
                epub_results = epub_builder.build()
                if epub_results[0] == 0:
                    epub_builder.move()
            else:
                epub_results = fake_results

            if search:
                try:
                    # BETA
                    search_builder = builder_loading.get('sphinx_search')(
                        version)
                    search_results = search_builder.build()
                    if search_results[0] == 0:
                        search_builder.upload()
                except Exception, e:
                    log.error(LOG_TEMPLATE.format(project=project.slug,
                                                  version=version.slug,
                                                  msg=e.message),
                              exc_info=True)
コード例 #43
0
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(
                ("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, identifier=version.identifier)))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier)
        else:
            # Does this ever get called?
            log.info(
                LOG_TEMPLATE.format(project=project.slug,
                                    version=version.slug,
                                    msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            build_dir = os.path.join(project.venv_path(version=version_slug),
                                     'build')
            if os.path.exists(build_dir):
                log.info(
                    LOG_TEMPLATE.format(project=project.slug,
                                        version=version.slug,
                                        msg='Removing existing build dir'))
                shutil.rmtree(build_dir)
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} {site_packages} {path}'.format(
                    cmd='virtualenv-2.7 -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)))
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = 'sphinx==1.1.3'
                update_docs_output['sphinx'] = run((
                    '{cmd} install -U {ignore_option} {sphinx} '
                    'virtualenv==1.10.1 setuptools==1.1 '
                    'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
                ).format(cmd=project.venv_bin(version=version_slug, bin='pip'),
                         sphinx=sphinx,
                         ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.1.3'
                # python 3 specific hax
                update_docs_output['sphinx'] = run((
                    '{cmd} install -U {ignore_option} {sphinx} '
                    'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
                ).format(cmd=project.venv_bin(version=version_slug, bin='pip'),
                         sphinx=sphinx,
                         ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if os.path.isfile("setup.py"):
                if getattr(settings, 'USE_PIP_INSTALL', False):
                    update_docs_output['install'] = run(
                        '{cmd} install --ignore-installed .'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='pip')))
                else:
                    update_docs_output['install'] = run(
                        '{cmd} setup.py install --force'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='python')))
            else:
                update_docs_output['install'] = (
                    999, "", "No setup.py, skipping install")

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.tags]

        if version_repo.supports_branches:
            version_post_data['branches'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.branches]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
コード例 #44
0
def build_docs(version_pk, pdf, man, epub, dash, search, record, force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and not project.skip:
            if dash:
                dash_builder = builder_loading.get('sphinx_dash')(version)
                dash_results = dash_builder.build()
                if dash_results[0] == 0:
                    dash_builder.move()
            else:
                dash_results = fake_results
            if pdf:
                pdf_builder = builder_loading.get('sphinx_pdf')(version)
                latex_results, pdf_results = pdf_builder.build()
                # Always move pdf results even when there's an error.
                #if pdf_results[0] == 0:
                pdf_builder.move()
            else:
                pdf_results = latex_results = fake_results
            if man:
                man_builder = builder_loading.get('sphinx_man')(version)
                man_results = man_builder.build()
                if man_results[0] == 0:
                    man_builder.move()
            else:
                man_results = fake_results
            if epub:
                epub_builder = builder_loading.get('sphinx_epub')(version)
                epub_results = epub_builder.build()
                if epub_results[0] == 0:
                    epub_builder.move()
            else:
                epub_results = fake_results

            if search:
                try:
                    # BETA
                    search_builder = builder_loading.get('sphinx_search')(version)
                    search_results = search_builder.build()
                    if search_results[0] == 0:
                        search_builder.upload()
                except Exception, e:
                    log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg=e.message), exc_info=True)
コード例 #45
0
ファイル: tasks.py プロジェクト: titilambert/readthedocs.org
def update_docs(pk,
                record=True,
                pdf=True,
                man=True,
                epub=True,
                dash=True,
                version_pk=None,
                force=False,
                **kwargs):
    """The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts. It
    also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    ###
    # Handle passed in arguments
    ###
    project_data = api.project(pk).get()
    project = make_api_project(project_data)

    # Prevent saving the temporary Project instance
    def new_save(*args, **kwargs):
        log.warning("Called save on a non-real object.")
        return 0

    project.save = new_save

    log.info("Building %s" % project)
    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = (api.version(
                project.slug).get(slug='latest')['objects'][0])
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info("Exception in creating version: %s" % e)
                raise e

    version = make_api_version(version_data)
    version.save = new_save

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data['project'] = ("/api/v1/version/%s/" %
                                       version_data['project'].pk)
            api.version(version.pk).put(version_data)

    if record:
        # Create Build Object.
        build = api.build.post(
            dict(
                project='/api/v1/project/%s/' % project.pk,
                version='/api/v1/version/%s/' % version.pk,
                type='html',
                state='triggered',
            ))
    else:
        build = {}

    try:
        log.info("Updating docs from VCS")
        update_output = update_imported_docs(version.pk)
        #update_output = update_result.get()
    except ProjectImportError, err:
        log.error("Failed to import project; skipping build.", exc_info=True)
        build['state'] = 'finished'
        build['setup_error'] = ('Failed to import project; skipping build.\n'
                                'Please make sure your repo is correct and '
                                'you have a conf.py')
        api.build(build['id']).put(build)
        return False
コード例 #46
0
ファイル: tasks.py プロジェクト: jalaziz/readthedocs.org
def update_docs(pk,
                record=True,
                pdf=True,
                man=True,
                epub=True,
                version_pk=None,
                force=False,
                **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we created it.
    Then it will build the html docs and other requested parts.
    It also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands from
        the shell, for example.
    """

    ###
    # Handle passed in arguments
    ###
    update_output = kwargs.get('update_output', {})
    project_data = api.project(pk).get()
    del project_data['users']
    del project_data['resource_uri']
    del project_data['absolute_url']
    project = Project(**project_data)

    # Prevent saving the temporary Project instance
    def new_save(*args, **kwargs):
        log.warning("Called save on a non-real object.")
        return 0

    project.save = new_save

    log.info("Building %s" % project)
    if version_pk:
        version_data = api.version(version_pk).get()
        del version_data['resource_uri']
    else:
        #Create or use the 'latest' branch, which is the default for a project.
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            version_data = api.version(
                project.slug).get(slug='latest')['objects'][0]
            del version_data['resource_uri']
        except (slumber.exceptions.HttpClientError, IndexError) as exc:
            #if exc.response.status_code in [404,500]:
            version_data = dict(
                project='/api/v1/project/%s/' % project.pk,
                slug='latest',
                active=True,
                verbose_name='latest',
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
                del version_data['resource_uri']
            except Exception as e:
                log.info("Exception in creating version: %s" % e)
                #raise e
    version_data['project'] = project
    version = Version(**version_data)
    version.save = new_save

    if not version_pk:
        #Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data['verbose_name'] = 'latest'
            to_save = True
        if not version.active:
            version_data['active'] = True
            to_save = True
        if version.identifier != branch:
            version_data['identifier'] = branch
            to_save = True
        if to_save:
            version_data[
                'project'] = "/api/v1/version/%s/" % version_data['project'].pk
            api.version(version.pk).put(version_data)

    if record:
        #Create Build Object.
        build = api.build.post(
            dict(
                project='/api/v1/project/%s/' % project.pk,
                version='/api/v1/version/%s/' % version.pk,
                type='html',
                state='triggered',
            ))
    else:
        build = {}

    #Make Dirs
    path = project.doc_path
    if not os.path.exists(path):
        os.makedirs(path)
    with project.repo_lock(30):
        if project.is_imported:
            try:
                update_output = update_imported_docs(project, version)
            except ProjectImportError, err:
                log.error("Failed to import project; skipping build.",
                          exc_info=True)
                build['state'] = 'finished'
                build[
                    'setup_error'] = 'Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py'
                api.build(build['id']).put(build)
                return False
        else:
コード例 #47
0
ファイル: tasks.py プロジェクト: barfle/readthedocs.org
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project
    ret_dict = {}

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    if not project.vcs_repo():
        raise ProjectImportError(
            ("Repo type '{0}' unknown".format(project.repo_type)))

    with project.repo_nonblockinglock(version=version,
                                      max_lock_age=getattr(
                                          settings, 'REPO_LOCK_SECONDS', 30)):

        before_vcs.send(sender=version)
        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, identifier=version.identifier)))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            ret_dict['checkout'] = version_repo.checkout(version.identifier, )
        else:
            # Does this ever get called?
            log.info(
                LOG_TEMPLATE.format(project=project.slug,
                                    version=version.slug,
                                    msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            ret_dict['checkout'] = version_repo.update()

        after_vcs.send(sender=version)

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.tags]

        if version_repo.supports_branches:
            version_post_data['branches'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.branches]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Versions Exception: %s" % e.message
コード例 #48
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
        api.build.post(
            dict(
                project="/api/v1/project/%s/" % project.pk,
                version="/api/v1/version/%s/" % version.pk,
                success=pdf_results[0] == 0,
                type="pdf",
                setup=latex_results[1],
                setup_error=latex_results[2],
                output=pdf_results[1],
                error=pdf_results[2],
            )
        )

    if version:
        # Mark version active on the site
        version_data = api.version(version.pk).get()
        version_data["active"] = True
        version_data["built"] = True
        # Need to delete this because a bug in tastypie breaks on the users list.
        del version_data["project"]
        try:
            api.version(version.pk).put(version_data)
        except Exception, e:
            log.error("Unable to post a new version", exc_info=True)

    # Build Finished, do house keeping bits

    if "no targets are out of date." in out:
        log.info("Build Unchanged")
    else:
        if ret == 0:
コード例 #49
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
def update_imported_docs(version_pk):
    """
    Check out or update the given project's repository.
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, "REPO_LOCK_SECONDS", 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError("Repo type '{repo_type}' unknown".format(repo_type=project.repo_type))

        # Get the actual code on disk
        if version:
            log.info(
                "Checking out version {slug}: {identifier}".format(slug=version.slug, identifier=version.identifier)
            )
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output["checkout"] = version_repo.checkout(version.identifier)
        else:
            # Does this ever get called?
            log.info("Updating to latest revision")
            version_slug = "latest"
            version_repo = project.vcs_repo(version_slug)
            update_docs_output["checkout"] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            if project.use_system_packages:
                site_packages = "--system-site-packages"
            else:
                site_packages = "--no-site-packages"
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output["venv"] = run(
                "{cmd} --distribute {site_packages} {path}".format(
                    cmd="virtualenv -p {interpreter}".format(interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug),
                )
            )
            # Other code expects sphinx-build to be installed inside the virtualenv.
            # Using the -I option makes sure it gets installed even if it is
            # already installed system-wide (and --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = "-I"
            else:
                ignore_option = ""
            if project.python_interpreter != "python3":
                update_docs_output["sphinx"] = run(
                    "{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1".format(
                        cmd=project.venv_bin(version=version_slug, bin="pip"), ignore_option=ignore_option
                    )
                )
            else:
                # python 3 specific hax
                update_docs_output["sphinx"] = run(
                    "{cmd} install {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 docutils==0.8.1".format(
                        cmd=project.venv_bin(version=version_slug, bin="pip"), ignore_option=ignore_option
                    )
                )

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output["requirements"] = run(
                    "{cmd} install -r {requirements}".format(
                        cmd=project.venv_bin(version=version_slug, bin="pip"), requirements=project.requirements_file
                    )
                )
            os.chdir(project.checkout_path(version_slug))
            if getattr(settings, "USE_PIP_INSTALL", False):
                update_docs_output["install"] = run(
                    "{cmd} install --ignore-installed .".format(cmd=project.venv_bin(version=version_slug, bin="pip"))
                )
            else:
                update_docs_output["install"] = run(
                    "{cmd} setup.py install --force".format(cmd=project.venv_bin(version=version_slug, bin="python"))
                )

        # check tags/version
        # XXX:dc: what in this block raises the values error?
        try:
            old_versions = [
                obj["identifier"] for obj in api.version.get(project__slug=project.slug, limit=5000)["objects"]
            ]
            if version_repo.supports_tags:
                transaction.enter_transaction_management(True)
                tags = version_repo.tags
                for tag in tags:
                    if tag.identifier in old_versions:
                        continue
                    log.debug("NEW TAG: (%s not in %s)" % (tag.identifier, old_versions))
                    slug = slugify_uniquely(Version, tag.verbose_name, "slug", 255, project=project)
                    try:

                        version_data = api.version.post(
                            dict(
                                project="/api/v1/project/%s/" % project.pk,
                                slug=slug,
                                identifier=tag.identifier,
                                verbose_name=tag.verbose_name,
                            )
                        )
                        ver = make_api_version(version_data)
                        log.info("New tag found: {0}".format(tag.identifier))
                        ver, highest = project.highest_version[1]
                        ver_obj = mkversion(ver)
                        # TODO: Handle updating higher versions automatically.
                        # This never worked very well, anyways.
                        if highest and ver_obj and ver_obj > highest:
                            log.info("Highest version known, building docs")
                            update_docs.delay(ver.project.pk, version_pk=ver.pk)
                    except Exception, e:
                        log.error("Failed to create version (tag)", exc_info=True)
                        transaction.rollback()
                transaction.leave_transaction_management()
            if version_repo.supports_branches:
                transaction.enter_transaction_management(True)
                branches = version_repo.branches
                for branch in branches:
                    if branch.identifier in old_versions:
                        continue
                    log.debug("NEW BRANCH: (%s not in %s)" % (branch, old_versions))
                    slug = slugify_uniquely(Version, branch.verbose_name, "slug", 255, project=project)
                    try:
                        api.version.post(
                            dict(
                                project="/api/v1/project/%s/" % project.pk,
                                slug=slug,
                                identifier=branch.identifier,
                                verbose_name=branch.verbose_name,
                            )
                        )
                        log.info("New branch found: {0}".format(branch.identifier))
                    except Exception, e:
                        log.error("Failed to create version (branch)", exc_info=True)
                        transaction.rollback()
                transaction.leave_transaction_management()
コード例 #50
0
ファイル: tasks.py プロジェクト: awesome/readthedocs.org
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(version, getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(("Repo type '{0}' unknown"
                                      .format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug, 
                    version=version.slug, 
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, 
                        identifier=version.identifier
                    )
                )
            )
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier
            )
        else:
            # Does this ever get called?
            log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Do Virtualenv bits:
        if project.use_virtualenv:
            build_dir = os.path.join(project.venv_path(version=version_slug), 'build')
            if os.path.exists(build_dir):
                log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir'))
                shutil.rmtree(build_dir)
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} {site_packages} {path}'.format(
                    cmd='virtualenv-2.7 -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)
                )
            )
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = 'sphinx==1.2'
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.10.1 setuptools==1.1 '
                     'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.2'
                # python 3 specific hax
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if os.path.isfile("setup.py"):
                if getattr(settings, 'USE_PIP_INSTALL', False):
                    update_docs_output['install'] = run(
                        '{cmd} install --ignore-installed .'.format(
                            cmd=project.venv_bin(version=version_slug, bin='pip')))
                else:
                    update_docs_output['install'] = run(
                        '{cmd} setup.py install --force'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='python')))
            else:
                update_docs_output['install'] = (999, "", "No setup.py, skipping install")

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.tags
            ]

        if version_repo.supports_branches:
            version_post_data['branches'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.branches
            ]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
コード例 #51
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we created it.
    Then it will build the html docs and other requested parts.
    It also handles clearing the varnish cache.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands from
        the shell, for example.
    """

    ###
    # Handle passed in arguments
    ###
    project_data = api.project(pk).get()
    project = make_api_project(project_data)

    # Prevent saving the temporary Project instance
    def new_save(*args, **kwargs):
        log.warning("Called save on a non-real object.")
        return 0

    project.save = new_save

    log.info("Building %s" % project)
    if version_pk:
        version_data = api.version(version_pk).get()
    else:
        branch = project.default_branch or project.vcs_repo().fallback_branch
        try:
            # Use latest version
            version_data = api.version(project.slug).get(slug="latest")["objects"][0]
        except (slumber.exceptions.HttpClientError, IndexError):
            # Create the latest version since it doesn't exist
            version_data = dict(
                project="/api/v1/project/%s/" % project.pk,
                slug="latest",
                active=True,
                verbose_name="latest",
                identifier=branch,
            )
            try:
                version_data = api.version.post(version_data)
            except Exception as e:
                log.info("Exception in creating version: %s" % e)
                raise e

    version = make_api_version(version_data)
    version.save = new_save

    if not version_pk:
        # Lots of course correction.
        to_save = False
        if not version.verbose_name:
            version_data["verbose_name"] = "latest"
            to_save = True
        if not version.active:
            version_data["active"] = True
            to_save = True
        if version.identifier != branch:
            version_data["identifier"] = branch
            to_save = True
        if to_save:
            version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk
            api.version(version.pk).put(version_data)

    if record:
        # Create Build Object.
        build = api.build.post(
            dict(
                project="/api/v1/project/%s/" % project.pk,
                version="/api/v1/version/%s/" % version.pk,
                type="html",
                state="triggered",
            )
        )
    else:
        build = {}

    try:
        log.info("Updating docs from VCS")
        update_output = update_imported_docs(version.pk)
        # update_output = update_result.get()
    except ProjectImportError, err:
        log.error("Failed to import project; skipping build.", exc_info=True)
        build["state"] = "finished"
        build[
            "setup_error"
        ] = "Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py"
        api.build(build["id"]).put(build)
        return False
コード例 #52
0
ファイル: tasks.py プロジェクト: awesome/readthedocs.org
def build_docs(version_pk, pdf, man, epub, dash, search, localmedia, record, force):
    """
    This handles the actual building of the documentation and DB records
    """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    if 'sphinx' in project.documentation_type and not project.conf_file(version.slug):
        return ('', 'Conf file not found.', -1)

    with project.repo_lock(version, getattr(settings, 'REPO_LOCK_SECONDS', 30)):

        html_builder = builder_loading.get(project.documentation_type)(version)
        if force:
            html_builder.force()
        html_builder.clean()
        html_results = html_builder.build()
        if html_results[0] == 0:
            html_builder.move()

        fake_results = (999, "Project Skipped, Didn't build",
                        "Project Skipped, Didn't build")
        # Only build everything else if the html build changed.
        if html_builder.changed and 'sphinx' in project.documentation_type:
            # Search builder. Creates JSON from docs and sends it to the server.
            if search:
                try:
                    search_builder = builder_loading.get('sphinx_search')(version)
                    search_results = search_builder.build()
                    if search_results[0] == 0:
                        # Update search index
                        search_builder.upload()
                        # Copy json for safe keeping
                        search_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="JSON Build Error"), exc_info=True)
            # Local media builder for singlepage HTML download archive
            if localmedia:
                try:
                    localmedia_builder = builder_loading.get('sphinx_singlehtmllocalmedia')(version)
                    localmedia_results = localmedia_builder.build()
                    if localmedia_results[0] == 0:
                        localmedia_builder.move()
                except:
                    log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Local Media HTML Build Error"), exc_info=True)
                    
            # Optional build steps 
            if version.project.slug not in HTML_ONLY and not project.skip:
                if pdf:
                    pdf_builder = builder_loading.get('sphinx_pdf')(version)
                    latex_results, pdf_results = pdf_builder.build()
                    # Always move pdf results even when there's an error.
                    #if pdf_results[0] == 0:
                    pdf_builder.move()
                else:
                    pdf_results = latex_results = fake_results
                if dash:
                    dash_builder = builder_loading.get('sphinx_dash')(version)
                    dash_results = dash_builder.build()
                    if dash_results[0] == 0:
                        dash_builder.move()
                else:
                    dash_results = fake_results

                if epub:
                    epub_builder = builder_loading.get('sphinx_epub')(version)
                    epub_results = epub_builder.build()
                    if epub_results[0] == 0:
                        epub_builder.move()
                else:
                    epub_results = fake_results
            else:
                search_results = dash_results = latex_results = pdf_results = epub_results = (999, "Optional builds disabled", "Optional builds disabled")
        else:
            search_results = dash_results = latex_results = pdf_results = epub_results = (999, "Optional builds disabled", "Optional builds disabled")


    return (html_results, latex_results, pdf_results,
            epub_results, dash_results, search_results)
コード例 #53
0
ファイル: tasks.py プロジェクト: billthornton/readthedocs.org
def clear_artifacts(version_pk):
    """ Remove artifacts from the build server. """
    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
コード例 #54
0
def update_docs(pk, version_pk=None, record=True, docker=False,
                pdf=True, man=True, epub=True, dash=True,
                search=True, force=False, intersphinx=True, localmedia=True,
                api=None, **kwargs):
    """
    The main entry point for updating documentation.

    It handles all of the logic around whether a project is imported or we
    created it.  Then it will build the html docs and other requested parts.

    `pk`
        Primary key of the project to update

    `record`
        Whether or not to keep a record of the update in the database. Useful
        for preventing changes visible to the end-user when running commands
        from the shell, for example.

    """

    # Dependency injection to allow for testing
    if api is None:
        api = tastyapi.api

    project_data = api.project(pk).get()
    project = make_api_project(project_data)
    log.info(LOG_TEMPLATE.format(
        project=project.slug, version='', msg='Building'))
    version = ensure_version(api, project, version_pk)
    build = create_build(version, api, record)
    results = {}

    try:
        record_build(
            api=api, build=build, record=record, results=results, state='cloning')
        vcs_results = setup_vcs(version, build, api)
        if vcs_results:
            results.update(vcs_results)

        if docker:
            record_build(
                api=api, build=build, record=record, results=results, state='building')
            build_results = run_docker(version)
            results.update(build_results)
        else:
            record_build(
                api=api, build=build, record=record, results=results, state='installing')
            setup_results = setup_environment(version)
            results.update(setup_results)

            record_build(
                api=api, build=build, record=record, results=results, state='building')
            build_results = build_docs(
                version, force, pdf, man, epub, dash, search, localmedia)
            results.update(build_results)

        move_files(version, results)
        record_pdf(api=api, record=record, results=results,
                   state='finished', version=version)
        finish_build(version=version, build=build, results=results)

        if results['html'][0] == 0:
            # Mark version active on the site
            version_data = api.version(version.pk).get()
            version_data['active'] = True
            version_data['built'] = True
            # Need to delete this because a bug in tastypie breaks on the users
            # list.
            del version_data['project']
            try:
                api.version(version.pk).put(version_data)
            except Exception, e:
                log.error(LOG_TEMPLATE.format(project=version.project.slug,
                                              version=version.slug, msg="Unable to put a new version"), exc_info=True)
    except vcs_support_utils.LockTimeout, e:
        results['checkout'] = (
            999, "", "Version locked, retrying in 5 minutes.")
        log.info(LOG_TEMPLATE.format(project=version.project.slug,
                                     version=version.slug, msg="Unable to lock, will retry"))
        # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying
        # Should completely retry the task for us until max_retries is exceeded
        update_docs.retry(exc=e, throw=False)