def update_imported_docs(version_pk, api=None): """ Check out or update the given project's repository. """ if api is None: api = tastyapi.api version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_nonblockinglock(version=version, max_lock_age=getattr(settings, "REPO_LOCK_SECONDS", 30)): if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type))) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="Checking out version {slug}: {identifier}".format( slug=version.slug, identifier=version.identifier ), ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict["checkout"] = version_repo.checkout(version.identifier) else: # Does this ever get called? log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating to latest revision")) version_slug = "latest" version_repo = project.vcs_repo(version_slug) ret_dict["checkout"] = version_repo.update() # Update tags/version version_post_data = {"repo": version_repo.repo_url} if version_repo.supports_tags: version_post_data["tags"] = [ {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.tags ] if version_repo.supports_branches: version_post_data["branches"] = [ {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.branches ] try: apiv2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Verisons Exception: %s" % e.message
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) translations = project_base.translations.get()["translations"] languages = set([translation["language"] for translation in translations if "language" in translation]) # Convert to JSON safe types metadata = {"version": project.default_version, "language": project.language, "languages": list(languages)} try: path = project.static_metadata_path() fh = open(path, "w") json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug( LOG_TEMPLATE.format(project=project.slug, version="", msg="Cannot write to metadata.json: {0}".format(e)) )
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ translations = apiv2.project(version.project.pk).translations.get()['translations'] for translation_data in translations: translation_slug = translation_data['slug'].replace('_', '-') translation_language = translation_data['language'] log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (translation_language, translation_slug))) # The directory for this specific translation symlink = version.project.translations_symlink_path(translation_language) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, translation_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink)) # Hack in the en version for backwards compat symlink = version.project.translations_symlink_path('en') run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) docs_dir = os.path.join(version.project.doc_path, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink)) # Add the main language project to nginx too if version.project.language is not 'en': symlink = version.project.translations_symlink_path(version.project.language) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) docs_dir = os.path.join(settings.DOCROOT, version.project.slug.replace('_', '-'), 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects subprojects = apiv2.project( version.project.pk).subprojects.get()['subprojects'] for subproject_data in subprojects: slugs = [subproject_data['slug']] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects if getattr(settings, 'DONT_HIT_DB', True): subproject_slugs = [data['slug'] for data in apiv2.project(version.project.pk).subprojects.get()['subprojects']] else: rels = version.project.subprojects.all() subproject_slugs = [rel.child.slug for rel in rels] for slug in subproject_slugs: slugs = [slug] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ try: translations = apiv2.project(version.project.pk).translations.get()['translations'] for translation_data in translations: translation = make_api_project(translation_data) # Get the first part of the symlink. base_path = version.project.translations_path(translation.language) translation_dir = translation.rtd_build_path(translation.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) log.info("Symlinking %s" % translation.language) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) # Hack in the en version for backwards compat base_path = version.project.translations_path('en') translation_dir = version.project.rtd_build_path(version.project.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) except Exception, e: log.error("Error in symlink_translations: %s" % e) # Don't fail on translation bits pass
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ try: translations = apiv2.project( version.project.pk).translations.get()['translations'] for translation_data in translations: translation = make_api_project(translation_data) # Get the first part of the symlink. base_path = version.project.translations_path(translation.language) translation_dir = translation.rtd_build_path(translation.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) log.info("Symlinking %s" % translation.language) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) # Hack in the en version for backwards compat base_path = version.project.translations_path('en') translation_dir = version.project.rtd_build_path(version.project.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) except Exception, e: log.error("Error in symlink_translations: %s" % e) # Don't fail on translation bits pass
def update_mirror_docs(): """ A periodic task used to update all projects that we mirror. """ record = False current = datetime.datetime.now() # Only record one build a day, at midnight. if current.hour == 0 and current.minute == 0: record = True data = apiv2.project().get(mirror=True, page_size=500) for project_data in data['results']: p = make_api_project(project_data) update_docs(pk=p.pk, record=record)
def get_token_for_project(project, force_local=False): token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = apiv2.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider='github') if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def get_token_for_project(project, force_local=False): token = None try: if getattr(settings, "DONT_HIT_DB", True) and not force_local: token = apiv2.project(project.pk).token().get()["token"] else: for user in project.users.all(): tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider="github") if tokens.exists(): token = tokens[0].token except Exception: log.error("Failed to get token for user", exc_info=True) return token
def get_token_for_project(project, force_local=False): if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = apiv2.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider='github') if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = project_base.translations.get()['translations'] languages = set([ translation['language'] for translation in translations if 'language' in translation ]) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: path = project.static_metadata_path() fh = open(path, 'w') json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug(LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e) ))
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = project_base.translations.get()['translations'] languages = set([ translation['language'] for translation in translations if 'language' in translation ]) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: path = project.static_metadata_path() fh = open(path, 'w') json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug( LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e)))
def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects subprojects = apiv2.project(version.project.pk).subprojects.get()['subprojects'] for subproject_data in subprojects: subproject_slug = subproject_data['slug'] log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ translations = apiv2.project(version.project.pk).translations.get()["translations"] for translation_data in translations: translation_slug = translation_data["slug"] translation_language = translation_data["language"] log.debug( LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (translation_language, translation_slug), ) ) # The directory for this specific translation symlink = version.project.translations_symlink_path(translation_language) run_on_app_servers("mkdir -p %s" % "/".join(symlink.split("/")[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, translation_slug, "rtd-builds") run_on_app_servers("ln -nsf %s %s" % (docs_dir, symlink)) # Hack in the en version for backwards compat symlink = version.project.translations_symlink_path("en") run_on_app_servers("mkdir -p %s" % "/".join(symlink.split("/")[:-1])) docs_dir = os.path.join(version.project.doc_path, "rtd-builds") run_on_app_servers("ln -nsf %s %s" % (docs_dir, symlink)) # Add the main language project to nginx too if version.project.language is not "en": symlink = version.project.translations_symlink_path(version.project.language) run_on_app_servers("mkdir -p %s" % "/".join(symlink.split("/")[:-1])) docs_dir = os.path.join(settings.DOCROOT, version.project.slug, "rtd-builds") run_on_app_servers("ln -nsf %s %s" % (docs_dir, symlink))
def update_imported_docs(version_pk, api=None): """ Check out or update the given project's repository. """ if api is None: api = tastyapi.api version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(version, getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown" .format(project.repo_type))) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier ) ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier ) else: # Does this ever get called? log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Do Virtualenv bits: if project.use_virtualenv: build_dir = os.path.join(project.venv_path(version=version_slug), 'build') if os.path.exists(build_dir): log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir')) shutil.rmtree(build_dir) if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # Here the command has been modified to support different # interpreters. update_docs_output['venv'] = run( '{cmd} {site_packages} {path}'.format( cmd='virtualenv-2.7 -p {interpreter}'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version_slug) ) ) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' if project.python_interpreter != 'python3': sphinx = 'sphinx==1.2' update_docs_output['sphinx'] = run( ('{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.10.1 setuptools==1.1 ' 'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format( cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) else: sphinx = 'sphinx==1.2' # python 3 specific hax update_docs_output['sphinx'] = run( ('{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format( cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) if os.path.isfile("setup.py"): if getattr(settings, 'USE_PIP_INSTALL', False): update_docs_output['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version_slug, bin='pip'))) else: update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) else: update_docs_output['install'] = (999, "", "No setup.py, skipping install") # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: apiv2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Verisons Exception: %s" % e.message
def update_imported_docs(version_pk, api=None): """ Check out or update the given project's repository. """ if api is None: api = tastyapi.api version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier))) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: build_dir = os.path.join(project.venv_path(version=version_slug), 'build') if os.path.exists(build_dir): log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir')) shutil.rmtree(build_dir) if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # Here the command has been modified to support different # interpreters. update_docs_output['venv'] = run( '{cmd} {site_packages} {path}'.format( cmd='virtualenv-2.7 -p {interpreter}'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' if project.python_interpreter != 'python3': sphinx = 'sphinx==1.1.3' update_docs_output['sphinx'] = run(( '{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.10.1 setuptools==1.1 ' 'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext' ).format(cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) else: sphinx = 'sphinx==1.1.3' # python 3 specific hax update_docs_output['sphinx'] = run(( '{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext' ).format(cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) if os.path.isfile("setup.py"): if getattr(settings, 'USE_PIP_INSTALL', False): update_docs_output['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version_slug, bin='pip'))) else: update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) else: update_docs_output['install'] = ( 999, "", "No setup.py, skipping install") # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: apiv2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Verisons Exception: %s" % e.message
def update_imported_docs(version_pk, api=None): """ Check out or update the given project's repository. """ if api is None: api = tastyapi.api version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock(version=version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): before_vcs.send(sender=version) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier))) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout(version.identifier, ) else: # Does this ever get called? log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = 'latest' version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: apiv2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Versions Exception: %s" % e.message
def update_imported_docs(version_pk, api=None): """ Check out or update the given project's repository. """ if api is None: api = tastyapi.api version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, "REPO_LOCK_SECONDS", 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type))) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="Checking out version {slug}: {identifier}".format( slug=version.slug, identifier=version.identifier ), ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output["checkout"] = version_repo.checkout(version.identifier) else: # Does this ever get called? log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating to latest revision")) version_slug = "latest" version_repo = project.vcs_repo(version_slug) update_docs_output["checkout"] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) try: project.conf_file(version.slug) except ProjectImportError: raise # Do Virtualenv bits: if project.use_virtualenv: build_dir = os.path.join(project.venv_path(version=version_slug), "build") if os.path.exists(build_dir): log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Removing existing build dir") ) shutil.rmtree(build_dir) if project.use_system_packages: site_packages = "--system-site-packages" else: site_packages = "--no-site-packages" # Here the command has been modified to support different # interpreters. update_docs_output["venv"] = run( "{cmd} {site_packages} {path}".format( cmd="virtualenv-2.7 -p {interpreter}".format(interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version_slug), ) ) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = "-I" else: ignore_option = "" if project.python_interpreter != "python3": sphinx = "sphinx==1.2" update_docs_output["sphinx"] = run( ( "{cmd} install -U {ignore_option} {sphinx} " "virtualenv==1.10.1 setuptools==1.1 " "docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext" ).format( cmd=project.venv_bin(version=version_slug, bin="pip"), sphinx=sphinx, ignore_option=ignore_option, ) ) else: sphinx = "sphinx==1.2" # python 3 specific hax update_docs_output["sphinx"] = run( ( "{cmd} install -U {ignore_option} {sphinx} " "virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext" ).format( cmd=project.venv_bin(version=version_slug, bin="pip"), sphinx=sphinx, ignore_option=ignore_option, ) ) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output["requirements"] = run( "{cmd} install --exists-action=w -r {requirements}".format( cmd=project.venv_bin(version=version_slug, bin="pip"), requirements=project.requirements_file ) ) os.chdir(project.checkout_path(version_slug)) if os.path.isfile("setup.py"): if getattr(settings, "USE_PIP_INSTALL", False): update_docs_output["install"] = run( "{cmd} install --ignore-installed .".format( cmd=project.venv_bin(version=version_slug, bin="pip") ) ) else: update_docs_output["install"] = run( "{cmd} setup.py install --force".format( cmd=project.venv_bin(version=version_slug, bin="python") ) ) else: update_docs_output["install"] = (999, "", "No setup.py, skipping install") # Update tags/version version_post_data = {"repo": version_repo.repo_url} if version_repo.supports_tags: version_post_data["tags"] = [ {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.tags ] if version_repo.supports_branches: version_post_data["branches"] = [ {"identifier": v.identifier, "verbose_name": v.verbose_name} for v in version_repo.branches ] try: apiv2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Verisons Exception: %s" % e.message