def sync_versions(self, version_repo): """ Update tags/branches hitting the API. It may trigger a new build to the stable version when hittig the ``sync_versions`` endpoint. """ version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] self.validate_duplicate_reserved_versions(version_post_data) try: api_v2.project(self.project.pk).sync_versions.post( version_post_data, ) except HttpClientError: log.exception('Sync Versions Exception') except Exception: log.exception('Unknown Sync Versions Exception')
def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True self.version.project.has_valid_clone = True
def sync_repo(self): """Update the project's repository and hit ``sync_versions`` API.""" # Make Dirs if not os.path.exists(self.project.doc_path): os.makedirs(self.project.doc_path) if not self.project.vcs_repo(): raise RepositoryError( _('Repository type "{repo_type}" unknown').format( repo_type=self.project.repo_type, ), ) with self.project.repo_nonblockinglock(version=self.version): # Get the actual code on disk try: before_vcs.send(sender=self.version) msg = 'Checking out version {slug}: {identifier}'.format( slug=self.version.slug, identifier=self.version.identifier, ) log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=msg, )) version_repo = self.get_vcs_repo() version_repo.checkout(self.version.identifier) finally: after_vcs.send(sender=self.version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] self.validate_duplicate_reserved_versions(version_post_data) try: # Hit the API ``sync_versions`` which may trigger a new build # for the stable version api_v2.project( self.project.pk).sync_versions.post(version_post_data) except HttpClientError: log.exception('Sync Versions Exception') except Exception: log.exception('Unknown Sync Versions Exception')
def update_documentation_type(version): """ Automatically determine the doc type for a user. """ # Keep this here for any 'auto' projects. ret = 'sphinx' project_data = api_v2.project(version.project.pk).get() project_data['documentation_type'] = ret api_v2.project(version.project.pk).put(project_data) version.project.documentation_type = ret
def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret
def symlink_subprojects(version): """Symlink project subprojects Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects if getattr(settings, 'DONT_HIT_DB', True): subproject_slugs = [data['slug'] for data in (api.project(version.project.pk) .subprojects .get()['subprojects'])] else: rels = version.project.subprojects.all() subproject_slugs = [rel.child.slug for rel in rels] for slug in subproject_slugs: slugs = [slug] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug(LOG_TEMPLATE .format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ translations = {} if getattr(settings, 'DONT_HIT_DB', True): for trans in (api.project( version.project.pk).translations.get()['translations']): translations[trans['language']] = trans['slug'] else: for trans in version.project.translations.all(): translations[trans.language] = trans.slug # Default language, and pointer for 'en' version_slug = version.project.slug.replace('_', '-') translations[version.project.language] = version_slug if not translations.has_key('en'): translations['en'] = version_slug run_on_app_servers('mkdir -p {0}'.format( os.path.join(version.project.doc_path, 'translations'))) for (language, slug) in translations.items(): log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (language, slug))) # The directory for this specific translation symlink = version.project.translations_symlink_path(language) translation_path = os.path.join(settings.DOCROOT, slug, 'rtd-builds') run_on_app_servers('ln -nsf {0} {1}'.format(translation_path, symlink))
def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects if getattr(settings, 'DONT_HIT_DB', True): subproject_slugs = [ data['slug'] for data in api.project( version.project.pk).subprojects.get()['subprojects'] ] else: rels = version.project.subprojects.all() subproject_slugs = [rel.child.slug for rel in rels] for slug in subproject_slugs: slugs = [slug] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def api_versions(self): from readthedocs.builds.models import APIVersion ret = [] for version_data in api.project(self.pk).active_versions.get()['versions']: version = APIVersion(**version_data) ret.append(version) return sort_version_aware(ret)
def get_subproject_urls(self): """List subproject URLs This is used in search result linking """ if getattr(settings, 'DONT_HIT_DB', True): return [(proj['slug'], proj['canonical_url']) for proj in ( api.project(self.pk).subprojects().get()['subprojects'])] return [(proj.child.slug, proj.child.get_docs_url()) for proj in self.subprojects.all()]
def get_subproject_urls(self): """ List subproject URLs. This is used in search result linking """ if settings.DONT_HIT_DB: return [(proj['slug'], proj['canonical_url']) for proj in (api.project(self.pk).subprojects().get()['subprojects'])] return [(proj.child.slug, proj.child.get_docs_url()) for proj in self.subprojects.all()]
def get_subproject_urls(self): """List subproject URLs This is used in search result linking """ if getattr(settings, "DONT_HIT_DB", True): return [ (proj["slug"], proj["canonical_url"]) for proj in (apiv2.project(self.pk).subprojects().get()["subprojects"]) ] else: return [(proj.child.slug, proj.child.get_docs_url()) for proj in self.subprojects.all()]
def get_token_for_project(project, force_local=False): if not getattr(settings, "ALLOW_PRIVATE_REPOS", False): return None token = None try: if getattr(settings, "DONT_HIT_DB", True) and not force_local: token = api.project(project.pk).token().get()["token"] else: for user in project.users.all(): tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider="github") if tokens.exists(): token = tokens[0].token except Exception: log.error("Failed to get token for user", exc_info=True) return token
def get_token_for_project(project, force_local=False): if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = api.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter( account__user__username=user.username, app__provider='github') if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def get_token_for_project(cls, project, force_local=False): """Get access token for project by iterating over project users""" # TODO why does this only target GitHub? if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = api.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter( account__user=user, app__provider=cls.adapter.provider_id) if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def get_token_for_project(cls, project, force_local=False): """Get access token for project by iterating over project users""" # TODO why does this only target GitHub? if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = api.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter( account__user=user, app__provider=cls.adapter.provider_id) if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for project', exc_info=True) return token
def update_imported_docs(version_pk): """ Check out or update the given project's repository :param version_pk: Version id to update """ version_data = api_v2.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock(version=version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): # Get the actual code on disk try: before_vcs.send(sender=version) if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier))) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() except Exception: raise finally: after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except HttpClientError as e: log.error("Sync Versions Exception: %s", e.content) except Exception as e: log.error("Unknown Sync Versions Exception", exc_info=True) return ret_dict
def get_project(project_pk): """Get project from API""" project_data = api_v2.project(project_pk).get() project = make_api_project(project_data) return project
def get_canonical_url(self): if getattr(settings, 'DONT_HIT_DB', True): return api.project(self.pk).canonical_url().get()['url'] return self.get_docs_url()
def sync_repo(self): """Update the project's repository and hit ``sync_versions`` API.""" # Make Dirs if not os.path.exists(self.project.doc_path): os.makedirs(self.project.doc_path) if not self.project.vcs_repo(): raise RepositoryError( _('Repository type "{repo_type}" unknown').format( repo_type=self.project.repo_type, ), ) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): # Get the actual code on disk try: before_vcs.send(sender=self.version) self._log( 'Checking out version {slug}: {identifier}'.format( slug=self.version.slug, identifier=self.version.identifier, ), ) version_repo = self.project.vcs_repo( self.version.slug, # When called from ``SyncRepositoryTask.run`` we don't have # a ``setup_env`` so we use just ``None`` and commands won't # be recorded getattr(self, 'setup_env', None), ) version_repo.checkout(self.version.identifier) finally: after_vcs.send(sender=self.version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: # Hit the API ``sync_versions`` which may trigger a new build # for the stable version api_v2.project(self.project.pk).sync_versions.post(version_post_data) except HttpClientError: log.exception('Sync Versions Exception') except Exception: log.exception('Unknown Sync Versions Exception')
def get_project(project_pk): """Get project from API.""" project_data = api_v2.project(project_pk).get() return APIProject(**project_data)
def update_imported_docs(version_pk): """ Check out or update the given project's repository :param version_pk: Version id to update """ version_data = api_v1.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock( version=version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): before_vcs.send(sender=version) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier ) ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout( version.identifier, ) else: # Does this ever get called? log.info(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Versions Exception: %s" % e.message
def api_versions(self): ret = [] for version_data in api.project(self.pk).active_versions.get()['versions']: version = make_api_version(version_data) ret.append(version) return sort_version_aware(ret)
def sync_repo(self): """Update the project's repository and hit ``sync_versions`` API.""" # Make Dirs if not os.path.exists(self.project.doc_path): os.makedirs(self.project.doc_path) if not self.project.vcs_repo(): raise RepositoryError( _('Repository type "{repo_type}" unknown').format( repo_type=self.project.repo_type, ), ) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): # Get the actual code on disk try: before_vcs.send(sender=self.version) self._log( 'Checking out version {slug}: {identifier}'.format( slug=self.version.slug, identifier=self.version.identifier, ), ) version_repo = self.project.vcs_repo( self.version.slug, # When called from ``SyncRepositoryTask.run`` we don't have # a ``setup_env`` so we use just ``None`` and commands won't # be recorded getattr(self, 'setup_env', None), ) version_repo.checkout(self.version.identifier) finally: after_vcs.send(sender=self.version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] self.validate_duplicate_reserved_versions(version_post_data) try: # Hit the API ``sync_versions`` which may trigger a new build # for the stable version api_v2.project(self.project.pk).sync_versions.post(version_post_data) except HttpClientError: log.exception('Sync Versions Exception') except Exception: log.exception('Unknown Sync Versions Exception')
def get_canonical_url(self): if settings.DONT_HIT_DB: return api.project(self.pk).canonical_url().get()['url'] return self.get_docs_url()
def get_canonical_url(self): if getattr(settings, 'DONT_HIT_DB', True): return apiv2.project(self.pk).canonical_url().get()['url'] else: return self.get_docs_url()