def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects if getattr(settings, 'DONT_HIT_DB', True): subproject_slugs = [data['slug'] for data in api.project(version.project.pk).subprojects.get()['subprojects']] else: rels = version.project.subprojects.all() subproject_slugs = [rel.child.slug for rel in rels] for slug in subproject_slugs: slugs = [slug] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def update_documentation_type(version): """ Automatically determine the doc type for a user. """ checkout_path = version.project.checkout_path(version.slug) os.chdir(checkout_path) files = run('find .')[1].split('\n') markdown = sphinx = 0 for filename in files: if fnmatch.fnmatch(filename, '*.md') or fnmatch.fnmatch(filename, '*.markdown'): markdown += 1 elif fnmatch.fnmatch(filename, '*.rst'): sphinx += 1 ret = 'sphinx' if markdown > sphinx: ret = 'mkdocs' project_data = api_v2.project(version.project.pk).get() project_data['documentation_type'] = ret api_v2.project(version.project.pk).put(project_data) version.project.documentation_type = ret
def update_documentation_type(version): """ Automatically determine the doc type for a user. """ checkout_path = version.project.checkout_path(version.slug) os.chdir(checkout_path) files = run('find .')[1].split('\n') markdown = sphinx = 0 for filename in files: if fnmatch.fnmatch(filename, '*.md') or fnmatch.fnmatch( filename, '*.markdown'): markdown += 1 elif fnmatch.fnmatch(filename, '*.rst'): sphinx += 1 ret = 'sphinx' if markdown > sphinx: ret = 'mkdocs' project_data = api_v2.project(version.project.pk).get() project_data['documentation_type'] = ret api_v2.project(version.project.pk).put(project_data) version.project.documentation_type = ret
def get_token_for_project(project, force_local=False): if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = api.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider='github') if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def get_token_for_project(project, force_local=False): if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False): return None token = None try: if getattr(settings, 'DONT_HIT_DB', True) and not force_local: token = api.project(project.pk).token().get()['token'] else: for user in project.users.all(): tokens = SocialToken.objects.filter( account__user__username=user.username, app__provider='github') if tokens.exists(): token = tokens[0].token except Exception: log.error('Failed to get token for user', exc_info=True) return token
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api_v1.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock(version=version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): before_vcs.send(sender=version) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier))) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout(version.identifier, ) else: # Does this ever get called? log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Versions Exception: %s" % e.message
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api_v1.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock(version=version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): before_vcs.send(sender=version) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier ) ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout( version.identifier, ) else: # Does this ever get called? log.info(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Versions Exception: %s" % e.message