def clear_artifacts(version_pk): """ Remove artifacts from the build server. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) run('rm -rf %s' % version.project.full_epub_path(version.slug)) run('rm -rf %s' % version.project.full_man_path(version.slug)) run('rm -rf %s' % version.project.full_build_path(version.slug)) run('rm -rf %s' % version.project.full_latex_path(version.slug))
def update_intersphinx(version_pk): version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project try: object_file = version.project.find('objects.inv', version.slug)[0] except IndexError, e: print "Failed to find objects file" return None
def build_docs(version_pk, pdf, man, epub, record, force): """ This handles the actual building of the documentation and DB records """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project if not project.conf_file(version.slug): return ('', 'Conf file not found.', -1) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): html_builder = builder_loading.get(project.documentation_type)(version) if force: html_builder.force() html_builder.clean() html_results = html_builder.build() if html_results[0] == 0: html_builder.move() fake_results = (999, "Project Skipped, Didn't build", "Project Skipped, Didn't build") # Only build everything else if the html build changed. if html_builder.changed and not project.skip: if pdf: pdf_builder = builder_loading.get('sphinx_pdf')(version) latex_results, pdf_results = pdf_builder.build() # Always move pdf results even when there's an error. #if pdf_results[0] == 0: pdf_builder.move() else: pdf_results = latex_results = fake_results if man: man_builder = builder_loading.get('sphinx_man')(version) man_results = man_builder.build() if man_results[0] == 0: man_builder.move() else: man_results = fake_results if epub: epub_builder = builder_loading.get('sphinx_epub')(version) epub_results = epub_builder.build() if epub_results[0] == 0: epub_builder.move() else: epub_results = fake_results return (html_results, latex_results, pdf_results, man_results, epub_results)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version( project.slug).get(slug='latest')['objects'][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data[ 'project'] = "/api/v1/version/%s/" % version_data['project'].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) #update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build['state'] = 'finished' build[ 'setup_error'] = 'Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py' api.build(build['id']).put(build) return False
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError("Repo type '{repo_type}' unknown".format( repo_type=project.repo_type)) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' update_docs_output['venv'] = run( '{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv', site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the virtualenv. # Using the -I option makes sure it gets installed even if it is # already installed system-wide (and --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' update_docs_output['sphinx'] = run( '{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1' .format(cmd=project.venv_bin(version=version_slug, bin='pip'), ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # check tags/version #XXX:dc: what in this block raises the values error? try: old_versions = [ obj['identifier'] for obj in api.version.get(project__slug=project.slug, limit=5000)['objects'] ] if version_repo.supports_tags: transaction.enter_transaction_management(True) tags = version_repo.tags for tag in tags: if tag.identifier in old_versions: continue log.debug('NEW TAG: (%s not in %s)' % (tag.identifier, old_versions)) slug = slugify_uniquely(Version, tag.verbose_name, 'slug', 255, project=project) try: version_data = api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=tag.identifier, verbose_name=tag.verbose_name)) ver = make_api_version(version_data) log.info("New tag found: {0}".format(tag.identifier)) ver, highest = project.highest_version[1] ver_obj = mkversion(ver) #TODO: Handle updating higher versions automatically. #This never worked very well, anyways. if highest and ver_obj and ver_obj > highest: log.info("Highest version known, building docs") update_docs.delay(ver.project.pk, version_pk=ver.pk) except Exception, e: log.error("Failed to create version (tag)", exc_info=True) transaction.rollback() transaction.leave_transaction_management() if version_repo.supports_branches: transaction.enter_transaction_management(True) branches = version_repo.branches for branch in branches: if branch.identifier in old_versions: continue log.debug('NEW BRANCH: (%s not in %s)' % (branch, old_versions)) slug = slugify_uniquely(Version, branch.verbose_name, 'slug', 255, project=project) try: api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=branch.identifier, verbose_name=branch.verbose_name)) log.info("New branch found: {0}".format( branch.identifier)) except Exception, e: log.error("Failed to create version (branch)", exc_info=True) transaction.rollback() transaction.leave_transaction_management()
api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, success=pdf_results[0] == 0, type='pdf', setup=latex_results[1], setup_error=latex_results[2], output=pdf_results[1], error=pdf_results[2], )) if version: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True #Need to delete this because a bug in tastypie breaks on the users list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error("Unable to post a new version", exc_info=True) # Build Finished, do house keeping bits if 'no targets are out of date.' in out: log.info("Build Unchanged") else: if ret == 0:
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version(project.slug).get(slug='latest')['objects'][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = "/api/v1/version/%s/" % version_data['project'].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post(dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) #update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build['state'] = 'finished' build['setup_error'] = 'Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py' api.build(build['id']).put(build) return False
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError("Repo type '{repo_type}' unknown".format( repo_type=project.repo_type)) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout(version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' update_docs_output['venv'] = run('{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv', site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the virtualenv. # Using the -I option makes sure it gets installed even if it is # already installed system-wide (and --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' update_docs_output['sphinx'] = run('{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run('{cmd} install -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) update_docs_output['install'] = run('{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # check tags/version #XXX:dc: what in this block raises the values error? try: old_versions = [obj['identifier'] for obj in api.version.get(project__slug=project.slug, limit=5000)['objects']] if version_repo.supports_tags: transaction.enter_transaction_management(True) tags = version_repo.tags for tag in tags: if tag.identifier in old_versions: continue log.debug('NEW TAG: (%s not in %s)' % (tag.identifier, old_versions)) slug = slugify_uniquely(Version, tag.verbose_name, 'slug', 255, project=project) try: version_data = api.version.post(dict( project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=tag.identifier, verbose_name=tag.verbose_name )) ver = make_api_version(version_data) log.info("New tag found: {0}".format(tag.identifier)) ver, highest = project.highest_version[1] ver_obj = mkversion(ver) #TODO: Handle updating higher versions automatically. #This never worked very well, anyways. if highest and ver_obj and ver_obj > highest: log.info("Highest version known, building docs") update_docs.delay(ver.project.pk, version_pk=ver.pk) except Exception, e: log.error("Failed to create version (tag)", exc_info=True) transaction.rollback() transaction.leave_transaction_management() if version_repo.supports_branches: transaction.enter_transaction_management(True) branches = version_repo.branches for branch in branches: if branch.identifier in old_versions: continue log.debug('NEW BRANCH: (%s not in %s)' % (branch, old_versions)) slug = slugify_uniquely(Version, branch.verbose_name, 'slug', 255, project=project) try: api.version.post(dict( project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=branch.identifier, verbose_name=branch.verbose_name )) log.info("New branch found: {0}".format(branch.identifier)) except Exception, e: log.error("Failed to create version (branch)", exc_info=True) transaction.rollback() transaction.leave_transaction_management()
api.build(build['id']).put(build) api.build.post(dict( project = '/api/v1/project/%s/' % project.pk, version = '/api/v1/version/%s/' % version.pk, success=pdf_results[0] == 0, type='pdf', setup=latex_results[1], setup_error=latex_results[2], output=pdf_results[1], error=pdf_results[2], )) if version: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True #Need to delete this because a bug in tastypie breaks on the users list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error("Unable to post a new version", exc_info=True) # Build Finished, do house keeping bits if 'no targets are out of date.' in out: log.info("Build Unchanged") else: if ret == 0: