def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ try: translations = apiv2.project(version.project.pk).translations.get()['translations'] for translation_data in translations: translation = make_api_project(translation_data) # Get the first part of the symlink. base_path = version.project.translations_path(translation.language) translation_dir = translation.rtd_build_path(translation.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) log.info("Symlinking %s" % translation.language) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) # Hack in the en version for backwards compat base_path = version.project.translations_path('en') translation_dir = version.project.rtd_build_path(version.project.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) except Exception, e: log.error("Error in symlink_translations: %s" % e) # Don't fail on translation bits pass
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) translations = project_base.translations.get()["translations"] languages = set([translation["language"] for translation in translations if "language" in translation]) # Convert to JSON safe types metadata = {"version": project.default_version, "language": project.language, "languages": list(languages)} try: path = project.static_metadata_path() fh = open(path, "w") json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug( LOG_TEMPLATE.format(project=project.slug, version="", msg="Cannot write to metadata.json: {0}".format(e)) )
def symlink_translations(version): """ Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ try: translations = apiv2.project( version.project.pk).translations.get()['translations'] for translation_data in translations: translation = make_api_project(translation_data) # Get the first part of the symlink. base_path = version.project.translations_path(translation.language) translation_dir = translation.rtd_build_path(translation.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) log.info("Symlinking %s" % translation.language) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) # Hack in the en version for backwards compat base_path = version.project.translations_path('en') translation_dir = version.project.rtd_build_path(version.project.slug) # Chop off the version from the end. translation_dir = '/'.join(translation_dir.split('/')[:-1]) run_on_app_servers('mkdir -p %s' % '/'.join(base_path.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (translation_dir, base_path)) except Exception, e: log.error("Error in symlink_translations: %s" % e) # Don't fail on translation bits pass
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if docker or settings.DOCKER_ENABLE: record_build(api=api, build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_mirror_docs(): """ A periodic task used to update all projects that we mirror. """ record = False current = datetime.datetime.now() # Only record one build a day, at midnight. if current.hour == 0 and current.minute == 0: record = True data = apiv2.project().get(mirror=True, page_size=500) for project_data in data['results']: p = make_api_project(project_data) update_docs(pk=p.pk, record=record)
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = project_base.translations.get()['translations'] languages = set([ translation['language'] for translation in translations if 'language' in translation ]) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: path = project.static_metadata_path() fh = open(path, 'w') json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug(LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e) ))
def update_static_metadata(project_pk): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project_base = apiv2.project(project_pk) project_data = project_base.get() project = make_api_project(project_data) log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = project_base.translations.get()['translations'] languages = set([ translation['language'] for translation in translations if 'language' in translation ]) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: path = project.static_metadata_path() fh = open(path, 'w') json.dump(metadata, fh) fh.close() copy_file_to_app_servers(path, path) except IOError as e: log.debug( LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e)))
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, version_pk=None, force=False, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version( project.slug).get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) #update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build['state'] = 'finished' build['setup_error'] = ('Failed to import project; skipping build.\n' 'Please make sure your repo is correct and ' 'you have a conf.py') api.build(build['id']).put(build) return False
def update_docs(pk, version_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(version, api, record) results = {} try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) results.update(vcs_results) if docker: record_build(api=api, build=build, record=record, results=results, state='building') build_results = run_docker(version) results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) move_files(version, results) record_pdf(api=api, record=record, results=results, state='finished', version=version) finish_build(version=version, build=build, results=results) if results['html'][0] == 0: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True # Need to delete this because a bug in tastypie breaks on the users # list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error(LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg="Unable to put a new version"), exc_info=True) except vcs_support_utils.LockTimeout, e: results['checkout'] = (999, "", "Version locked, retrying in 5 minutes.") log.info( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) if 'tryton' in project.repo: # Skip for now return log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version(project.slug) .get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', type='branch', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Exception in creating version: %s' % e)) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post(dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating docs from VCS')) update_output = update_imported_docs(version.pk, api) #update_output = update_result.get() except ProjectImportError, err: log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True) build['state'] = 'finished' build['setup_error'] = ( 'Failed to import project; skipping build.\n' '\nError\n-----\n\n%s' % err.message ) api.build(build['id']).put(build) return False
def update_docs(pk, version_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(version, api, record) results = {} try: record_build( api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if docker: record_build( api=api, build=build, record=record, results=results, state='building') build_results = run_docker(version) results.update(build_results) else: record_build( api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build( api=api, build=build, record=record, results=results, state='building') build_results = build_docs( version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) move_files(version, results) record_pdf(api=api, record=record, results=results, state='finished', version=version) finish_build(version=version, build=build, results=results) if results['html'][0] == 0: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True # Need to delete this because a bug in tastypie breaks on the users # list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to put a new version"), exc_info=True) except vcs_support_utils.LockTimeout, e: results['checkout'] = ( 999, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) if 'tryton' in project.repo: # Skip for now return log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version( project.slug).get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', type='branch', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Exception in creating version: %s' % e)) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating docs from VCS')) update_output = update_imported_docs(version.pk, api) #update_output = update_result.get() except ProjectImportError, err: log.error(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True) build['state'] = 'finished' build['setup_error'] = ('Failed to import project; skipping build.\n' '\nError\n-----\n\n%s' % err.message) api.build(build['id']).put(build) return False
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, intersphinx=True, localmedia=True, basic=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ start_time = datetime.datetime.utcnow() try: project_data = api_v1.project(pk).get() except HttpClientError: log.exception(LOG_TEMPLATE.format(project=pk, version='', msg='Failed to get project data on build. Erroring.')) project = make_api_project(project_data) # Don't build skipped projects if project.skip: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Skipping')) return else: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build) if vcs_results: results.update(vcs_results) if project.documentation_type == 'auto': update_documentation_type(version) if docker or settings.DOCKER_ENABLE: record_build(build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(build=build, record=record, results=results, state='building') build_results = build_docs(version, force, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, basic=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api apiv2 = tastyapi.apiv2 else: apiv2 = api start_time = datetime.datetime.utcnow() try: project_data = api.project(pk).get() except HttpClientError: log.exception( LOG_TEMPLATE.format( project=pk, version='', msg='Failed to get project data on build. Erroring.')) project = make_api_project(project_data) # Don't build skipped projects if project.skip: log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Skipping')) return else: log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if project.documentation_type == 'auto': update_documentation_type(version, apiv2) if docker or settings.DOCKER_ENABLE: record_build(api=api, build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version(project.slug).get(slug="latest")["objects"][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project="/api/v1/project/%s/" % project.pk, slug="latest", active=True, verbose_name="latest", identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data["verbose_name"] = "latest" to_save = True if not version.active: version_data["active"] = True to_save = True if version.identifier != branch: version_data["identifier"] = branch to_save = True if to_save: version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project="/api/v1/project/%s/" % project.pk, version="/api/v1/version/%s/" % version.pk, type="html", state="triggered", ) ) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) # update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build["state"] = "finished" build[ "setup_error" ] = "Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py" api.build(build["id"]).put(build) return False
def update_docs( pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, api=None, **kwargs ): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) # if 'edx-platform' in project.repo: # Skip edx for now # return log.info(LOG_TEMPLATE.format(project=project.slug, version="", msg="Building")) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version(project.slug).get(slug="latest")["objects"][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project="/api/v1/project/%s/" % project.pk, slug="latest", type="branch", active=True, verbose_name="latest", identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info( LOG_TEMPLATE.format(project=project.slug, version="", msg="Exception in creating version: %s" % e) ) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data["verbose_name"] = "latest" to_save = True if not version.active: version_data["active"] = True to_save = True if version.identifier != branch: version_data["identifier"] = branch to_save = True if to_save: version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project="/api/v1/project/%s/" % project.pk, version="/api/v1/version/%s/" % version.pk, type="html", state="triggered", ) ) else: build = {} try: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating docs from VCS")) update_output = update_imported_docs(version.pk, api) # update_output = update_result.get() except ProjectImportError, err: log.error( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="Failed to import project; skipping build" ), exc_info=True, ) build["state"] = "finished" build["setup_error"] = "Failed to import project; skipping build.\n" "\nError\n-----\n\n%s" % err.message api.build(build["id"]).put(build) return False