def update_config_from_json(version_pk): """ Check out or update the given project's repository. """ # Remove circular import from projects.forms import ImportProjectForm version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Checking for json config")) try: rtd_json = open(os.path.join( project.checkout_path(version.slug), '.rtd.json' )) json_obj = json.load(rtd_json) for key in json_obj.keys(): # Treat the defined fields on the Import form as # the canonical list of allowed user editable fields. # This is in essense just another UI for that form. if key not in ImportProjectForm._meta.fields: del json_obj[key] except IOError: log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="No rtd.json found.")) return None project_data = api.project(project.pk).get() project_data.update(json_obj) api.project(project.pk).put(project_data) log.debug(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updated from JSON."))
def handle(self, *args, **options): if len(args): for slug in args: project_data = api.project(slug).get() p = tasks.make_api_project(project_data) log.info("Building %s" % p) tasks.update_docs(pk=p.pk, docker=True)
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if docker or settings.DOCKER_ENABLE: record_build(api=api, build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_documentation_type(version, api): """ Automatically determine the doc type for a user. """ checkout_path = version.project.checkout_path(version.slug) os.chdir(checkout_path) files = run('find .')[1].split('\n') markdown = sphinx = 0 for filename in files: if fnmatch.fnmatch(filename, '*.md') or fnmatch.fnmatch(filename, '*.markdown'): markdown += 1 elif fnmatch.fnmatch(filename, '*.rst'): sphinx += 1 ret = 'sphinx' if markdown > sphinx: ret = 'mkdocs' project_data = api.project(version.project.pk).get() project_data['documentation_type'] = ret api.project(version.project.pk).put(project_data) version.project.documentation_type = ret
def update_documentation_type(version, api): """ Automatically determine the doc type for a user. """ checkout_path = version.project.checkout_path(version.slug) os.chdir(checkout_path) files = run("find .")[1].split("\n") markdown = sphinx = 0 for filename in files: if fnmatch.fnmatch(filename, "*.md") or fnmatch.fnmatch(filename, "*.markdown"): markdown += 1 elif fnmatch.fnmatch(filename, "*.rst"): sphinx += 1 ret = "sphinx" if markdown > sphinx: ret = "mkdocs" project_data = api.project(version.project.pk).get() project_data["documentation_type"] = ret api.project(version.project.pk).put(project_data) version.project.documentation_type = ret
def scrape_conf_file(version): """ Locate the given project's ``conf.py`` file and extract important settings, including copyright, theme, source suffix and version. """ #This is where we actually find the conf.py, so we can't use #the value from the project :) project = version.project project_data = api.project(project.pk).get() try: conf_file = project.conf_file(version.slug) except (ProjectImportError, IndexError): log.error("Missing conf.py in %s" % project, exc_info=True) return -1 else: conf_dir = conf_file.replace('/conf.py', '') os.chdir(conf_dir) lines = open('conf.py').readlines() data = {} for line in lines: match = ghetto_hack.search(line) if match: data[match.group(1).strip()] = match.group(2).strip() project_data['copyright'] = data.get('copyright', 'Unknown') project_data['theme'] = data.get('html_theme', 'default') if len(project.theme) > 20: project_data['theme'] = 'default' project_data['suffix'] = data.get('source_suffix', '.rst') project_data['path'] = os.getcwd() try: project_data['version'] = str(decimal.Decimal(data.get('version'))) except (TypeError, decimal.InvalidOperation): project_data['version'] = '' api.project(project.pk).put(project_data)
def scrape_conf_file(version): """ Locate the given project's ``conf.py`` file and extract important settings, including copyright, theme, source suffix and version. """ # This is where we actually find the conf.py, so we can't use # the value from the project :) project = version.project project_data = api.project(project.pk).get() try: conf_file = project.conf_file(version.slug) except IndexError: print ("Could not find conf.py in %s" % project) return -1 else: conf_dir = conf_file.replace("/conf.py", "") os.chdir(conf_dir) lines = open("conf.py").readlines() data = {} for line in lines: match = ghetto_hack.search(line) if match: data[match.group(1).strip()] = match.group(2).strip() project_data["copyright"] = data.get("copyright", "Unknown") project_data["theme"] = data.get("html_theme", "default") if len(project.theme) > 20: project_data["theme"] = "default" project_data["suffix"] = data.get("source_suffix", ".rst") project_data["path"] = os.getcwd() try: project_data["version"] = str(decimal.Decimal(data.get("version"))) except (TypeError, decimal.InvalidOperation): project_data["version"] = "" api.project(project.pk).put(project_data)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, version_pk=None, force=False, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version( project.slug).get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) #update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build['state'] = 'finished' build['setup_error'] = ('Failed to import project; skipping build.\n' 'Please make sure your repo is correct and ' 'you have a conf.py') api.build(build['id']).put(build) return False
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # Here the command has been modified to support different # interpreters. update_docs_output['venv'] = run( '{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv -p {interpreter}'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' if project.python_interpreter != 'python3': sphinx = ( 'hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c' '#egg=Sphinx') update_docs_output['sphinx'] = run( ('{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.8.2 distribute==0.6.28 ' 'docutils==0.8.1').format(cmd=project.venv_bin( version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) else: sphinx = 'sphinx==1.1.3' # python 3 specific hax update_docs_output['sphinx'] = run( ('{cmd} install {ignore_option} {sphinx} ' 'virtualenv==1.9.1 docutils==0.11').format( cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) if getattr(settings, 'USE_PIP_INSTALL', False): update_docs_output['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version_slug, bin='pip'))) else: update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: api.project(project.pk).sync_versions.post( json.dumps(version_post_data)) except Exception, e: print "Sync Verisons Exception: %s" % e.message
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown" .format(project.repo_type))) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier ) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # Here the command has been modified to support different # interpreters. update_docs_output['venv'] = run( '{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv -p {interpreter}'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version_slug) ) ) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' if project.python_interpreter != 'python3': sphinx = ('hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c' '#egg=Sphinx') update_docs_output['sphinx'] = run( ('{cmd} install -U {ignore_option} {sphinx} ' 'virtualenv==1.8.2 distribute==0.6.28 ' 'docutils==0.8.1').format( cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) else: sphinx = 'sphinx==1.1.3' # python 3 specific hax update_docs_output['sphinx'] = run( ('{cmd} install {ignore_option} {sphinx} ' 'virtualenv==1.9.1 docutils==0.11').format( cmd=project.venv_bin(version=version_slug, bin='pip'), sphinx=sphinx, ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) if getattr(settings, 'USE_PIP_INSTALL', False): update_docs_output['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version_slug, bin='pip'))) else: update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: api.project(project.pk).sync_versions.post(json.dumps(version_post_data)) except Exception, e: print "Sync Verisons Exception: %s" % e.message
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### update_output = kwargs.get('update_output', {}) project_data = api.project(pk).get() del project_data['users'] del project_data['resource_uri'] del project_data['absolute_url'] project = Project(**project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() del version_data['resource_uri'] else: #Create or use the 'latest' branch, which is the default for a project. branch = project.default_branch or project.vcs_repo().fallback_branch try: version_data = api.version( project.slug).get(slug='latest')['objects'][0] del version_data['resource_uri'] except (slumber.exceptions.HttpClientError, IndexError) as exc: #if exc.response.status_code in [404,500]: version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) del version_data['resource_uri'] except Exception as e: log.info("Exception in creating version: %s" % e) #raise e version_data['project'] = project version = Version(**version_data) version.save = new_save if not version_pk: #Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data[ 'project'] = "/api/v1/version/%s/" % version_data['project'].pk api.version(version.pk).put(version_data) if record: #Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} #Make Dirs path = project.doc_path if not os.path.exists(path): os.makedirs(path) with project.repo_lock(30): if project.is_imported: try: update_output = update_imported_docs(project, version) except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build['state'] = 'finished' build[ 'setup_error'] = 'Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py' api.build(build['id']).put(build) return False else:
def update_docs(pk, version_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(version, api, record) results = {} try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) results.update(vcs_results) if docker: record_build(api=api, build=build, record=record, results=results, state='building') build_results = run_docker(version) results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) move_files(version, results) record_pdf(api=api, record=record, results=results, state='finished', version=version) finish_build(version=version, build=build, results=results) if results['html'][0] == 0: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True # Need to delete this because a bug in tastypie breaks on the users # list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error(LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg="Unable to put a new version"), exc_info=True) except vcs_support_utils.LockTimeout, e: results['checkout'] = (999, "", "Version locked, retrying in 5 minutes.") log.info( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) if 'tryton' in project.repo: # Skip for now return log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version(project.slug) .get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', type='branch', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Exception in creating version: %s' % e)) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post(dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating docs from VCS')) update_output = update_imported_docs(version.pk, api) #update_output = update_result.get() except ProjectImportError, err: log.error(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True) build['state'] = 'finished' build['setup_error'] = ( 'Failed to import project; skipping build.\n' '\nError\n-----\n\n%s' % err.message ) api.build(build['id']).put(build) return False
def update_docs( pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, api=None, **kwargs ): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) # if 'edx-platform' in project.repo: # Skip edx for now # return log.info(LOG_TEMPLATE.format(project=project.slug, version="", msg="Building")) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version(project.slug).get(slug="latest")["objects"][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project="/api/v1/project/%s/" % project.pk, slug="latest", type="branch", active=True, verbose_name="latest", identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info( LOG_TEMPLATE.format(project=project.slug, version="", msg="Exception in creating version: %s" % e) ) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data["verbose_name"] = "latest" to_save = True if not version.active: version_data["active"] = True to_save = True if version.identifier != branch: version_data["identifier"] = branch to_save = True if to_save: version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project="/api/v1/project/%s/" % project.pk, version="/api/v1/version/%s/" % version.pk, type="html", state="triggered", ) ) else: build = {} try: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="Updating docs from VCS")) update_output = update_imported_docs(version.pk, api) # update_output = update_result.get() except ProjectImportError, err: log.error( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="Failed to import project; skipping build" ), exc_info=True, ) build["state"] = "finished" build["setup_error"] = "Failed to import project; skipping build.\n" "\nError\n-----\n\n%s" % err.message api.build(build["id"]).put(build) return False
def update_docs(pk, version_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(version, api, record) results = {} try: record_build( api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if docker: record_build( api=api, build=build, record=record, results=results, state='building') build_results = run_docker(version) results.update(build_results) else: record_build( api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build( api=api, build=build, record=record, results=results, state='building') build_results = build_docs( version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) move_files(version, results) record_pdf(api=api, record=record, results=results, state='finished', version=version) finish_build(version=version, build=build, results=results) if results['html'][0] == 0: # Mark version active on the site version_data = api.version(version.pk).get() version_data['active'] = True version_data['built'] = True # Need to delete this because a bug in tastypie breaks on the users # list. del version_data['project'] try: api.version(version.pk).put(version_data) except Exception, e: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to put a new version"), exc_info=True) except vcs_support_utils.LockTimeout, e: results['checkout'] = ( 999, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, dash=True, search=True, version_pk=None, force=False, intersphinx=True, localmedia=True, api=None, **kwargs): """The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ if api is None: api = tastyapi.api project_data = api.project(pk).get() project = make_api_project(project_data) if 'tryton' in project.repo: # Skip for now return log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = (api.version( project.slug).get(slug='latest')['objects'][0]) except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', type='branch', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Exception in creating version: %s' % e)) raise e version = make_api_version(version_data) if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = ("/api/v1/version/%s/" % version_data['project'].pk) api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project='/api/v1/project/%s/' % project.pk, version='/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} try: log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating docs from VCS')) update_output = update_imported_docs(version.pk, api) #update_output = update_result.get() except ProjectImportError, err: log.error(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True) build['state'] = 'finished' build['setup_error'] = ('Failed to import project; skipping build.\n' '\nError\n-----\n\n%s' % err.message) api.build(build['id']).put(build) return False
def update_docs( pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, intersphinx=True, localmedia=True, api=None, basic=False, **kwargs ): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api apiv2 = tastyapi.apiv2 else: apiv2 = api start_time = datetime.datetime.utcnow() try: project_data = api.project(pk).get() except HttpClientError: log.exception(LOG_TEMPLATE.format(project=pk, version="", msg="Failed to get project data on build. Erroring.")) project = make_api_project(project_data) # Don't build skipped projects if project.skip: log.info(LOG_TEMPLATE.format(project=project.slug, version="", msg="Skipping")) return else: log.info(LOG_TEMPLATE.format(project=project.slug, version="", msg="Building")) version = ensure_version(api, project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(api=api, build=build, record=record, results=results, state="cloning") vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if project.documentation_type == "auto": update_documentation_type(version, apiv2) if docker or settings.DOCKER_ENABLE: record_build(api=api, build=build, record=record, results=results, state="building") docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state="installing") setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state="building") build_results = build_docs(version, force, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results["checkout"] = (423, "", "Version locked, retrying in 5 minutes.") log.info( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry") ) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, pdf=True, man=True, epub=True, dash=True, search=True, force=False, intersphinx=True, localmedia=True, api=None, basic=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ # Dependency injection to allow for testing if api is None: api = tastyapi.api apiv2 = tastyapi.apiv2 else: apiv2 = api start_time = datetime.datetime.utcnow() try: project_data = api.project(pk).get() except HttpClientError: log.exception( LOG_TEMPLATE.format( project=pk, version='', msg='Failed to get project data on build. Erroring.')) project = make_api_project(project_data) # Don't build skipped projects if project.skip: log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Skipping')) return else: log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(api, project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(api=api, build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build, api) if vcs_results: results.update(vcs_results) if project.documentation_type == 'auto': update_documentation_type(version, apiv2) if docker or settings.DOCKER_ENABLE: record_build(api=api, build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(api=api, build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(api=api, build=build, record=record, results=results, state='building') build_results = build_docs(version, force, pdf, man, epub, dash, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. """ ### # Handle passed in arguments ### update_output = kwargs.get("update_output", {}) project_data = api.project(pk).get() del project_data["users"] del project_data["resource_uri"] del project_data["absolute_url"] project = Project(**project_data) def new_save(*args, **kwargs): # fields = [(field, field.value_to_string(self)) for field in self._meta.fields] print "*** Called save on a non-real object." # print fields # raise TypeError('Not a real model') return 0 project.save = new_save print "Building %s" % project if version_pk: version_data = api.version(version_pk).get() del version_data["resource_uri"] else: # Create or use the 'latest' branch, which is the default for a project. branch = project.default_branch or project.vcs_repo().fallback_branch try: version_data = api.version(project.slug).get(slug="latest")["objects"][0] del version_data["resource_uri"] except (slumber.exceptions.HttpClientError, IndexError) as exc: # if exc.response.status_code in [404,500]: version_data = dict( project="/api/v1/project/%s/" % project.pk, slug="latest", active=True, verbose_name="latest", identifier=branch, ) try: version_data = api.version.post(version_data) del version_data["resource_uri"] except Exception as e: raise e version_data["project"] = project version = Version(**version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data["verbose_name"] = "latest" to_save = True if not version.active: version_data["active"] = True to_save = True if version.identifier != branch: version_data["identifier"] = branch to_save = True if to_save: version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project="/api/v1/project/%s/" % project.pk, version="/api/v1/version/%s/" % version.pk, type="html", state="triggered", ) ) else: build = {} # Make Dirs path = project.doc_path if not os.path.exists(path): os.makedirs(path) with project.repo_lock(30): if project.is_imported: try: update_output = update_imported_docs(project, version) except ProjectImportError, err: print ("Error importing project: %s. Skipping build." % err) return False # scrape_conf_file(version) else:
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### project_data = api.project(pk).get() project = make_api_project(project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() else: branch = project.default_branch or project.vcs_repo().fallback_branch try: # Use latest version version_data = api.version(project.slug).get(slug="latest")["objects"][0] except (slumber.exceptions.HttpClientError, IndexError): # Create the latest version since it doesn't exist version_data = dict( project="/api/v1/project/%s/" % project.pk, slug="latest", active=True, verbose_name="latest", identifier=branch, ) try: version_data = api.version.post(version_data) except Exception as e: log.info("Exception in creating version: %s" % e) raise e version = make_api_version(version_data) version.save = new_save if not version_pk: # Lots of course correction. to_save = False if not version.verbose_name: version_data["verbose_name"] = "latest" to_save = True if not version.active: version_data["active"] = True to_save = True if version.identifier != branch: version_data["identifier"] = branch to_save = True if to_save: version_data["project"] = "/api/v1/version/%s/" % version_data["project"].pk api.version(version.pk).put(version_data) if record: # Create Build Object. build = api.build.post( dict( project="/api/v1/project/%s/" % project.pk, version="/api/v1/version/%s/" % version.pk, type="html", state="triggered", ) ) else: build = {} try: log.info("Updating docs from VCS") update_output = update_imported_docs(version.pk) # update_output = update_result.get() except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) build["state"] = "finished" build[ "setup_error" ] = "Failed to import project; skipping build.\nPlease make sure your repo is correct and you have a conf.py" api.build(build["id"]).put(build) return False
def update_docs(pk, record=True, pdf=True, man=True, epub=True, version_pk=None, force=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. It also handles clearing the varnish cache. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ ### # Handle passed in arguments ### update_output = kwargs.get('update_output', {}) project_data = api.project(pk).get() del project_data['users'] del project_data['resource_uri'] del project_data['absolute_url'] project = Project(**project_data) # Prevent saving the temporary Project instance def new_save(*args, **kwargs): log.warning("Called save on a non-real object.") return 0 project.save = new_save log.info("Building %s" % project) if version_pk: version_data = api.version(version_pk).get() del version_data['resource_uri'] else: #Create or use the 'latest' branch, which is the default for a project. branch = project.default_branch or project.vcs_repo().fallback_branch try: version_data = api.version(project.slug).get(slug='latest')['objects'][0] del version_data['resource_uri'] except (slumber.exceptions.HttpClientError, IndexError) as exc: #if exc.response.status_code in [404,500]: version_data = dict( project='/api/v1/project/%s/' % project.pk, slug='latest', active=True, verbose_name='latest', identifier=branch, ) try: version_data = api.version.post(version_data) del version_data['resource_uri'] except Exception as e: raise e version_data['project'] = project version = Version(**version_data) version.save = new_save if not version_pk: #Lots of course correction. to_save = False if not version.verbose_name: version_data['verbose_name'] = 'latest' to_save = True if not version.active: version_data['active'] = True to_save = True if version.identifier != branch: version_data['identifier'] = branch to_save = True if to_save: version_data['project'] = "/api/v1/version/%s/" % version_data['project'].pk api.version(version.pk).put(version_data) if record: #Create Build Object. build = api.build.post(dict( project= '/api/v1/project/%s/' % project.pk, version= '/api/v1/version/%s/' % version.pk, type='html', state='triggered', )) else: build = {} #Make Dirs path = project.doc_path if not os.path.exists(path): os.makedirs(path) with project.repo_lock(30): if project.is_imported: try: update_output = update_imported_docs(project, version) except ProjectImportError, err: log.error("Failed to import project; skipping build.", exc_info=True) return False else: