def update_static_metadata(project_pk, path=None): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project = Project.objects.get(pk=project_pk) if not path: path = project.static_metadata_path() log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = [trans.language for trans in project.translations.all()] languages = set(translations) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: fh = open(path, 'w+') json.dump(metadata, fh) fh.close() Syncer.copy(path, path, host=socket.gethostname(), file=True) except (AttributeError, IOError) as e: log.debug( LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e)))
def update_static_metadata(project_pk, path=None): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project = Project.objects.get(pk=project_pk) if not path: path = project.static_metadata_path() log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = [trans.language for trans in project.translations.all()] languages = set(translations) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: fh = open(path, 'w+') json.dump(metadata, fh) fh.close() Syncer.copy(path, path, host=socket.gethostname(), file=True) except (AttributeError, IOError) as e: log.debug(LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e) ))
def move_files(version_pk, hostname, html=False, localmedia=False, search=False, pdf=False, epub=False): version = Version.objects.get(pk=version_pk) if html: from_path = version.project.artifact_path( version=version.slug, type=version.project.documentation_type) target = version.project.rtd_build_path(version.slug) Syncer.copy(from_path, target, host=hostname) if 'sphinx' in version.project.documentation_type: if localmedia: from_path = version.project.artifact_path(version=version.slug, type='sphinx_localmedia') to_path = version.project.get_production_media_path( type='htmlzip', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if search: from_path = version.project.artifact_path(version=version.slug, type='sphinx_search') to_path = version.project.get_production_media_path( type='json', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) # Always move PDF's because the return code lies. if pdf: from_path = version.project.artifact_path(version=version.slug, type='sphinx_pdf') to_path = version.project.get_production_media_path( type='pdf', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if epub: from_path = version.project.artifact_path(version=version.slug, type='sphinx_epub') to_path = version.project.get_production_media_path( type='epub', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if 'mkdocs' in version.project.documentation_type: if search: from_path = version.project.artifact_path(version=version.slug, type='mkdocs_json') to_path = version.project.get_production_media_path( type='json', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname)
def move_files(version_pk, hostname, html=False, localmedia=False, search=False, pdf=False, epub=False): version = Version.objects.get(pk=version_pk) if html: from_path = version.project.artifact_path(version=version.slug, type=version.project.documentation_type) target = version.project.rtd_build_path(version.slug) Syncer.copy(from_path, target, host=hostname) if 'sphinx' in version.project.documentation_type: if localmedia: from_path = version.project.artifact_path(version=version.slug, type='sphinx_localmedia') to_path = version.project.get_production_media_path(type='htmlzip', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if search: from_path = version.project.artifact_path(version=version.slug, type='sphinx_search') to_path = version.project.get_production_media_path(type='json', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) # Always move PDF's because the return code lies. if pdf: from_path = version.project.artifact_path(version=version.slug, type='sphinx_pdf') to_path = version.project.get_production_media_path(type='pdf', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if epub: from_path = version.project.artifact_path(version=version.slug, type='sphinx_epub') to_path = version.project.get_production_media_path(type='epub', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname) if 'mkdocs' in version.project.documentation_type: if search: from_path = version.project.artifact_path(version=version.slug, type='mkdocs_json') to_path = version.project.get_production_media_path(type='json', version_slug=version.slug, include_file=False) Syncer.copy(from_path, to_path, host=hostname)