def handle(self, *args, **options): ''' Build/index all versions or a single project's version ''' project = options['project'] if project: queryset = Version.objects.public(project__slug=project) log.info("Building all versions for %s" % project) elif getattr(settings, 'INDEX_ONLY_LATEST', True): queryset = Version.objects.public().filter(slug='latest') else: queryset = Version.objects.public() for version in queryset: log.info("Reindexing %s" % version) try: commit = version.project.vcs_repo(version.slug).commit except: # This will happen on prod commit = None try: page_list = parse_json.process_all_json_files(version, build_dir=False) index_search_request(version=version, page_list=page_list, commit=commit, project_scale=0, page_scale=0, section=False, delete=False) except Exception: log.error('Build failed for %s' % version, exc_info=True)
def handle(self, *args, **options): ''' Build/index all versions or a single project's version ''' project = options['project'] if project: queryset = Version.objects.public(project__slug=project) log.info("Building all versions for %s" % project) elif getattr(settings, 'INDEX_ONLY_LATEST', True): queryset = Version.objects.public().filter(slug=LATEST) else: queryset = Version.objects.public() for version in queryset: log.info("Reindexing %s" % version) try: commit = version.project.vcs_repo(version.slug).commit except: # This will happen on prod commit = None try: page_list = parse_json.process_all_json_files(version, build_dir=False) index_search_request(version=version, page_list=page_list, commit=commit, project_scale=0, page_scale=0, section=False, delete=False) except Exception: log.error('Build failed for %s' % version, exc_info=True)
def update_search(version_pk, commit): version = Version.objects.get(pk=version_pk) if 'sphinx' in version.project.documentation_type: page_list = process_all_json_files(version, build_dir=False) elif 'mkdocs' in version.project.documentation_type: page_list = process_mkdocs_json(version, build_dir=False) else: log.error('Unknown documentation type: %s' % version.project.documentation_type) return log_msg = ' '.join([page['path'] for page in page_list]) log.info("(Search Index) Sending Data: %s [%s]" % (version.project.slug, log_msg)) index_search_request( version=version, page_list=page_list, commit=commit, project_scale=0, page_scale=0, # Don't index sections to speed up indexing. # They aren't currently exposed anywhere. section=False, )
def index_search(request): data = request.DATA['data'] project_pk = data['project_pk'] version_pk = data['version_pk'] project = Project.objects.get(pk=project_pk) version = Version.objects.get(pk=version_pk) utils.index_search_request(version=version, page_list=data['page_list']) return Response({'indexed': True})
def index_search(request): """ Add things to the search index. """ data = request.DATA['data'] project_pk = data['project_pk'] version_pk = data['version_pk'] project = Project.objects.get(pk=project_pk) version = Version.objects.get(pk=version_pk) utils.index_search_request(version=version, page_list=data['page_list']) return Response({'indexed': True})
def index_search(request): """ Add things to the search index. """ data = request.DATA['data'] project_pk = data['project_pk'] version_pk = data['version_pk'] commit = data.get('commit') project = Project.objects.get(pk=project_pk) version = Version.objects.get(pk=version_pk) utils.index_search_request(version=version, page_list=data['page_list'], commit=commit) return Response({'indexed': True})
def update_search(version_pk, commit): version = Version.objects.get(pk=version_pk) if 'sphinx' in version.project.documentation_type: page_list = process_all_json_files(version, build_dir=False) if 'mkdocs' in version.project.documentation_type: page_list = process_mkdocs_json(version, build_dir=False) log_msg = ' '.join([page['path'] for page in page_list]) log.info("(Search Index) Sending Data: %s [%s]" % (version.project.slug, log_msg)) index_search_request(version=version, page_list=page_list, commit=commit)
def index_search(request): """ Add things to the search index. """ data = request.DATA['data'] project_pk = data['project_pk'] version_pk = data['version_pk'] commit = data.get('commit') project = Project.objects.get(pk=project_pk) version = Version.objects.get(pk=version_pk) resp = requests.get('https://api.grokthedocs.com/api/v1/index/1/heatmap/', params={'project': project.slug, 'compare': True}) ret_json = resp.json() project_scale = ret_json.get('scaled_project', {}).get(project.slug) page_scale = ret_json.get('scaled_page', {}).get(page['path'], 1) utils.index_search_request(version=version, page_list=data['page_list'], commit=commit, project_scale=project_scale, page_scale=page_scale) return Response({'indexed': True})
def handle(self, *args, **options): ''' Build/index all versions or a single project's version ''' project = options['project'] if project: queryset = Version.objects.public(project__slug=project) log.info("Building all versions for %s" % project) elif getattr(settings, 'INDEX_ONLY_LATEST', True): queryset = Version.objects.public().filter(slug='latest') else: queryset = Version.objects.public() for version in queryset: log.info("Reindexing %s" % version) try: page_list = parse_json.process_all_json_files(version, build_dir=False) index_search_request(version=version, page_list=page_list) except Exception: log.error('Build failed for %s' % version, exc_info=True)
def index_search(request): """ Add things to the search index. """ data = request.DATA['data'] project_pk = data['project_pk'] version_pk = data['version_pk'] commit = data.get('commit') project = Project.objects.get(pk=project_pk) version = Version.objects.get(pk=version_pk) project_scale = 1 page_scale = 1 utils.index_search_request(version=version, page_list=data['page_list'], commit=commit, project_scale=project_scale, page_scale=page_scale) return Response({'indexed': True})