def update_community_score(repo): surveys = models.CommunitySurvey.objects.filter(repository=repo) score = 0 answer_count = 0 survey_score = 0.0 for survey in surveys: for k in SURVEY_FIElDS: data = getattr(survey, k) if data is not None: answer_count += 1 survey_score += (data - 1) / 4 # Average and convert to 0-5 scale score = (survey_score / answer_count) * 5 repo.community_score = score repo.community_survey_count = len(surveys) repo.save() namespace = repo.provider_namespace.namespace.name fields = { 'content_name': '{}.{}'.format(namespace, repo.name), 'content_id': repo.id, 'community_score': repo.community_score, 'quality_score': repo.quality_score, } serializers.influx_insert_internal({ 'measurement': 'content_score', 'fields': fields })
def update(self, request, *args, **kwargs): instance = self.get_object() serializer = self.get_serializer(instance=instance, data=request.data) serializer.is_valid(raise_exception=True) repos = set(serializer.validated_data['repositories_followed']) ^ \ set(instance.repositories_followed.all()) authors = set(serializer.validated_data['namespaces_followed']) ^ \ set(instance.namespaces_followed.all()) serializer.save() for repo in repos: count = models.UserPreferences.objects.filter( repositories_followed=repo).count() name = repo.provider_namespace.namespace.name fields = { 'content_name': '{}.{}'.format(name, repo.name), 'content_id': repo.id, 'follower_count': count, } serializers.influx_insert_internal({ 'measurement': 'content_follower', 'fields': fields }) for author in authors: count = models.UserPreferences.objects.filter( namespaces_followed=author).count() fields = { 'author_name': author.name, 'author_id': author.id, 'follower_count': count, } serializers.influx_insert_internal({ 'measurement': 'author_follower', 'fields': fields }) return Response(serializer.data)
def list(self, request, *args, **kwargs): if request.query_params.get('owner__username'): params = {} for key, val in request.query_params.items(): if key == 'owner__username': params['namespace__name__iexact'] = val elif key == 'name': params['name__iexact'] = val elif key not in ('page', 'page_size'): params[key] = val qs = self.get_queryset() qs = qs.filter(**params) page = self.paginate_queryset(qs) if request.query_params.get('name'): content = qs.first() if content is not None: content.repository.download_count += 1 content.repository.save() name = '{}.{}'.format( content.namespace.name, content.repository.name ) data = { 'measurement': 'content_download', 'fields': { 'content_name': name, 'content_id': content.repository.id, 'download_count': content.repository.download_count } } serializers.influx_insert_internal(data) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(qs, many=True) return Response(serializer.data) return super().list(self, request, *args, **kwargs)
def update_repo_score(repo): surveys = models.RepositorySurvey.objects.filter(repository=repo) score = calculate_survey_score(surveys) repo.community_score = score repo.community_survey_count = len(surveys) repo.save() namespace = repo.provider_namespace.namespace.name fields = { 'content_name': '{}.{}'.format(namespace, repo.name), 'content_id': repo.id, 'community_score': repo.community_score, 'quality_score': repo.quality_score, } serializers.influx_insert_internal({ 'measurement': 'content_score', 'fields': fields })
def _import_repository(import_task, logger): repository = import_task.repository repo_full_name = ( repository.provider_namespace.name + "/" + repository.original_name) logger.info(u'Starting import: task_id={}, repository={}' .format(import_task.id, repo_full_name)) logger.info(' ') token = _get_social_token(import_task) gh_api = github.Github(token) gh_repo = gh_api.get_repo(repo_full_name) try: repo_info = i_repo.import_repository( repository.clone_url, temp_dir=settings.CONTENT_DOWNLOAD_DIR, logger=logger) except i_exc.ImporterError as e: raise exc.TaskError(str(e)) repository.import_branch = repo_info.branch repository.format = repo_info.format.value repository.travis_status_url = import_task.travis_status_url repository.travis_build_url = import_task.travis_build_url if repo_info.name: old_name = repository.name new_name = repo_info.name if old_name != new_name: logger.info( u'Updating repository name "{old_name}" -> "{new_name}"' .format(old_name=old_name, new_name=new_name)) repository.name = new_name context = utils.Context( repository=repository, github_token=token, github_client=gh_api, github_repo=gh_repo) new_content_objs = [] for content_info in repo_info.contents: content_logger = logutils.ContentTypeAdapter( logger, content_info.content_type, content_info.name) importer_cls = importers.get_importer(content_info.content_type) importer = importer_cls(context, content_info, logger=content_logger) issue_tracker_url = '' if (hasattr(content_info, 'role_meta') and getattr(content_info, 'role_meta') and content_info.role_meta.get('issue_tracker_url')): issue_tracker_url = content_info.role_meta['issue_tracker_url'] elif gh_repo.has_issues: issue_tracker_url = gh_repo.html_url + '/issues' repository.issue_tracker_url = issue_tracker_url content_obj = importer.do_import() new_content_objs.append(content_obj.id) for obj in repository.content_objects.exclude(id__in=new_content_objs): logger.info( 'Deleting Content instance: content_type={0}, ' 'namespace={1}, name={2}'.format( obj.content_type, obj.namespace, obj.name)) obj.delete() _update_readme(repository, repo_info.readme, gh_api, gh_repo) _update_namespace(gh_repo) _update_repo_info(repository, gh_repo, repo_info.commit, repo_info.description) repository.save() _update_task_msg_content_id(import_task) _update_quality_score(import_task) _cleanup_old_task_msg(import_task) # Updating versions has to go last because: # - we don't want to update the version number if the import fails. # - version updates send out email notifications and we don't want to # notify people that an update happened if it failed on one of the other # steps _update_repository_versions(repository, gh_repo, logger) warnings = import_task.messages.filter( message_type=models.ImportTaskMessage.TYPE_WARNING).count() errors = import_task.messages.filter( message_type=models.ImportTaskMessage.TYPE_ERROR).count() import_task.finish_success( 'Import completed with {0} warnings and {1} ' 'errors'.format(warnings, errors)) if repository.is_new: user_notifications.author_release.delay(repository.id) repository.is_new = False repository.save() namespace = repository.provider_namespace.namespace.name fields = { 'content_name': '{}.{}'.format(namespace, repository.name), 'content_id': repository.id, 'community_score': repository.community_score, 'quality_score': repository.quality_score, } serializers.influx_insert_internal({ 'measurement': 'content_score', 'fields': fields })