DjangoJSONEncoder, JSONResponseView, JqPaginatedListView, TechAdminRequiredMixin) from apps.common.utils import get_api_module from apps.document.models import DocumentProperty, TextUnitProperty from apps.task.forms import (LoadDocumentsForm, LocateTermsForm, LocateForm, ExistedClassifierClassifyForm, CreateClassifierClassifyForm, ClusterForm, SimilarityForm, PartySimilarityForm, CleanProjectForm, UpdateElasticSearchForm, TaskDetailForm, TotalCleanupForm, LoadFixtureForm, DumpFixtureForm) from apps.task.models import Task from apps.task.tasks import call_task, clean_tasks, purge_task from apps.dump.app_dump import get_model_fixture_dump, load_fixture_from_dump, download project_api_module = get_api_module('project') __author__ = "ContraxSuite, LLC; LexPredict, LLC" __copyright__ = "Copyright 2015-2018, ContraxSuite, LLC" __license__ = "https://github.com/LexPredict/lexpredict-contraxsuite/blob/1.1.5a/LICENSE" __version__ = "1.1.5a" __maintainer__ = "LexPredict, LLC" __email__ = "*****@*****.**" class BaseTaskView(AdminRequiredMixin, FormView): template_name = 'task/task_form.html' task_name = 'Task' def form_valid(self, form): data = form.cleaned_data
def get(self, request, *args, **kwargs): # get admin tasks data task_api_module = get_api_module('task') task_api_view = task_api_module.TaskViewSet(request=request) task_api_view.format_kwarg = {} admin_task_df = pd.DataFrame(task_api_view.list(request=request).data) admin_task_total_count = admin_task_df.shape[0] admin_task_by_status_count = dict(admin_task_df.groupby(['status']).size()) \ if not admin_task_df.empty else 0 # get projects data project_api_module = get_api_module('project') project_api_view = project_api_module.ProjectViewSet(request=request) project_api_view.format_kwarg = {} project_data = project_api_view.list(request=request).data if not project_data: project_total_count = project_completed_count = project_completed_weight = \ project_progress_avg = project_documents_total_count = \ project_documents_unique_count = 0 else: for i in project_data: progress_data = i.pop('progress') i.update(progress_data) project_df = pd.DataFrame(project_data) project_df['completed'] = np.where(project_df['progress'] == 100, 1, 0) project_total_count = project_df.shape[0] project_df_sum = project_df.sum() project_completed_count = project_df_sum.completed project_completed_weight = round( project_completed_count / project_total_count * 100, 1) project_progress_avg = round(project_df.mean().progress, 1) project_documents_total_count = project_df_sum.total_documents_count project_documents_unique_count = Document.objects.filter( taskqueue__project__isnull=False) \ .distinct().count() # get task queues data task_queue_api_view = project_api_module.TaskQueueViewSet( request=request) task_queue_api_view.format_kwarg = {} task_queue_data = task_queue_api_view.list(request=request).data if not task_queue_data: task_queue_total_count = task_queue_completed_count = task_queue_completed_weight = \ task_queue_progress_avg = task_queue_documents_total_count = \ task_queue_documents_unique_count = task_queue_reviewers_unique_count = 0 else: for i in task_queue_data: progress_data = i.pop('progress') i.update(progress_data) task_queue_df = pd.DataFrame(task_queue_data) task_queue_df['completed'] = np.where( task_queue_df['progress'] == 100, 1, 0) task_queue_total_count = task_queue_df.shape[0] task_queue_df_sum = task_queue_df.sum() task_queue_completed_count = task_queue_df_sum.completed task_queue_completed_weight = round( task_queue_completed_count / task_queue_total_count * 100, 1) task_queue_progress_avg = round(task_queue_df.mean().progress, 1) task_queue_documents_total_count = task_queue_df_sum.total_documents_count task_queue_documents_unique_count = Document.objects.filter(taskqueue__isnull=False) \ .distinct().count() task_queue_reviewers_unique_count = User.objects.filter(taskqueue__isnull=False) \ .distinct().count() # set counts depending on user role documents = Document.objects document_properties = DocumentProperty.objects document_tags = DocumentTag.objects document_notes = DocumentNote.objects document_relations = DocumentRelation.objects document_clusters = DocumentCluster.objects text_units = TextUnit.objects tu_tags = TextUnitTag.objects tu_properties = TextUnitProperty.objects tu_classifications = TextUnitClassification.objects tu_classification_suggestions = TextUnitClassifierSuggestion.objects tuc_suggestion_types = TextUnitClassifierSuggestion.objects.distinct( 'class_name') tu_notes = TextUnitNote.objects tu_clusters = TextUnitCluster.objects terms = Term.objects term_usages = TermUsage.objects amount_usages = AmountUsage.objects citation_usages = CitationUsage.objects copyright_usages = CopyrightUsage.objects court_usages = CourtUsage.objects currency_usages = CurrencyUsage.objects date_duration_usages = DateDurationUsage.objects date_usages = DateUsage.objects definition_usages = DefinitionUsage.objects distance_usages = DistanceUsage.objects geo_entities = GeoEntity.objects geo_entity_usages = GeoEntityUsage.objects geo_aliases = GeoAlias.objects geo_alias_usages = GeoAliasUsage.objects geo_relations = GeoRelation.objects parties = Party.objects party_usages = PartyUsage.objects percent_usages = PercentUsage.objects ratio_usages = RatioUsage.objects regulation_usages = RegulationUsage.objects trademark_usages = TrademarkUsage.objects url_usages = UrlUsage.objects if request.user.is_reviewer: document_filter_opts = dict( document__taskqueue__reviewers=request.user) tu_filter_opts = dict( text_unit__document__taskqueue__reviewers=request.user) documents = documents.filter( taskqueue__reviewers=request.user).distinct() document_properties = document_properties.filter( **document_filter_opts).distinct() document_tags = document_tags.filter( **document_filter_opts).distinct() document_notes = document_notes.filter( **document_filter_opts).distinct() document_relations = document_relations.filter( document_a__taskqueue__reviewers=request.user, document_b__taskqueue__reviewers=request.user).distinct() document_clusters = document_clusters.filter( documents__taskqueue__reviewers=request.user).distinct() text_units = text_units.filter(**document_filter_opts).distinct() tu_tags = tu_tags.filter(**tu_filter_opts).distinct() tu_properties = tu_properties.filter(**tu_filter_opts).distinct() tu_classifications = tu_classifications.filter( **tu_filter_opts).distinct() tu_classification_suggestions = tu_classification_suggestions.filter( **tu_filter_opts).distinct() tuc_suggestion_types = tuc_suggestion_types.filter( **tu_filter_opts).distinct('class_name') tu_notes = tu_notes.filter(**tu_filter_opts).distinct() tu_clusters = tu_clusters.filter( text_units__document__taskqueue__reviewers=request.user ).distinct() terms = terms.filter( termusage__text_unit__document__taskqueue__reviewers=request. user).distinct() term_usages = term_usages.filter(**tu_filter_opts).distinct() amount_usages = amount_usages.filter(**tu_filter_opts).distinct() citation_usages = citation_usages.filter( **tu_filter_opts).distinct() copyright_usages = copyright_usages.filter( **tu_filter_opts).distinct() court_usages = court_usages.filter(**tu_filter_opts).distinct() currency_usages = currency_usages.filter( **tu_filter_opts).distinct() date_duration_usages = date_duration_usages.filter( **tu_filter_opts).distinct() date_usages = date_usages.filter(**tu_filter_opts).distinct() definition_usages = definition_usages.filter( **tu_filter_opts).distinct() distance_usages = distance_usages.filter( **tu_filter_opts).distinct() geo_aliases = geo_aliases.filter( geoaliasusage__text_unit__document__taskqueue__reviewers=request .user).distinct() geo_alias_usages = geo_alias_usages.filter( **tu_filter_opts).distinct() geo_entities = geo_entities.filter( geoentityusage__text_unit__document__taskqueue__reviewers= request.user).distinct() geo_entity_usages = geo_entity_usages.filter( **tu_filter_opts).distinct() geo_relations = geo_relations.filter( entity_a__geoentityusage__text_unit__document__taskqueue__reviewers=request.user, entity_b__geoentityusage__text_unit__document__taskqueue__reviewers=request.user) \ .distinct() parties = parties.filter( partyusage__text_unit__document__taskqueue__reviewers=request. user).distinct() party_usages = party_usages.filter(**tu_filter_opts).distinct() percent_usages = percent_usages.filter(**tu_filter_opts).distinct() ratio_usages = ratio_usages.filter(**tu_filter_opts).distinct() regulation_usages = regulation_usages.filter( **tu_filter_opts).distinct() trademark_usages = trademark_usages.filter( **tu_filter_opts).distinct() url_usages = url_usages.filter(**tu_filter_opts).distinct() data = { "document_count": documents.count(), "document_property_count": document_properties.count(), "document_tag_count": document_tags.count(), "document_note_count": document_notes.count(), "document_relation_count": document_relations.count(), "document_cluster_count": document_clusters.count(), "text_unit_count": text_units.count(), "text_unit_tag_count": tu_tags.count(), "text_unit_property_count": tu_properties.count(), "text_unit_classification_count": tu_classifications.count(), "text_unit_classification_suggestion_count": tu_classification_suggestions.count(), "text_unit_classification_suggestion_type_count": tuc_suggestion_types.count(), "text_unit_note_count": tu_notes.count(), "text_unit_cluster_count": tu_clusters.count(), "amount_usage_count": amount_usages.count(), "citation_usage_count": citation_usages.count(), "copyright_usage_count": copyright_usages.count(), "court_count": Court.objects.count(), "court_usage_count": court_usages.count(), "currency_usage_count": currency_usages.count(), "date_duration_usage_count": date_duration_usages.count(), "date_usage_count": date_usages.count(), "definition_usage_count": definition_usages.count(), "distance_usage_count": distance_usages.count(), "geo_alias_count": geo_aliases.count(), "geo_alias_usage_count": geo_alias_usages.count(), "geo_entity_count": geo_entities.count(), "geo_entity_usage_count": geo_entity_usages.count(), "geo_relation_count": geo_relations.count(), "party_count": parties.count(), "party_usage_count": party_usages.count(), "percent_usage_count": percent_usages.count(), "ratio_usage_count": ratio_usages.count(), "regulation_usage_count": regulation_usages.count(), "trademark_usage_count": trademark_usages.count(), "url_usage_count": url_usages.count(), "term_count": terms.count(), "term_usage_count": term_usages.count(), "project_total_count": project_total_count, "project_completed_count": project_completed_count, "project_completed_weight": project_completed_weight, "project_progress_avg": project_progress_avg, "project_documents_total_count": project_documents_total_count, "project_documents_unique_count": project_documents_unique_count, "task_queue_total_count": task_queue_total_count, "task_queue_completed_count": task_queue_completed_count, "task_queue_completed_weight": task_queue_completed_weight, "task_queue_progress_avg": task_queue_progress_avg, "task_queue_documents_total_count": task_queue_documents_total_count, "task_queue_documents_unique_count": task_queue_documents_unique_count, "task_queue_reviewers_unique_count": task_queue_reviewers_unique_count, "admin_task_total_count": admin_task_total_count, "admin_task_by_status_count": admin_task_by_status_count, } return Response(data)
expires_in = timezone.timedelta(days=getattr(settings, 'REST_AUTH_TOKEN_EXPIRES_DAYS', 1)) expiration_date = token.created + expires_in return timezone.now() > expiration_date @staticmethod def update_token_date(token): """ Update token expiration date """ if getattr(settings, 'REST_AUTH_TOKEN_UPDATE_EXPIRATION_DATE', False): token.created = timezone.now() token.save() # do not move above CookieAuthentication as it throws error user_api_module = get_api_module('users') class TokenSerializer(serializers.ModelSerializer): """ Serializer for Token model. """ user_name = serializers.SerializerMethodField() user = serializers.SerializerMethodField() class Meta: model = TokenModel fields = ('key', 'user_name', 'user') def get_user_name(self, obj): try:
from apps.project.models import Project, TaskQueue, UploadSession, ProjectClustering from apps.project.sync_tasks.soft_delete_project_task import SoftDeleteProjectSyncTask from apps.task.models import Task from apps.task.tasks import call_task, purge_task from apps.task.utils.logger import get_django_logger from apps.users.models import User from apps.common.file_storage import get_file_storage __author__ = "ContraxSuite, LLC; LexPredict, LLC" __copyright__ = "Copyright 2015-2019, ContraxSuite, LLC" __license__ = "https://github.com/LexPredict/lexpredict-contraxsuite/blob/1.2.3/LICENSE" __version__ = "1.2.3" __maintainer__ = "LexPredict, LLC" __email__ = "*****@*****.**" common_api_module = get_api_module('common') users_api_module = get_api_module('users') ALREADY_EXISTS = 'Already exists' class PatchedListView(rest_framework.views.APIView): def get(self, request, *args, **kwargs): data = self.get_json_data(**kwargs) return JsonResponse(data, safe=False) # -------------------------------------------------------- # Task Queue Views # --------------------------------------------------------
include(api_module.urlpatterns)), ] for api_version, this_api_urlpatterns in api_urlpatterns.items(): urlpatterns += [ url(r'^api/', include((this_api_urlpatterns, api_version))), ] # django-rest-swagger urls # patched original rest_framework_swagger.views.get_swagger_view schema_view = get_swagger_view() urlpatterns += [ url(r'^api/(?:(?P<group_by>version|app)/)?$', schema_view, name='swagger') ] # APi for media files under /media/data directory common_api_module = get_api_module('common') urlpatterns += [ url(r'^{}/(?P<path>.+)/$'.format(settings.MEDIA_API_URL.strip('/')), common_api_module.MediaFilesAPIView.as_view(), name='api-media') ] # Manually add debug patterns if settings.DEBUG: # This allows the error pages to be debugged during development, just visit # these urls in browser to see how these error pages look like. urlpatterns += [ url(r'^' + settings.BASE_URL + '400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^' + settings.BASE_URL + '403/$',
'reassigning_progress': Task.special_tasks_progress_groups(reassigning_ots), 'reassigning_completed': Task.special_tasks_completed(reassigning_ots), 'cleanup_completed': Task.special_tasks_completed({'task_name': 'clean-project', '_project_id': obj.pk}), }) return stats class DocumentTypeSerializer(serializers.ModelSerializer): class Meta: model = DocumentType fields = ['uid', 'code', 'title'] common_api_module = get_api_module('common') class ProjectDetailSerializer(serializers.ModelSerializer): status = serializers.PrimaryKeyRelatedField( queryset=ReviewStatus.objects.all(), many=False, required=False) status_data = common_api_module.ReviewStatusSerializer( source='status', many=False, read_only=True) owners = serializers.PrimaryKeyRelatedField( queryset=User.objects.all(), many=True, required=False) owners_data = UserSerializer( source='owners', many=True, read_only=True) reviewers = serializers.PrimaryKeyRelatedField( queryset=User.objects.all(), many=True, required=False) reviewers_data = UserSerializer( source='reviewers', many=True, read_only=True)