Beispiel #1
0
    def post(self, request, token):

        age_limit = pytz.UTC.localize(
            datetime.datetime.now()) - UserConfirmation.AGE_LIMIT

        # Grab the confirmation object for this view, if it exists.
        confirm = UserConfirmation.objects.filter(token=token,
                                                  created__gt=age_limit)

        # Sleep randomly to screw with timing attacks.
        time.sleep(random.random() * UserConfirmation.DELAY_BASE)

        if not confirm:
            # No such valid token
            raise Http404

        confirm = confirm.first()

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        confirm.user.set_password(ser.validated_data['password'])
        confirm.user.save()

        confirm.delete()

        return Response(data={'success': 'Password Updated'})
Beispiel #2
0
    def post(self, request):

        log.debug('keys: {}'.format(request.data.keys()))

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        try:
            user = User.objects.get(username=ser.validated_data['username'])
        except User.DoesNotExist:
            return Response(data={'warning': 'No such user.'})

        warnings = []
        try:
            if user.extra.type == 'LDAP' and settings.LDAP_GROUPS_ENABLED:
                warnings.append(
                    'The delete user will be able to log in as long as they are '
                    'in the {} LDAP group.'.format(
                        settings.LDAP_REQUIRED_GROUP))
        except UserExtraModel.DoesNotExist:
            log.error(
                "User object does not have a row in UserExtra. This shouldn't happen."
            )

        user.delete()

        return Response(data={
            'warning': warnings,
            'info': 'User {} deleted.'.format(user.username)
        })
Beispiel #3
0
    def post(self, request):
        log.info("add group: {}".format(request.data))

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        users = User.objects.filter(
            username__in=ser.validated_data['usernames'])
        if not users:
            return Response(data={'warning': 'No such users.'})

        try:
            group = Group.objects.get(name=ser.validated_data['group'])
        except Group.DoesNotExist:
            return Response(data={'warning': 'No such group.'})

        info = []
        warning = []
        for user in users:
            if user.extra.type == 'LDAP' and settings.LDAP_GROUPS_ENABLED:
                warning.append('{} uses external LDAP groups.'.format(
                    user.name))
            else:
                user.groups.add(group)
                user.save()
                info.append('{} assigned to group {}'.format(
                    user.username, group.name))

        return Response(data={'info': [], 'warning': warning})
Beispiel #4
0
    def post(self, request):
        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        # Muck with timing attacks.
        time.sleep(random.random()*UserConfirmation.DELAY_BASE)

        try:
            user = User.objects.get(username=ser.validated_data['username'])
            if user.email != ser.validated_data['email']:
                raise User.DoesNotExist
        except User.DoesNotExist:
            return Response(data={'warning': 'Invalid User.'.format()})

        if user.extra.type != user.extra.BASIC:
            return Response(data={'warning': 'You can only reset passwords for basic auth users.'})

        user.password = None

        try:
            confirm = UserConfirmation.objects.get(user=user)
            now = datetime.datetime.now()
            now = pytz.UTC.localize(now)
            confirm.created = now
            confirm.save()
        except UserConfirmation.DoesNotExist:
            confirm = UserConfirmation.make_confirm(user)

        confirm.send_confirmation(request)

        return Response(data={'info': 'User {} password reset and email sent.'
                                      .format(user.username)})
Beispiel #5
0
    def post(self, request, capture_node):

        try:
            capture_node = CaptureNode.objects.get(id=capture_node).hostname
        except CaptureNode.DoesNotExist:
            return Response(status=404)

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        result = iface_tasks.interface_toggle.apply_async([ser.validated_data['iface']],
                                                          queue=capture_node)

        _classname = type(self).__name__

        # This should be quick, wait for the result to finish.
        try:
            result_data = result.get(timeout=self.timeout)
            ephemeral_task_cleanup(result)
        except TimeoutError:
            return Response(data={'warning': "{} timed out.".format(_classname)})
        except:
            log.error("{} ({}). {}".format(_classname, capture_node, format_exc()))
            return Response(data={'warning': "{} encountered an error.".format(_classname)})

        return Response(data={'info': result_data.get('info'),
                              'data': result_data.get('data')})
Beispiel #6
0
    def post(self, request):
        """Add a new user. The user will have a random password set, and will be emailed an email
        confirmation/password change request.
        Post Data:
          - username: Username of new user
          - email: Email of new user
          - first_name: First name of new user.
          - last_name: Last name of new user.
          - user_type: The type of user to create.
        """

        info = []

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        username = ser.validated_data['username']
        user_type = ser.validated_data['user_type']

        # See if this user already exists.
        if User.objects.filter(username=username).exists():
            return Response(data={'warning': 'User already exists.'})

        if user_type == 'LDAP' and settings.LDAP_GROUPS_ENABLED:
            return Response(data={'warning': 'With LDAP groups enabled, LDAP users are created on '
                                  'login, and their permissions are based on LDAP groups.'})

        user = User.objects.create_user(username, email=ser.validated_data['email'],
                                        first_name=ser.validated_data['first_name'],
                                        last_name=ser.validated_data['last_name'])
        user.extra = UserExtraModel(type=user_type, timezone=ser.validated_data['timezone'])

        if user_type == UserExtraModel.BASIC:
            # Generate a new confirmation object with a random token.
            confirm = UserConfirmation.make_confirm(user)
            info.append(confirm.send_confirmation(request))

        if user_type == 'LDAP':
            # Automatically enroll LDAP users in our local LDAP required group. This provides the
            # 'authorization' needed to identify LDAP users who should be able to log in.
            try:
                ldap_base_group = Group.objects.get(name=settings.LDAP_REQUIRED_GROUP)
            except Group.DoesNotExist:
                # There's a pretty unlikely race condition here where the group is created between
                # where we checked for one and when we save a new one. Eh.
                ldap_base_group = Group(name=settings.LDAP_REQUIRED_GROUP)
                ldap_base_group.save()

            user.groups.add(ldap_base_group)

        user.save()
        user.extra.save()

        return Response({'success': 'User "{}" Created.'.format(username)})
Beispiel #7
0
    def post(self, request):

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        try:
            ser.save()
        except IntegrityError as err:
            return Response({'warning': 'Site {} already exists.'.format(err)})

        return Response(
            {'success': 'Site {} created.'.format(ser.data['name'])})
Beispiel #8
0
    def post(self, request, capture_node):

        log.error('data? {}'.format(request.data, ))
        try:
            capture_node = CaptureNode.objects.get(id=capture_node).hostname
        except CaptureNode.DoesNotExist:
            return Response(status=404)

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        log.error('val_data? {}'.format(list(ser.validated_data.keys())))
        devices = ser.validated_data['devices']

        if self.device_limit is not None and len(devices) > self.device_limit:
            return Response(
                data={
                    'warning':
                    'The maximum number of devices for this command is {}.'.
                    format(self.device_limit)
                })

        result = self.task.apply_async([devices] + self.extra_args,
                                       queue=capture_node)

        _class_name = type(self).__name__

        if self.track:
            # Track the task in our augmented task tracking system
            TaskTrack.track(result, self.track_desc, request)

            return Response(data=self.track_start_context)
        else:
            # This should be quick, wait for the result to finish.
            try:
                result_data = result.get(timeout=self.timeout)
                ephemeral_task_cleanup(result)
            except TimeoutError:
                return Response(
                    data={'warning': "{} timed out.".format(_class_name)})
            except:
                log.error("{} ({}). {}".format(_class_name, capture_node,
                                               format_exc()))
                return Response(data={
                    'warning':
                    "{} encountered an error.".format(_class_name)
                })

            return Response(data=result_data)
Beispiel #9
0
    def post(self, request):
        """Attempt to add an capture_node to our list of capture_nodes. The capture_node being added must be
        contactable and have its ident view available.
        """

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        try:
            ser.save()
        except IntegrityError:
            return Response("Database error when adding new capture node.")

        return Response(data={'success': 'CaptureNode {} added successfully.'
                                         .format(ser.data['hostname'])})
Beispiel #10
0
    def post(self, request):
        # Re-using the serializer from group adding.
        ser = UserAddGroupView.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        users = User.objects.filter(username__in=ser.validated_data['usernames'])
        if not users:
            return Response(data={'warning': 'No such users.'})

        try:
            group = Group.objects.get(name=ser.validated_data['group'])
        except Group.DoesNotExist:
            return Response(data={'warning': 'No such group.'})

        for user in users:
            user.groups.remove(group)
            user.save()

        return Response(data={'info': 'Users removed from group {}.'.format(group.name)})
Beispiel #11
0
    def post(self, request):

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={
                'warning':
                ['{}'.format(request.data),
                 format_errors(ser.errors)]
            })

        try:
            site = Site.objects.get(name=ser.data['name'])
            # We need to delete both the associated groups and the site itself.
            site.group.delete()
            site.admin_group.delete()
            site.delete()
        except Site.DoesNotExist:
            return Response({
                'warning':
                'No such site to delete: {}'.format(ser.data['name'])
            })

        return Response(data={'success': 'Site deleted.'})
Beispiel #12
0
    def post(self, request):
        """Perform a search over flows on all capture_nodes. The expected result (delivered via
        a separate file request) is a flow result file.
        :param request: Request
        :return:
        :rtype: Response
        """

        log.info("search start: %s", request.data.get('start_time'))

        ser = self.PostSerializer(data=request.data)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        data = ser.validated_data

        for key in data.keys():
            log.info(key)

        user_groups = request.user.groups.all()

        # Make sure the search can be executed.
        check_res = search_syntax_check(data['query'])
        if 'ps_error_msg' in check_res:
            check_res['warning'] = 'Invalid Search'
            return Response(data=check_res)

        # Get all the asked for sites that the user has permission to search.
        sites_q = Q(group__in=user_groups) | Q(admin_group__in=user_groups)
        if data['sites']:
            q = Q()
            for site_id in data['sites']:
                q = q | Q(id=site_id)
            sites_q = sites_q & q

        sites = Site.objects.filter(sites_q)

        # Get a list of all active capture_nodes.
        capnodes = CaptureNode.objects.filter(site__in=sites)
        if len(capnodes) == 0:
            return Response(
                data={
                    'warning':
                    'You lack the permissions to search any of '
                    'the sites selected.'
                })

        search_info = SearchInfo(type=data['action'],
                                 proto=data['proto'],
                                 start=data['start'],
                                 end=data['end'],
                                 query=data['query'])
        search_info.save()

        # Associate each of the searched sites with the search.
        for site in sites:
            search_site = SearchSite(search=search_info, site=site)
            search_site.save()

        ser_start = data['start'].isoformat()
        ser_end = data['end'].isoformat()

        subtasks = []
        for capnode in capnodes:
            node_search = NodeSearch(search=search_info,
                                     token=uuid.uuid4(),
                                     capture_node=capnode)
            node_search.save()
            args = [
                data['query'], ser_start, ser_end, data['proto'],
                node_search.post_url()
            ]
            kwargs = {'packets': data['action'] == SearchInfo.T_PCAP}

            task = search_node.subtask(args=args,
                                       kwargs=kwargs,
                                       options={'queue': capnode.hostname})
            subtasks.append(task)

        # Create the final merge task to combine the results from the capture_nodes.
        callback_task = search_merge.subtask(args=[search_info.id],
                                             options={'queue': 'search_head'})
        # This will execute the subtasks, then the callback task when those are done.
        task_group = chord(subtasks, callback_task)
        result = task_group.apply_async(queue='search_head')

        if data['action'] == SearchInfo.T_PCAP:
            message = "Fetching PCAP File"
        else:
            message = "Performing Flow Search"
        TaskTrack.track(result, message, request)

        response_data = {'success': 'Search Started'}
        response_data.update(check_res)

        if data['action'] == SearchInfo.T_FLOW:
            # Hand back the task id to watch for completion, and the URL to use
            # to get the results. This is only needed for flow searches.
            response_data['task_id'] = result.task_id
            response_data['results_url'] = search_info.flow_results_url
        return Response(data=response_data)
Beispiel #13
0
    def get(self, request):
        """Fetch stats for the given time period.
        :param request:
        :return:
        """

        log.info("stats start: %s", request.data.get('start'))

        # Validate GET variables
        ser = self.GetSerializer(data=request.GET)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        log.error('request: {}'.format(request.GET))

        site_id = ser.validated_data.get('site', None)
        capnode_id = ser.validated_data.get('capture_node', None)
        iface_id = ser.validated_data.get('interface', None)
        end = ser.validated_data.get('end', pytz.UTC.localize(datetime.utcnow()))
        start = ser.validated_data.get('start', end - timedelta(hours=24))

        query = Stats.objects.filter(minute__gt=start, minute__lt=end)

        # The fields/values we include will determine the 'group by' (how the results are
        # aggregated).
        values = ['minute']
        title = []
        if site_id is not None:
            site = Site.objects.get(id=site_id)
            query = query.filter(capture_node__site_id=site_id)
            values.append('capture_node__site_id')
            title.append(site.name)

        if capnode_id is not None:
            try:
                capnode = CaptureNode.objects.get(id=capnode_id)
                query = query.filter(capture_node_id=capnode_id)
                values.append('capture_node_id')
                title.append(capnode.hostname)
            except CaptureNode.DoesNotExist:
                log.error("Capture node does not exist. {}".format(capnode_id))

        if iface_id is not None:
            try:
                iface = StatsInterface.objects.get(id=iface_id)
                query = query.filter(interface_id=iface_id)
                values.append('interface_id')
                title.append(iface.name)
            except StatsInterface.DoesNotExist:
                log.error("Interface does not exist.  {}".format(iface_id))

        query = query.order_by('minute')

        query = query.values(*values).distinct()

        query = query.annotate(
                    capture_size=Sum('capture_size'),
                    received=Sum('received'),
                    dropped=Sum('dropped'),
                    ipv4=Sum('ipv4'),
                    ipv6=Sum('ipv6'),
                    network_other=Sum('network_other'),
                    tcp=Sum('tcp'),
                    udp=Sum('udp'),
                    transport_other=Sum('transport_other')
                )

        if not title:
            title = ['all capture nodes']

        title = 'Capture stats for ' + ', '.join(title)

        return Response(data={'data': {'chart_data': query,
                                       'title': title,
                                       'start': start,
                                       'end': end}})
Beispiel #14
0
    def get(self, request):
        """Fetch stats for the given time period.
        :param request:
        :return:
        """

        log.info('req data: {}'.format(request.GET))

        # Validate GET variables
        ser = self.GetSerializer(data=request.GET)
        if not ser.is_valid():
            return Response(data={'warning': format_errors(ser.errors)})

        grouping = ser.validated_data.get('grouping')
        stat_type = ser.validated_data.get('stat_type')
        end = ser.validated_data.get('end', pytz.UTC.localize(datetime.utcnow()))
        start = ser.validated_data.get('start', end - timedelta(hours=24))

        log.error('end time: {}'.format(end))
        log.info('grouping {}, stat_type {}'.format(grouping, stat_type))

        query = Stats.objects.filter(minute__gt=start, minute__lt=end)

        values = ['minute']
        grouping_key = None
        if grouping == 'capture_node':
            grouping_key = 'capture_node__hostname'
            values.append(grouping_key)
        elif grouping == 'site':
            grouping_key = 'capture_node__site__name'
            values.append(grouping_key)
        else:
            raise RuntimeError("Invalid grouping: {}".format(grouping_key))

        values.append(stat_type)

        query = query.order_by('minute')

        query = query.values(*values).distinct()

        query = query.annotate(
            data=Sum(stat_type)
        )

        # There is a way, using the postgresql 'crosstab' function, to do this in postgresql
        # directly. It is very complicated, and not very dynamic. Instead we'll just build the
        # result set the hard way in python.

        # dict[minutes] -> dict[host/site name] -> count
        by_minute = defaultdict(lambda : {})
        for res in query:
            by_minute[res['minute']][res[grouping_key]] = res[stat_type]

        chart_data = []
        for minute in sorted(by_minute.keys()):
            data = {'minute': minute}
            data.update(by_minute[minute])
            chart_data.append(data)

        title = '{} by {}'.format(self.GetSerializer.STAT_TYPES[stat_type],
                                  self.GetSerializer.GROUPINGS[grouping])

        return Response(data={'data': {'chart_data': chart_data,
                                       'title': title,
                                       'start': start,
                                       'end': end}})