def class_from_id(type_, _id): """ Return an instantiated class object. :param type_: The CRIPTs top-level object type. :type type_: str :param _id: The ObjectId to search for. :type _id: str :returns: class which inherits from :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes` """ #Quick fail if not _id or not type_: return None # doing this to avoid circular imports from cripts.comments.comment import Comment from cripts.core.cripts_mongoengine import Action from cripts.core.source_access import SourceAccess from cripts.core.user_role import UserRole from cripts.events.event import Event from cripts.usernames.username import UserName from cripts.targets.target import Target from cripts.hashes.hash import Hash from cripts.datasets.dataset import Dataset from cripts.email_addresses.email_address import EmailAddress # make sure it's a string _id = str(_id) # Use bson.ObjectId to make sure this is a valid ObjectId, otherwise # the queries below will raise a ValidationError exception. if not ObjectId.is_valid(_id.decode('utf8')): return None if type_ == 'Comment': return Comment.objects(id=_id).first() elif type_ == 'Event': return Event.objects(id=_id).first() elif type_ == 'Action': return Action.objects(id=_id).first() elif type_ == 'SourceAccess': return SourceAccess.objects(id=_id).first() elif type_ == 'UserRole': return UserRole.objects(id=_id).first() elif type_ == 'UserName': return UserName.objects(id=_id).first() elif type_ == 'Target': return Target.objects(id=_id).first() elif type_ == 'Hash': return Hash.objects(id=_id).first() elif type_ == 'Dataset': return Dataset.objects(id=_id).first() elif type_ == 'EmailAddress': return EmailAddress.objects(id=_id).first() else: return None
def class_from_value(type_, value): """ Return an instantiated class object. :param type_: The CRIPTs top-level object type. :type type_: str :param value: The value to search for. :type value: str :returns: class which inherits from :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes` """ #Quick fail if not type_ or not value: return None # doing this to avoid circular imports from cripts.comments.comment import Comment from cripts.events.event import Event from cripts.usernames.username import UserName from cripts.targets.target import Target from cripts.hashes.hash import Hash from cripts.datasets.dataset import Dataset from cripts.email_addresses.email_address import EmailAddress # Make sure value is a string... value = str(value) # Use bson.ObjectId to make sure this is a valid ObjectId, otherwise # the queries below will raise a ValidationError exception. if (type_ in [ 'Comment', 'Event', 'UserName', 'Target', 'Hash', 'Dataset', 'EmailAddress' ] and not ObjectId.is_valid(value.decode('utf8'))): return None if type_ == 'Comment': return Comment.objects(id=value).first() elif type_ == 'Event': return Event.objects(id=value).first() elif type_ == 'UserName': return UserName.objects(id=value).first() elif type_ == 'Target': return Target.objects(id=value).first() elif type_ == 'Hash': return Hash.objects(id=value).first() elif type_ == 'Dataset': return Dataset.objects(id=value).first() elif type_ == 'EmailAddress': return EmailAddress.objects(id=value).first() else: return None
def get_dataset_details(name, analyst): """ Generate the data to render the Dataset details template. :param name: The name of the dataset to get details for. :type name: str :param analyst: The user requesting this information. :type analyst: str :returns: template (str), arguments (dict) """ template = None allowed_sources = user_sources(analyst) dataset_object = Dataset.objects(name = name, source__name__in=allowed_sources).first() if not dataset_object: error = ("Either no data exists for this dataset" " or you do not have permission to view it.") template = "error.html" args = {'error': error} return template, args dataset_object.sanitize_sources(username="******" % analyst, sources=allowed_sources) # remove pending notifications for user remove_user_from_notification("%s" % analyst, dataset_object.id, 'Dataset') # subscription subscription = { 'type': 'Dataset', 'id': dataset_object.id, 'subscribed': is_user_subscribed("%s" % analyst, 'Dataset', dataset_object.id), } #objects objects = dataset_object.sort_objects() #relationships relationships = dataset_object.sort_relationships("%s" % analyst, meta=True) # relationship relationship = { 'type': 'Datset', 'value': dataset_object.id } #comments comments = {'comments': dataset_object.get_comments(), 'url_key':dataset_object.name} # favorites favorite = is_user_favorite("%s" % analyst, 'Dataset', dataset_object.id) # services service_list = get_supported_services('Dataset') # analysis results service_results = dataset_object.get_analysis_results() args = {'dataset': dataset_object, 'objects': objects, 'relationships': relationships, 'comments': comments, 'favorite': favorite, 'relationship': relationship, 'subscription': subscription, 'name': dataset_object.name, 'service_list': service_list, 'service_results': service_results} return template, args
def dataset_add_update(name, description=None, source=None, method='', reference='', analyst=None, bucket_list=None, ticket=None, is_validate_only=False, cache={}, related_id=None, related_type=None, relationship_type=None): retVal = {} if not source: return {"success" : False, "message" : "Missing source information."} is_item_new = False dataset_object = None cached_results = cache.get(form_consts.Dataset.CACHED_RESULTS) if cached_results != None: dataset_object = cached_results.get(name) else: dataset_object = Dataset.objects(name=name).first() if not dataset_object: dataset_object = Dataset() dataset_object.name = name dataset_object.description = description is_item_new = True if cached_results != None: cached_results[name] = dataset_object if not dataset_object.description: dataset_object.description = description or '' elif dataset_object.description != description: if description: dataset_object.description += "\n" + (description or '') if isinstance(source, basestring): source = [create_embedded_source(source, reference=reference, method=method, analyst=analyst)] if source: for s in source: dataset_object.add_source(s) else: return {"success" : False, "message" : "Missing source information."} if bucket_list: dataset_object.add_bucket_list(bucket_list, analyst) if ticket: dataset_object.add_ticket(ticket, analyst) related_obj = None if related_id: related_obj = class_from_id(related_type, related_id) if not related_obj: retVal['success'] = False retVal['message'] = 'Related Object not found.' return retVal resp_url = reverse('cripts.datasets.views.dataset_detail', args=[dataset_object.name]) if is_validate_only == False: dataset_object.save(username=analyst) #set the URL for viewing the new data if is_item_new == True: # Update the dataset stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts':{}} if 'Datasets' not in count_stats['counts']: count_stats['counts']['Datasets'] = 0 else: count_stats['counts']['Datasets'] = count_stats['counts']['Datasets'] + 1 counts.update({'name': "counts"}, {'$set': {'counts': count_stats['counts']}}, upsert=True) retVal['message'] = ('Success! Click here to view the new Dataset: ' '<a href="%s">%s</a>' % (resp_url, dataset_object.name)) else: message = ('Updated existing Dataset: ' '<a href="%s">%s</a>' % (resp_url, dataset_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message elif is_validate_only == True: if dataset_object.id != None and is_item_new == False: message = ('Warning: Dataset already exists: ' '<a href="%s">%s</a>' % (resp_url, dataset_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message if related_obj and email_object and relationship_type: relationship_type=RelationshipTypes.inverse(relationship=relationship_type) dataset_object.add_relationship(related_obj, relationship_type, analyst=analyst, get_rels=False) dataset_object.save(username=analyst) # run dataset triage if is_item_new and is_validate_only == False: dataset_object.reload() run_triage(dataset_object, analyst) retVal['success'] = True retVal['object'] = dataset_object return retVal