def delete_object_file(value): """ In the event this is a file (but not PCAP), clean up after ourselves when deleting an object. :param value: The value of the object we are deleting. :type value: str """ if not re.match(r"^[a-f\d]{32}$", value, re.I): return #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. obj_list = ('Dataset', 'EmailAddress', 'Event', 'Hash', 'Target', 'UserName') # In order to make sure this object isn't tied to more than one top-level # object, we need to check the rest of the database. We will at least find # one instance, which is the one we are going to be removing. If we find # another instance, then we should not remove the object from GridFS. count = 0 query = {'objects.value': value} for obj in obj_list: obj_class = class_from_type(obj) count += len(obj_class.objects(__raw__=query)) if count > 1: break else: col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) grid.remove({'md5': value}) return
def generate_counts(): """ Generate dashboard counts. """ counts = mongo_connector(settings.COL_COUNTS) email_addresses = mongo_connector(settings.COL_EMAIL_ADDRESSES) usernames = mongo_connector(settings.COL_USERNAMES) today = datetime.datetime.fromordinal(datetime.datetime.now().toordinal()) start = datetime.datetime.now() last_seven = start - datetime.timedelta(7) last_thirty = start - datetime.timedelta(30) count = {} count['Email Addresses'] = email_addresses.find().count() count['UserNames'] = usernames.find().count() counts.update({'name': "counts"}, {'$set': {'counts': count}}, upsert=True)
def create_indexes(): """ Creates the default set of indexes for the system. Depending on your use cases, as well as quantity of data, admins may wish to tweak these indexes to best fit their requirements. """ print "Creating indexes (duplicates will be ignored automatically)" analysis_results = mongo_connector(settings.COL_ANALYSIS_RESULTS) analysis_results.ensure_index("service_name", background=True) analysis_results.ensure_index("object_type", background=True) analysis_results.ensure_index("object_id", background=True) analysis_results.ensure_index("start_date", background=True) analysis_results.ensure_index("finish_date", background=True) analysis_results.ensure_index("version", background=True) analysis_results.ensure_index("analysis_id", background=True) bucket_lists = mongo_connector(settings.COL_BUCKET_LISTS) bucket_lists.ensure_index("name", background=True) comments = mongo_connector(settings.COL_COMMENTS) comments.ensure_index("obj_id", background=True) comments.ensure_index("users", background=True) comments.ensure_index("tags", background=True) comments.ensure_index("status", background=True) events = mongo_connector(settings.COL_EVENTS) events.ensure_index("objects.value", background=True) events.ensure_index("title", background=True) events.ensure_index("relationships.value", background=True) events.ensure_index("source.name", background=True) events.ensure_index("created", background=True) events.ensure_index("status", background=True) events.ensure_index("favorite", background=True) events.ensure_index("event_type", background=True) events.ensure_index("bucket_list", background=True) if settings.FILE_DB == settings.GRIDFS: objects_files = mongo_connector('%s.files' % settings.COL_OBJECTS) objects_files.ensure_index("md5", background=True) objects_chunks = mongo_connector('%s.chunks' % settings.COL_OBJECTS) objects_chunks.ensure_index([("files_id", pymongo.ASCENDING), ("n", pymongo.ASCENDING)], unique=True) notifications = mongo_connector(settings.COL_NOTIFICATIONS) notifications.ensure_index("obj_id", background=True) # auto-expire notifications after 30 days notifications.ensure_index("date", background=True, expireAfterSeconds=2592000) notifications.ensure_index("users", background=True)
def create_indexes(): """ Creates the default set of indexes for the system. Depending on your use cases, as well as quantity of data, admins may wish to tweak these indexes to best fit their requirements. """ print "Creating indexes (duplicates will be ignored automatically)" analysis_results = mongo_connector(settings.COL_ANALYSIS_RESULTS) analysis_results.ensure_index("service_name", background=True) analysis_results.ensure_index("object_type", background=True) analysis_results.ensure_index("object_id", background=True) analysis_results.ensure_index("start_date", background=True) analysis_results.ensure_index("finish_date", background=True) analysis_results.ensure_index("version", background=True) analysis_results.ensure_index("analysis_id", background=True) bucket_lists = mongo_connector(settings.COL_BUCKET_LISTS) bucket_lists.ensure_index("name", background=True) comments = mongo_connector(settings.COL_COMMENTS) comments.ensure_index("obj_id", background=True) comments.ensure_index("users", background=True) comments.ensure_index("tags", background=True) comments.ensure_index("status", background=True) events = mongo_connector(settings.COL_EVENTS) events.ensure_index("objects.value", background=True) events.ensure_index("title", background=True) events.ensure_index("relationships.value", background=True) events.ensure_index("source.name", background=True) events.ensure_index("created", background=True) events.ensure_index("status", background=True) events.ensure_index("favorite", background=True) events.ensure_index("event_type", background=True) events.ensure_index("bucket_list", background=True) if settings.FILE_DB == settings.GRIDFS: objects_files = mongo_connector("%s.files" % settings.COL_OBJECTS) objects_files.ensure_index("md5", background=True) objects_chunks = mongo_connector("%s.chunks" % settings.COL_OBJECTS) objects_chunks.ensure_index([("files_id", pymongo.ASCENDING), ("n", pymongo.ASCENDING)], unique=True) notifications = mongo_connector(settings.COL_NOTIFICATIONS) notifications.ensure_index("obj_id", background=True) # auto-expire notifications after 30 days notifications.ensure_index("date", background=True, expireAfterSeconds=2592000) notifications.ensure_index("users", background=True)
def remove_indexes(): """ Removes all indexes from all collections. """ coll_list = [ settings.COL_BUCKET_LISTS, settings.COL_COMMENTS, settings.COL_DATASETS, settings.COL_EMAIL_ADDRESSES, settings.COL_EVENTS, settings.COL_HASHES, settings.COL_NOTIFICATIONS, '%s.files' % settings.COL_OBJECTS, '%s.chunks' % settings.COL_OBJECTS, settings.COL_TARGETS, settings.COL_USERNAMES, ] for coll in coll_list: print "Removing index for: %s" % coll c = mongo_connector(coll) c.drop_indexes()
def remove_indexes(): """ Removes all indexes from all collections. """ coll_list = [ settings.COL_BUCKET_LISTS, settings.COL_COMMENTS, settings.COL_DATASETS, settings.COL_EMAIL_ADDRESSES, settings.COL_EVENTS, settings.COL_HASHES, settings.COL_NOTIFICATIONS, "%s.files" % settings.COL_OBJECTS, "%s.chunks" % settings.COL_OBJECTS, settings.COL_TARGETS, settings.COL_USERNAMES, ] for coll in coll_list: print "Removing index for: %s" % coll c = mongo_connector(coll) c.drop_indexes()
def delete_object_file(value): """ In the event this is a file (but not PCAP), clean up after ourselves when deleting an object. :param value: The value of the object we are deleting. :type value: str """ if not re.match(r"^[a-f\d]{32}$", value, re.I): return #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. obj_list = ('Dataset', 'EmailAddress', 'Event', 'Hash', 'Target', 'UserName' ) # In order to make sure this object isn't tied to more than one top-level # object, we need to check the rest of the database. We will at least find # one instance, which is the one we are going to be removing. If we find # another instance, then we should not remove the object from GridFS. count = 0 query = {'objects.value': value} for obj in obj_list: obj_class = class_from_type(obj) count += len(obj_class.objects(__raw__=query)) if count > 1: break else: col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) grid.remove({'md5': value}) return
def add_object(type_, id_, object_type, source, method, reference, user, value=None, file_=None, add_indicator=False, get_objects=True, tlo=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_objects: bool :param tlo: The CRIPTs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type tlo: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes` :param is_sort_relationships: Return all relationships and meta, sorted :type is_sort_relationships: bool :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ if is_validate_locally: # no TLO provided return {"success": True} if not tlo: if type_ and id_: tlo = class_from_id(type_, id_) if not tlo: return {'success': False, 'message': "Failed to find TLO"} try: if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename ret = tlo.add_object(object_type, value, source, method, reference, user) if not ret['success']: msg = '%s! [Type: "%s"][Value: "%s"]' return { "success": False, "message": msg % (ret['message'], object_type, value) } else: results = {'success': True} if not is_validate_only: # save the object tlo.update(add_to_set__obj=ret['object']) results['message'] = "Object added successfully" if file_: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if is_sort_relationships == True: results['relationships'] = tlo.sort_relationships(user, meta=True) if get_objects: results['objects'] = tlo.sort_objects() results['id'] = str(tlo.id) return results except ValidationError as e: return {'success': False, 'message': str(e)}
def generate_email_address_jtable(request, option): """ Generate the jtable data for rendering in the list template. :param request: The request for this jtable. :type request: :class:`django.http.HttpRequest` :param option: Action to take. :type option: str of either 'jtlist', 'jtdelete', or 'inline'. :returns: :class:`django.http.HttpResponse` """ obj_type = EmailAddress type_ = "email_address" mapper = obj_type._meta['jtable_opts'] if option == "jtlist": # Sets display url details_url = mapper['details_url'] details_url_key = mapper['details_url_key'] fields = mapper['fields'] response = jtable_ajax_list(obj_type, details_url, details_url_key, request, includes=fields) return HttpResponse(json.dumps(response, default=json_handler), content_type="application/json") if option == "jtdelete": response = {"Result": "ERROR"} if jtable_ajax_delete(obj_type,request): # Update the email stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts':{}} if 'Email Addresses' not in count_stats['counts']: count_stats['counts']['Email Addresses'] = 0 else: count_stats['counts']['Email Addresses'] = count_stats['counts']['Email Addresses'] - 1 counts.update({'name': "counts"}, {'$set': {'counts': count_stats['counts']}}, upsert=True) response = {"Result": "OK"} return HttpResponse(json.dumps(response, default=json_handler), content_type="application/json") jtopts = { 'title': "Email Addresses", 'default_sort': mapper['default_sort'], 'listurl': reverse('cripts.%ses.views.%ses_listing' % (type_, type_), args=('jtlist',)), 'deleteurl': reverse('cripts.%ses.views.%ses_listing' % (type_, type_), args=('jtdelete',)), 'searchurl': reverse(mapper['searchurl']), 'fields': mapper['jtopts_fields'], 'hidden_fields': mapper['hidden_fields'], 'linked_fields': mapper['linked_fields'], 'details_link': mapper['details_link'], 'no_sort': mapper['no_sort'] } jtable = build_jtable(jtopts,request) jtable['toolbar'] = [ { 'tooltip': "'All Email Addresses'", 'text': "'All'", 'click': "function () {$('#email_address_listing').jtable('load', {'refresh': 'yes'});}", 'cssClass': "'jtable-toolbar-center'", }, { 'tooltip': "'New Email Addresses'", 'text': "'New'", 'click': "function () {$('#email_address_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}", 'cssClass': "'jtable-toolbar-center'", }, { 'tooltip': "'Add Email Address'", 'text': "'Add Email Address'", 'click': "function () {$('#new-email_address').click()}", }, ] if option == "inline": return render_to_response("jtable.html", {'jtable': jtable, 'jtid': '%s_listing' % type_, 'button' : '%ses_tab' % type_}, RequestContext(request)) else: return render_to_response("%s_listing.html" % type_, {'jtable': jtable, 'jtid': '%s_listing' % type_}, RequestContext(request))
def email_address_add_update(address, description=None, source=None, method='', reference='', analyst=None, datasets=None, bucket_list=None, ticket=None, is_validate_only=False, cache={}, related_id=None, related_type=None, relationship_type=None): retVal = {} if not source: return {"success" : False, "message" : "Missing source information."} # Parse out the e-mail address. Return an error if it looks invalid, (aka missing the @, has whitespace, etc) try: if ' ' in address: raise ValueError local_name, domain_part = address.strip().split('@', 1) if len(local_name) == 0 or len(domain_part) == 0: raise ValueError # lowercase the domain name and recreate the e-mail address address = '@'.join([local_name, domain_part.lower()]) except ValueError: return {'success': False, 'message': "Invalid Email Address Format"} is_item_new = False email_object = None cached_results = cache.get(form_consts.EmailAddress.CACHED_RESULTS) if cached_results != None: email_object = cached_results.get(address) else: email_object = EmailAddress.objects(address=address).first() if not email_object: email_object = EmailAddress() email_object.address = address email_object.description = description email_object.local_name = local_name email_object.domain = domain_part.lower() is_item_new = True if cached_results != None: cached_results[address] = email_object if not email_object.description: email_object.description = description or '' elif email_object.description != description: if description: email_object.description += "\n" + (description or '') if isinstance(source, basestring): source = [create_embedded_source(source, reference=reference, method=method, analyst=analyst)] if source: for s in source: email_object.add_source(s) else: return {"success" : False, "message" : "Missing source information."} if bucket_list: email_object.add_bucket_list(bucket_list, analyst) if ticket: email_object.add_ticket(ticket, analyst) related_obj = None if related_id: related_obj = class_from_id(related_type, related_id) if not related_obj: retVal['success'] = False retVal['message'] = 'Related Object not found.' return retVal resp_url = reverse('cripts.email_addresses.views.email_address_detail', args=[email_object.address]) if is_validate_only == False: email_object.save(username=analyst) #set the URL for viewing the new data if is_item_new == True: # Update the email stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts':{}} if 'Email Addresses' not in count_stats['counts']: count_stats['counts']['Email Addresses'] = 0 else: count_stats['counts']['Email Addresses'] = count_stats['counts']['Email Addresses'] + 1 counts.update({'name': "counts"}, {'$set': {'counts': count_stats['counts']}}, upsert=True) retVal['message'] = ('Success! Click here to view the new Email: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) else: message = ('Updated existing Email: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message elif is_validate_only == True: if email_object.id != None and is_item_new == False: message = ('Warning: Email already exists: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message if related_obj and email_object and relationship_type: relationship_type=RelationshipTypes.inverse(relationship=relationship_type) email_object.add_relationship(related_obj, relationship_type, analyst=analyst, get_rels=False) email_object.save(username=analyst) # run email triage if is_item_new and is_validate_only == False: email_object.reload() run_triage(email_object, analyst) retVal['success'] = True retVal['object'] = email_object return retVal
def username_add_update(name, description, source=None, method='', reference='', analyst=None, datasets=None, bucket_list=None, ticket=None, is_validate_only=False, cache={}, related_id=None, related_type=None, relationship_type=None): retVal = {} if not source: return {"success" : False, "message" : "Missing source information."} is_item_new = False username_object = None cached_results = cache.get(form_consts.UserName.CACHED_RESULTS) if cached_results != None: username_object = cached_results.get(username) else: username_object = UserName.objects(name=name).first() if not username_object: username_object = UserName() username_object.name = name username_object.description = description is_item_new = True if cached_results != None: cached_results[username] = username_object if not username_object.description: username_object.description = description or '' elif username_object.description != description: username_object.description += "\n" + (description or '') if isinstance(source, basestring): source = [create_embedded_source(source, reference=reference, method=method, analyst=analyst)] if source: for s in source: username_object.add_source(s) else: return {"success" : False, "message" : "Missing source information."} if bucket_list: username_object.add_bucket_list(bucket_list, analyst) if ticket: username_object.add_ticket(ticket, analyst) related_obj = None if related_id: related_obj = class_from_id(related_type, related_id) if not related_obj: retVal['success'] = False retVal['message'] = 'Related Object not found.' return retVal resp_url = reverse('cripts.usernames.views.username_detail', args=[username_object.username_id]) if is_validate_only == False: username_object.save(username=analyst) #set the URL for viewing the new data if is_item_new == True: # Update the username stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts':{}} if 'UserNames' not in count_stats['counts']: count_stats['counts']['UserNames'] = 0 else: count_stats['counts']['UserNames'] = count_stats['counts']['UserNames'] + 1 counts.update({'name': "counts"}, {'$set': {'counts': count_stats['counts']}}, upsert=True) retVal['message'] = ('Success! Click here to view the new UserName: '******'<a href="%s">%s</a>' % (resp_url, username_object.name)) else: message = ('Updated existing UserName: '******'<a href="%s">%s</a>' % (resp_url, username_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message elif is_validate_only == True: if username_object.id != None and is_item_new == False: message = ('Warning: UserName already exists: ' '<a href="%s">%s</a>' % (resp_url, username_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message if related_obj and username_object and relationship_type: relationship_type=RelationshipTypes.inverse(relationship=relationship_type) username_object.add_relationship(related_obj, relationship_type, analyst=analyst, get_rels=False) username_object.save(username=analyst) # run username triage if is_item_new and is_validate_only == False: username_object.reload() run_triage(username_object, analyst) retVal['success'] = True retVal['object'] = username_object return retVal
def add_object(type_, id_, object_type, source, method, reference, user, value=None, file_=None, add_indicator=False, get_objects=True, tlo=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_objects: bool :param tlo: The CRIPTs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type tlo: :class:`cripts.core.cripts_mongoengine.CriptsBaseAttributes` :param is_sort_relationships: Return all relationships and meta, sorted :type is_sort_relationships: bool :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ if is_validate_locally: # no TLO provided return {"success": True} if not tlo: if type_ and id_: tlo = class_from_id(type_, id_) if not tlo: return {'success': False, 'message': "Failed to find TLO"} try: if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename ret = tlo.add_object(object_type, value, source, method, reference, user) if not ret['success']: msg = '%s! [Type: "%s"][Value: "%s"]' return {"success": False, "message": msg % (ret['message'], object_type, value)} else: results = {'success': True} if not is_validate_only: # save the object tlo.update(add_to_set__obj=ret['object']) results['message'] = "Object added successfully" if file_: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if is_sort_relationships == True: results['relationships'] = tlo.sort_relationships(user, meta=True) if get_objects: results['objects'] = tlo.sort_objects() results['id'] = str(tlo.id) return results except ValidationError as e: return {'success': False, 'message': str(e)}
def generate_email_address_jtable(request, option): """ Generate the jtable data for rendering in the list template. :param request: The request for this jtable. :type request: :class:`django.http.HttpRequest` :param option: Action to take. :type option: str of either 'jtlist', 'jtdelete', or 'inline'. :returns: :class:`django.http.HttpResponse` """ obj_type = EmailAddress type_ = "email_address" mapper = obj_type._meta['jtable_opts'] if option == "jtlist": # Sets display url details_url = mapper['details_url'] details_url_key = mapper['details_url_key'] fields = mapper['fields'] response = jtable_ajax_list(obj_type, details_url, details_url_key, request, includes=fields) return HttpResponse(json.dumps(response, default=json_handler), content_type="application/json") if option == "jtdelete": response = {"Result": "ERROR"} if jtable_ajax_delete(obj_type, request): # Update the email stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts': {}} if 'Email Addresses' not in count_stats['counts']: count_stats['counts']['Email Addresses'] = 0 else: count_stats['counts']['Email Addresses'] = count_stats[ 'counts']['Email Addresses'] - 1 counts.update({'name': "counts"}, {'$set': { 'counts': count_stats['counts'] }}, upsert=True) response = {"Result": "OK"} return HttpResponse(json.dumps(response, default=json_handler), content_type="application/json") jtopts = { 'title': "Email Addresses", 'default_sort': mapper['default_sort'], 'listurl': reverse('cripts.%ses.views.%ses_listing' % (type_, type_), args=('jtlist', )), 'deleteurl': reverse('cripts.%ses.views.%ses_listing' % (type_, type_), args=('jtdelete', )), 'searchurl': reverse(mapper['searchurl']), 'fields': mapper['jtopts_fields'], 'hidden_fields': mapper['hidden_fields'], 'linked_fields': mapper['linked_fields'], 'details_link': mapper['details_link'], 'no_sort': mapper['no_sort'] } jtable = build_jtable(jtopts, request) jtable['toolbar'] = [ { 'tooltip': "'All Email Addresses'", 'text': "'All'", 'click': "function () {$('#email_address_listing').jtable('load', {'refresh': 'yes'});}", 'cssClass': "'jtable-toolbar-center'", }, { 'tooltip': "'New Email Addresses'", 'text': "'New'", 'click': "function () {$('#email_address_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}", 'cssClass': "'jtable-toolbar-center'", }, { 'tooltip': "'Add Email Address'", 'text': "'Add Email Address'", 'click': "function () {$('#new-email_address').click()}", }, ] if option == "inline": return render_to_response( "jtable.html", { 'jtable': jtable, 'jtid': '%s_listing' % type_, 'button': '%ses_tab' % type_ }, RequestContext(request)) else: return render_to_response("%s_listing.html" % type_, { 'jtable': jtable, 'jtid': '%s_listing' % type_ }, RequestContext(request))
def email_address_add_update(address, description=None, source=None, method='', reference='', analyst=None, datasets=None, bucket_list=None, ticket=None, is_validate_only=False, cache={}, related_id=None, related_type=None, relationship_type=None): retVal = {} if not source: return {"success": False, "message": "Missing source information."} # Parse out the e-mail address. Return an error if it looks invalid, (aka missing the @, has whitespace, etc) try: if ' ' in address: raise ValueError local_name, domain_part = address.strip().split('@', 1) if len(local_name) == 0 or len(domain_part) == 0: raise ValueError # lowercase the domain name and recreate the e-mail address address = '@'.join([local_name, domain_part.lower()]) except ValueError: return {'success': False, 'message': "Invalid Email Address Format"} is_item_new = False email_object = None cached_results = cache.get(form_consts.EmailAddress.CACHED_RESULTS) if cached_results != None: email_object = cached_results.get(address) else: email_object = EmailAddress.objects(address=address).first() if not email_object: email_object = EmailAddress() email_object.address = address email_object.description = description email_object.local_name = local_name email_object.domain = domain_part.lower() is_item_new = True if cached_results != None: cached_results[address] = email_object if not email_object.description: email_object.description = description or '' elif email_object.description != description: if description: email_object.description += "\n" + (description or '') if isinstance(source, basestring): source = [ create_embedded_source(source, reference=reference, method=method, analyst=analyst) ] if source: for s in source: email_object.add_source(s) else: return {"success": False, "message": "Missing source information."} if bucket_list: email_object.add_bucket_list(bucket_list, analyst) if ticket: email_object.add_ticket(ticket, analyst) related_obj = None if related_id: related_obj = class_from_id(related_type, related_id) if not related_obj: retVal['success'] = False retVal['message'] = 'Related Object not found.' return retVal resp_url = reverse('cripts.email_addresses.views.email_address_detail', args=[email_object.address]) if is_validate_only == False: email_object.save(username=analyst) #set the URL for viewing the new data if is_item_new == True: # Update the email stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts': {}} if 'Email Addresses' not in count_stats['counts']: count_stats['counts']['Email Addresses'] = 0 else: count_stats['counts']['Email Addresses'] = count_stats[ 'counts']['Email Addresses'] + 1 counts.update({'name': "counts"}, {'$set': { 'counts': count_stats['counts'] }}, upsert=True) retVal['message'] = ('Success! Click here to view the new Email: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) else: message = ('Updated existing Email: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message elif is_validate_only == True: if email_object.id != None and is_item_new == False: message = ('Warning: Email already exists: ' '<a href="%s">%s</a>' % (resp_url, email_object.address)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message if related_obj and email_object and relationship_type: relationship_type = RelationshipTypes.inverse( relationship=relationship_type) email_object.add_relationship(related_obj, relationship_type, analyst=analyst, get_rels=False) email_object.save(username=analyst) # run email triage if is_item_new and is_validate_only == False: email_object.reload() run_triage(email_object, analyst) retVal['success'] = True retVal['object'] = email_object return retVal
def username_add_update(name, description, source=None, method='', reference='', analyst=None, datasets=None, bucket_list=None, ticket=None, is_validate_only=False, cache={}, related_id=None, related_type=None, relationship_type=None): retVal = {} if not source: return {"success": False, "message": "Missing source information."} is_item_new = False username_object = None cached_results = cache.get(form_consts.UserName.CACHED_RESULTS) if cached_results != None: username_object = cached_results.get(username) else: username_object = UserName.objects(name=name).first() if not username_object: username_object = UserName() username_object.name = name username_object.description = description is_item_new = True if cached_results != None: cached_results[username] = username_object if not username_object.description: username_object.description = description or '' elif username_object.description != description: username_object.description += "\n" + (description or '') if isinstance(source, basestring): source = [ create_embedded_source(source, reference=reference, method=method, analyst=analyst) ] if source: for s in source: username_object.add_source(s) else: return {"success": False, "message": "Missing source information."} if bucket_list: username_object.add_bucket_list(bucket_list, analyst) if ticket: username_object.add_ticket(ticket, analyst) related_obj = None if related_id: related_obj = class_from_id(related_type, related_id) if not related_obj: retVal['success'] = False retVal['message'] = 'Related Object not found.' return retVal resp_url = reverse('cripts.usernames.views.username_detail', args=[username_object.username_id]) if is_validate_only == False: username_object.save(username=analyst) #set the URL for viewing the new data if is_item_new == True: # Update the username stats counts = mongo_connector(settings.COL_COUNTS) count_stats = counts.find_one({'name': 'counts'}) if not count_stats or ('counts' not in count_stats): count_stats = {'counts': {}} if 'UserNames' not in count_stats['counts']: count_stats['counts']['UserNames'] = 0 else: count_stats['counts'][ 'UserNames'] = count_stats['counts']['UserNames'] + 1 counts.update({'name': "counts"}, {'$set': { 'counts': count_stats['counts'] }}, upsert=True) retVal['message'] = ( 'Success! Click here to view the new UserName: '******'<a href="%s">%s</a>' % (resp_url, username_object.name)) else: message = ('Updated existing UserName: '******'<a href="%s">%s</a>' % (resp_url, username_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message elif is_validate_only == True: if username_object.id != None and is_item_new == False: message = ('Warning: UserName already exists: ' '<a href="%s">%s</a>' % (resp_url, username_object.name)) retVal['message'] = message retVal['status'] = form_consts.Status.DUPLICATE retVal['warning'] = message if related_obj and username_object and relationship_type: relationship_type = RelationshipTypes.inverse( relationship=relationship_type) username_object.add_relationship(related_obj, relationship_type, analyst=analyst, get_rels=False) username_object.save(username=analyst) # run username triage if is_item_new and is_validate_only == False: username_object.reload() run_triage(username_object, analyst) retVal['success'] = True retVal['object'] = username_object return retVal