Beispiel #1
0
def check_config_cron_user(model, request=None):
    """ check config user  """

    # reset stop condition
    stop_system_importer_file_csv = False

    # check for csv_import_username (after initial migration w/o user defined) - stop immediately
    if not model.csv_import_username:
        # if function was called from 'system_create_cron' (creating scheduled task)
        if request:
            # call message
            messages.error(request,
                           'No user for import defined. Check config!')
            # get username (needed for logger)
            logger_username = str(request.user)
        # if function was called from 'system_cron' (scheduled task)
        else:
            # call message for all users
            error_message_cron('No user for import defined. Check config!')
            # get main config model
            mainconfigmodel = MainConfigModel.objects.get(
                main_config_name='MainConfig')
            # get cron username from main config (needed for logger if no user was defined in the proper config)
            logger_username = mainconfigmodel.cron_username
        # call logger
        error_logger(logger_username,
                     ' SYSTEM_IMPORTER_FILE_CSV_NO_USER_DEFINED')
        # set stop condition
        stop_system_importer_file_csv = True

    # return stop condition to 'csv.system_create_cron' or 'csv.system_cron'
    return stop_system_importer_file_csv
Beispiel #2
0
def system_creator_async(request_post, request_user):
    """ function to create many systems at once """

    # call logger
    debug_logger(str(request_user), " SYSTEM_CREATOR_BEGIN")

    # exctract lines from systemlist (list results from request object via large text area)
    lines = request_post.get('systemlist').splitlines()

    # iterate over lines
    for line in lines:

        # skip emtpy lines
        if line == '':
            warning_logger(str(request_user), " SYSTEM_CREATOR_ROW_EMPTY")
            continue

        # check line for length of string
        if len(line) > 50:
            warning_logger(str(request_user), " SYSTEM_CREATOR_LONG_STRING")
            continue

        # check for existence of system
        system = System.objects.filter(system_name = line)
        if system.count() > 0:
            # call logger
            error_logger(str(request_user), " SYSTEM_CREATOR_SYSTEM_EXISTS " + "system_name:" + line)
            # leave this loop because system with this systemname already exists
            continue

        # create form with request data
        form = SystemCreatorForm(request_post)

        # create system
        if form.is_valid():

            # don't save form yet
            system = form.save(commit=False)

            # set system_name
            system.system_name = line

            # set auto values
            system.system_created_by_user_id = request_user
            system.system_modified_by_user_id = request_user
            system.system_modify_time = timezone.now()

            # save object
            system.save()

            # save manytomany
            form.save_m2m()

            # call logger
            system.logger(str(request_user), ' SYSTEM_CREATOR_EXECUTED')

    # call logger
    debug_logger(str(request_user), " SYSTEM_CREATOR_END")
Beispiel #3
0
def check_content_file_type(rows, logger_username, request=None):
    """ check file for csv respectively some kind of text file """

    try:
        # try to iterate over rows
        for row in rows:
            # do nothing
            pass

        # return True if successful to 'csv_main.system_handler'
        return True

    # wrong file type
    except UnicodeDecodeError:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Wrong file type for CSV import. Check config or file system!')
        # if function was called from 'system_cron'
        else:
            # call message for all users
            error_message_cron(
                'Wrong file type for CSV import. Check config or file system!')
        # call logger
        error_logger(logger_username,
                     ' SYSTEM_IMPORTER_FILE_CSV_WRONG_FILE_TYPE')
        # return False if not successful to 'csv_main.system_handler'
        return False

    # other file errors (e. g. file containing null bytes)
    except:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(request,
                           'File is corrupted. Check config or file system!')
        # if function was called from 'system_cron'
        else:
            # call message for all users
            error_message_cron(
                'File is corrupted. Check config or file system!')
        # call logger
        error_logger(logger_username,
                     ' SYSTEM_IMPORTER_FILE_CSV_CORRUPTED_FILE')
        # return False if not successful to 'csv_main.system_handler'
        return False
Beispiel #4
0
def path_check(request):
    """ check path from `dfirtrack.config` is existing in filesystem """

    # check MARKDOWN_PATH
    if not os.path.isdir(markdown_path):
        # call logger
        error_logger(str(request.user), " MARKDOWN_PATH_NOT_EXISTING")
        messages.error(
            request,
            "The path MARKDOWN_PATH does not exist. Check `dfirtrack.config` or filesystem!"
        )
        # call logger for consistency
        debug_logger(str(request.user), " SYSTEM_MARKDOWN_SYSTEMS_END")
        # leave exporter
        return False
    else:
        return True
Beispiel #5
0
def config_check(request):
    """ check variables in `dfirtrack.config` """

    # check MARKDOWN_PATH
    if markdown_path == '':
        # call logger
        error_logger(str(request.user), " MARKDOWN_PATH_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable MARKDOWN_PATH seems to be undefined. Check `dfirtrack.config`!"
        )
        # call logger for consistency
        debug_logger(str(request.user), " SYSTEM_MARKDOWN_SYSTEMS_END")
        # leave exporter
        return False
    else:
        return True
Beispiel #6
0
def config_check(request):
    """ check variables in `dfirtrack.config` """

    # check GIRAF_PASS
    if giraf_pass == '':
        # call logger
        error_logger(str(request.user), " GIRAF_PASS_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable GIRAF_PASS seems to be undefined. Check `dfirtrack.config`!"
        )
        # call logger for consistency
        debug_logger(str(request.user), " API_GIRAF_SYSTEMS_END")
        # leave importer
        return redirect('/systems/')

    # check GIRAF_URL
    if giraf_url == '':
        # call logger
        error_logger(str(request.user), " GIRAF_URL_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable GIRAF_URL seems to be undefined. Check `dfirtrack.config`!"
        )
        # call logger for consistency
        debug_logger(str(request.user), " API_GIRAF_SYSTEMS_END")
        # leave importer
        return redirect('/systems/')

    # check GIRAF_USER
    if giraf_user == '':
        # call logger
        error_logger(str(request.user), " GIRAF_USER_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable GIRAF_USER seems to be undefined. Check `dfirtrack.config`!"
        )
        # call logger for consistency
        debug_logger(str(request.user), " API_GIRAF_SYSTEMS_END")
        # leave importer
        return redirect('/systems/')
Beispiel #7
0
def check_content_file_system(model, request=None):
    """ check file system """

    # reset stop condition
    stop_system_importer_file_csv = False
    """ set username for logger """

    # if function was called from 'system_instant'
    if request:
        logger_username = str(request.user)
    # if function was called from 'system_cron'
    else:
        logger_username = model.csv_import_username.username  # check for existence of user in config was done before

    # build csv file path
    csv_import_file = model.csv_import_path + '/' + model.csv_import_filename

    # CSV import path does not exist - stop immediately
    if not os.path.isdir(model.csv_import_path):
        # if function was called from 'system_instant'
        if request:
            # call messsage
            messages.error(
                request,
                'CSV import path does not exist. Check config or file system!')
        # if function was called from 'system_cron'
        else:
            # call message for all users
            error_message_cron(
                'CSV import path does not exist. Check config or file system!')
        # call logger
        error_logger(logger_username,
                     ' SYSTEM_IMPORTER_FILE_CSV_PATH_NOT_EXISTING')
        # set stop condition
        stop_system_importer_file_csv = True
    else:
        # no read permission for CSV import path - stop immediately
        if not os.access(model.csv_import_path, os.R_OK):
            # if function was called from 'system_instant'
            if request:
                # call messsage
                messages.error(
                    request,
                    'No read permission for CSV import path. Check config or file system!'
                )
            # if function was called from 'system_cron'
            else:
                # call message for all users
                error_message_cron(
                    'No read permission for CSV import path. Check config or file system!'
                )
            # call logger
            error_logger(logger_username,
                         ' SYSTEM_IMPORTER_FILE_CSV_PATH_NO_READ_PERMISSION')
            # set stop condition
            stop_system_importer_file_csv = True
        else:
            # CSV import file does not exist - stop immediately
            if not os.path.isfile(csv_import_file):
                # if function was called from 'system_instant'
                if request:
                    # call messsage
                    messages.error(
                        request,
                        'CSV import file does not exist. Check config or provide file!'
                    )
                # if function was called from 'system_cron'
                else:
                    # call message for all users
                    error_message_cron(
                        'CSV import file does not exist. Check config or provide file!'
                    )
                # call logger
                error_logger(logger_username,
                             ' SYSTEM_IMPORTER_FILE_CSV_FILE_NOT_EXISTING')
                # set stop condition
                stop_system_importer_file_csv = True
            else:
                # no read permission for CSV import file - stop immediately
                if not os.access(csv_import_file, os.R_OK):
                    # if function was called from 'system_instant'
                    if request:
                        # call messsage
                        messages.error(
                            request,
                            'No read permission for CSV import file. Check config or file system!'
                        )
                    # if function was called from 'system_cron'
                    else:
                        # call message for all users
                        error_message_cron(
                            'No read permission for CSV import file. Check config or file system!'
                        )
                    # call logger
                    error_logger(
                        logger_username,
                        ' SYSTEM_IMPORTER_FILE_CSV_FILE_NO_READ_PERMISSION')
                    # set stop condition
                    stop_system_importer_file_csv = True
                else:
                    # CSV import file is empty - stop immediately
                    if os.path.getsize(csv_import_file) == 0:
                        # if function was called from 'system_instant'
                        if request:
                            # call messsage
                            messages.error(
                                request,
                                'CSV import file is empty. Check config or file system!'
                            )
                        # if function was called from 'system_cron'
                        else:
                            # call message for all users
                            error_message_cron(
                                'CSV import file is empty. Check config or file system!'
                            )
                        # call logger
                        error_logger(logger_username,
                                     ' SYSTEM_IMPORTER_FILE_CSV_FILE_EMPTY')
                        # set stop condition
                        stop_system_importer_file_csv = True

    # return stop condition to 'csv.system_create_cron' or 'csv.system_cron' or 'csv.system_instant'
    return stop_system_importer_file_csv
Beispiel #8
0
def check_config_attributes(model, request=None):
    """ check config for logic errors about attributes """

    # reset stop condition
    stop_system_importer_file_csv = False
    """ set username for logger """

    # if function was called from 'system_instant' or 'system_upload' or 'system_create_cron'
    if request:
        logger_username = str(request.user)
    # if function was called from 'system_cron'
    else:
        logger_username = model.csv_import_username.username  # check for existence of user in config was done before
    """ check numeric values for column fields """

    # CSV_COLUMN_SYSTEM
    if not 1 <= model.csv_column_system <= 99:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                '`CSV_COLUMN_SYSTEM` is outside the allowed range. Check config!'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_SYSTEM out of range'
        )
        # set stop condition
        stop_system_importer_file_csv = True

    # CSV_COLUMN_IP
    if model.csv_column_ip:
        # check CSV_COLUMN_IP for value
        if not 1 <= model.csv_column_ip <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_IP` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_IP out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_DNSNAME
    if model.csv_column_dnsname:
        # check CSV_COLUMN_DNSNAME for value
        if not 1 <= model.csv_column_dnsname <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_DNSNAME` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_DNSNAME out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_DOMAIN
    if model.csv_column_domain:
        # check CSV_COLUMN_DOMAIN for value
        if not 1 <= model.csv_column_domain <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_DOMAIN` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_DOMAIN out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_LOCATION
    if model.csv_column_location:
        # check CSV_COLUMN_LOCATION for value
        if not 1 <= model.csv_column_location <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_LOCATION` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_LOCATION out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_OS
    if model.csv_column_os:
        # check CSV_COLUMN_OS for value
        if not 1 <= model.csv_column_os <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_OS` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_OS out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_REASON
    if model.csv_column_reason:
        # check CSV_COLUMN_REASON for value
        if not 1 <= model.csv_column_reason <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_REASON` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_REASON out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_RECOMMENDATION
    if model.csv_column_recommendation:
        # check CSV_COLUMN_RECOMMENDATION for value
        if not 1 <= model.csv_column_recommendation <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_RECOMMENDATION` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_RECOMMENDATION out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_SERVICEPROVIDER
    if model.csv_column_serviceprovider:
        # check CSV_COLUMN_SERVICEPROVIDER for value
        if not 1 <= model.csv_column_serviceprovider <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_SERVICEPROVIDER` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_SERVICEPROVIDER out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_SYSTEMTYPE
    if model.csv_column_systemtype:
        # check CSV_COLUMN_SYSTEMTYPE for value
        if not 1 <= model.csv_column_systemtype <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_SYSTEMTYPE` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_SYSTEMTYPE out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_CASE
    if model.csv_column_case:
        # check CSV_COLUMN_CASE for value
        if not 1 <= model.csv_column_case <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_CASE` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_CASE out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_COMPANY
    if model.csv_column_company:
        # check CSV_COLUMN_COMPANY for value
        if not 1 <= model.csv_column_company <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_COMPANY` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_COMPANY out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True

    # CSV_COLUMN_TAG
    if model.csv_column_tag:
        # check CSV_COLUMN_TAG for value
        if not 1 <= model.csv_column_tag <= 99:
            # if function was called from 'system_instant' or 'system_upload'
            if request:
                # call message
                messages.error(
                    request,
                    '`CSV_COLUMN_TAG` is outside the allowed range. Check config!'
                )
            # call logger
            error_logger(
                logger_username,
                ' SYSTEM_IMPORTER_FILE_CSV variable CSV_COLUMN_TAG out of range'
            )
            # set stop condition
            stop_system_importer_file_csv = True
    """ check for EITHER 'choice' and 'column' OR 'default' """

    # reset error condition
    attribute_error = False

    # set error counter
    attribute_error_counter = 0

    # create empty list for error IDs
    attribute_error_id = []

    # ip - CSV chosen and no CSV column filled out
    if model.csv_choice_ip and not model.csv_column_ip:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('ip_01')
    # ip - CSV not chosen and CSV column filled out
    if not model.csv_choice_ip and model.csv_column_ip:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('ip_02')

    # dnsname - CSV chosen and no CSV column filled out
    if model.csv_choice_dnsname and not model.csv_column_dnsname:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('dnsname_01')
    # dnsname - CSV not chosen and CSV column filled out
    if not model.csv_choice_dnsname and model.csv_column_dnsname:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('dnsname_02')
    # dnsname - CSV chosen and DB chosen
    if model.csv_choice_dnsname and model.csv_default_dnsname:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('dnsname_03')
    # dnsname - CSV column filled out and DB chosen
    if model.csv_column_dnsname and model.csv_default_dnsname:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('dnsname_04')

    # domain - CSV chosen and no CSV column filled out
    if model.csv_choice_domain and not model.csv_column_domain:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('domain_01')
    # domain - CSV not chosen and CSV column filled out
    if not model.csv_choice_domain and model.csv_column_domain:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('domain_02')
    # domain - CSV chosen and DB chosen
    if model.csv_choice_domain and model.csv_default_domain:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('domain_03')
    # domain - CSV column filled out and DB chosen
    if model.csv_column_domain and model.csv_default_domain:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('domain_04')

    # location - CSV chosen and no CSV column filled out
    if model.csv_choice_location and not model.csv_column_location:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('location_01')
    # location - CSV not chosen and CSV column filled out
    if not model.csv_choice_location and model.csv_column_location:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('location_02')
    # location - CSV chosen and DB chosen
    if model.csv_choice_location and model.csv_default_location:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('location_03')
    # location - CSV column filled out and DB chosen
    if model.csv_column_location and model.csv_default_location:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('location_04')

    # os - CSV chosen and no CSV column filled out
    if model.csv_choice_os and not model.csv_column_os:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('os_01')
    # os - CSV not chosen and CSV column filled out
    if not model.csv_choice_os and model.csv_column_os:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('os_02')
    # os - CSV chosen and DB chosen
    if model.csv_choice_os and model.csv_default_os:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('os_03')
    # os - CSV column filled out and DB chosen
    if model.csv_column_os and model.csv_default_os:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('os_04')

    # reason - CSV chosen and no CSV column filled out
    if model.csv_choice_reason and not model.csv_column_reason:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('reason_01')
    # reason - CSV not chosen and CSV column filled out
    if not model.csv_choice_reason and model.csv_column_reason:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('reason_02')
    # reason - CSV chosen and DB chosen
    if model.csv_choice_reason and model.csv_default_reason:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('reason_03')
    # reason - CSV column filled out and DB chosen
    if model.csv_column_reason and model.csv_default_reason:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('reason_04')

    # recommendation - CSV chosen and no CSV column filled out
    if model.csv_choice_recommendation and not model.csv_column_recommendation:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('recommendation_01')
    # recommendation - CSV not chosen and CSV column filled out
    if not model.csv_choice_recommendation and model.csv_column_recommendation:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('recommendation_02')
    # recommendation - CSV chosen and DB chosen
    if model.csv_choice_recommendation and model.csv_default_recommendation:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('recommendation_03')
    # recommendation - CSV column filled out and DB chosen
    if model.csv_column_recommendation and model.csv_default_recommendation:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('recommendation_04')

    # serviceprovider - CSV chosen and no CSV column filled out
    if model.csv_choice_serviceprovider and not model.csv_column_serviceprovider:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('serviceprovider_01')
    # serviceprovider - CSV not chosen and CSV column filled out
    if not model.csv_choice_serviceprovider and model.csv_column_serviceprovider:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('serviceprovider_02')
    # serviceprovider - CSV chosen and DB chosen
    if model.csv_choice_serviceprovider and model.csv_default_serviceprovider:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('serviceprovider_03')
    # serviceprovider - CSV column filled out and DB chosen
    if model.csv_column_serviceprovider and model.csv_default_serviceprovider:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('serviceprovider_04')

    # systemtype - CSV chosen and no CSV column filled out
    if model.csv_choice_systemtype and not model.csv_column_systemtype:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('systemtype_01')
    # systemtype - CSV not chosen and CSV column filled out
    if not model.csv_choice_systemtype and model.csv_column_systemtype:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('systemtype_02')
    # systemtype - CSV chosen and DB chosen
    if model.csv_choice_systemtype and model.csv_default_systemtype:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('systemtype_03')
    # systemtype - CSV column filled out and DB chosen
    if model.csv_column_systemtype and model.csv_default_systemtype:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('systemtype_04')

    # case - CSV chosen and no CSV column filled out
    if model.csv_choice_case and not model.csv_column_case:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('case_01')
    # case - CSV not chosen and CSV column filled out
    if not model.csv_choice_case and model.csv_column_case:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('case_02')
    # case - CSV chosen and DB chosen
    if model.csv_choice_case and model.csv_default_case.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('case_03')
    # case - CSV column filled out and DB chosen
    if model.csv_column_case and model.csv_default_case.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('case_04')

    # company - CSV chosen and no CSV column filled out
    if model.csv_choice_company and not model.csv_column_company:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('company_01')
    # company - CSV not chosen and CSV column filled out
    if not model.csv_choice_company and model.csv_column_company:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('company_02')
    # company - CSV chosen and DB chosen
    if model.csv_choice_company and model.csv_default_company.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('company_03')
    # company - CSV column filled out and DB chosen
    if model.csv_column_company and model.csv_default_company.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('company_04')

    # tag - CSV chosen and no CSV column filled out
    if model.csv_choice_tag and not model.csv_column_tag:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('tag_01')
    # tag - CSV not chosen and CSV column filled out
    if not model.csv_choice_tag and model.csv_column_tag:
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('tag_02')
    # tag - CSV chosen and DB chosen
    if model.csv_choice_tag and model.csv_default_tag.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('tag_03')
    # tag - CSV column filled out and DB chosen
    if model.csv_column_tag and model.csv_default_tag.all():
        # set attribute error
        attribute_error = True
        # autoincrement counter
        attribute_error_counter += 1
        # add error code
        attribute_error_id.append('tag_04')

    # check previous checks for error - one message / log for all
    if attribute_error:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            if attribute_error_counter == 1:
                # call message
                messages.error(
                    request,
                    f'There was {attribute_error_counter} error regarding attributes. Check config!'
                )
            elif attribute_error_counter > 1:
                # call message
                messages.error(
                    request,
                    f'There were {attribute_error_counter} errors regarding attributes. Check config!'
                )
        # call logger
        error_logger(
            logger_username,
            f' SYSTEM_IMPORTER_FILE_CSV attributes misconfigured {attribute_error_id}'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    """ check tag pefix and delimiter in combination with CSV and DB """

    # tag - CSV chosen and prefix and / or prefix delimiter not set
    if model.csv_choice_tag and (not model.csv_tag_prefix
                                 or not model.csv_tag_prefix_delimiter):
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Choose prefix and delimiter for tag import from CSV to distinguish between manual set tags.'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV tag prefix and / or tag delimiter not set'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    # tag - DB chosen and prefix and / or prefix delimiter chosen
    if model.csv_default_tag.all() and (model.csv_tag_prefix
                                        or model.csv_tag_prefix_delimiter):
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Prefix and delimiter are not available when setting tags from database.'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV tag prefix and / or tag delimiter not compatible with csv_default_tag'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    # tag - DB chosen but special option 'tag_remove_prefix' set
    if model.csv_remove_tag == 'tag_remove_prefix' and model.csv_default_tag.all(
    ):
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Removing tags with prefix is only available when setting tags from CSV.'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV remove tags with prefix not compatible with csv_default_tag'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    """ check tagfree choices (systemstatus / analysisstatus) in combination with tag from CSV """

    # tag - alternative choice systemstatus (tagfree) chosen without tag choice from CSV
    if model.csv_choice_tagfree_systemstatus and not model.csv_choice_tag:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Alternative systemstatus only available with tags from CSV.')
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV tagfree systemstatus only compatible with csv_choice_tag'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    # tag - alternative choice analysisstatus (tagfree) chosen without tag choice from CSV
    if model.csv_choice_tagfree_analysisstatus and not model.csv_choice_tag:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'Alternative analysisstatus only available with tags from CSV.'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV tagfree analysisstatus only compatible with csv_choice_tag'
        )
        # set stop condition
        stop_system_importer_file_csv = True
    """ check if the column fields are different """

    # reset error condition
    column_error = False

    # create empty dict for column values
    all_columns_dict = {}

    # add column values to dict
    all_columns_dict['csv_column_system'] = model.csv_column_system
    if model.csv_column_ip:
        all_columns_dict['csv_column_ip'] = model.csv_column_ip
    if model.csv_column_dnsname:
        all_columns_dict['csv_column_dnsname'] = model.csv_column_dnsname
    if model.csv_column_domain:
        all_columns_dict['csv_column_domain'] = model.csv_column_domain
    if model.csv_column_location:
        all_columns_dict['csv_column_location'] = model.csv_column_location
    if model.csv_column_os:
        all_columns_dict['csv_column_os'] = model.csv_column_os
    if model.csv_column_reason:
        all_columns_dict['csv_column_reason'] = model.csv_column_reason
    if model.csv_column_recommendation:
        all_columns_dict[
            'csv_column_recommendation'] = model.csv_column_recommendation
    if model.csv_column_serviceprovider:
        all_columns_dict[
            'csv_column_serviceprovider'] = model.csv_column_serviceprovider
    if model.csv_column_systemtype:
        all_columns_dict['csv_column_systemtype'] = model.csv_column_systemtype
    if model.csv_column_case:
        all_columns_dict['csv_column_case'] = model.csv_column_case
    if model.csv_column_company:
        all_columns_dict['csv_column_company'] = model.csv_column_company
    if model.csv_column_tag:
        all_columns_dict['csv_column_tag'] = model.csv_column_tag

    # check all column values against each other
    for column in all_columns_dict:

        # explicitly copy dict
        pruned_columns_dict = dict(all_columns_dict)
        # remove column from copied dict
        del pruned_columns_dict[column]
        # check for the same value in pruned dict
        if all_columns_dict[column] in pruned_columns_dict.values():
            # set error condition
            column_error = True

    # check previous checks for error - one message / log for all
    if column_error:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(request,
                           'The columns have to be unique. Check config!')
        # call logger
        error_logger(logger_username,
                     ' SYSTEM_IMPORTER_FILE_CSV columns not unique')
        # set stop condition
        stop_system_importer_file_csv = True
    """ check remove conditions in combination with skip condition """

    # reset error condition
    remove_error = False

    # remove systemstatus
    if model.csv_skip_existing_system and model.csv_remove_systemstatus:
        # set remove error
        remove_error = True
    # remove analysisstatus
    if model.csv_skip_existing_system and model.csv_remove_analysisstatus:
        # set remove error
        remove_error = True
    # remove ip
    if model.csv_skip_existing_system and model.csv_remove_ip:
        # set remove error
        remove_error = True
    # remove dnsname
    if model.csv_skip_existing_system and model.csv_remove_dnsname:
        # set remove error
        remove_error = True
    # remove domain
    if model.csv_skip_existing_system and model.csv_remove_domain:
        # set remove error
        remove_error = True
    # remove location
    if model.csv_skip_existing_system and model.csv_remove_location:
        # set remove error
        remove_error = True
    # remove os
    if model.csv_skip_existing_system and model.csv_remove_os:
        # set remove error
        remove_error = True
    # remove reason
    if model.csv_skip_existing_system and model.csv_remove_reason:
        # set remove error
        remove_error = True
    # remove recommendation
    if model.csv_skip_existing_system and model.csv_remove_recommendation:
        # set remove error
        remove_error = True
    # remove serviceprovider
    if model.csv_skip_existing_system and model.csv_remove_serviceprovider:
        # set remove error
        remove_error = True
    # remove systemtype
    if model.csv_skip_existing_system and model.csv_remove_systemtype:
        # set remove error
        remove_error = True
    # remove case
    if model.csv_skip_existing_system and model.csv_remove_case:
        # set remove error
        remove_error = True
    # remove company
    if model.csv_skip_existing_system and model.csv_remove_company:
        # set remove error
        remove_error = True

    # check previous checks for error - one message / log for all
    if remove_error:
        # if function was called from 'system_instant' or 'system_upload'
        if request:
            # call message
            messages.error(
                request,
                'There is an error regarding removing existing attributes. Check config!'
            )
        # call logger
        error_logger(
            logger_username,
            ' SYSTEM_IMPORTER_FILE_CSV remove attributes misconfigured ')
        # set stop condition
        stop_system_importer_file_csv = True
    """ check remove conditions without CSV or DB """

    # TODO: [code] add checks like in 'dfirtrack_config.forms'
    """ call single message (to avoid noise) for all users and return """

    # error occurred and function was called from 'system_cron'
    if stop_system_importer_file_csv and not request:
        # call message for all users
        error_message_cron(
            'There was an error within the configuration. Check config!')

    # return stop condition to 'csv.system_create_cron' or 'csv.system_cron' or 'csv.system_instant' or 'csv.system_upload'
    return stop_system_importer_file_csv
Beispiel #9
0
def reportitems(request):
    """ this function checks for every system the existence of a markdown file with information about the and imports the content of this file as reportitem for the corresponding system """

    # call logger
    debug_logger(str(request.user), " REPORTITEM_FILESYSTEM_IMPORTER_BEGIN")

    # check whether REPORTITEMS_FILESYSTEMPATH is defined in `dfirtrack.config`
    if reportitems_filesystempath == '':
        # call logger
        error_logger(str(request.user),
                     " REPORTITEMS_FILESYSTEMPATH_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable REPORTITEMS_FILESYSTEMPATH seems to be undefined. Check `dfirtrack.config`!"
        )
        # leave importer
        return redirect('/systems/')

    # check whether REPORTITEMS_FILESYSTEMPATH points to non-existing directory
    if not os.path.isdir(reportitems_filesystempath):
        # call logger
        error_logger(str(request.user),
                     " REPORTITEMS_FILESYSTEM_IMPORTER_WRONG_PATH")
        messages.error(
            request,
            "The variable REPORTITEMS_FILESYSTEMPATH points to a non-existing directory. Check `dfirtrack.config`!"
        )
        # leave importer
        return redirect('/systems/')

    # check whether REPORTITEMS_HEADLINE is defined in `dfirtrack.config`
    if reportitems_headline == '':
        # call logger
        error_logger(str(request.user),
                     " REPORTITEMS_HEADLINE_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable REPORTITEMS_HEADLINE seems to be undefined. Check `dfirtrack.config`!"
        )
        # leave importer
        return redirect('/systems/')

    # check whether REPORTITEMS_SUBHEADLINE is defined in `dfirtrack.config`
    if reportitems_subheadline == '':
        # call logger
        error_logger(str(request.user),
                     " REPORTITEMS_SUBHEADLINE_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable REPORTITEMS_SUBHEADLINE seems to be undefined. Check `dfirtrack.config`!"
        )
        # leave importer
        return redirect('/systems/')

    # check whether REPORTITEMS_DELETE is defined in `dfirtrack.config`
    if not isinstance(reportitems_delete, bool):
        # call logger
        error_logger(str(request.user),
                     " REPORTITEMS_DELETE_VARIABLE_UNDEFINED")
        messages.error(
            request,
            "The variable REPORTITEMS_DELETE seems to be undefined or not a boolean. Check `dfirtrack.config`!"
        )
        # leave importer
        return redirect('/systems/')

    # get all system objects
    systems = System.objects.all()

    # create headline if it does not exist
    headline, created = Headline.objects.get_or_create(
        headline_name=reportitems_headline)
    if created == True:
        # call logger
        headline.logger(str(request.user),
                        " REPORTITEMS_FILESYSTEM_IMPORTER_HEADLINE_CREATED")

    # set counter for non-existing files (needed for messages)
    nofile_found_counter = 0

    # set counter for created reportitems (needed for messages)
    reportitems_created_counter = 0

    # set counter for modified reportitems (needed for messages)
    reportitems_modified_counter = 0

    # set counter for deleted reportitems (needed for messages)
    reportitems_deleted_counter = 0

    # iterate over systems
    for system in systems:

        # create path for reportfile
        reportpath = reportitems_filesystempath + "/" + system.system_name + ".md"

        # check whether a file is existing for this system
        if not os.path.isfile(reportpath):
            # call logger
            warning_logger(
                str(request.user),
                " REPORTITEMS_FILESYSTEM_IMPORTER_NO_FILE system_name:" +
                system.system_name)
            # autoincrement counter
            nofile_found_counter += 1

            # check whether already existing reportitem for this system should be deleted if no file was provided
            if reportitems_delete:
                # delete already existing reportitem for this system if no file was provided
                try:
                    reportitem = Reportitem.objects.get(
                        system=system,
                        headline=headline,
                        reportitem_subheadline=reportitems_subheadline)
                    # call logger (before deleting instance)
                    reportitem.logger(
                        str(request.user),
                        " REPORTITEMS_FILESYSTEM_IMPORTER_REPORTITEM_DELETED")
                    reportitem.delete()
                    # autoincrement counter
                    reportitems_deleted_counter += 1
                except Reportitem.DoesNotExist:
                    pass

            # continue with next system
            continue

        # create reportitem if it does not exist (get_or_create won't work in this context because of needed user objects for saving)
        try:
            reportitem = Reportitem.objects.get(
                system=system,
                headline=headline,
                reportitem_subheadline=reportitems_subheadline)
            reportitems_modified_counter += 1
        except Reportitem.DoesNotExist:
            reportitem = Reportitem()
            reportitems_created_counter += 1
            reportitem.system = system
            reportitem.headline = headline
            reportitem.reportitem_subheadline = reportitems_subheadline
            reportitem.reportitem_created_by_user_id = request.user

        # open file
        reportfile = open(reportpath, "r")
        # add changing values (existing reportitem_note will be overwritten)
        reportitem.reportitem_note = reportfile.read()
        # close file
        reportfile.close()
        reportitem.reportitem_modified_by_user_id = request.user
        reportitem.save()

        # call logger
        reportitem.logger(
            str(request.user),
            " REPORTITEMS_FILESYSTEM_IMPORTER_REPORTITEM_CREATED_OR_MODIFIED")

    # call final messages
    if nofile_found_counter > 0:
        if nofile_found_counter == 1:
            messages.warning(
                request, "No file was found for " + str(nofile_found_counter) +
                " system.")
        else:
            messages.warning(
                request, "No files were found for " +
                str(nofile_found_counter) + " systems.")
    if reportitems_created_counter > 0:
        if reportitems_created_counter == 1:
            messages.success(
                request,
                str(reportitems_created_counter) + ' reportitem was created.')
        else:
            messages.success(
                request,
                str(reportitems_created_counter) +
                ' reportitems were created.')
    if reportitems_modified_counter > 0:
        if reportitems_modified_counter == 1:
            messages.success(
                request,
                str(reportitems_modified_counter) +
                ' reportitem was modified.')
        else:
            messages.success(
                request,
                str(reportitems_modified_counter) +
                ' reportitems were modified.')
    if reportitems_deleted_counter > 0:
        if reportitems_deleted_counter == 1:
            messages.success(
                request,
                str(reportitems_deleted_counter) + ' reportitem was deleted.')
        else:
            messages.success(
                request,
                str(reportitems_deleted_counter) +
                ' reportitems were deleted.')

    return redirect('/systems/')
Beispiel #10
0
def system_creator_async(request_post, request_user):
    """ function to create many systems at once """

    # call logger
    debug_logger(str(request_user), ' SYSTEM_CREATOR_BEGIN')

    # exctract lines from systemlist (list results from request object via large text area)
    lines = request_post.get('systemlist').splitlines()

    #  count lines (needed for messages)
    number_of_lines = len(lines)

    # set systems_created_counter (needed for messages)
    systems_created_counter = 0

    # set systems_skipped_counter (needed for messages)
    systems_skipped_counter = 0

    # set lines_faulty_counter (needed for messages)
    lines_faulty_counter = 0

    # create empty list (needed for messages)
    skipped_systems = []

    # iterate over lines
    for line in lines:

        # skip emtpy lines
        if line == '':
            # autoincrement counter
            lines_faulty_counter += 1
            # call logger
            warning_logger(str(request_user), ' SYSTEM_CREATOR_ROW_EMPTY')
            continue

        # check line for length of string
        if len(line) > 50:
            # autoincrement counter
            lines_faulty_counter += 1
            # call logger
            warning_logger(str(request_user), ' SYSTEM_CREATOR_LONG_STRING')
            continue

        # check for existence of system
        system = System.objects.filter(system_name=line)
        """ already existing system """

        # in case of existing system
        if system.count() > 0:
            # autoincrement counter
            systems_skipped_counter += 1
            # add system name to list of skipped systems
            skipped_systems.append(line)
            # call logger
            error_logger(
                str(request_user),
                ' SYSTEM_CREATOR_SYSTEM_EXISTS ' + 'system_name:' + line)
            # leave this loop because system with this systemname already exists
            continue
        """ new system """

        # create form with request data
        form = SystemCreatorForm(request_post)

        # create system
        if form.is_valid():
            """ object creation """

            # don't save form yet
            system = form.save(commit=False)

            # set system_name
            system.system_name = line

            # set auto values
            system.system_created_by_user_id = request_user
            system.system_modified_by_user_id = request_user
            system.system_modify_time = timezone.now()

            # save object
            system.save()

            # save manytomany
            form.save_m2m()
            """ object counter / log """

            # autoincrement counter
            systems_created_counter += 1

            # call logger
            system.logger(str(request_user), ' SYSTEM_CREATOR_EXECUTED')
    """ call final messages """

    # finish message
    message_user(request_user, 'System creator finished', constants.SUCCESS)

    # number messages

    if systems_created_counter > 0:
        if systems_created_counter == 1:
            message_user(request_user,
                         str(systems_created_counter) + ' system was created.',
                         constants.SUCCESS)
        else:
            message_user(
                request_user,
                str(systems_created_counter) + ' systems were created.',
                constants.SUCCESS)

    if systems_skipped_counter > 0:
        if systems_skipped_counter == 1:
            message_user(
                request_user,
                str(systems_skipped_counter) + ' system was skipped. ' +
                str(skipped_systems), constants.ERROR)
        else:
            message_user(
                request_user,
                str(systems_skipped_counter) + ' systems were skipped. ' +
                str(skipped_systems), constants.ERROR)

    if lines_faulty_counter > 0:
        if lines_faulty_counter == 1:
            message_user(
                request_user,
                str(lines_faulty_counter) + ' line out of ' +
                str(number_of_lines) +
                ' lines was faulty (see log file for details).',
                constants.WARNING)
        else:
            message_user(
                request_user,
                str(lines_faulty_counter) + ' lines out of ' +
                str(number_of_lines) +
                ' lines were faulty (see log file for details).',
                constants.WARNING)

    # call logger
    info_logger(
        str(request_user),
        ' SYSTEM_CREATOR_STATUS ' + 'created:' + str(systems_created_counter) +
        '|' + 'skipped:' + str(systems_skipped_counter) + '|' +
        'faulty_lines:' + str(lines_faulty_counter))

    # call logger
    debug_logger(str(request_user), ' SYSTEM_CREATOR_END')
Beispiel #11
0
def system(request):

    # get user string
    request_user = str(request.user)

    # call logger
    debug_logger(request_user, " API_GIRAF_SYSTEMS_BEGIN")

    # check variables in `dfirtrack.config`
    stop_importer_api_giraf = check_config(request)

    # leave importer_api_giraf if variables caused errors
    if stop_importer_api_giraf:
        return redirect(reverse('system_list'))

    # check connection
    try:
        urllib.request.urlopen(dfirtrack_config.GIRAF_URL, timeout=2)
    except:
        # call logger
        error_logger(request_user, " API_GIRAF_SYSTEMS_URL_NOT_AVAILABLE")
        messages.error(request, "GIRAF API URL not available.")
        # call logger (for consistency purposes to show end of api call)
        debug_logger(request_user, " API_GIRAF_SYSTEMS_END")
        return redirect(reverse('system_list'))

    # get JSON from GIRAF API (returns <class 'requests.models.Response'>)
    system_json = requests.get(
        dfirtrack_config.GIRAF_URL + '/api/systems/systems/',
        auth=(dfirtrack_config.GIRAF_USER, dfirtrack_config.GIRAF_PASS))

    # load JSON to list (returns list if authenticated, returns dict else)
    system_list = system_json.json()

    # check for list type (in case of auth error it returns dict)
    if type(system_list) != list:
        """ stop api call because of missing list """
        # call logger
        error_logger(request_user, " API_GIRAF_POSSIBLE_AUTH_ERROR")
        messages.error(request, "GIRAF API possible authentication error.")
        # call logger (for consistency purposes to show end of api call)
        debug_logger(request_user, " API_GIRAF_SYSTEMS_END")
        return redirect(reverse('system_list'))

    # iterate over systems
    for system_dict in system_list:

        # get hostname and uuid
        try:
            hostname = system_dict['hostname']
        except:
            # leave this loop if there is something wrong with the data
            error_logger(request_user, " API_GIRAF_WRONG_DATA")
            continue

        # get uuid
        uuid = system_dict['uuid']

        # get list of ips
        ip_list = system_dict['ip_address']

        # get list of systemusers
        systemuser_list = system_dict['systemuser']

        # get domain
        domain = system_dict['domain']
        domain, created = Domain.objects.get_or_create(domain_name=domain)

        if created == True:
            # call logger
            domain.logger(request_user, " API_GIRAF_SYSTEMS_DOMAIN_CREATED")

        # get Os
        osimportname = str(system_dict['os']) + " " + str(
            system_dict['release']) + " " + str(system_dict['version'])
        osimportname, created = Osimportname.objects.get_or_create(
            osimportname_name=osimportname,
            osimportname_importer='GIRAF',
            defaults={
                'os': Os.objects.get(os_name='tbd')
            },  # set 'tbd' if no mapping exists, real os will be updated after next api call after mapping
        )

        # get architecture
        osarch = system_dict['machine_type']
        osarch, created = Osarch.objects.get_or_create(osarch_name=osarch)

        if created == True:
            # call logger
            osarch.logger(request_user, " API_GIRAF_SYSTEMS_OSARCH_CREATED")

        # get installation date
        install_date = system_dict['install_date']
        if install_date is not None:
            install_date = dateutil.parser.parse(install_date)

        # get boot time
        last_booted_at = system_dict['last_booted_at']
        if last_booted_at is not None:
            last_booted_at = dateutil.parser.parse(last_booted_at)

        # check for uuid
        system = System.objects.filter(system_uuid=uuid)
        if not system:  # uuid:no

            # check for hostname
            system = System.objects.filter(system_name=hostname)
            if not system:  # hostname:no

                system = System()  # create new system object
                system.system_name = hostname
                system.systemstatus = Systemstatus.objects.get(
                    systemstatus_name='Unknown')
                system.system_created_by_user_id = request.user
                system.system_modified_by_user_id = request.user
                system.system_modify_time = timezone.now()
                system.save()  # hostname:yes

            #system = System.objects.filter(system_name=hostname, domain.domain_name=domain)
            #if not system:      # if system with this hostname and domain doesn't exit
            #    pass
            #else:
            #    system = System.objects.filter(system_name=hostname, domain.domain_name=domain, system_install_time=FOO)

            system = System.objects.get(
                system_name=hostname
            )  # Get system for the case uuid:no hostname:yes
            system.system_uuid = uuid  # uuid:yes

            # iterate over ips
            for ip_dict in ip_list:
                ip = ip_dict['ip_address']
                # get or create ip object
                ip, created = Ip.objects.get_or_create(ip_ip=ip)

                if created == True:
                    ip.logger(request_user, " API_GIRAF_SYSTEMS_IP_CREATED")

                # check whether ip already exists for this system otherwise add fk-relationship
                if system.ip.filter(ip_ip=ip.ip_ip).exists() is False:
                    system.ip.add(ip)

            # iterate over systemusers
            for systemuser_dict in systemuser_list:
                systemuser_name = systemuser_dict['username']
                systemuser_lastlogon_time = systemuser_dict['last_logon']
                if systemuser_lastlogon_time is not None:
                    systemuser_lastlogon_time = dateutil.parser.parse(
                        systemuser_lastlogon_time)
                # get or create systemuser object (check isn't required because system is already bind by this)
                systemuser, created = Systemuser.objects.get_or_create(
                    systemuser_name=systemuser_name, system=system)
                # update logon time (not suitable for searching above)
                systemuser.systemuser_lastlogon_time = systemuser_lastlogon_time
                systemuser.save()

                if created == True:
                    systemuser.logger(request_user,
                                      " API_GIRAF_SYSTEMS_SYSTEMUSER_CREATED")

            system.domain = domain
            system.os = osimportname.os  # set OS to existing mapping or 'tbd'
            system.osarch = osarch
            system.system_install_time = install_date
            system.system_lastbooted_time = last_booted_at

            # set auto values
            system.system_api_time = timezone.now()

            # save object
            system.save()

            # call logger
            system.logger(request_user, ' API_GIRAF_SYSTEMS_EXECUTED')

        else:  # uuid:yes

            system = System.objects.get(system_uuid=uuid)

            # iterate over ips
            for ip_dict in ip_list:
                ip = ip_dict['ip_address']
                # get or create ip object
                ip, created = Ip.objects.get_or_create(ip_ip=ip)

                if created == True:
                    ip.logger(request_user, " API_GIRAF_SYSTEMS_IP_CREATED")

                # check whether ip already exists for this system otherwise add fk-relationship
                if system.ip.filter(ip_ip=ip.ip_ip).exists() is False:
                    system.ip.add(ip)

            # iterate over systemusers
            for systemuser_dict in systemuser_list:
                systemuser_name = systemuser_dict['username']
                systemuser_lastlogon_time = systemuser_dict['last_logon']
                if systemuser_lastlogon_time is not None:
                    systemuser_lastlogon_time = dateutil.parser.parse(
                        systemuser_lastlogon_time)
                # get or create systemuser object (check isn't required because system is already bind by this)
                systemuser, created = Systemuser.objects.get_or_create(
                    systemuser_name=systemuser_name, system=system)
                # update logon time (not suitable for searching above)
                systemuser.systemuser_lastlogon_time = systemuser_lastlogon_time
                systemuser.save()

                if created == True:
                    # call logger
                    systemuser.logger(request_user,
                                      " API_GIRAF_SYSTEMS_SYSTEMUSER_CREATED")

            system.domain = domain
            system.os = osimportname.os  # set OS to existing mapping or 'tbd'
            system.osarch = osarch
            system.system_install_time = install_date
            system.system_lastbooted_time = last_booted_at

            # set auto values
            system.system_api_time = timezone.now()
            # save object
            system.save()

            # call logger
            system.logger(request_user, ' API_GIRAF_SYSTEMS_EXECUTED')

    # call logger
    debug_logger(request_user, " API_GIRAF_SYSTEMS_END")

    return redirect(reverse('system_list'))
Beispiel #12
0
def check_content_file_system(request=None):
    """check file system"""

    # get config model
    model = SystemExporterMarkdownConfigModel.objects.get(
        system_exporter_markdown_config_name='SystemExporterMarkdownConfig')

    # reset stop condition
    stop_exporter_markdown = False
    """ set username for logger """

    # get config
    main_config_model = MainConfigModel.objects.get(
        main_config_name='MainConfig')

    # if function was called from 'system' / 'system_create_cron'
    if request:
        logger_username = str(request.user)
    # if function was called from 'system_cron'
    else:
        logger_username = main_config_model.cron_username

    # check MARKDOWN_PATH for empty string - stop immediately
    if not model.markdown_path:
        # if function was called from 'system'
        if request:
            messages.error(
                request,
                'Markdown path contains an empty string. Check config!')
        # if function was called from 'system_cron'
        else:
            error_message_cron(
                'Markdown path contains an empty string. Check config!')
        # call logger
        error_logger(logger_username,
                     ' MARKDOWN_EXPORTER_MARKDOWN_PATH_EMPTY_STRING')
        # set stop condition
        stop_exporter_markdown = True
    else:
        # check MARKDOWN_PATH for existence in file system - stop immediately
        if not os.path.isdir(model.markdown_path):
            # if function was called from 'system'
            if request:
                messages.error(
                    request,
                    'Markdown path does not exist. Check config or file system!',
                )
            # if function was called from 'system_cron'
            else:
                error_message_cron(
                    'Markdown path does not exist. Check config or file system!'
                )
            # call logger
            error_logger(logger_username,
                         ' MARKDOWN_EXPORTER_MARKDOWN_PATH_NOT_EXISTING')
            # set stop condition
            stop_exporter_markdown = True
        else:
            # check MARKDOWN_PATH for write permission - stop immediately
            if not os.access(model.markdown_path, os.W_OK):
                # if function was called from 'system'
                if request:
                    messages.error(
                        request,
                        'No write permission for markdown path. Check config or file system!',
                    )
                # if function was called from 'system_cron'
                else:
                    error_message_cron(
                        'No write permission for markdown path. Check config or file system!'
                    )
                # call logger
                error_logger(
                    logger_username,
                    ' MARKDOWN_EXPORTER_MARKDOWN_PATH_NO_WRITE_PERMISSION',
                )
                # set stop condition
                stop_exporter_markdown = True

    # return stop condition
    return stop_exporter_markdown
Beispiel #13
0
def system_modificator_async(request_post, request_user):
    """ function to modify many systems at once """

    # call logger
    debug_logger(str(request_user), " SYSTEM_MODIFICATOR_BEGIN")

    # exctract lines from systemlist (list results from request object via large text area)
    lines = request_post.get('systemlist').splitlines()

    # iterate over lines
    for line in lines:

        # skip emtpy lines
        if line == '':
            warning_logger(str(request_user), " SYSTEM_MODIFICATOR_ROW_EMPTY")
            continue

        # check line for string
        if not isinstance(line, str):
            warning_logger(str(request_user), " SYSTEM_MODIFICATOR_NO_STRING")
            continue

        # check line for length of string
        if len(line) > 50:
            warning_logger(str(request_user), " SYSTEM_MODIFICATOR_LONG_STRING")
            continue

        # check for existence of system
        system = System.objects.filter(system_name = line)
        if system.count() == 0:
            # call logger
            error_logger(str(request_user), " SYSTEM_MODIFICATOR_SYSTEM_DOES_NOT_EXISTS " + "system_name:" + line)
            # leave this loop because system with this systemname does not exist
            continue
        elif system.count() > 1:
            # call logger
            error_logger(str(request_user), " SYSTEM_MODIFICATOR_SYSTEM_NOT_DISTINCT " + "system_name:" + line)
            # leave this loop because system with this systemname is not distinct
            continue

        # get existing system
        system = System.objects.get(system_name = line)

        # create form with request data
        form = SystemModificatorForm(request_post, instance = system)

        # extract tags (list results from request object via multiple choice field)
        tags = request_post.getlist('tag')

        # modify system
        if form.is_valid():

            # don't save form yet
            system = form.save(commit=False)

            # set system_name
            system.system_name = line

            # set auto values
            system.system_modified_by_user_id = request_user
            system.system_modify_time = timezone.now()

            # save object
            system.save()

            # call logger
            system.logger(str(request_user), ' SYSTEM_MODIFICATOR_EXECUTED')

            # TODO: add check for empty list
            # add tags (using save_m2m would replace existing tags)
            for tag_id in tags:
                # get object
                tag = Tag.objects.get(tag_id=tag_id)
                # add tag to system
                system.tag.add(tag)

    # call logger
    debug_logger(str(request_user), " SYSTEM_MODIFICATOR_END")
Beispiel #14
0
def system_modificator_async(request_post, request_user):
    """ function to modify many systems at once """

    # call logger
    debug_logger(str(request_user), ' SYSTEM_MODIFICATOR_BEGIN')

    # exctract lines from systemlist (list results either from request object via multiline selector or via large text area)
    lines = request_post.getlist('systemlist')
    system_char_field_used = False
    # if large text area was used, the list contains only one entry with (one or more) line breaks
    if len(lines) == 1 and ("\r\n" in lines[0] or not lines[0].isdigit()):
        system_char_field_used = True
        lines = lines[0].splitlines()

    #  count lines (needed for messages)
    number_of_lines = len(lines)

    # set systems_modified_counter (needed for messages)
    systems_modified_counter = 0

    # set systems_skipped_counter (needed for messages)
    systems_skipped_counter = 0

    # set lines_faulty_counter (needed for messages)
    lines_faulty_counter = 0

    # create empty list (needed for messages)
    skipped_systems = []

    # iterate over lines
    for line in lines:

        # skip emtpy lines
        if line == '':
            # autoincrement counter
            lines_faulty_counter += 1
            # call logger
            warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_ROW_EMPTY')
            continue

        # check line for string
        if not isinstance(line, str):  # coverage: ignore branch
            # autoincrement counter
            lines_faulty_counter += 1
            # call logger
            warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_NO_STRING')
            continue

        # check line for length of string
        if len(line) > 50:
            # autoincrement counter
            lines_faulty_counter += 1
            # call logger
            warning_logger(str(request_user),
                           ' SYSTEM_MODIFICATOR_LONG_STRING')
            continue

        # check for existence of system
        if system_char_field_used:
            system = System.objects.filter(system_name=line)
        else:
            system = System.objects.filter(system_id=line)
        """ handling non-existing or non-unique systems 2 """

        # system does not exist
        if system.count() == 0:
            # autoincrement counter
            systems_skipped_counter += 1
            # add system name to list of skipped systems
            skipped_systems.append(line)
            # call logger
            error_logger(
                str(request_user),
                ' SYSTEM_MODIFICATOR_SYSTEM_DOES_NOT_EXISTS ' +
                'system_id/system_name:' + line)
            # leave this loop because system with this systemname does not exist
            continue
        # more than one system exists
        elif system.count() > 1:
            # autoincrement counter
            systems_skipped_counter += 1
            # add system name to list of skipped systems
            skipped_systems.append(line)
            # call logger
            error_logger(
                str(request_user), ' SYSTEM_MODIFICATOR_SYSTEM_NOT_DISTINCT ' +
                'system_id/system_name:' + line)
            # leave this loop because system with this systemname is not distinct
            continue

        # get existing system
        if system_char_field_used:
            system = System.objects.get(system_name=line)
        else:
            system = System.objects.get(system_id=line)
        """ new system """

        # create form with request data
        form = SystemModificatorForm(
            request_post,
            instance=system,
            use_system_charfield=system_char_field_used)

        # extract tags (list results from request object via multiple choice field)
        tags = request_post.getlist('tag')

        # extract companies (list results from request object via multiple choice field)
        companies = request_post.getlist('company')

        # modify system
        if form.is_valid():
            """ object modification """

            # don't save form yet
            system = form.save(commit=False)

            # set auto values
            system.system_modified_by_user_id = request_user
            system.system_modify_time = timezone.now()

            # save object
            system.save()
            """ object counter / log """

            # autoincrement counter
            systems_modified_counter += 1

            # call logger
            system.logger(str(request_user), ' SYSTEM_MODIFICATOR_EXECUTED')
            """ many 2 many """

            # TODO: add check for empty list
            # add tags (using save_m2m would replace existing tags)
            for tag_id in tags:
                # get object
                tag = Tag.objects.get(tag_id=tag_id)
                # add tag to system
                system.tag.add(tag)

            for company_id in companies:
                # get object
                company = Company.objects.get(company_id=company_id)
                # add company to system
                system.company.add(company)
    """ call final messages """

    # finish message
    message_user(request_user, 'System modificator finished',
                 constants.SUCCESS)

    # number messages

    if systems_modified_counter > 0:
        if systems_modified_counter == 1:
            message_user(
                request_user,
                str(systems_modified_counter) + ' system was modified.',
                constants.SUCCESS)
        else:
            message_user(
                request_user,
                str(systems_modified_counter) + ' systems were modified.',
                constants.SUCCESS)

    if systems_skipped_counter > 0:
        if systems_skipped_counter == 1:
            message_user(
                request_user,
                str(systems_skipped_counter) + ' system was skipped. ' +
                str(skipped_systems), constants.ERROR)
        else:
            message_user(
                request_user,
                str(systems_skipped_counter) + ' systems were skipped. ' +
                str(skipped_systems), constants.ERROR)

    if lines_faulty_counter > 0:
        if lines_faulty_counter == 1:
            message_user(
                request_user,
                str(lines_faulty_counter) + ' line out of ' +
                str(number_of_lines) +
                ' lines was faulty (see log file for details).',
                constants.WARNING)
        else:
            message_user(
                request_user,
                str(lines_faulty_counter) + ' lines out of ' +
                str(number_of_lines) +
                ' lines were faulty (see log file for details).',
                constants.WARNING)

    # call logger
    info_logger(
        str(request_user), ' SYSTEM_MODIFICATOR_STATUS ' + 'modified:' +
        str(systems_modified_counter) + '|' + 'skipped:' +
        str(systems_skipped_counter) + '|' + 'faulty_lines:' +
        str(lines_faulty_counter))

    # call logger
    debug_logger(str(request_user), " SYSTEM_MODIFICATOR_END")
Beispiel #15
0
def systems_ips(request):
    """ this function parses a csv file and tries to import systems and corresponding ips """

    # form was valid to post
    if request.method == "POST":

        # call logger
        debug_logger(str(request.user), " SYSTEM_IP_IMPORTER_BEGIN")

        # get text out of file (variable results from request object via file upload field)
        systemipcsv = TextIOWrapper(request.FILES['systemipcsv'].file, encoding=request.encoding)

        # read rows out of csv
        rows = csv.reader(systemipcsv, quotechar="'")

        # set row counter (needed for logger)
        i = 0

        # check for wrong file type
        try:
            # iterate over rows
            for row in rows:

                # autoincrement row counter
                i += 1

                # check for empty rows
                try:
                    # check system column for empty value
                    if row[0] == '':
                        warning_logger(str(request.user), " SYSTEM_IP_IMPORTER_SYSTEM_COLUMN " + "row_" + str(i) + ":empty_column")
                        continue
                except IndexError:
                    warning_logger(str(request.user), " SYSTEM_IP_IMPORTER_ROW row_" + str(i) + ":empty_row")
                    continue

                # check system column for string
                if not isinstance(row[0], str):
                    warning_logger(str(request.user), " SYSTEM_IP_IMPORTER_SYSTEM_COLUMN " + "row_" + str(i) + ":no_string")
                    continue

                # check system column for length of string
                if len(row[0]) > 50:
                    warning_logger(str(request.user), " SYSTEM_IP_IMPORTER_SYSTEM_COLUMN " + "row_" + str(i) + ":long_string")
                    continue

                # check ip column for ip
                try:
                    ipaddress.ip_address(row[1])
                except ValueError:
                    warning_logger(str(request.user), " SYSTEM_IP_IMPORTER_IP_COLUMN " + "row_" + str(i) + ":invalid_ip")
                    continue

                # create ip
                ip, created = Ip.objects.get_or_create(ip_ip=row[1])
                if created == True:
                    ip.logger(str(request.user), " SYSTEMS_IP_IMPORTER_IP_CREATED")

                # check for existence of system
                system = System.objects.filter(system_name = row[0], ip = ip)
                if system.count() > 0:
                    error_logger(str(request.user), " SYSTEM_IP_IMPORTER_SYSTEM_EXISTS " + "row_" + str(i) + ":system_exists|system_name:" + row[0] + "|ip:" + str(row[1]))
                    continue

                # create form with request data
                form = SystemIpFileImport(request.POST, request.FILES)

                # create system
                if form.is_valid():

                    # don't save form yet
                    system = form.save(commit=False)

                    # set system_name
                    system.system_name = row[0]

                    # set auto values
                    system.system_created_by_user_id = request.user
                    system.system_modified_by_user_id = request.user
                    system.system_modify_time = timezone.now()

                    # save object
                    system.save()

                    # save manytomany
                    form.save_m2m()

                    # save ip for system
                    system.ip.add(ip)

                    # call logger
                    system.logger(str(request.user), ' SYSTEM_IP_IMPORTER_EXECUTED')

        # wrong file type
        except UnicodeDecodeError:
            critical_logger(str(request.user), " SYSTEM_IP_IMPORTER_WRONG_FILE_TYPE")

        # call logger
        debug_logger(str(request.user), " SYSTEM_IP_IMPORTER_END")

        return redirect('/systems')

    else:
        # show empty form
        form = SystemIpFileImport(initial={
            'systemstatus': 2,
            'analysisstatus': 1,
        })

        # call logger
        debug_logger(str(request.user), " SYSTEM_IP_IMPORTER_ENTERED")
    return render(request, 'dfirtrack_main/system/systems_ip_importer.html', {'form': form})
Beispiel #16
0
def systems_tags(request):
    """ this function imports a csv file with multiple systems and relevant tags """

    """
    the following high-level workflow is done by this function
    - remove all tags for systems beginning with 'TAGPREFIX' (if there are any)
    - evaluate given CSV line by line (without first row)
        - check whether this line has relevant tags (leave loop if not)
        - get hostname and convert to lowercase
        - get domain and change to empty string if incorrect (either 'NT AUTHORITY' or hostname itself)
        - create domain if necessary
        - check for existing systems (with this hostname)
            - if == 1:
                - check for existing domain (for this system)
                    if domain_of_system == NULL: domain is set to domain from CSV (if there is one)
            - if > 1: leave loop because not distinct
            - if == 0: create system
        - add relevant tags to this system
        - check for reportitem headline = SYSTEMTAG_HEADLINE, reportitem_subheadline = SYSTEMTAG_SUBHEADLINE and create if necessary
        - fill reportitem_note with markdown table containing with information of report(s)
    - logs and messages are written if applicable
    - counters are incremented where necessary
    """

    # form was valid to post
    if request.method == "POST":

        # call logger
        debug_logger(str(request.user), " SYSTEM_TAG_IMPORTER_BEGIN")

        # check TAGLIST (from settings.config) for empty list
        if not TAGLIST:
            messages.error(request, "No relevant tags defined. Check `TAGLIST` in `dfirtrack.config`!")
            # call logger
            error_logger(str(request.user), " SYSTEM_TAG_IMPORTER_NO_TAGS_DEFINED.")
            return redirect('/systems/')
        else:
            taglist = TAGLIST

        # check TAGPREFIX (from settings.config) for empty string
        if TAGPREFIX is "":
            messages.error(request, "No prefix string defined. Check `TAGPREFIX` in `dfirtrack.config`!")
            # call logger
            error_logger(str(request.user), " SYSTEM_TAG_IMPORTER_NO_TAGPREFIX_DEFINED.")
            return redirect('/systems/')
        # expand the string by an underscore
        else:
            tagprefix = TAGPREFIX + "_"

        # check whether SYSTEMTAG_HEADLINE is defined in `dfirtrack.config`
        if systemtag_headline == '':
            # call logger
            error_logger(str(request.user), " SYSTEMTAG_HEADLINE_VARIABLE_UNDEFINED")
            messages.error(request, "The variable SYSTEMTAG_HEADLINE seems to be undefined. Check `dfirtrack.config`!")
            # leave importer
            return redirect('/systems/')

        # check whether SYSTEMTAG_SUBHEADLINE is defined in `dfirtrack.config`
        if systemtag_subheadline == '':
            # call logger
            error_logger(str(request.user), " SYSTEMTAG_SUBHEADLINE_VARIABLE_UNDEFINED")
            messages.error(request, "The variable SYSTEMTAG_SUBHEADLINE seems to be undefined. Check `dfirtrack.config`!")
            # leave importer
            return redirect('/systems/')

        # get text out of file (variable results from request object via file upload field)
        systemtagcsv = TextIOWrapper(request.FILES['systemtagcsv'].file, encoding=request.encoding)

        # read rows out of csv
        rows = csv.reader(systemtagcsv)

        # create empty list (this list is used to store every line as single dict: {system_name: row}), because if there are multiple rows with the same system they are added to the same reportitem
        rowlist = []

        """ remove all tags for systems beginning with 'TAGPREFIX' (if there are any) """

        # get all systems that have tags beginning with 'TAGPREFIX' | prefixtagsystems -> queryset
        prefixtagsystems=System.objects.filter(tag__tag_name__startswith=tagprefix)

        # iterate over systems in queryset | prefixtagsystem  -> system object
        for prefixtagsystem in prefixtagsystems:

            # get all tags beginning with 'TAGPREFIX' that belong to the actual system | systemprefixtags -> queryset
            systemprefixtags=prefixtagsystem.tag.filter(tag_name__startswith=tagprefix)

            # iterate over queryset | systemprefixtag -> tag object
            for systemprefixtag in systemprefixtags:
                # delete all existing tags (the m2m relationship) beginning with 'TAGPREFIX' for this system (so that removed tags from csv will be removed as well)
                systemprefixtag.system_set.remove(prefixtagsystem)

        # create headline if it does not exist
        headline, created = Headline.objects.get_or_create(headline_name=systemtag_headline)
        if created == True:
            headline.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_HEADLINE_CREATED")

        """ remove all reportitems """

        # delete reportitems (so no reportitems with legacy information / tags will be left)
        Reportitem.objects.filter(headline = headline, reportitem_subheadline = systemtag_subheadline).delete()

        """ prepare and start loop """

        # set row_counter (needed for logger)
        row_counter = 1

        # set systems_created_counter (needed for messages)
        systems_created_counter = 0

        # set systems_skipped_counter (needed for messages)
        systems_skipped_counter = 0

        # iterate over rows
        for row in rows:

            # skip first row (headlines)
            if row_counter == 1:
                # autoincrement row counter
                row_counter += 1
                continue

            # get system_name and change to lowercase
            system_name = row[8].lower()

            # get tags from csv
            tagcsvstring = row[9]
            if tagcsvstring == '':
                # autoincrement systems_skipped_counter
                systems_skipped_counter += 1
                # autoincrement row_counter
                row_counter += 1
                # leave because systems without tags are not relevant
                continue
            else:
                # convert string (at whitespaces) to list
                tagcsvlist = tagcsvstring.split()

            # create empty list for mapping
            tagaddlist = []
            # check for relevant tags and add to list
            for tag in taglist:
                if tag in tagcsvlist:
                    tagaddlist.append(tagprefix + tag)

            # check if tagaddlist is empty
            if not tagaddlist:
                # autoincrement systems_skipped_counter
                systems_skipped_counter += 1
                # autoincrement row_counter
                row_counter += 1
                # leave because there are no relevant tags
                continue

            # get domain from csv
            domain_name = row[7]
            # change domain_name to empty string if incorrect domain_name ('NT AUTHORITY') was provided
            if domain_name == 'NT AUTHORITY':
                domain_name = ''
            # clear domain if domain_name equals system_name
            elif domain_name.lower() == system_name:
                domain_name = ''

            # get or create domain object if some valid name was provided
            if domain_name != '':
                # create domain
                domain, created = Domain.objects.get_or_create(domain_name=domain_name)
                # call logger if created
                if created == True:
                    domain.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_DOMAIN_CREATED")
                    messages.success(request, 'Domain "' + domain.domain_name + '" created.')
            else:
                # set domain to None to avoid further errors (domain is needed later)
                domain = None

            # create empty dict
            rowdict = {}

            # put the actual row to the dict (dict with only ONE key-value-pair)
            rowdict[system_name] = row

            # append dict to the global list (because if there are multiple rows with the same system, needed for reportitem SYSTEMTAG_SUBHEADLINE)
            rowlist.append(rowdict)

            # get all systems with this system_name
            systemquery = System.objects.filter(system_name=system_name)

            """ check how many systems were returned """

            # if there is only one system
            if len(systemquery) == 1:
                # get system object
                system = System.objects.get(system_name=system_name)

                """ add domain from CSV only if system does not already has a domain """

                # check whether system has existing domain and CSV submitted a domain
                if system.domain is None and domain is not None:

                    # if system has no existing domain set domain of system to domain submitted by tag csv
                    system.domain = domain
                    system.system_modify_time = timezone.now()
                    system.system_modified_by_user_id = request.user
                    system.save()
                    # call logger
                    system.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_SYSTEM_DOMAIN_ADDED")

            # if there is more than one system
            elif len(systemquery) > 1:
                # call logger
                error_logger(str(request.user), " SYSTEM_TAG_IMPORTER_SYSTEM_EXISTS_MULTIPLE_TIMES " + "row_" + str(row_counter) + ":system_exists_multiple_times|system_name:" + system_name)
                messages.error(request, 'System "' + system_name + '" was found multiple times. Nothing was changed for this system.')
                # autoincrement row_counter
                row_counter += 1
                # leave because of no distinct mapping
                continue
            else:
                # create entire new system object
                system = System()
                system.system_name = system_name
                system.systemstatus = Systemstatus.objects.get(systemstatus_name = "Unknown")
                #system.analysisstatus = Analysisstatus.objects.get(analysisstatus_name = "Needs anaylsis")
                # add domain if submitted
                if domain is not None:
                    system.domain = domain
                system.system_modify_time = timezone.now()
                system.system_created_by_user_id = request.user
                system.system_modified_by_user_id = request.user
                system.save()

                # autoincrement systems_created_counter
                systems_created_counter += 1

                # call logger
                system.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_SYSTEM_CREATED")

            # iterate over tags in tagaddlist
            for tag_name in tagaddlist:
                # get tagcolor object
                tagcolor = Tagcolor.objects.get(tagcolor_name='primary')

                # create tag if needed
                tag, created = Tag.objects.get_or_create(tag_name=tag_name, tagcolor=tagcolor)
                # call logger if created
                if created == True:
                    tag.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_TAG_CREATED")
                    messages.success(request, 'Tag "' + tag.tag_name + '" created.')

                # add tag to system
                tag.system_set.add(system)

            # call logger
            system.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_SYSTEM_MODIFIED")

            # create reportitem if it does not exist (get_or_create won't work in this context because of needed user objects for saving)
            try:
                reportitem = Reportitem.objects.get(system = system, headline = headline, reportitem_subheadline = systemtag_subheadline)
            except Reportitem.DoesNotExist:
                reportitem = Reportitem()
                reportitem.system = system
                reportitem.headline = headline
                reportitem.reportitem_subheadline = (systemtag_subheadline)
                reportitem.reportitem_created_by_user_id = request.user

            # create empty list (used to store elements of markdown table)
            notelist = []

            # put head of markdown table into list
            notelist.append("|File|Type|Version|Started|Duration|Lines|Checked|Domain|Host|Tags|Errors|FirstTrace|LastToolUsage|UsageTime|MalwareInstall")
            notelist.append("|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|:---|")

            # iterate over entries in list (dictionaries)
            for item in rowlist:
                # if this single key-value-pair dict contains the system
                if system_name in item:
                    # get row
                    entry = item[system_name]
                    # convert row
                    entry = "|" + "|".join(entry) + "|"
                    # fill empty fields with '---' (otherwise mkdocs skips these)
                    entry = entry.replace("||", "| --- |")
                    # repeat last step to catch empty fields lying next to each other
                    entry = entry.replace("||", "| --- |")
                    # put entry to markdown table
                    notelist.append(entry)

            # join list to string with linebreaks
            notestring = "\n".join(notelist)

            # add changing values (existing reportitem_note will be overwritten)
            reportitem.reportitem_note = notestring
            reportitem.reportitem_modified_by_user_id = request.user
            reportitem.save()

            # call logger
            reportitem.logger(str(request.user), " SYSTEMS_TAG_IMPORTER_REPORTITEM_CREATED_OR_MODIFIED")

            # autoincrement row_counter
            row_counter += 1

        # call final messages
        if systems_created_counter > 0:
            if systems_created_counter  == 1:
                messages.success(request, str(systems_created_counter) + ' system was created.')
            else:
                messages.success(request, str(systems_created_counter) + ' systems were created.')
        if systems_skipped_counter > 0:
            if systems_skipped_counter  == 1:
                messages.warning(request, str(systems_skipped_counter) + ' system was skipped or cleaned (no relevant tags).')
            else:
                messages.warning(request, str(systems_skipped_counter) + ' systems were skipped or cleaned (no relevant tags).')

        # call logger
        debug_logger(str(request.user), " SYSTEM_TAG_IMPORTER_END")

        return redirect('/systems/')

    else:
        # show empty form
        form = SystemTagFileImport()

        # call logger
        debug_logger(str(request.user), " SYSTEM_TAG_IMPORTER_ENTERED")

    return render(request, 'dfirtrack_main/system/systems_tag_importer.html', {'form': form})
Beispiel #17
0
def entry(request):

    # get user string
    request_user = str(request.user)

    # get redirector from GET request
    redirector = request.GET['redirector']

    # call logger
    debug_logger(request_user, " API_GIRAF_ENTRIES_BEGIN")

    # check variables in `dfirtrack.config`
    stop_importer_api_giraf = check_config(request)

    # leave importer_api_giraf if variables caused errors
    if stop_importer_api_giraf:
        return redirect(reverse('system_list'))

    # check connection
    try:
        urllib.request.urlopen(dfirtrack_config.GIRAF_URL, timeout=2)
    except:
        # call logger
        error_logger(request_user, " API_GIRAF_ENTRIES_URL_NOT_AVAILABLE")
        messages.error(request, "GIRAF API URL not available.")
        # call logger (for consistency purposes to show end of api call)
        debug_logger(request_user, " API_GIRAF_SYSTEMS_END")
        if redirector == 'entry':
            return redirect(reverse('entry_list'))
        elif redirector == 'system':
            return redirect(reverse('system_list'))

    # get JSON from GIRAF API (returns <class 'requests.models.Response'>)
    entry_json = requests.get(
        dfirtrack_config.GIRAF_URL + '/api/systems/timelines/',
        auth=(dfirtrack_config.GIRAF_USER, dfirtrack_config.GIRAF_PASS))

    # load JSON to list (returns list if authenticated, returns dict else)
    entry_list = entry_json.json()

    # check for list type (in case of auth error it returns dict)
    if type(entry_list) != list:
        """ stop api call because of missing list """
        # call logger
        error_logger(request_user, " API_GIRAF_POSSIBLE_AUTH_ERROR")
        messages.error(request, "GIRAF API possible authentication error.")
        # call logger (for consistency purposes to show end of api call)
        debug_logger(request_user, " API_GIRAF_ENTRIES_END")
        # redirect depending on redirector from GET request
        if redirector == 'entry':
            return redirect(reverse('entry_list'))
        elif redirector == 'system':
            return redirect(reverse('system_list'))

    # iterate over entries
    for entry_dict in entry_list:

        # get entry time
        entry_time = entry_dict['entry_date']
        if entry_time is not None:
            entry_time = dateutil.parser.parse(entry_time)

        # get system
        entry_system_dict = entry_dict['system']
        system_uuid = entry_system_dict['uuid']
        try:
            system = System.objects.get(system_uuid=system_uuid)
        except:
            # call logger
            error_logger(
                request_user, " API_GIRAF_ENTRIES_UNKNOWN_SYSTEM " +
                "system_uuid:" + system_uuid)
            messages.error(request,
                           "GIRAF API unknown system UUID: " + system_uuid)
            # leave this loop because system for this entry does not exist yet
            continue

        # get entry sha1
        entry_sha1 = entry_dict['hash_sha1']

        # get entry content
        entry_content = entry_dict['json_content']
        # load JSON to dict
        entry_content = json.loads(entry_content)

        # extract entry content from dict
        entry_date = entry_content['date']
        entry_utc = entry_content['utc']
        entry_system = entry_content['system']
        entry_type = entry_content['type']
        entry_content = entry_content['content']

        # get entry if it already exists
        entry = Entry.objects.filter(system=system, entry_sha1=entry_sha1)

        # only create object if it does not exist yet
        if not entry:

            # create new entry object
            entry = Entry()

            # enter extracted values
            entry.entry_time = entry_time
            entry.system = system
            entry.entry_sha1 = entry_sha1
            entry.entry_date = entry_date
            entry.entry_utc = entry_utc
            entry.entry_system = entry_system
            entry.entry_type = entry_type
            entry.entry_content = entry_content

            # set auto values
            entry.entry_create_time = timezone.now()
            entry.entry_modify_time = timezone.now()
            entry.entry_api_time = timezone.now()
            entry.entry_created_by_user_id = request.user
            entry.entry_modified_by_user_id = request.user

            # save object
            entry.save()

            # call logger
            entry.logger(request_user, ' API_GIRAF_ENTRIES_EXECUTED')

    # call logger
    debug_logger(request_user, " API_GIRAF_ENTRIES_END")

    # redirect depending on redirector from GET request
    if redirector == 'entry':
        return redirect(reverse('entry_list'))
    elif redirector == 'system':
        return redirect(reverse('system_list'))
Beispiel #18
0
def check_content_file_system(main_config_model, module_text, request=None):
    """check file system"""

    # reset stop condition
    stop_cron_exporter = False
    """ set username for logger """

    # if function was called from 'artifact_create_cron' / 'system_create_cron'
    if request:
        logger_username = str(request.user)
    # if function was called from 'artifact_cron' / 'system_cron'
    else:
        logger_username = main_config_model.cron_username

    # cron export path does not exist - stop immediately
    if not os.path.isdir(main_config_model.cron_export_path):
        # if function was called from 'artifact_create_cron' / 'system_create_cron'
        if request:
            # call message
            messages.error(
                request,
                'Export path does not exist. Check config or file system!')
        # if function was called from 'artifact_cron' / 'system_cron'
        else:
            # call message for all users
            error_message_cron(
                f'{module_text}: Export path does not exist. Check config or file system!'
            )
        # call logger
        error_logger(
            logger_username,
            f' {module_text}_SPREADSHEET_EXPORTER_CRON_EXPORT_PATH_NOT_EXISTING',
        )
        # set stop condition
        stop_cron_exporter = True
    else:
        # no write permission for cron export path - stop immediately
        if not os.access(main_config_model.cron_export_path, os.R_OK):
            # if function was called from 'artifact_create_cron' / 'system_create_cron'
            if request:
                # call message
                messages.error(
                    request,
                    'No write permission for export path. Check config or file system!',
                )
            # if function was called from 'artifact_cron' / 'system_cron'
            else:
                # call message for all users
                error_message_cron(
                    f'{module_text}: No write permission for export path. Check config or file system!'
                )
            # call logger
            error_logger(
                logger_username,
                f' {module_text}_SPREADSHEET_EXPORTER_CRON_EXPORT_PATH_NO_WRITE_PERMISSION',
            )
            # set stop condition
            stop_cron_exporter = True

    # return stop condition
    return stop_cron_exporter