def write_report_systemsorted(system, request_user): """ function that prepares return values and pathes """ """ the return values (prefix 'r') are used for the `mkdocs.yml` file they build the key-value-pair for every system """ # return system_id for mkdocs.yml rid = str(system.system_id) # return fqdn for mkdocs.yml if system.dnsname != None: rfqdn = system.system_name + "." + system.dnsname.dnsname_name else: rfqdn = system.system_name """ build the path for every file it is distinguished between the short version for the `mkdocs.yml` file ('value' of key-value-pair) and the long version that is used to write to the file system """ # build path path = system.system_name # check for domain and add to path if system.domain != None: path = path + "_" + system.domain.domain_name # check for system_install_time and add to path if system.system_install_time != None: install_time = system.system_install_time.strftime('%Y%m%d_%H%M%S') path = path + "_" + install_time # return shortened path for mkdocs.yml ('value') rpath = "systems/" + path + ".md" # get config model model = SystemExporterMarkdownConfigModel.objects.get(system_exporter_markdown_config_name = 'SystemExporterMarkdownConfig') # finish path for markdown file path = model.markdown_path + "/docs/systems/" + path + ".md" # open file for system report = open(path, "w") django_report = File(report) # write systemreport write_report.write_report(django_report, system) # close and save file django_report.closed report.close() # call logger info_logger(request_user, " SYSTEM_MARKDOWN_CREATED system_id:" + str(system.system_id) + "|system_name:" + str(system.system_name)) # return strings for mkdocs.yml (only used in systemsorted_async) return(rid, rfqdn, rpath)
def system_cron(): """spreadsheet export via scheduled task to server file system""" # prepare time for output file filetime = timezone.now().strftime('%Y%m%d_%H%M') # get config main_config_model = MainConfigModel.objects.get(main_config_name='MainConfig') # check file system stop_cron_exporter = check_content_file_system(main_config_model, 'SYSTEM_XLS') # leave if config caused errors if stop_cron_exporter: # return to scheduled task return # prepare output file path output_file_path = ( main_config_model.cron_export_path + '/' + filetime + '_systems.xls' ) # get username from config username = main_config_model.cron_username # call main function xls_disk = write_xls(username) # save spreadsheet to disk xls_disk.save(output_file_path) # call logger info_logger(username, ' SYSTEM_XLS_FILE_WRITTEN ' + output_file_path)
def system_cron(): # prepare time for output file filetime = timezone.now().strftime('%Y%m%d_%H%M') # get config main_config_model = MainConfigModel.objects.get( main_config_name='MainConfig') # prepare output file path output_file_path = main_config_model.cron_export_path + '/' + filetime + '_systems.csv' # open output file csv_disk = open(output_file_path, 'w') # get username from config username = main_config_model.cron_username # call main function csv_disk = write_csv(username, csv_disk) # save spreadsheet to disk csv_disk.close() # call logger info_logger(username, ' SYSTEM_CSV_FILE_WRITTEN ' + output_file_path)
def reportitem_post_save_system_case(sender, instance, *args, **kwargs): """ link system to case if system's reportitem was set to case """ # improve readability reportitem = instance # reportitem was linked with a case if reportitem.case: # improve readability reportitem_case = reportitem.case reportitem_system = reportitem.system # check whether reportitem's system is linked with reportitem's case if not reportitem_case in reportitem_system.case.all(): # link reportitem's system with reportitem's case reportitem_system.case.add(reportitem_case) # get all users all_users = User.objects.all() # call message for all users message_users( all_users, f"System '{reportitem_system.system_name}' was assigned to case '{reportitem_case.case_name}' due to reportitem assignment.", constants.SUCCESS ) # call logger info_logger('signal', f' SYSTEM_CASE_ASSIGNMENT system_id:{reportitem_system.system_id}|system_name:{reportitem_system.system_name}|case_id:{reportitem_case.case_id}|case_name:{reportitem_case.case_name}|reason:reportitem_assignment')
def system_importer_file_csv_config_based_config_view(request): # form was valid to post if request.method == "POST": # get config model model = SystemImporterFileCsvConfigbasedConfigModel.objects.get( system_importer_file_csv_configbased_config_name= 'SystemImporterFileCsvConfigbasedConfig') # get form form = SystemImporterFileCsvConfigbasedConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() form.save_m2m() # create message messages.success( request, 'System importer file CSV config based config changed') # call logger info_logger( str(request.user), " SYSTEM_IMPORTER_FILE_CSV_CONFIG_BASED_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') else: # show form page again return render( request, 'dfirtrack_config/system/system_importer_file_csv_config_based_config_popup.html', { 'form': form, }) else: # get config model model = SystemImporterFileCsvConfigbasedConfigModel.objects.get( system_importer_file_csv_configbased_config_name= 'SystemImporterFileCsvConfigbasedConfig') # get form form = SystemImporterFileCsvConfigbasedConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/system/system_importer_file_csv_config_based_config_popup.html', { 'form': form, })
def system_exporter_spreadsheet_xls_config_view(request): # form was valid to post if request.method == "POST": # get config model model = SystemExporterSpreadsheetXlsConfigModel.objects.get( system_exporter_spreadsheet_xls_config_name= 'SystemExporterSpreadsheetXlsConfig') # get form form = SystemExporterSpreadsheetXlsConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() # create message messages.success(request, 'System exporter spreadsheet XLS config changed') # call logger info_logger(str(request.user), " SYSTEM_EXPORTER_SPREADSHEET_XLS_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') # TODO: with only non-mandatory model attributes, it is not possible to get an invalid form # TODO: finish prepared tests in 'dfirtrack_config.tests.system.test_system_exporter_spreadsheet_xls_config_views' # TODO: remove the coverage limitation with further mandatory model attributes else: # coverage: ignore branch # show form page again return render( request, 'dfirtrack_config/system/system_exporter_spreadsheet_xls_config_popup.html', { 'form': form, }) else: # get config model model = SystemExporterSpreadsheetXlsConfigModel.objects.get( system_exporter_spreadsheet_xls_config_name= 'SystemExporterSpreadsheetXlsConfig') # get form form = SystemExporterSpreadsheetXlsConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/system/system_exporter_spreadsheet_xls_config_popup.html', { 'form': form, })
def system_exporter_markdown_config_view(request): # form was valid to post if request.method == "POST": # get config model model = SystemExporterMarkdownConfigModel.objects.get( system_exporter_markdown_config_name='SystemExporterMarkdownConfig' ) # get form form = SystemExporterMarkdownConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() # create message messages.success(request, 'System exporter markdown config changed') # call logger info_logger(str(request.user), " SYSTEM_EXPORTER_MARKDOWN_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>' ) else: # show form page again return render( request, 'dfirtrack_config/system/system_exporter_markdown_config_popup.html', { 'form': form, }, ) else: # get config model model = SystemExporterMarkdownConfigModel.objects.get( system_exporter_markdown_config_name='SystemExporterMarkdownConfig' ) # get form form = SystemExporterMarkdownConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/system/system_exporter_markdown_config_popup.html', { 'form': form, }, )
def artifact_exporter_spreadsheet_xls_config_view(request): # form was valid to post if request.method == "POST": # get config model model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get( artifact_exporter_spreadsheet_xls_config_name= 'ArtifactExporterSpreadsheetXlsConfig') # get form form = ArtifactExporterSpreadsheetXlsConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() form.save_m2m() # call logger info_logger(str(request.user), " ARTIFACT_EXPORTER_SPREADSHEET_XLS_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') else: # show form page return render( request, 'dfirtrack_config/artifact/artifact_exporter_spreadsheet_xls_config_popup.html', { 'form': form, }) else: # get config model model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get( artifact_exporter_spreadsheet_xls_config_name= 'ArtifactExporterSpreadsheetXlsConfig') # get form form = ArtifactExporterSpreadsheetXlsConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/artifact/artifact_exporter_spreadsheet_xls_config_popup.html', { 'form': form, })
def system_exporter_spreadsheet_csv_config_view(request): # form was valid to post if request.method == "POST": # get config model model = SystemExporterSpreadsheetCsvConfigModel.objects.get( system_exporter_spreadsheet_csv_config_name= 'SystemExporterSpreadsheetCsvConfig') # get form form = SystemExporterSpreadsheetCsvConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() # call logger info_logger(str(request.user), " SYSTEM_EXPORTER_SPREADSHEET_CSV_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') else: # show form page again return render( request, 'dfirtrack_config/system/system_exporter_spreadsheet_csv_config_popup.html', { 'form': form, }) else: # get config model model = SystemExporterSpreadsheetCsvConfigModel.objects.get( system_exporter_spreadsheet_csv_config_name= 'SystemExporterSpreadsheetCsvConfig') # get form form = SystemExporterSpreadsheetCsvConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/system/system_exporter_spreadsheet_csv_config_popup.html', { 'form': form, })
def main_config_view(request): # form was valid to post if request.method == "POST": # get config model model = MainConfigModel.objects.get(main_config_name='MainConfig') # get form form = MainConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() form.save_m2m() # create message messages.success(request, 'Main config changed') # call logger info_logger(str(request.user), " MAIN_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') else: # show form page again return render( request, 'dfirtrack_config/main_config_popup.html', { 'form': form, }, ) else: # get config model model = MainConfigModel.objects.get(main_config_name='MainConfig') # get form form = MainConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/main_config_popup.html', { 'form': form, }, )
def clean_directory(request_user): """ function to clean the system path within the markdown directory """ # clean or create markdown directory if os.path.exists(markdown_path + "/docs/systems/"): # remove markdown directory (recursivly) shutil.rmtree(markdown_path + "/docs/systems/") # recreate markdown directory os.mkdir(markdown_path + "/docs/systems/") # call logger debug_logger(request_user, " SYSTEM_MARKDOWN_ALL_SYSTEMS_DIRECTORY_CLEANED") else: # create markdown directory os.mkdir(markdown_path + "/docs/systems/") # call logger info_logger(request_user, " SYSTEM_MARKDOWN_FOLDER_CREATED")
def clean_directory(username): """ function to clean the system path within the markdown directory """ # get config model model = SystemExporterMarkdownConfigModel.objects.get(system_exporter_markdown_config_name = 'SystemExporterMarkdownConfig') # clean or create markdown directory if os.path.exists(model.markdown_path + "/docs/systems/"): # remove markdown directory (recursively) shutil.rmtree(model.markdown_path + "/docs/systems/") # recreate markdown directory os.mkdir(model.markdown_path + "/docs/systems/") # call logger debug_logger(username, " SYSTEM_MARKDOWN_ALL_SYSTEMS_DIRECTORY_CLEANED") else: # create markdown directory os.makedirs(model.markdown_path + "/docs/systems/") # call logger info_logger(username, " SYSTEM_MARKDOWN_FOLDER_CREATED")
def main_config_view(request): # form was valid to post if request.method == "POST": # get config model model = MainConfigModel.objects.get(main_config_name='MainConfig') # get form form = MainConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() # call logger info_logger(str(request.user), " MAIN_CONFIG_CHANGED") # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') # TODO: with 'system_name_editable' as the only non-mandatory model attribute, it is not possible to get an invalid form # TODO: finish prepared tests in 'dfirtrack_config.tests.main.test_main_config_views' # TODO: remove the coverage limitation with further mandatory model attributes else: # coverage: ignore branch # show form page return render(request, 'dfirtrack_config/main_config_popup.html', { 'form': form, }) else: # get config model model = MainConfigModel.objects.get(main_config_name='MainConfig') # get form form = MainConfigForm(instance=model) # show form page return render(request, 'dfirtrack_config/main_config_popup.html', { 'form': form, })
def system_cron(): # prepare time for output file filetime = timezone.now().strftime('%Y%m%d_%H%M') # get config main_config_model = MainConfigModel.objects.get(main_config_name = 'MainConfig') # prepare output file path output_file_path = main_config_model.cron_export_path + '/' + filetime + '_systems.xls' # get username from config username = main_config_model.cron_username # call main function xls_disk = write_xls(username) # save spreadsheet to disk xls_disk.save(output_file_path) # call logger info_logger(username, ' SYSTEM_XLS_FILE_WRITTEN ' + output_file_path)
def write_xls(username): """write spreadsheet""" # create workbook object with UTF-8 encoding workbook = xlwt.Workbook(encoding='utf-8') # define name of worksheet within file worksheet_artifact = workbook.add_sheet('artifacts') # define styling for headline style = style_headline() # get config model model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get( artifact_exporter_spreadsheet_xls_config_name='ArtifactExporterSpreadsheetXlsConfig' ) """ start with headline """ # set counter row_num = 0 # create empty list headline = [] # check for attribute id if model.artifactlist_xls_artifact_id: headline.append('Artifact ID') # append mandatory attribute headline.append('Artifact') # check for remaining attributes if model.artifactlist_xls_system_id: headline.append('System ID') if model.artifactlist_xls_system_name: headline.append('System') if model.artifactlist_xls_artifactstatus: headline.append('Artifactstatus') if model.artifactlist_xls_artifactpriority: headline.append('Artifactpriority') if model.artifactlist_xls_artifacttype: headline.append('Artifacttype') if model.artifactlist_xls_artifact_source_path: headline.append('Source path') if model.artifactlist_xls_artifact_storage_path: headline.append('Storage path') if model.artifactlist_xls_artifact_note_internal: headline.append('Internal note') if model.artifactlist_xls_artifact_note_external: headline.append('External note') if model.artifactlist_xls_artifact_note_analysisresult: headline.append('Analysis result') if model.artifactlist_xls_artifact_md5: headline.append('MD5') if model.artifactlist_xls_artifact_sha1: headline.append('SHA1') if model.artifactlist_xls_artifact_sha256: headline.append('SHA256') if model.artifactlist_xls_artifact_create_time: headline.append('Created') if model.artifactlist_xls_artifact_modify_time: headline.append('Modified') # write headline worksheet_artifact = write_row(worksheet_artifact, headline, row_num, style) # clear styling to default style = style_default() """ append artifacts """ # get all Artifact objects ordered by system name (fk) and artifact id artifacts = Artifact.objects.all().order_by("system__system_name", "artifact_id") # iterate over artifacts for artifact in artifacts: # leave loop if artifactstatus of this artifact is not configured for export if ( artifact.artifactstatus not in model.artifactlist_xls_choice_artifactstatus.all() ): continue # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline = [] """ check for attribute """ # artifact id if model.artifactlist_xls_artifact_id: entryline.append(artifact.artifact_id) """ append mandatory attribute """ # artifact name entryline.append(artifact.artifact_name) """ check for remaining attributes """ # system id if model.artifactlist_xls_system_id: entryline.append(artifact.system.system_id) # system name if model.artifactlist_xls_system_name: entryline.append(artifact.system.system_name) # artifactstatus if model.artifactlist_xls_artifactstatus: entryline.append(artifact.artifactstatus.artifactstatus_name) # artifactpriority if model.artifactlist_xls_artifactpriority: entryline.append(artifact.artifactpriority.artifactpriority_name) # artifacttype if model.artifactlist_xls_artifacttype: entryline.append(artifact.artifacttype.artifacttype_name) # artifact source path if model.artifactlist_xls_artifact_source_path: if artifact.artifact_source_path == None: artifact_source_path = '' else: artifact_source_path = artifact.artifact_source_path entryline.append(artifact_source_path) # artifact storage path if model.artifactlist_xls_artifact_storage_path: artifact_storage_path = artifact.artifact_storage_path entryline.append(artifact_storage_path) # artifact note internal if model.artifactlist_xls_artifact_note_internal: if artifact.artifact_note_internal == None: artifact_note_internal = '' else: artifact_note_internal = artifact.artifact_note_internal entryline.append(artifact_note_internal) # artifact note external if model.artifactlist_xls_artifact_note_external: if artifact.artifact_note_external == None: artifact_note_external = '' else: artifact_note_external = artifact.artifact_note_external entryline.append(artifact_note_external) # artifact note analysisresult if model.artifactlist_xls_artifact_note_analysisresult: if artifact.artifact_note_analysisresult == None: artifact_note_analysisresult = '' else: artifact_note_analysisresult = artifact.artifact_note_analysisresult entryline.append(artifact_note_analysisresult) # artifact md5 if model.artifactlist_xls_artifact_md5: if artifact.artifact_md5 == None: artifact_md5 = '' else: artifact_md5 = artifact.artifact_md5 entryline.append(artifact_md5) # artifact sha1 if model.artifactlist_xls_artifact_sha1: if artifact.artifact_sha1 == None: artifact_sha1 = '' else: artifact_sha1 = artifact.artifact_sha1 entryline.append(artifact_sha1) # artifact sha256 if model.artifactlist_xls_artifact_sha256: if artifact.artifact_sha256 == None: artifact_sha256 = '' else: artifact_sha256 = artifact.artifact_sha256 entryline.append(artifact_sha256) # artifact create time if model.artifactlist_xls_artifact_create_time: artifact_create_time = artifact.artifact_create_time.strftime( '%Y-%m-%d %H:%M' ) entryline.append(artifact_create_time) # artifact modify time if model.artifactlist_xls_artifact_modify_time: artifact_modify_time = artifact.artifact_modify_time.strftime( '%Y-%m-%d %H:%M' ) entryline.append(artifact_modify_time) # write line for artifact worksheet_artifact = write_row(worksheet_artifact, entryline, row_num, style) # call logger debug_logger( username, ' ARTIFACT_XLS_ARTIFACT_EXPORTED ' + 'artifact_id:' + str(artifact.artifact_id) + '|artifact_name:' + artifact.artifact_name + '|system_id:' + str(artifact.system.system_id) + '|system_name:' + artifact.system.system_name, ) # write an empty row row_num += 2 # write meta information for file creation actualtime = timezone.now().strftime('%Y-%m-%d %H:%M') worksheet_artifact.write(row_num, 0, 'Created:', style) worksheet_artifact.write(row_num, 1, actualtime, style) row_num += 1 creator = username worksheet_artifact.write(row_num, 0, 'Created by:', style) worksheet_artifact.write(row_num, 1, creator, style) """ add worksheet for artifactstatus """ # check all conditions if ( model.artifactlist_xls_worksheet_artifactstatus and model.artifactlist_xls_artifactstatus and Artifactstatus.objects.count() != 0 ): # define name of worksheet within file worksheet_artifactstatus = workbook.add_sheet('artifactstatus') # create empty list headline_artifactstatus = [] # append attributes headline_artifactstatus.append('ID') headline_artifactstatus.append('Artifactstatus') headline_artifactstatus.append('Note') # define styling for headline style = style_headline() # set counter row_num = 0 # write headline worksheet_artifactstatus = write_row( worksheet_artifactstatus, headline_artifactstatus, row_num, style ) # clear styling to default style = style_default() """ append artifactstatus """ # get all Artifactstatus objects ordered by artifactstatus_id artifactstatuss = Artifactstatus.objects.all().order_by("artifactstatus_name") # iterate over artifactstatus for artifactstatus in artifactstatuss: # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline_artifactstatus = [] entryline_artifactstatus.append(artifactstatus.artifactstatus_id) entryline_artifactstatus.append(artifactstatus.artifactstatus_name) # add placeholder if artifactstatus note does not exist if artifactstatus.artifactstatus_note: entryline_artifactstatus.append(artifactstatus.artifactstatus_note) else: entryline_artifactstatus.append('---') # write line for artifactstatus worksheet_artifactstatus = write_row( worksheet_artifactstatus, entryline_artifactstatus, row_num, style ) """ add worksheet for artifacttype """ # check all conditions if ( model.artifactlist_xls_worksheet_artifacttype and model.artifactlist_xls_artifacttype and Artifacttype.objects.count() != 0 ): # define name of worksheet within file worksheet_artifacttype = workbook.add_sheet('artifacttype') # create empty list headline_artifacttype = [] # append attributes headline_artifacttype.append('ID') headline_artifacttype.append('Artifacttype') headline_artifacttype.append('Note') # define styling for headline style = style_headline() # set counter row_num = 0 # write headline worksheet_artifacttype = write_row( worksheet_artifacttype, headline_artifacttype, row_num, style ) # clear styling to default style = style_default() """ append artifacttype """ # get all Artifacttype objects ordered by artifacttype_name artifacttypes = Artifacttype.objects.all().order_by("artifacttype_name") # iterate over artifacttype for artifacttype in artifacttypes: # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline_artifacttype = [] entryline_artifacttype.append(artifacttype.artifacttype_id) entryline_artifacttype.append(artifacttype.artifacttype_name) # add placeholder if artifacttype note does not exist if artifacttype.artifacttype_note: entryline_artifacttype.append(artifacttype.artifacttype_note) else: entryline_artifacttype.append('---') # write line for artifacttype worksheet_artifacttype = write_row( worksheet_artifacttype, entryline_artifacttype, row_num, style ) # call logger info_logger(username, " ARTIFACT_XLS_CREATED") # return xls object return workbook
# set column counter col_num = 1 # create empty list for line entryline_tag = [] entryline_tag.append(tag.tag_id) entryline_tag.append(tag.tag_name) entryline_tag.append(tag.tag_note) # write line for tag worksheet_tag = write_row(worksheet_tag, entryline_tag, row_num, style) # call logger info_logger(username, " SYSTEM_XLS_CREATED") # return xls object return workbook @login_required(login_url="/login") def system_create_cron(request): """helper function to check config before creating scheduled task""" # get config main_config_model = MainConfigModel.objects.get(main_config_name='MainConfig') # check file system stop_cron_exporter = check_content_file_system( main_config_model, 'SYSTEM_XLS', request
def tag_creator_async(request_post, request_user): """ function to create many tags for many systems at once """ # call logger debug_logger(str(request_user), ' TAG_CREATOR_START') # extract tags (list results from request object via multiple choice field) tags = request_post.getlist('tag') # extract systems (list results from request object via multiple choice field) systems = request_post.getlist('system') # set tags_created_counter (needed for messages) tags_created_counter = 0 # set system_tags_created_counter (needed for messages) system_tags_created_counter = 0 # iterate over systems for system_id in systems: # autoincrement counter system_tags_created_counter += 1 # iterate over tags for tag_id in tags: # create form with request data form = TagCreatorForm(request_post) # create relation if form.is_valid(): """ object creation """ # get objects system = System.objects.get(system_id=system_id) tag = Tag.objects.get(tag_id=tag_id) # add tag to system system.tag.add(tag) """ object counter / log """ # autoincrement counter tags_created_counter += 1 # call logger system.logger( str(request_user), ' TAG_CREATOR_EXECUTED') """ finish tag importer """ # call final message message_user( request_user, f'{tags_created_counter} tags created for {system_tags_created_counter} systems.', constants.SUCCESS ) # call logger info_logger( str(request_user), f' TAG_CREATOR_STATUS' f' tags_created:{tags_created_counter}' f'|systems_affected:{system_tags_created_counter}' ) # call logger debug_logger(str(request_user), ' TAG_CREATOR_END')
def system_creator_async(request_post, request_user): """ function to create many systems at once """ # call logger debug_logger(str(request_user), ' SYSTEM_CREATOR_BEGIN') # exctract lines from systemlist (list results from request object via large text area) lines = request_post.get('systemlist').splitlines() # count lines (needed for messages) number_of_lines = len(lines) # set systems_created_counter (needed for messages) systems_created_counter = 0 # set systems_skipped_counter (needed for messages) systems_skipped_counter = 0 # set lines_faulty_counter (needed for messages) lines_faulty_counter = 0 # create empty list (needed for messages) skipped_systems = [] # iterate over lines for line in lines: # skip emtpy lines if line == '': # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_CREATOR_ROW_EMPTY') continue # check line for length of string if len(line) > 50: # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_CREATOR_LONG_STRING') continue # check for existence of system system = System.objects.filter(system_name=line) """ already existing system """ # in case of existing system if system.count() > 0: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger error_logger( str(request_user), ' SYSTEM_CREATOR_SYSTEM_EXISTS ' + 'system_name:' + line) # leave this loop because system with this systemname already exists continue """ new system """ # create form with request data form = SystemCreatorForm(request_post) # create system if form.is_valid(): """ object creation """ # don't save form yet system = form.save(commit=False) # set system_name system.system_name = line # set auto values system.system_created_by_user_id = request_user system.system_modified_by_user_id = request_user system.system_modify_time = timezone.now() # save object system.save() # save manytomany form.save_m2m() """ object counter / log """ # autoincrement counter systems_created_counter += 1 # call logger system.logger(str(request_user), ' SYSTEM_CREATOR_EXECUTED') """ call final messages """ # finish message message_user(request_user, 'System creator finished', constants.SUCCESS) # number messages if systems_created_counter > 0: if systems_created_counter == 1: message_user(request_user, str(systems_created_counter) + ' system was created.', constants.SUCCESS) else: message_user( request_user, str(systems_created_counter) + ' systems were created.', constants.SUCCESS) if systems_skipped_counter > 0: if systems_skipped_counter == 1: message_user( request_user, str(systems_skipped_counter) + ' system was skipped. ' + str(skipped_systems), constants.ERROR) else: message_user( request_user, str(systems_skipped_counter) + ' systems were skipped. ' + str(skipped_systems), constants.ERROR) if lines_faulty_counter > 0: if lines_faulty_counter == 1: message_user( request_user, str(lines_faulty_counter) + ' line out of ' + str(number_of_lines) + ' lines was faulty (see log file for details).', constants.WARNING) else: message_user( request_user, str(lines_faulty_counter) + ' lines out of ' + str(number_of_lines) + ' lines were faulty (see log file for details).', constants.WARNING) # call logger info_logger( str(request_user), ' SYSTEM_CREATOR_STATUS ' + 'created:' + str(systems_created_counter) + '|' + 'skipped:' + str(systems_skipped_counter) + '|' + 'faulty_lines:' + str(lines_faulty_counter)) # call logger debug_logger(str(request_user), ' SYSTEM_CREATOR_END')
def system_modificator_async(request_post, request_user): """ function to modify many systems at once """ # call logger debug_logger(str(request_user), ' SYSTEM_MODIFICATOR_BEGIN') # exctract lines from systemlist (list results either from request object via multiline selector or via large text area) lines = request_post.getlist('systemlist') system_char_field_used = False # if large text area was used, the list contains only one entry with (one or more) line breaks if len(lines) == 1 and ("\r\n" in lines[0] or not lines[0].isdigit()): system_char_field_used = True lines = lines[0].splitlines() # count lines (needed for messages) number_of_lines = len(lines) # set systems_modified_counter (needed for messages) systems_modified_counter = 0 # set systems_skipped_counter (needed for messages) systems_skipped_counter = 0 # set lines_faulty_counter (needed for messages) lines_faulty_counter = 0 # create empty list (needed for messages) skipped_systems = [] # iterate over lines for line in lines: # skip emtpy lines if line == '': # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_ROW_EMPTY') continue # check line for string if not isinstance(line, str): # coverage: ignore branch # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_NO_STRING') continue # check line for length of string if len(line) > 50: # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_LONG_STRING') continue # check for existence of system if system_char_field_used: system = System.objects.filter(system_name=line) else: system = System.objects.filter(system_id=line) """ handling non-existing or non-unique systems 2 """ # system does not exist if system.count() == 0: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger error_logger( str(request_user), ' SYSTEM_MODIFICATOR_SYSTEM_DOES_NOT_EXISTS ' + 'system_id/system_name:' + line) # leave this loop because system with this systemname does not exist continue # more than one system exists elif system.count() > 1: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger error_logger( str(request_user), ' SYSTEM_MODIFICATOR_SYSTEM_NOT_DISTINCT ' + 'system_id/system_name:' + line) # leave this loop because system with this systemname is not distinct continue # get existing system if system_char_field_used: system = System.objects.get(system_name=line) else: system = System.objects.get(system_id=line) """ new system """ # create form with request data form = SystemModificatorForm( request_post, instance=system, use_system_charfield=system_char_field_used) # extract tags (list results from request object via multiple choice field) tags = request_post.getlist('tag') # extract companies (list results from request object via multiple choice field) companies = request_post.getlist('company') # modify system if form.is_valid(): """ object modification """ # don't save form yet system = form.save(commit=False) # set auto values system.system_modified_by_user_id = request_user system.system_modify_time = timezone.now() # save object system.save() """ object counter / log """ # autoincrement counter systems_modified_counter += 1 # call logger system.logger(str(request_user), ' SYSTEM_MODIFICATOR_EXECUTED') """ many 2 many """ # TODO: add check for empty list # add tags (using save_m2m would replace existing tags) for tag_id in tags: # get object tag = Tag.objects.get(tag_id=tag_id) # add tag to system system.tag.add(tag) for company_id in companies: # get object company = Company.objects.get(company_id=company_id) # add company to system system.company.add(company) """ call final messages """ # finish message message_user(request_user, 'System modificator finished', constants.SUCCESS) # number messages if systems_modified_counter > 0: if systems_modified_counter == 1: message_user( request_user, str(systems_modified_counter) + ' system was modified.', constants.SUCCESS) else: message_user( request_user, str(systems_modified_counter) + ' systems were modified.', constants.SUCCESS) if systems_skipped_counter > 0: if systems_skipped_counter == 1: message_user( request_user, str(systems_skipped_counter) + ' system was skipped. ' + str(skipped_systems), constants.ERROR) else: message_user( request_user, str(systems_skipped_counter) + ' systems were skipped. ' + str(skipped_systems), constants.ERROR) if lines_faulty_counter > 0: if lines_faulty_counter == 1: message_user( request_user, str(lines_faulty_counter) + ' line out of ' + str(number_of_lines) + ' lines was faulty (see log file for details).', constants.WARNING) else: message_user( request_user, str(lines_faulty_counter) + ' lines out of ' + str(number_of_lines) + ' lines were faulty (see log file for details).', constants.WARNING) # call logger info_logger( str(request_user), ' SYSTEM_MODIFICATOR_STATUS ' + 'modified:' + str(systems_modified_counter) + '|' + 'skipped:' + str(systems_skipped_counter) + '|' + 'faulty_lines:' + str(lines_faulty_counter)) # call logger debug_logger(str(request_user), " SYSTEM_MODIFICATOR_END")
def task_creator_async(request_post, request_user): """ function to create many tasks for many systems at once """ # call logger debug_logger(str(request_user), ' TASK_CREATOR_START') # extract tasknames (list results from request object via multiple choice field) tasknames = request_post.getlist('taskname') # extract systems (list results from request object via multiple choice field) systems = request_post.getlist('system') # set tasks_created_counter (needed for messages) tasks_created_counter = 0 # set system_tasks_created_counter (needed for messages) system_tasks_created_counter = 0 # iterate over systems for system in systems: # autoincrement counter system_tasks_created_counter += 1 # iterate over tasknames for taskname in tasknames: # create form with request data form = TaskCreatorForm(request_post) # create task if form.is_valid(): """ object creation """ # don't save form yet task = form.save(commit=False) # set taskname and system task.taskname = Taskname.objects.get(taskname_id=taskname) task.system = System.objects.get(system_id=system) # set auto values task.task_created_by_user_id = request_user task.task_modified_by_user_id = request_user # get taskstatus objects for comparing taskstatus_working = Taskstatus.objects.get( taskstatus_name='20_working') taskstatus_done = Taskstatus.objects.get( taskstatus_name='30_done') # set times depending on submitted taskstatus if task.taskstatus == taskstatus_working: task.task_started_time = timezone.now() if task.taskstatus == taskstatus_done: task.task_started_time = timezone.now() task.task_finished_time = timezone.now() # save object task.save() # save manytomany form.save_m2m() """ object counter / log """ # autoincrement counter tasks_created_counter += 1 # call logger task.logger(str(request_user), ' TASK_CREATOR_EXECUTED') """ finish system importer """ # call final message message_user( request_user, f'{tasks_created_counter} tasks created for {system_tasks_created_counter} systems.', constants.SUCCESS) # call logger info_logger( str(request_user), f' TASK_CREATOR_STATUS' f' tasks_created:{tasks_created_counter}' f'|systems_affected:{system_tasks_created_counter}') # call logger debug_logger(str(request_user), ' TASK_CREATOR_END')
def system_creator_async(request_post, request_user): """ function to create many systems at once """ # call logger debug_logger(str(request_user), ' SYSTEM_CREATOR_START') # exctract lines from systemlist (list results from request object via large text area) lines = request_post.get('systemlist').splitlines() # count lines (needed for messages) number_of_lines = len(lines) # set systems_created_counter (needed for messages) systems_created_counter = 0 # set systems_skipped_counter (needed for messages) systems_skipped_counter = 0 # set lines_faulty_counter (needed for messages) lines_faulty_counter = 0 # create empty list (needed for messages) skipped_systems = [] # iterate over lines for line in lines: # skip emtpy lines if line == '': # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_CREATOR_ROW_EMPTY') continue # check line for length of string if len(line) > 50: # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_CREATOR_LONG_STRING') continue # check for existence of system system = System.objects.filter(system_name = line) """ already existing system """ # in case of existing system if system.count() > 0: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger warning_logger(str(request_user), f' SYSTEM_CREATOR_SYSTEM_EXISTS system_name:{line}') # leave this loop because system with this systemname already exists continue """ new system """ # create form with request data form = SystemCreatorForm(request_post) # create system if form.is_valid(): """ object creation """ # don't save form yet system = form.save(commit=False) # set system_name system.system_name = line # set auto values system.system_created_by_user_id = request_user system.system_modified_by_user_id = request_user system.system_modify_time = timezone.now() # save object system.save() # save manytomany form.save_m2m() """ object counter / log """ # autoincrement counter systems_created_counter += 1 # call logger system.logger(str(request_user), ' SYSTEM_CREATOR_EXECUTED') """ finish system importer """ # call final messages final_messages(systems_created_counter, systems_skipped_counter, lines_faulty_counter, skipped_systems, number_of_lines, request_user) # call logger info_logger( str(request_user), f' SYSTEM_CREATOR_STATUS' f' created:{systems_created_counter}' f'|skipped:{systems_skipped_counter}' f'|faulty_lines:{lines_faulty_counter}' ) # call logger debug_logger(str(request_user), ' SYSTEM_CREATOR_END')
def systems(request): # create xls MIME type object sod = HttpResponse(content_type='application/ms-excel') # define filename sod['Content-Disposition'] = 'attachment; filename="systems.xls"' # preamble workbook = xlwt.Workbook(encoding='utf-8') worksheet = workbook.add_sheet('Systems') # set counter row_num = 0 # define styling style = xlwt.XFStyle() font = xlwt.Font() font.bold = True style.font = font borders = xlwt.Borders() # prepare headline columns = [ 'ID', 'System', 'Status', 'Reason', 'Recommendation', 'Type', 'IP', 'Domain', 'DNS Name', 'OS', 'Company', 'Location', 'Serviceprovider', 'Created', 'Last modified', ] # write headline for col_num in range(len(columns)): worksheet.write(row_num, col_num, columns[col_num], style) style = xlwt.XFStyle() # get all System objects ordered by system_name systems = System.objects.all().order_by("system_name") # iterate over systems for system in systems: # skip system depending on export variable if system.system_export_spreadsheet == False: continue # autoincrement row counter row_num += 1 # set column counter col_num = 1 # set foreign key field to none if it doesn't exist if system.reason == None: reason = '' else: reason = system.reason.reason_name # set foreign key field to none if it doesn't exist if system.recommendation == None: recommendation = '' else: recommendation = system.recommendation.recommendation_name # set foreign key field to none if it doesn't exist if system.systemtype == None: systemtype = '' else: systemtype = system.systemtype.systemtype_name # get all ips of system ips_all = system.ip.all() # count ips n = system.ip.count() # create empty ip string ip = '' # set counter i = 1 # iterate over ip objects in ip list for ip_obj in ips_all: # add actual ip to ip string ip = ip + ip_obj.ip_ip # add newline except for last ip if i < n: ip = ip + '\n' i = i + 1 # set foreign key field to none if it doesn't exist if system.domain == None: domain = '' else: domain = system.domain.domain_name # set foreign key field to none if it doesn't exist if system.dnsname == None: dnsname = '' else: dnsname = system.dnsname.dnsname_name # set foreign key field to none if it doesn't exist if system.os == None: os = '' else: os = system.os.os_name # get all companies of system companys_all = system.company.all() # count companies n = system.company.count() # create empty company string company = '' # set counter i = 1 # iterate over company objects in company list for company_obj in companys_all: # add actual company to company string company = company + company_obj.company_name # add newline except for last company if i < n: company = company + '\n' i = i + 1 # set foreign key field to none if it doesn't exist if system.location == None: location = '' else: location = system.location.location_name # set foreign key field to none if it doesn't exist if system.serviceprovider == None: serviceprovider = '' else: serviceprovider = system.serviceprovider.serviceprovider_name # prepare string values for datetimes create_time = system.system_create_time.strftime('%Y-%m-%d %H:%M') modify_time = system.system_modify_time.strftime('%Y-%m-%d %H:%M') # prepare a line for every system columns = [ system.system_id, system.system_name, system.systemstatus.systemstatus_name, reason, recommendation, systemtype, ip, domain, dnsname, os, company, location, serviceprovider, create_time, modify_time, ] # write a line for every system (iterate over column content) for col_num in range(len(columns)): worksheet.write(row_num, col_num, columns[col_num], style) # write an empty row row_num += 2 # write meta information actualtime = strftime('%Y-%m-%d %H:%M') worksheet.write(row_num, 0, 'SOD created:', style) worksheet.write(row_num, 1, actualtime, style) row_num += 1 creator = str(request.user) worksheet.write(row_num, 0, 'Created by:', style) worksheet.write(row_num, 1, creator, style) # close file workbook.save(sod) # call logger info_logger(str(request.user), " SYSTEM_XLS_CREATED") # return xls object return sod
str(system.system_id) + '|system_name:' + system.system_name, ) # write an empty row csv_writer.writerow([]) # prepare string value for actual datetimes actualtime = timezone.now().strftime('%Y-%m-%d %H:%M') # write meta information csv_writer.writerow(['Created:', actualtime]) creator = username csv_writer.writerow(['Created by:', creator]) # call logger info_logger(username, " SYSTEM_CSV_CREATED") # return csv object return csv_file @login_required(login_url="/login") def system_create_cron(request): """helper function to check config before creating scheduled task""" # get config main_config_model = MainConfigModel.objects.get( main_config_name='MainConfig') # check file system stop_cron_exporter = check_content_file_system(main_config_model,
def case_creator_async(request_post, request_user): """ function to assign many cases to systems and vice versa """ # call logger debug_logger(str(request_user), ' CASE_CREATOR_START') # extract cases (list results from request object via multiple choice field) cases = request_post.getlist('case') # extract systems (list results from request object via multiple choice field) systems = request_post.getlist('system') # set cases_affected_counter (needed for messages) cases_affected_counter = 0 # set systems_affected_counter (needed for messages) systems_affected_counter = 0 # iterate over systems for system_id in systems: # autoincrement counter systems_affected_counter += 1 # iterate over cases for case_id in cases: # create form with request data form = CaseCreatorForm(request_post) # create relation if form.is_valid(): """ object creation """ # get objects system = System.objects.get(system_id=system_id) case = Case.objects.get(case_id=case_id) # add case to system system.case.add(case) """ object counter / log """ # autoincrement counter cases_affected_counter += 1 # call logger system.logger(str(request_user), ' CASE_CREATOR_EXECUTED') """ finish case importer """ # fix case number (other meaning than with tag creator cases_affected_counter = int(cases_affected_counter / systems_affected_counter) # call final message message_user( request_user, f'{cases_affected_counter} cases assigned to {systems_affected_counter} systems.', constants.SUCCESS) # call logger info_logger( str(request_user), f' CASE_CREATOR_STATUS' f' cases_affected:{cases_affected_counter}' f'|systems_affected:{systems_affected_counter}') # call logger debug_logger(str(request_user), ' CASE_CREATOR_END')
def artifact(request): """ prepare file including formatting """ # create xls MIME type object artifactlist = HttpResponse(content_type='application/ms-excel') # define filename artifactlist[ 'Content-Disposition'] = 'attachment; filename="artifactlist.xls"' # create workbook object with UTF-8 encoding workbook = xlwt.Workbook(encoding='utf-8') # define name of worksheet within file worksheet_artifact = workbook.add_sheet('artifacts') # define styling for headline style = style_headline() # get config model model = ArtifactExporterSpreadsheetXlsConfigModel.objects.get( artifact_exporter_spreadsheet_xls_config_name= 'ArtifactExporterSpreadsheetXlsConfig') """ start with headline """ # set counter row_num = 0 # create empty list headline = [] # check for attribute id if model.artifactlist_xls_artifact_id: headline.append('Artifact ID') # append mandatory attribute headline.append('Artifact') # check for remaining attributes if model.artifactlist_xls_system_id: headline.append('System ID') if model.artifactlist_xls_system_name: headline.append('System') if model.artifactlist_xls_artifactstatus: headline.append('Artifactstatus') if model.artifactlist_xls_artifacttype: headline.append('Artifacttype') if model.artifactlist_xls_artifact_source_path: headline.append('Source path') if model.artifactlist_xls_artifact_storage_path: headline.append('Storage path') if model.artifactlist_xls_artifact_note: headline.append('Note') if model.artifactlist_xls_artifact_md5: headline.append('MD5') if model.artifactlist_xls_artifact_sha1: headline.append('SHA1') if model.artifactlist_xls_artifact_sha256: headline.append('SHA256') if model.artifactlist_xls_artifact_create_time: headline.append('Created') if model.artifactlist_xls_artifact_modify_time: headline.append('Modified') # write headline worksheet_artifact = write_row(worksheet_artifact, headline, row_num, style) # clear styling to default style = style_default() """ append artifacts """ # get all Artifact objects ordered by system name (fk) and artifact id artifacts = Artifact.objects.all().order_by("system__system_name", "artifact_id") # iterate over artifacts for artifact in artifacts: # leave loop if artifactstatus of this artifact is not configured for export if artifact.artifactstatus not in model.artifactlist_xls_choice_artifactstatus.all( ): continue # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline = [] """ check for attribute """ # artifact id if model.artifactlist_xls_artifact_id: entryline.append(artifact.artifact_id) """ append mandatory attribute """ # artifact name entryline.append(artifact.artifact_name) """ check for remaining attributes """ # system id if model.artifactlist_xls_system_id: entryline.append(artifact.system.system_id) # system name if model.artifactlist_xls_system_name: entryline.append(artifact.system.system_name) # artifactstatus if model.artifactlist_xls_artifactstatus: entryline.append(artifact.artifactstatus.artifactstatus_name) # artifacttype if model.artifactlist_xls_artifacttype: entryline.append(artifact.artifacttype.artifacttype_name) # artifact source path if model.artifactlist_xls_artifact_source_path: if artifact.artifact_source_path == None: artifact_source_path = '' else: artifact_source_path = artifact.artifact_source_path entryline.append(artifact_source_path) # artifact storage path if model.artifactlist_xls_artifact_storage_path: artifact_storage_path = artifact.artifact_storage_path entryline.append(artifact_storage_path) # artifact note if model.artifactlist_xls_artifact_note: if artifact.artifact_note == None: artifact_note = '' else: artifact_note = artifact.artifact_note entryline.append(artifact_note) # artifact md5 if model.artifactlist_xls_artifact_md5: if artifact.artifact_md5 == None: artifact_md5 = '' else: artifact_md5 = artifact.artifact_md5 entryline.append(artifact_md5) # artifact sha1 if model.artifactlist_xls_artifact_sha1: if artifact.artifact_sha1 == None: artifact_sha1 = '' else: artifact_sha1 = artifact.artifact_sha1 entryline.append(artifact_sha1) # artifact sha256 if model.artifactlist_xls_artifact_sha256: if artifact.artifact_sha256 == None: artifact_sha256 = '' else: artifact_sha256 = artifact.artifact_sha256 entryline.append(artifact_sha256) # artifact create time if model.artifactlist_xls_artifact_create_time: artifact_create_time = artifact.artifact_create_time.strftime( '%Y-%m-%d %H:%M') entryline.append(artifact_create_time) # artifact modify time if model.artifactlist_xls_artifact_modify_time: artifact_modify_time = artifact.artifact_modify_time.strftime( '%Y-%m-%d %H:%M') entryline.append(artifact_modify_time) # write line for artifact worksheet_artifact = write_row(worksheet_artifact, entryline, row_num, style) # write an empty row row_num += 2 # write meta information for file creation actualtime = strftime('%Y-%m-%d %H:%M') worksheet_artifact.write(row_num, 0, 'Artifactlist created:', style) worksheet_artifact.write(row_num, 1, actualtime, style) row_num += 1 creator = str(request.user) worksheet_artifact.write(row_num, 0, 'Created by:', style) worksheet_artifact.write(row_num, 1, creator, style) """ add worksheet for artifactstatus """ # check all conditions if model.artifactlist_xls_worksheet_artifactstatus and model.artifactlist_xls_artifactstatus and Artifactstatus.objects.count( ) != 0: # define name of worksheet within file worksheet_artifactstatus = workbook.add_sheet('artifactstatus') # create empty list headline_artifactstatus = [] # append attributes headline_artifactstatus.append('ID') headline_artifactstatus.append('Artifactstatus') headline_artifactstatus.append('Note') # define styling for headline style = style_headline() # set counter row_num = 0 # write headline worksheet_artifactstatus = write_row(worksheet_artifactstatus, headline_artifactstatus, row_num, style) # clear styling to default style = style_default() """ append artifactstatus """ # get all Artifactstatus objects ordered by artifactstatus_id artifactstatuss = Artifactstatus.objects.all().order_by( "artifactstatus_id") # iterate over artifactstatus for artifactstatus in artifactstatuss: # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline_artifactstatus = [] entryline_artifactstatus.append(artifactstatus.artifactstatus_id) entryline_artifactstatus.append(artifactstatus.artifactstatus_name) # add placeholder if artifactstatus note does not exist if artifactstatus.artifactstatus_note: entryline_artifactstatus.append( artifactstatus.artifactstatus_note) else: entryline_artifactstatus.append('---') # write line for artifactstatus worksheet_artifactstatus = write_row(worksheet_artifactstatus, entryline_artifactstatus, row_num, style) """ add worksheet for artifacttype """ # check all conditions if model.artifactlist_xls_worksheet_artifacttype and model.artifactlist_xls_artifacttype and Artifacttype.objects.count( ) != 0: # define name of worksheet within file worksheet_artifacttype = workbook.add_sheet('artifacttype') # create empty list headline_artifacttype = [] # append attributes headline_artifacttype.append('ID') headline_artifacttype.append('Artifacttype') headline_artifacttype.append('Note') # define styling for headline style = style_headline() # set counter row_num = 0 # write headline worksheet_artifacttype = write_row(worksheet_artifacttype, headline_artifacttype, row_num, style) # clear styling to default style = style_default() """ append artifacttype """ # get all Artifacttype objects ordered by artifacttype_name artifacttypes = Artifacttype.objects.all().order_by( "artifacttype_name") # iterate over artifacttype for artifacttype in artifacttypes: # autoincrement row counter row_num += 1 # set column counter col_num = 1 # create empty list for line entryline_artifacttype = [] entryline_artifacttype.append(artifacttype.artifacttype_id) entryline_artifacttype.append(artifacttype.artifacttype_name) # add placeholder if artifacttype note does not exist if artifacttype.artifacttype_note: entryline_artifacttype.append(artifacttype.artifacttype_note) else: entryline_artifacttype.append('---') # write line for artifacttype worksheet_artifacttype = write_row(worksheet_artifacttype, entryline_artifacttype, row_num, style) # close file workbook.save(artifactlist) # call logger info_logger(str(request.user), " ARTIFACT_XLS_CREATED") # return xls object return artifactlist
# system create time if model.spread_csv_system_create_time: system_create_time = system.system_create_time.strftime( '%Y-%m-%d %H:%M') entryline.append(system_create_time) # system modify time if model.spread_csv_system_modify_time: system_modify_time = system.system_modify_time.strftime( '%Y-%m-%d %H:%M') entryline.append(system_modify_time) # write entryline sod_writer.writerow(entryline) # write an empty row sod_writer.writerow([]) # prepare string value for actual datetimes actualtime = strftime('%Y-%m-%d %H:%M') # write meta information sod_writer.writerow(['SOD created:', actualtime]) creator = request.user sod_writer.writerow(['Created by:', creator]) # call logger info_logger(str(request.user), " SYSTEM_CSV_CREATED") # return csv object return sod
def csv_entry_import_async(system_id, file_name, field_mapping, request_user, case_id=None): """ async entry csv import """ row_count = 0 fail_count = 0 dup_count = 0 system = System.objects.get(system_id=system_id) case = Case.objects.get(case_id=case_id) if case_id else None try: with open(file_name, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='"') next(spamreader) for row in spamreader: try: row_count += 1 entry = Entry() entry.system = system entry.entry_created_by_user_id = request_user entry.entry_modified_by_user_id = request_user m = hashlib.sha1() entry.entry_time = row[field_mapping['entry_time']] m.update(entry.entry_time.encode()) entry.entry_type = row[field_mapping['entry_type']] m.update(entry.entry_type .encode()) entry.entry_content = row[field_mapping['entry_content']] m.update(entry.entry_content.encode()) entry.entry_sha1 = m.hexdigest() entry.case = case if not Entry.objects.filter(entry_sha1=m.hexdigest()).exists(): entry.full_clean() entry.save() if field_mapping['entry_tag'] != -1: tags = ast.literal_eval(row[field_mapping['entry_tag']]) for tag_name in tags: tag = Tag.objects.filter(tag_name=tag_name) if len(tag) == 1: entry.tag.add(tag[0]) else: dup_count += 1 continue except ValidationError as e: debug_logger( str(request_user), f' ENTRY_CSV_IMPORT' f' ERROR: {e}' ) fail_count += 1 continue os.remove(file_name) except FileNotFoundError: info_logger( str(request_user), ' ENTRY_CSV_IMPORT' ' ERROR: File not found' ) message_user( request_user, "Could not import the CSV file. Maybe the upload wasn't successful or the file was deleted.", constants.ERROR ) return if fail_count == 0 and dup_count != 0: message_user( request_user, f'Imported {row_count-dup_count} entries for system "{system.system_name}". Removed {dup_count} duplicates.', constants.SUCCESS ) elif fail_count == 0: message_user( request_user, f'Imported {row_count} entries for system "{system.system_name}".', constants.SUCCESS ) else: message_user( request_user, f'Could not import {fail_count} of {row_count} entries for system "{system.system_name}".', constants.WARNING ) # call logger info_logger( str(request_user), f' ENTRY_CSV_IMPORT' f' created:{row_count}' f' failed:{fail_count}' f' duplicates:{dup_count}' )
def systems(request): # create csv MIME type object sod = HttpResponse(content_type='text/csv') # define filename sod['Content-Disposition'] = 'attachment; filename="sod.csv"' # create file object for writing lines sod_writer = csv.writer(sod) # write headline sod_writer.writerow([ 'ID', 'System', 'Status', 'Reason', 'Recommendation', 'Type', 'IP', 'Domain', 'DNS Name', 'OS', 'Company', 'Location', 'Serviceprovider', 'Created', 'Last modified', ]) # get all System objects ordered by system_name systems = System.objects.all().order_by("system_name") # iterate over systems for system in systems: # skip system depending on export variable if system.system_export_spreadsheet == False: continue # set foreign key field to none if it doesn't exist if system.reason == None: reason = '' else: reason = system.reason.reason_name # set foreign key field to none if it doesn't exist if system.recommendation == None: recommendation = '' else: recommendation = system.recommendation.recommendation_name # set foreign key field to none if it doesn't exist if system.systemtype == None: systemtype = '' else: systemtype = system.systemtype.systemtype_name # get all ips of system ips_all = system.ip.all() # count ips n = system.ip.count() # create empty ip string ip = '' # set counter i = 1 # iterate over ip objects in ip list for ip_obj in ips_all: # add actual ip to ip string ip = ip + ip_obj.ip_ip # add newline except for last ip if i < n: ip = ip + '\n' i = i + 1 # set foreign key field to none if it doesn't exist if system.domain == None: domain = '' else: domain = system.domain.domain_name # set foreign key field to none if it doesn't exist if system.dnsname == None: dnsname = '' else: dnsname = system.dnsname.dnsname_name # set foreign key field to none if it doesn't exist if system.os == None: os = '' else: os = system.os.os_name # get all companies of system companys_all = system.company.all() # count companies n = system.company.count() # create empty company string company = '' # set counter i = 1 # iterate over company objects in company list for company_obj in companys_all: # add actual company to company string company = company + company_obj.company_name # add newline except for last company if i < n: company = company + '\n' i = i + 1 # set foreign key field to none if it doesn't exist if system.location == None: location = '' else: location = system.location.location_name # set foreign key field to none if it doesn't exist if system.serviceprovider == None: serviceprovider = '' else: serviceprovider = system.serviceprovider.serviceprovider_name # prepare string values for datetimes create_time = system.system_create_time.strftime('%Y-%m-%d %H:%M') modify_time = system.system_modify_time.strftime('%Y-%m-%d %H:%M') # write a line for every system sod_writer.writerow([ system.system_id, system.system_name, system.systemstatus.systemstatus_name, reason, recommendation, systemtype, ip, domain, dnsname, os, company, location, serviceprovider, create_time, modify_time, ]) # write an empty row sod_writer.writerow([]) # prepare string value for actual datetimes actualtime = strftime('%Y-%m-%d %H:%M') # write meta information sod_writer.writerow(['SOD created:', actualtime]) creator = request.user sod_writer.writerow(['Created by:', creator]) # call logger info_logger(str(request.user), " SYSTEM_CSV_CREATED") # return csv object return sod
def artifact_creator_async(request_post, request_user): """function to create many artifacts for many systems at once""" # call logger debug_logger(str(request_user), ' ARTIFACT_CREATOR_START') # extract artifacttypes (list results from request object via multiple choice field) artifacttypes = request_post.getlist('artifacttype') # extract systems (list results from request object via multiple choice field) systems = request_post.getlist('system') # set artifacts_created_counter (needed for messages) artifacts_created_counter = 0 # set system_artifacts_created_counter (needed for messages) system_artifacts_created_counter = 0 # iterate over systems for system in systems: # autoincrement counter system_artifacts_created_counter += 1 # iterate over artifacttypes for artifacttype in artifacttypes: # create form with request data form = ArtifactCreatorForm(request_post) # create artifact if form.is_valid(): """object creation""" # don't save form yet artifact = form.save(commit=False) # set artifacttype and system artifact.artifacttype = Artifacttype.objects.get( artifacttype_id=artifacttype ) artifact.system = System.objects.get(system_id=system) """set artifact name """ # get values from form alternative_artifact_name_choice = form.cleaned_data.get( 'alternative_artifact_name_choice' ) alternative_artifact_name = form.cleaned_data.get( 'alternative_artifact_name' ) # set artifact name if alternative_artifact_name_choice: # set artifact name according to text field artifact.artifact_name = alternative_artifact_name else: # set artifact name according to artifacttype artifact.artifact_name = Artifacttype.objects.get( artifacttype_id=artifacttype ).artifacttype_name # set auto values artifact.artifact_created_by_user_id = request_user artifact.artifact_modified_by_user_id = request_user # save object artifact.save() # save manytomany form.save_m2m() """ object counter / log """ # autoincrement counter artifacts_created_counter += 1 # call logger artifact.logger(str(request_user), ' ARTIFACT_CREATOR_EXECUTED') """ finish artifact creator """ # call final message message_user( request_user, f'{artifacts_created_counter} artifacts created for {system_artifacts_created_counter} systems.', constants.SUCCESS, ) # call logger info_logger( str(request_user), f' ARTIFACT_CREATOR_STATUS' f' artifacts_created:{artifacts_created_counter}' f'|systems_affected:{system_artifacts_created_counter}', ) # call logger debug_logger(str(request_user), ' ARTIFACT_CREATOR_END')
def system_importer_file_csv_config_view(request): # POST request if request.method == "POST": # get config model model = SystemImporterFileCsvConfigModel.objects.get( system_importer_file_csv_config_name='SystemImporterFileCsvConfig') # get form form = SystemImporterFileCsvConfigForm(request.POST, instance=model) if form.is_valid(): # save settings model = form.save(commit=False) model.save() form.save_m2m() # create message messages.success(request, 'System importer file CSV config changed') # call logger info_logger(str(request.user), " SYSTEM_IMPORTER_FILE_CSV_CONFIG_CHANGED") """ check file system """ # build csv file path csv_path = model.csv_import_path + '/' + model.csv_import_filename """ CSV import path does not exist - handled in dfirtrack_config.form CSV import path is not readable - handled in dfirtrack_config.form CSV import file does exist but is not readable - handled in dfirtrack_config.form """ # CSV import file does not exist - show warning if not os.path.isfile(csv_path): # create message messages.warning( request, 'CSV import file does not exist at the moment. Make sure the file is available during import.' ) # CSV import file is empty - show warning if os.path.isfile(csv_path): if os.path.getsize(csv_path) == 0: # create message messages.warning( request, 'CSV import file is empty. Make sure the file contains systems during import.' ) # show warning if existing systems will be updated if not model.csv_skip_existing_system: # call message messages.warning(request, 'WARNING: Existing systems will be updated!') # close popup return HttpResponse( '<script type="text/javascript">window.close();</script>') else: # show form page again return render( request, 'dfirtrack_config/system/system_importer_file_csv_config_popup.html', { 'form': form, }) # GET request else: # get config model model = SystemImporterFileCsvConfigModel.objects.get( system_importer_file_csv_config_name='SystemImporterFileCsvConfig') # get form form = SystemImporterFileCsvConfigForm(instance=model) # show form page return render( request, 'dfirtrack_config/system/system_importer_file_csv_config_popup.html', { 'form': form, })