def end_message(request_user, sorted_text): """ export finished """ # create message text depending on sorting message_text = f'System exporter markdown (sorted by {sorted_text}) finished' # call async message for single user message_user( request_user, message_text, constants.SUCCESS, ) # return to calling function in '...sorted' return
def artifact_creator_async(request_post, request_user): """function to create many artifacts for many systems at once""" # call logger debug_logger(str(request_user), ' ARTIFACT_CREATOR_START') # extract artifacttypes (list results from request object via multiple choice field) artifacttypes = request_post.getlist('artifacttype') # extract systems (list results from request object via multiple choice field) systems = request_post.getlist('system') # set artifacts_created_counter (needed for messages) artifacts_created_counter = 0 # set system_artifacts_created_counter (needed for messages) system_artifacts_created_counter = 0 # iterate over systems for system in systems: # autoincrement counter system_artifacts_created_counter += 1 # iterate over artifacttypes for artifacttype in artifacttypes: # create form with request data form = ArtifactCreatorForm(request_post) # create artifact if form.is_valid(): """object creation""" # don't save form yet artifact = form.save(commit=False) # set artifacttype and system artifact.artifacttype = Artifacttype.objects.get( artifacttype_id=artifacttype ) artifact.system = System.objects.get(system_id=system) """set artifact name """ # get values from form alternative_artifact_name_choice = form.cleaned_data.get( 'alternative_artifact_name_choice' ) alternative_artifact_name = form.cleaned_data.get( 'alternative_artifact_name' ) # set artifact name if alternative_artifact_name_choice: # set artifact name according to text field artifact.artifact_name = alternative_artifact_name else: # set artifact name according to artifacttype artifact.artifact_name = Artifacttype.objects.get( artifacttype_id=artifacttype ).artifacttype_name # set auto values artifact.artifact_created_by_user_id = request_user artifact.artifact_modified_by_user_id = request_user # save object artifact.save() # save manytomany form.save_m2m() """ object counter / log """ # autoincrement counter artifacts_created_counter += 1 # call logger artifact.logger(str(request_user), ' ARTIFACT_CREATOR_EXECUTED') """ finish artifact creator """ # call final message message_user( request_user, f'{artifacts_created_counter} artifacts created for {system_artifacts_created_counter} systems.', constants.SUCCESS, ) # call logger info_logger( str(request_user), f' ARTIFACT_CREATOR_STATUS' f' artifacts_created:{artifacts_created_counter}' f'|systems_affected:{system_artifacts_created_counter}', ) # call logger debug_logger(str(request_user), ' ARTIFACT_CREATOR_END')
def system_modificator_async(request_post, request_user): """ function to modify many systems at once """ # call logger debug_logger(str(request_user), ' SYSTEM_MODIFICATOR_BEGIN') # exctract lines from systemlist (list results either from request object via multiline selector or via large text area) lines = request_post.getlist('systemlist') system_char_field_used = False # if large text area was used, the list contains only one entry with (one or more) line breaks if len(lines) == 1 and ("\r\n" in lines[0] or not lines[0].isdigit()): system_char_field_used = True lines = lines[0].splitlines() # count lines (needed for messages) number_of_lines = len(lines) # set systems_modified_counter (needed for messages) systems_modified_counter = 0 # set systems_skipped_counter (needed for messages) systems_skipped_counter = 0 # set lines_faulty_counter (needed for messages) lines_faulty_counter = 0 # create empty list (needed for messages) skipped_systems = [] # iterate over lines for line in lines: # skip emtpy lines if line == '': # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_ROW_EMPTY') continue # check line for string if not isinstance(line, str): # coverage: ignore branch # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_NO_STRING') continue # check line for length of string if len(line) > 50: # autoincrement counter lines_faulty_counter += 1 # call logger warning_logger(str(request_user), ' SYSTEM_MODIFICATOR_LONG_STRING') continue # check for existence of system if system_char_field_used: system = System.objects.filter(system_name=line) else: system = System.objects.filter(system_id=line) """ handling non-existing or non-unique systems 2 """ # system does not exist if system.count() == 0: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger error_logger( str(request_user), ' SYSTEM_MODIFICATOR_SYSTEM_DOES_NOT_EXISTS ' + 'system_id/system_name:' + line) # leave this loop because system with this systemname does not exist continue # more than one system exists elif system.count() > 1: # autoincrement counter systems_skipped_counter += 1 # add system name to list of skipped systems skipped_systems.append(line) # call logger error_logger( str(request_user), ' SYSTEM_MODIFICATOR_SYSTEM_NOT_DISTINCT ' + 'system_id/system_name:' + line) # leave this loop because system with this systemname is not distinct continue # get existing system if system_char_field_used: system = System.objects.get(system_name=line) else: system = System.objects.get(system_id=line) """ new system """ # create form with request data form = SystemModificatorForm( request_post, instance=system, use_system_charfield=system_char_field_used) # extract tags (list results from request object via multiple choice field) tags = request_post.getlist('tag') # extract companies (list results from request object via multiple choice field) companies = request_post.getlist('company') # modify system if form.is_valid(): """ object modification """ # don't save form yet system = form.save(commit=False) # set auto values system.system_modified_by_user_id = request_user system.system_modify_time = timezone.now() # save object system.save() """ object counter / log """ # autoincrement counter systems_modified_counter += 1 # call logger system.logger(str(request_user), ' SYSTEM_MODIFICATOR_EXECUTED') """ many 2 many """ # TODO: add check for empty list # add tags (using save_m2m would replace existing tags) for tag_id in tags: # get object tag = Tag.objects.get(tag_id=tag_id) # add tag to system system.tag.add(tag) for company_id in companies: # get object company = Company.objects.get(company_id=company_id) # add company to system system.company.add(company) """ call final messages """ # finish message message_user(request_user, 'System modificator finished', constants.SUCCESS) # number messages if systems_modified_counter > 0: if systems_modified_counter == 1: message_user( request_user, str(systems_modified_counter) + ' system was modified.', constants.SUCCESS) else: message_user( request_user, str(systems_modified_counter) + ' systems were modified.', constants.SUCCESS) if systems_skipped_counter > 0: if systems_skipped_counter == 1: message_user( request_user, str(systems_skipped_counter) + ' system was skipped. ' + str(skipped_systems), constants.ERROR) else: message_user( request_user, str(systems_skipped_counter) + ' systems were skipped. ' + str(skipped_systems), constants.ERROR) if lines_faulty_counter > 0: if lines_faulty_counter == 1: message_user( request_user, str(lines_faulty_counter) + ' line out of ' + str(number_of_lines) + ' lines was faulty (see log file for details).', constants.WARNING) else: message_user( request_user, str(lines_faulty_counter) + ' lines out of ' + str(number_of_lines) + ' lines were faulty (see log file for details).', constants.WARNING) # call logger info_logger( str(request_user), ' SYSTEM_MODIFICATOR_STATUS ' + 'modified:' + str(systems_modified_counter) + '|' + 'skipped:' + str(systems_skipped_counter) + '|' + 'faulty_lines:' + str(lines_faulty_counter)) # call logger debug_logger(str(request_user), " SYSTEM_MODIFICATOR_END")
def csv_entry_import_async(system_id, file_name, field_mapping, request_user, case_id=None): """ async entry csv import """ row_count = 0 fail_count = 0 dup_count = 0 system = System.objects.get(system_id=system_id) case = Case.objects.get(case_id=case_id) if case_id else None try: with open(file_name, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='"') next(spamreader) for row in spamreader: try: row_count += 1 entry = Entry() entry.system = system entry.entry_created_by_user_id = request_user entry.entry_modified_by_user_id = request_user m = hashlib.sha1() entry.entry_time = row[field_mapping['entry_time']] m.update(entry.entry_time.encode()) entry.entry_type = row[field_mapping['entry_type']] m.update(entry.entry_type .encode()) entry.entry_content = row[field_mapping['entry_content']] m.update(entry.entry_content.encode()) entry.entry_sha1 = m.hexdigest() entry.case = case if not Entry.objects.filter(entry_sha1=m.hexdigest()).exists(): entry.full_clean() entry.save() if field_mapping['entry_tag'] != -1: tags = ast.literal_eval(row[field_mapping['entry_tag']]) for tag_name in tags: tag = Tag.objects.filter(tag_name=tag_name) if len(tag) == 1: entry.tag.add(tag[0]) else: dup_count += 1 continue except ValidationError as e: debug_logger( str(request_user), f' ENTRY_CSV_IMPORT' f' ERROR: {e}' ) fail_count += 1 continue os.remove(file_name) except FileNotFoundError: info_logger( str(request_user), ' ENTRY_CSV_IMPORT' ' ERROR: File not found' ) message_user( request_user, "Could not import the CSV file. Maybe the upload wasn't successful or the file was deleted.", constants.ERROR ) return if fail_count == 0 and dup_count != 0: message_user( request_user, f'Imported {row_count-dup_count} entries for system "{system.system_name}". Removed {dup_count} duplicates.', constants.SUCCESS ) elif fail_count == 0: message_user( request_user, f'Imported {row_count} entries for system "{system.system_name}".', constants.SUCCESS ) else: message_user( request_user, f'Could not import {fail_count} of {row_count} entries for system "{system.system_name}".', constants.WARNING ) # call logger info_logger( str(request_user), f' ENTRY_CSV_IMPORT' f' created:{row_count}' f' failed:{fail_count}' f' duplicates:{dup_count}' )
def final_messages(systems_touched_counter, systems_skipped_counter, lines_faulty_counter, skipped_systems, number_of_lines, request_user, workflow_count=0, workflows_applied=0): """ final messages for 'system_creator' and 'system_modificator' """ # number messages if systems_touched_counter > 0: if systems_touched_counter == 1: message_user( request_user, f'{systems_touched_counter} system was created / modified.', constants.SUCCESS) else: message_user( request_user, f'{systems_touched_counter} systems were created / modified.', constants.SUCCESS) if systems_skipped_counter > 0: if systems_skipped_counter == 1: message_user( request_user, f'{systems_skipped_counter} system was skipped. {skipped_systems}', constants.WARNING) else: message_user( request_user, f'{systems_skipped_counter} systems were skipped. {skipped_systems}', constants.WARNING) if lines_faulty_counter > 0: if lines_faulty_counter == 1: message_user( request_user, f'{lines_faulty_counter} line out of {number_of_lines} lines was faulty (see log file for details).', constants.WARNING) else: message_user( request_user, f'{lines_faulty_counter} lines out of {number_of_lines} lines were faulty (see log file for details).', constants.WARNING) if systems_touched_counter > 0 and workflow_count > 0: if workflows_applied == systems_touched_counter * workflow_count: message_user(request_user, 'System creator/modificator workflows applied.', constants.SUCCESS) else: message_user(request_user, 'Could not apply all workflows.', constants.WARNING) # return to 'system_creator' and 'system_modificator' return
def systemsorted_async(request_user): """ exports markdown report for all systems """ # call directory cleaning function clean_directory.clean_directory(str(request_user)) # get all systems systems = System.objects.all().order_by('system_name') # create empty list and dict (needed for mkdocs.yml) systemlist = [] systemdict = {} # iterate over systems for system in systems: # skip system depending on export variable if system.system_export_markdown == False: continue # call writing function (and get return values) rid, rfqdn, rpath = write_report_systemsorted(system, str(request_user)) """ build a dict that is used for the system section in mkdocs.yml """ # build string as key for systemdict (needed for mkdocs.yml) index = rfqdn + " (" + rid + ")" # add value to key in 1-value dict (needed for mkdocs.yml) systemdict[index] = rpath # add dict to list (needed for mkdocs.yml) systemlist.append(systemdict) # set dict to empty dict (needed for mkdocs.yml) systemdict = {} # get config model model = SystemExporterMarkdownConfigModel.objects.get( system_exporter_markdown_config_name='SystemExporterMarkdownConfig') # get path for mkdocs.yml mkdconfpath = model.markdown_path + "/mkdocs.yml" # read content (dictionary) of mkdocs.yml if existent, else create dummy content mkdconfdict = read_or_create_mkdocs_yml.read_or_create_mkdocs_yml( str(request_user), mkdconfpath) # get pages list mkdconflist = mkdconfdict['pages'] # set counter i = 0 j = 0 # iterate over 'pages' list for item in mkdconflist: # find subsection 'Systems' in list try: dummy = item['Systems'] # set index j = i except: # coverage: ignore branch # do nothing pass # autoincrement counter for next loop i += 1 # set at dict 'Systems' in list mkdconflist at index j (replace section 'Systems') mkdconflist[j]['Systems'] = systemlist # set pages with old entries and new 'Systems' section mkdconfdict['pages'] = mkdconflist # open mkdocs.yml for writing (new file) mkdconffile = open(mkdconfpath, "w") yaml.dump(mkdconfdict, mkdconffile) # close file mkdconffile.close() # finish message message_user(request_user, 'System exporter markdown (sorted by system) finished', constants.SUCCESS) # call logger debug_logger(str(request_user), " SYSTEM_EXPORTER_MARKDOWN_SYSTEMSORTED_END")