def archiveReportShort(pkR, comment, _logger): try: result = shortcuts.get_object_or_404(models.Results, pk=pkR) bk = models.dm_reports.get() if result.status != "Completed": raise Exception( "analysis is not completed. Try again once the analysis has completed" ) if bk.location != (None or 'None' or '' or ' ' or '/'): # Test for valid drive location and reset if invalid. if not os.path.isdir(bk.location): invalid_location = bk.location bk.location = 'None' bk.save() raise Exception("backup media is not available: %s" % invalid_location) # Test for writeable directory # Try to create default "archivedReports", then try "archivedReports_". Previous versions # of code created first directory with root ownership. for directory in arc_directory: arc_dir = os.path.join(bk.location, directory) if not os.path.isdir(arc_dir): # Make the directory old_mask = os.umask(0000) os.makedirs(arc_dir) os.umask(old_mask) if os.access(arc_dir, os.W_OK | os.X_OK): break else: arc_dir = None if arc_dir: archiveReport(pkR, arc_dir, comment, _logger) else: raise Exception( "Permission denied writing to backup media: %s" % bk.location) #update diskusage in this result's database object setResultDiskspace(pkR) else: raise Exception("backup media not configured.") except: _logger.exception(traceback.format_exc()) raise else: return True
def archiveReportShort(pkR, comment, _logger): try: result = shortcuts.get_object_or_404(models.Results, pk=pkR) bk = models.dm_reports.get() if result.status != "Completed": raise Exception("analysis is not completed. Try again once the analysis has completed") if bk.location != (None or 'None' or '' or ' ' or '/'): # Test for valid drive location and reset if invalid. if not os.path.isdir(bk.location): invalid_location = bk.location bk.location = 'None' bk.save() raise Exception("backup media is not available: %s" % invalid_location) # Test for writeable directory # Try to create default "archivedReports", then try "archivedReports_". Previous versions # of code created first directory with root ownership. for directory in arc_directory: arc_dir = os.path.join(bk.location, directory) if not os.path.isdir(arc_dir): # Make the directory old_mask = os.umask(0000) os.makedirs(arc_dir) os.umask(old_mask) if os.access(arc_dir, os.W_OK|os.X_OK): break else: arc_dir = None if arc_dir: archiveReport(pkR, arc_dir, comment, _logger) else: raise Exception("Permission denied writing to backup media: %s" % bk.location) #update diskusage in this result's database object setResultDiskspace(pkR) else: raise Exception("backup media not configured.") except: _logger.exception(traceback.format_exc()) raise else: return True
def pruneReport(pkR, comment, _logger): logger = _logger try: # There should only ever be one object bk = models.dm_reports.get() # Determine if this pruning request is from autoAction. If so, we only log to the Report's log # if there were files removed. Otherwise, the Report's log will fill with a daily entry enable_logging = True if "auto-action" in comment: enable_logging = False # Get the selected prune group stored in PruneLevel. This seems to be a string value of the id number of the prune group selected. # We should just use the name of the prune group instead. Make sure we store the selected prune group name in this field instead. # bk.pruneLevel pruneGroup = models.dm_prune_group.objects.get(name=bk.pruneLevel) #get the full path to report directory res = shortcuts.get_object_or_404(models.Results, pk=pkR) reportDir = res.get_report_dir() # Get the file selection patterns (rules) from the pruneGroup object. The pruneGroup contains a list of pk of the rules to use. pruneRules = [] for element in pruneGroup.ruleNums.split(','): if len(element) > 0: num = int(element) obj = models.dm_prune_field.objects.get(pk=num) pruneRules.append(str(obj.rule)) if enable_logging: res.updateMetaData("Pruning", "Pruning using prune group %s: %s" % (bk.pruneLevel, pruneRules), 0, comment, logger=logger) toDel, errFiles, totalSize, numFilesDel = prune_directory(reportDir, pruneRules, logger) # # Log the pruning results # if len(toDel) > 0 and len(errFiles) == 0: status = "Pruned" info = "Pruning completed. %7.2f KB deleted" % float(totalSize / 1024) comment = "%d files deleted" % numFilesDel # Even when enable_logging is False, we want this message to be logged. res.updateMetaData(status, info, totalSize, comment, logger=logger) elif len(toDel) > 0 and (len(errFiles) == len(toDel)): # All files had an error being removed raise Exception("All %d files failed to be removed. See /var/log/ion/data_management.log." % len(errFiles)) elif len(errFiles) > 0 and len(errFiles) < len(toDel): # Some files had an error being removed raise Exception("%d of %d files failed to be removed. See /var/log/ion/data_management.log." % (len(errFiles), len(toDel))) else: # No files were found to remove status = "Pruned" info = "Pruning completed. No valid files to remove" comment = "%d files deleted" % numFilesDel if enable_logging: res.updateMetaData(status, info, totalSize, comment, logger=logger) # update diskusage for this result's database object setResultDiskspace(pkR) except: logger.exception(traceback.format_exc()) raise else: return True
def pruneReport(pkR, comment, _logger): logger = _logger try: # There should only ever be one object bk = models.dm_reports.get() # Determine if this pruning request is from autoAction. If so, we only log to the Report's log # if there were files removed. Otherwise, the Report's log will fill with a daily entry enable_logging = True if "auto-action" in comment: enable_logging = False # Get the selected prune group stored in PruneLevel. This seems to be a string value of the id number of the prune group selected. # We should just use the name of the prune group instead. Make sure we store the selected prune group name in this field instead. # bk.pruneLevel pruneGroup = models.dm_prune_group.objects.get(name=bk.pruneLevel) #get the full path to report directory res = shortcuts.get_object_or_404(models.Results, pk=pkR) reportDir = res.get_report_dir() # Get the file selection patterns (rules) from the pruneGroup object. The pruneGroup contains a list of pk of the rules to use. pruneRules = [] for element in pruneGroup.ruleNums.split(','): if len(element) > 0: num = int(element) obj = models.dm_prune_field.objects.get(pk=num) pruneRules.append(str(obj.rule)) if enable_logging: res.updateMetaData("Pruning", "Pruning using prune group %s: %s" % (bk.pruneLevel, pruneRules), 0, comment, logger=logger) toDel, errFiles, totalSize, numFilesDel = prune_directory( reportDir, pruneRules, logger) # # Log the pruning results # if len(toDel) > 0 and len(errFiles) == 0: status = "Pruned" info = "Pruning completed. %7.2f KB deleted" % float( totalSize / 1024) comment = "%d files deleted" % numFilesDel # Even when enable_logging is False, we want this message to be logged. res.updateMetaData(status, info, totalSize, comment, logger=logger) elif len(toDel) > 0 and (len(errFiles) == len(toDel)): # All files had an error being removed raise Exception( "All %d files failed to be removed. See /var/log/ion/data_management.log." % len(errFiles)) elif len(errFiles) > 0 and len(errFiles) < len(toDel): # Some files had an error being removed raise Exception( "%d of %d files failed to be removed. See /var/log/ion/data_management.log." % (len(errFiles), len(toDel))) else: # No files were found to remove status = "Pruned" info = "Pruning completed. No valid files to remove" comment = "%d files deleted" % numFilesDel if enable_logging: res.updateMetaData(status, info, totalSize, comment, logger=logger) # update diskusage for this result's database object setResultDiskspace(pkR) except: logger.exception(traceback.format_exc()) raise else: return True