def pg_list(name, default=''): """Retrieve a serialized (comma-separated) list from a POST request. Duplicated elements are removed""" # FIXME: a hostgroup containing hundreds of hosts may exceed POST size s = request.POST.get(name, default) li = clean(s).strip().split(',') return list(set(li))
def clean(value, rules): """Cleans the value using specified rules :param value: the string value to be cleaned :type value: str :param rules: the clean rules to be used :type rules: list of cleanit.rule.Rule :return: the cleaned value after applying all the rules :rtype: str """ return core.clean(value, rules, replacement='')
from sensitivity import test from post import plot from core import clean, log # This method cleans the remnant files from the last run clean() # Here, we define the parameters and values to investiate test_params = [{'epoch': [50, 100]}, {'latent': [20, 30]}] # Perform sensitivity analysis test(test_params) # Getting the plots saved in the temp folder plot(test_params)
def pg(name, default=''): """Retrieve an element from a POST request""" s = request.POST.get(name, default)[:64] return clean(s).strip()