options, args = parser.parse_args()

# Check to see if a config file was specified on the command line.
# If not, circular-shelf.config is used.
if len(args) == 0:
  configfile = 'circular-shelf.config'
elif len(args) == 1:
  configfile = args[0]
else:
  print '\nUsage:  python circular-shelf.py [FILE.CONFIG] [-b|--smooth-beta] [-d|--dirichlet-center] [-s|--sloped]\n'
  sys.exit(0)

# Create a netCDF file according to the information in the config file.
parser = ConfigParser()
parser.read(configfile)
nx = int(parser.get('grid','ewn'))
ny = int(parser.get('grid','nsn'))
nz = int(parser.get('grid','upn'))
dx = float(parser.get('grid','dew'))
dy = float(parser.get('grid','dns'))
filename = parser.get('CF input', 'name')

print 'Writing', filename
try:
  netCDFfile = NetCDFFile(filename,'w',format='NETCDF3_CLASSIC')
except TypeError:
  netCDFfile = NetCDFFile(filename,'w')

netCDFfile.createDimension('time',1)
netCDFfile.createDimension('x1',nx)
netCDFfile.createDimension('y1',ny)
Example #2
0
else:
    print '\nUsage:  python circular-shelf.py [FILE.CONFIG] [-b|--smooth-beta] [-d|--dirichlet-center] [-s|--sloped]\n'
    sys.exit(0)

# Check to see if #procs specified, relevant when running the code in parallel.
# If not, serial run (#procs==1) is performed. To run in parallel, the configure
# file must be specifed, but the nu,ber of processors does not
if len(sys.argv) > 2:
    nprocs = sys.argv[2]
else:
    nprocs = '1'

# Create a netCDF file according to the information in the config file.
parser = ConfigParser()
parser.read(configfile)
nx = int(parser.get('grid', 'ewn'))
ny = int(parser.get('grid', 'nsn'))
nz = int(parser.get('grid', 'upn'))
dx = float(parser.get('grid', 'dew'))
dy = float(parser.get('grid', 'dns'))
filename = parser.get('CF input', 'name')

print 'Writing', filename
try:
    netCDFfile = NetCDFFile(filename, 'w', format='NETCDF3_CLASSIC')
except TypeError:
    netCDFfile = NetCDFFile(filename, 'w')

netCDFfile.createDimension('time', 1)
netCDFfile.createDimension('x1', nx)
netCDFfile.createDimension('y1', ny)
Example #3
0
else:
    print "\nUsage:  python circular-shelf.py [FILE.CONFIG] [-b|--smooth-beta] [-d|--dirichlet-center] [-s|--sloped]\n"
    sys.exit(0)

# Check to see if #procs specified, relevant when running the code in parallel.
# If not, serial run (#procs==1) is performed. To run in parallel, the configure
# file must be specifed, but the nu,ber of processors does not
if len(sys.argv) > 2:
    nprocs = sys.argv[2]
else:
    nprocs = "1"

# Create a netCDF file according to the information in the config file.
parser = ConfigParser()
parser.read(configfile)
nx = int(parser.get("grid", "ewn"))
ny = int(parser.get("grid", "nsn"))
nz = int(parser.get("grid", "upn"))
dx = float(parser.get("grid", "dew"))
dy = float(parser.get("grid", "dns"))
filename = parser.get("CF input", "name")

print "Writing", filename
try:
    netCDFfile = NetCDFFile(filename, "w", format="NETCDF3_CLASSIC")
except TypeError:
    netCDFfile = NetCDFFile(filename, "w")

netCDFfile.createDimension("time", 1)
netCDFfile.createDimension("x1", nx)
netCDFfile.createDimension("y1", ny)
Example #4
0
File: daf.py Project: Daudau/daf
def main(argv=None):
    parser = OptionParser()
    parser.add_option("-i", "--input-disk", dest="input_disk",
                    default=None, 
                    help="Set the base path of the mounted disk to analyze",
                    metavar="DISK_PATH")
    parser.add_option("-n", "--session-name", dest="session_name",
                    default="default_session", 
                    help="Set the session name to SESSION_NAME",
                    metavar="SESSION_NAME")
    parser.add_option("-d", "--session-directory", dest="session_directory",
                    default="",
                    help="Put the files created during session into DIRECTORY",
                    metavar="DIRECTORY")
    parser.add_option("-p", "--plugins-directory", dest="plugins_directory",
                    default="plugins",
                    help="Specify the directory of the plugins")
    parser.add_option("-y", "--system-hive", dest="system_hive", default=None,
                    help="Specify the absolute path to the system hive")
    parser.add_option("-o", "--software-hive", dest="software_hive",
                    default=None,
                    help="Specify the absolute path to the software hive")
    parser.add_option("-a", "--sam-hive", dest="sam_hive", default=None,
                    help="Specify the absolute path to the sam hive")
    parser.add_option("-u", "--users-hives", dest="users_hives", default=None,
                    help="Specify a list of (username, user_hive) couples")
    parser.add_option("-c", "--continue", dest="cont",
                    help="Continue a previous session by giving its SESSION_FOLDER",
                    metavar="SESSION_FOLDER")
    (options, args) = parser.parse_args()

    if not options.input_disk:
        print("You need to specify the path to the disk you want to analyze")
        sys.exit(-1)
    else:
        options.input_disk = os.path.abspath(options.input_disk)
        if options.input_disk[-1] != "/":
            options.input_disk = options.input_disk + "/"

    users_hives = None
    if options.users_hives:
        pattern = re.compile("^\[(\([a-zA-Z0-9.\s-]+,[a-zA-Z0-9./\s-]+\)(\s*,\s*)?)+\]$")
        if not pattern.match(options.users_hives):
            print("Wrong pattern for given users hives.")
            sys.exit(-1)
        users_hives = []
        for couples in options.users_hives.split("(")[1::]:
            couple = couples.split(")")[0].split(",")
            users_hives += [(couple[0], couple[1])]

    if options.cont:
        if not os.path.isdir(options.cont):
            print("You specified a session folder that doesn't exist: " + options.cont)
            sys.exit(-1)
        if not options.cont[-1] == "/":
            options.cont += "/"
        if not os.path.isfile(options.cont + "session_config.ini") or not os.path.isfile(options.cont + "session_results.ini"):
            print("The folder you specified to continue a session is missing one of the needed files (session_config.ini or session_results.ini)")
            sys.exit(-1)
        options.session_directory = options.cont
        parser = ConfigParser.SafeConfigParser()
        parser.read(options.cont + "session_config.ini")
        options.session_name = parser.get("session_information", "session_name")

    # Create new user session
    user_session = session.Session(options.input_disk, options.session_name,
        options.session_directory, options.plugins_directory,
        options.system_hive, options.software_hive, options.sam_hive,
        users_hives)

    # Load the different plugins and there commands
    user_session.launch_plugins()

    # Run the renderer
    user_session.run_renderer()
Example #5
0
except Exception, e:  # get error back
    logger.error(
        'ERROR: No config found! Check your inputs.conf in local.')  # logger
    logger.error('ERROR: %e' % e)  # logger
    splunk.Intersplunk.generateErrorResults(
        ': No config found! Check your inputs.conf in local.'
    )  # print the error into Splunk UI
    sys.exit()  # exit on error

# use user provided options or get [default] stanza options
try:  # lets do it
    if myDebug == 'yes':
        logger.info('read the default options from inputs.conf...')  # logger
    if myDebug == 'yes':
        logger.info('reading server from inputs.conf...')  # logger
    server = parser.get(section_name, 'server')

except:  # get error back
    logger.error('ERROR: unable to get server from inputs.conf')  # logger
    splunk.Intersplunk.generateErrorResults(
        ': unable to get server from inputs.conf'
    )  # print the error into Splunk UI
    sys.exit()  # exit on error

try:
    # always check username and password in inputs.conf, never provided by user!
    if myDebug == 'yes':
        logger.info('reading user/pwd from inputs.conf...')  # logger
    password = parser.get(section_name, 'password')
    binddn = parser.get(section_name, 'binddn')
Example #6
0
def main():

    usage = "usage: %prog config_file.ini"
    parser = OptionParser(usage=usage)
    (cmd_opt, args) = parser.parse_args()

    if len(args) == 1:
        config_files = [args[0]]
    else:
        config_files = ['download_ta_bdys.ini']

    parser = SafeConfigParser()
    found = parser.read(config_files)
    if not found:
        sys.exit('Could not load config ' + config_files[0])

    # set up logging
    logging.config.fileConfig(config_files[0],
                              defaults={'hostname': socket.gethostname()})
    logger = logging.getLogger()

    logger.info('Starting download TA boundaries')

    db_host = None
    db_rolename = None
    db_port = None
    db_user = None
    db_pass = None
    db_schema = 'public'
    layer_name = None
    layer_geom_column = None
    layer_output_srid = 4167
    create_grid = False
    grid_res = 0.05
    shift_geometry = False

    base_uri = parser.get('source', 'base_uri')
    db_name = parser.get('database', 'name')
    db_schema = parser.get('database', 'schema')

    if parser.has_option('database', 'rolename'):
        db_rolename = parser.get('database', 'rolename')
    if parser.has_option('database', 'host'):
        db_host = parser.get('database', 'host')
    if parser.has_option('database', 'port'):
        db_port = parser.get('database', 'port')
    if parser.has_option('database', 'user'):
        db_user = parser.get('database', 'user')
    if parser.has_option('database', 'password'):
        db_pass = parser.get('database', 'password')

    layer_name = parser.get('layer', 'name')
    layer_geom_column = parser.get('layer', 'geom_column')
    if parser.has_option('layer', 'output_srid'):
        layer_output_srid = parser.getint('layer', 'output_srid')
    if parser.has_option('layer', 'create_grid'):
        create_grid = parser.getboolean('layer', 'create_grid')
    if parser.has_option('layer', 'grid_res'):
        grid_res = parser.getfloat('layer', 'grid_res')
    if parser.has_option('layer', 'shift_geometry'):
        shift_geometry = parser.getboolean('layer', 'shift_geometry')

    try:
        output_srs = osr.SpatialReference()
        output_srs.ImportFromEPSG(layer_output_srid)
    except:
        logger.fatal("Output SRID %s is not valid" % (layer_output_srid))
        sys.exit(1)

    if create_grid and not grid_res > 0:
        logger.fatal("Grid resolution must be greater than 0")
        sys.exit(1)

    #
    # Determine TA layer and its year from REST service
    #

    logger.debug(base_uri + '?f=json')
    response = urllib2.urlopen(base_uri + '?f=json')
    capabilities = json.load(response)

    latest_service = None
    latest_year = None
    p = re.compile('((\d{4})\_Geographies)$', flags=re.UNICODE)
    for service in capabilities['services']:
        m = p.search(service['name'])
        if m:
            if not latest_year or m.group(2) > latest_year:
                latest_year = int(m.group(2))
                latest_service = m.group(1)

    logger.debug(base_uri + '/' + latest_service + '/MapServer?f=json')
    response = urllib2.urlopen(base_uri + '/' + latest_service +
                               '/MapServer?f=json')
    capabilities = json.load(response)

    ta_layer = None
    p = re.compile('^Territorial\sAuthorities\s\d{4}$', flags=re.UNICODE)
    for layer in capabilities['layers']:
        m = p.search(layer['name'])
        if m:
            ta_layer = layer
            break

    if not ta_layer:
        logger.fatal('Could not find the TA layer in ' + base_uri)
        sys.exit(1)

    feature_url = base_uri + '/' + latest_service + '/MapServer/' + str(ta_layer['id']) + \
        '/query?f=json&where=1=1&returnGeometry=true&outSR=' + str(layer_output_srid)

    geojson_drv = ogr.GetDriverByName('GeoJSON')
    if geojson_drv is None:
        logger.fatal('Could not load the OGR GeoJSON driver')
        sys.exit(1)

    #
    # Connect to the PostgreSQL database
    #

    pg_drv = ogr.GetDriverByName('PostgreSQL')
    if pg_drv is None:
        logger.fatal('Could not load the OGR PostgreSQL driver')
        sys.exit(1)

    pg_uri = 'PG:dbname=' + db_name
    if db_host:
        pg_uri = pg_uri + ' host=' + db_host
    if db_port:
        pg_uri = pg_uri + ' port=' + db_port
    if db_user:
        pg_uri = pg_uri + ' user='******' password='******'t open PG output database: " + str(e))
        sys.exit(1)
(options, args) = parser.parse_args()

if options.homepath is None:
    homepath = os.getcwd()
else:
    homepath = options.homepath
data_dir = 'data/'

hostname = socket.gethostname()
cadvisor_address = "http://" + hostname + ":8080/api/v1.3/docker/"

try:
    if os.path.exists(os.path.join(homepath, "cadvisor", "config.ini")):
        parser = SafeConfigParser()
        parser.read(os.path.join(homepath, "cadvisor", "config.ini"))
        sampling_interval = parser.get('cadvisor', 'sampling_interval')
        if len(sampling_interval) == 0:
            sampling_interval = '60s'
except IOError:
    logger.info("config.ini file is missing")

if sampling_interval[-1:] == 's':
    sampling_interval = int(sampling_interval[:-1])
else:
    sampling_interval = int(sampling_interval) * 60 / 2

counter_time_map = {}
counter = 0
##the default value it 60-1, when cAdvisor started, the code need to calculate the index because of the sliding window
index = 59
Example #8
0
def bootstrap():
    import sys
    from os import path
    from ConfigParser import ConfigParser, NoOptionError
    from document import open_document, demo_data
    from optparse import OptionParser

    global doc
    doc = None

    parser = OptionParser()
    parser.add_option('-q', '--quiet', action='store_true', dest='quiet')
    options, args = parser.parse_args()

    cfg_path = args[0]
    parser = ConfigParser()
    parser.read(cfg_path)
    dbfile = parser.get('cork', 'dbfile')

    raw_names = dict(parser.items('cork')).get('plugins', '').split('\n')
    for plugin_name in filter(None, (name.strip() for name in raw_names)):
        __import__(plugin_name)

    if len(args) > 1:
        cmd = args[1]
    else:
        cmd = 'serve'

    if cmd == 'init':
        doc = open_document(dbfile)
        demo_data(doc)

    elif cmd == 'serve':
        from werkzeug import run_simple, SharedDataMiddleware
        from server import CorkApp

        if options.quiet:
            from werkzeug.serving import BaseRequestHandler
            class QuietHandler(BaseRequestHandler):
                def log_request(self, *args, **kwargs):
                    pass
            handler = QuietHandler
        else:
            handler = None

        host = parser.get('testserver', 'host')
        port = parser.getint('testserver', 'port')

        def lazy_app(environ, start_response):
            """
            WSGI application that creates the real app in a lazy fashion,
            useful to prevent opening two DB connections when running
            with the Werkzeug reloader.
            """
            global doc, cork_app
            if doc is None:
                doc = open_document(dbfile)
                cork_app = CorkApp(doc)
            return cork_app(environ, start_response)

        run_simple(host, port, lazy_app, use_reloader=True, request_handler=handler)
        print # a blank line

    elif cmd == 'launchd':
        from wsginetd import serve
        from server import CorkApp
        sys.stderr = open('var/launchd.log', 'a')
        doc = open_document(dbfile)
        serve(CorkApp(doc))

    elif cmd == 'interact':
        import code
        doc = open_document(dbfile)
        code.interact(local={'doc': doc, '__name__': '__console__', '__doc__': None})

    elif cmd == 'packdb':
        doc = open_document(dbfile)
        doc._p_connection.pack()

    else:
        print>>sys.stderr, "Unknown command \"%s\"" % cmd

    if doc is not None:
        doc.close()
Example #9
0
def action_import_users(name, args):
    """import users from SVN information"""

    import llvmlab
    import ConfigParser
    from optparse import OptionParser, OptionGroup
    parser = OptionParser("""\
%%prog %s [options] <lab config path> <svn mailer config> <svn htpasswd path>

This command imports user information from the llvm.org SVN information. It will
add any users who are not present in the lab.llvm.org database, and import their
name, email, and SVN login information.\
""" % name)
    (opts, args) = parser.parse_args(args)

    if len(args) != 3:
        parser.error("invalid number of arguments")

    config_path, svn_mailer_path, svn_htpasswd_path = args

    # Load the app object.
    instance = llvmlab.ui.app.App.create_standalone(config_path=config_path)
    data = instance.config.data

    # Load the SVN mailer config.
    parser = ConfigParser.RawConfigParser()
    parser.read(svn_mailer_path)

    # Load the SVN htpasswd file.
    file = open(svn_htpasswd_path)
    svn_htpasswd = {}
    for ln in file:
        if ln.strip():
            user, htpasswd, module = ln.split(":")
            svn_htpasswd[user] = (htpasswd, module)
    file.close()

    # Validate that the authors list and the htpasswd list coincide.
    svn_authors = dict((author, parser.get("authors", author))
                       for author in parser.options("authors"))
    for id in set(svn_authors) - set(svn_htpasswd):
        warning("svn mailer authors contains user without htpasswd: %r " % id)
    for id in set(svn_htpasswd) - set(svn_authors):
        warning("svn contains passwd but no mailer entry: %r " % id)

    # Add user entries for any missing users.
    for id in sorted(set(svn_authors) & set(svn_htpasswd)):
        name, email = split_name_and_email(svn_authors[id])
        htpasswd = svn_htpasswd[id][0]
        passhash = hashlib.sha256(htpasswd +
                                  instance.config['SECRET_KEY']).hexdigest()

        # Lookup the user entry.
        user = data.users.get(id)

        # Never allow modifying the admin user.
        if user is data.admin_user:
            warning("ignore %r, is the admin user!" % id)
            continue

        # Create the user if missing.
        if user is None:
            # Use the users htpasswd (itself) as the initial password.
            user = data.users[id] = llvmlab.user.User(id, passhash, name,
                                                      email, htpasswd)
            note("added user %r" % id)
            continue

        # Otherwise, update the users info if necessary.
        for kind, new, old in (('name', name, user.name), ('email', email,
                                                           user.email),
                               ('htpasswd', htpasswd, user.htpasswd)):
            if new != old:
                note("changed %r %s from %r to %r" % (id, kind, old, new))
                setattr(user, kind, new)

    # Save the instance data.
    instance.save_data()
def main():

    usage = "usage: %prog config_file.ini"
    parser = OptionParser(usage=usage)
    (cmd_opt, args) = parser.parse_args()

    if len(args) == 1:
        config_files = [args[0]]
    else:
        config_files = ["download_ta_bdys.ini"]

    parser = SafeConfigParser()
    found = parser.read(config_files)
    if not found:
        sys.exit("Could not load config " + config_files[0])

    # set up logging
    logging.config.fileConfig(config_files[0], defaults={"hostname": socket.gethostname()})
    logger = logging.getLogger()

    logger.info("Starting download TA boundaries")

    db_host = None
    db_rolename = None
    db_port = None
    db_user = None
    db_pass = None
    db_schema = "public"
    layer_name = None
    layer_geom_column = None
    layer_output_srid = 4167
    create_grid = False
    grid_res = 0.05
    shift_geometry = False

    base_uri = parser.get("source", "base_uri")
    db_name = parser.get("database", "name")
    db_schema = parser.get("database", "schema")

    if parser.has_option("database", "rolename"):
        db_rolename = parser.get("database", "rolename")
    if parser.has_option("database", "host"):
        db_host = parser.get("database", "host")
    if parser.has_option("database", "port"):
        db_port = parser.get("database", "port")
    if parser.has_option("database", "user"):
        db_user = parser.get("database", "user")
    if parser.has_option("database", "password"):
        db_pass = parser.get("database", "password")

    layer_name = parser.get("layer", "name")
    layer_geom_column = parser.get("layer", "geom_column")
    if parser.has_option("layer", "output_srid"):
        layer_output_srid = parser.getint("layer", "output_srid")
    if parser.has_option("layer", "create_grid"):
        create_grid = parser.getboolean("layer", "create_grid")
    if parser.has_option("layer", "grid_res"):
        grid_res = parser.getfloat("layer", "grid_res")
    if parser.has_option("layer", "shift_geometry"):
        shift_geometry = parser.getboolean("layer", "shift_geometry")

    try:
        output_srs = osr.SpatialReference()
        output_srs.ImportFromEPSG(layer_output_srid)
    except:
        logger.fatal("Output SRID %s is not valid" % (layer_output_srid))
        sys.exit(1)

    if create_grid and not grid_res > 0:
        logger.fatal("Grid resolution must be greater than 0")
        sys.exit(1)

    #
    # Determine TA layer and its year from REST service
    #

    logger.debug(base_uri + "?f=json")
    response = urllib2.urlopen(base_uri + "?f=json")
    capabilities = json.load(response)

    latest_service = None
    latest_year = None
    p = re.compile("((\d{4})\_Geographies)$", flags=re.UNICODE)
    for service in capabilities["services"]:
        m = p.search(service["name"])
        if m:
            if not latest_year or m.group(2) > latest_year:
                latest_year = int(m.group(2))
                latest_service = m.group(1)

    logger.debug(base_uri + "/" + latest_service + "/MapServer?f=json")
    response = urllib2.urlopen(base_uri + "/" + latest_service + "/MapServer?f=json")
    capabilities = json.load(response)

    ta_layer = None
    p = re.compile("^Territorial\sAuthorities\s\d{4}$", flags=re.UNICODE)
    for layer in capabilities["layers"]:
        m = p.search(layer["name"])
        if m:
            ta_layer = layer
            break

    if not ta_layer:
        logger.fatal("Could not find the TA layer in " + base_uri)
        sys.exit(1)

    feature_url = (
        base_uri
        + "/"
        + latest_service
        + "/MapServer/"
        + str(ta_layer["id"])
        + "/query?f=json&where=1=1&returnGeometry=true&outSR="
        + str(layer_output_srid)
    )

    geojson_drv = ogr.GetDriverByName("GeoJSON")
    if geojson_drv is None:
        logger.fatal("Could not load the OGR GeoJSON driver")
        sys.exit(1)

    #
    # Connect to the PostgreSQL database
    #

    pg_drv = ogr.GetDriverByName("PostgreSQL")
    if pg_drv is None:
        logger.fatal("Could not load the OGR PostgreSQL driver")
        sys.exit(1)

    pg_uri = "PG:dbname=" + db_name
    if db_host:
        pg_uri = pg_uri + " host=" + db_host
    if db_port:
        pg_uri = pg_uri + " port=" + db_port
    if db_user:
        pg_uri = pg_uri + " user="******" password="******"Can't open PG output database: " + str(e))
        sys.exit(1)
Example #11
0
def action_import_users(name, args):
    """import users from SVN information"""

    import llvmlab
    import ConfigParser
    from optparse import OptionParser, OptionGroup
    parser = OptionParser("""\
%%prog %s [options] <lab config path> <svn mailer config> <svn htpasswd path>

This command imports user information from the llvm.org SVN information. It will
add any users who are not present in the lab.llvm.org database, and import their
name, email, and SVN login information.\
""" % name)
    (opts, args) = parser.parse_args(args)

    if len(args) != 3:
        parser.error("invalid number of arguments")

    config_path, svn_mailer_path, svn_htpasswd_path = args

    # Load the app object.
    instance = llvmlab.ui.app.App.create_standalone(config_path = config_path)
    data = instance.config.data

    # Load the SVN mailer config.
    parser = ConfigParser.RawConfigParser()
    parser.read(svn_mailer_path)

    # Load the SVN htpasswd file.
    file = open(svn_htpasswd_path)
    svn_htpasswd = {}
    for ln in file:
        if ln.strip():
            user,htpasswd,module = ln.split(":")
            svn_htpasswd[user] = (htpasswd, module)
    file.close()

    # Validate that the authors list and the htpasswd list coincide.
    svn_authors = dict((author, parser.get("authors", author))
                       for author in parser.options("authors"))
    for id in set(svn_authors) - set(svn_htpasswd):
        warning("svn mailer authors contains user without htpasswd: %r " % id)
    for id in set(svn_htpasswd) - set(svn_authors):
        warning("svn contains passwd but no mailer entry: %r " % id)

    # Add user entries for any missing users.
    for id in sorted(set(svn_authors) & set(svn_htpasswd)):
        name,email = split_name_and_email(svn_authors[id])
        htpasswd = svn_htpasswd[id][0]
        passhash = hashlib.sha256(
            htpasswd + instance.config['SECRET_KEY']).hexdigest()

        # Lookup the user entry.
        user = data.users.get(id)

        # Never allow modifying the admin user.
        if user is data.admin_user:
            warning("ignore %r, is the admin user!" % id)
            continue

        # Create the user if missing.
        if user is None:
            # Use the users htpasswd (itself) as the initial password.
            user = data.users[id] = llvmlab.user.User(id, passhash, name,
                                                      email, htpasswd)
            note("added user %r" % id)
            continue

        # Otherwise, update the users info if necessary.
        for kind,new,old in (('name', name, user.name),
                             ('email', email, user.email),
                             ('htpasswd', htpasswd, user.htpasswd)):
            if new != old:
                note("changed %r %s from %r to %r" % (
                        id, kind, old, new))
                setattr(user, kind, new)

    # Save the instance data.
    instance.save_data()