Пример #1
0
 def xmlInit(self, aDEVS):
     """
     The trace functionality for XML output at initialization
     Args:
         aDEVS - the model that was initialised
     """
     execute(aDEVS, easyCommand("self.tracers.saveXML", [toStr(aDEVS.getModelFullName()), aDEVS.timeLast, "'EX'", "''", toStr(aDEVS.state.toXML()), toStr(aDEVS.state)]), self.immediate)
Пример #2
0
def generate_individual(num, base, scripts):
	# create unique folder
	name = INDIVIDUAL % num
	folder = base + "/" + name
	os.mkdir(folder)

	# move seed file if it exists
	if os.path.exists("%s.seed" % name):
		os.rename("%s.seed" % name, "%s/individual.seed" % name)

	# generate individual
	os.chdir(folder)
	result = util.execute("%s/generate.csh" % scripts, redirect="generate.log")

	# delete the databases
	shutil.rmtree("db", True)
	shutil.rmtree("incremental_db", True)

	# save output files
	os.chdir(base)
	if result == 0:
		for ext in ["csv", "sof"]:
			os.rename("%s/individual.%s" % (name, ext), "%s.%s" % (name, ext))

	# compress any interesting files
	util.execute("tar czf %s.tgz %s" % (name, name))
	shutil.rmtree(name)

	return result
Пример #3
0
        def make_mapping(self, chanel):
            slot = 24
            num = 0
            disk_list = []
            dsu = '1.1'

            cmd = "ls -l /sys/block/ |grep host |grep -v sda"
            s, o = execute(cmd, False, logging=False)
            for nr in o.split('\n'):
                c = 'expander'
                if c in nr:
                    n = nr.split()
                    blk = n[8]
                    loc = n[10]

                    m = re.search('port-\d+:\d+:\d+', loc)
                    port = m.group()
                    cmd = "ls /sys/class/sas_host/*/device/*/*/%s" % (port)
                    ss, oo = execute(cmd, logging=False)
                    idx = oo.split()[1]
                    cmd = "cat /sys/class/sas_phy/%s/phy_identifier" % (idx)
                    ss, phy_idx = execute(cmd, logging=False)
                    phy_idx = phy_idx.strip('\n')
                    if slot == 16:
                        disk_loc = '%s.%s' % (dsu, self.key_16_loc[phy_idx])
                    else:
                        disk_loc = '%s.%s' % (dsu, self.key_24_loc[phy_idx])

                    self.mapping[disk_loc] = blk
                    disk_list.append(disk_loc)

            self.dsu_list[dsu] = slot
Пример #4
0
def interconnect():
    # also see database/build_database/shared_tables.py for code to fill in
    # project.interconnect_id, project.connect_distance_km and project.connect_cost_per_mw
    # based on this table
    # note: we could eventually add interconnect-specific connection costs here,
    # to be used instead of generic project interconnection costs; in that case
    # the code in shared_tables.calculate_interconnect_costs() would also need
    # to be updated
    execute("""
        DROP TABLE IF EXISTS interconnect;
        CREATE TABLE interconnect (
            interconnect_id integer PRIMARY KEY NOT NULL,
            county text,
            latitude float,
            longitude float
        );
        ALTER TABLE interconnect OWNER TO admin;
        -- At some point interconnect was filled in with the equivalent of the
        -- following command. The original code is missing, but these appear to be
        -- the population-weighted centers of each county.
        INSERT INTO interconnect (interconnect_id, county, latitude, longitude) VALUES
            (1, 'Honolulu', 21.372464, -157.913673),
            (2, 'Hawaii', 19.672837, -155.421895),
            (3, 'Maui', 20.863747, -156.493816),
            (4, 'Kauai', 22.021022, -159.442112),
            (5, 'Kalawao', 21.188495, -156.979972);
    """)
Пример #5
0
 def POST(self):
     exe_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
     data = web.input()
     data = json.loads(data.payload)
     url = data['repository']['url']
     util.execute('url', url, exe_time)
     return ''
Пример #6
0
def system_load():
    # TODO: extend to other load zones by adding more rows to the 
    # 'sales_forecast' region of the psip_data_file

    # get historical peak and average loads
    hist = pd.read_sql(
        sql="""
            SELECT
                load_zone, EXTRACT(year FROM date_time) as year_hist,
                MAX(system_load) as peak_hist, AVG(system_load) as avg_hist
            FROM system_load
            GROUP BY 1, 2;
        """,
        con=db_engine
    )
    # forecast peak and energy
    fore = data_frame_from_xlsx(psip_data_file, 'sales_forecast')
    fore = fore.T.set_index(0).T
    fore = fore.rename(columns={'year': 'year_fore'})
    # calculate scale factors for system_load_scale table
    sls = pd.merge(hist, fore, on='load_zone')
    sls['load_scen_id'] = load_scen_id
    sls['peak_fore'] = sls['underlying forecast (MW)'] + sls['energy efficiency (MW)']
    sls['avg_fore'] = (sls['underlying forecast (GWh)'] + sls['energy efficiency (GWh)'])/8.76
    sls['scale'] = (sls['peak_fore'] - sls['avg_fore']) / (sls['peak_hist'] - sls['avg_hist'])
    sls['offset'] = sls['peak_fore'] - sls['scale'] * sls['peak_hist']

    # put into standard order, drop unneeded columns, convert to the right types for the database
    db_columns = [
        'load_zone', 'load_scen_id', 'year_hist', 'year_fore',
        'peak_hist', 'peak_fore', 'avg_hist', 'avg_fore', 'scale', 'offset'
    ]
    system_load_scale = pd.DataFrame()
    for c in db_columns:
        if c in ['load_zone', 'load_scen_id']:
            system_load_scale[c] = sls[c].astype(str)
        elif c in ['year_hist', 'year_fore']:
            system_load_scale[c] = sls[c].astype(int)
        else:
            system_load_scale[c] = sls[c].astype(float)
    system_load_scale.set_index(db_columns[:4], inplace=True)
    # store data
    execute("DELETE FROM system_load_scale WHERE load_scen_id=%s;", (load_scen_id,))
    system_load_scale.to_sql('system_load_scale', db_engine, if_exists='append')
    
    # create another forecast with peak and average loads from 2007, carried through to the future
    execute("""
        CREATE TEMPORARY TABLE tsls AS 
            SELECT * FROM system_load_scale WHERE load_scen_id=%s;
        UPDATE tsls a 
            SET peak_fore=b.peak_hist, avg_fore=b.avg_hist, load_scen_id='flat_2007'
            FROM tsls b
            WHERE b.year_hist=2007 and b.year_fore=2045;
        UPDATE tsls 
            SET scale = (peak_fore - avg_fore) / (peak_hist - avg_hist);
        UPDATE tsls 
            SET "offset" = peak_fore - scale * peak_hist;
        INSERT INTO system_load_scale SELECT * FROM tsls;
        DROP TABLE tsls;
    """, (load_scen_id,))
Пример #7
0
def dd_dev(dev):
    log.info('start to dd dev %s' % dev)
    try:
        execute('dd if=/dev/zero of=/dev/%s bs=128K' % dev)
    except Exception as e:
         log.error(caused(e).detail)
    log.info('complete to dd dev %s' % dev)
Пример #8
0
 def xmlConfluent(self, aDEVS):
     """
     The trace functionality for XML output at a confluent transition
     Args:
         aDEVS - the model that transitioned
     """
     portInfo = ""
     for I in range(len(aDEVS.IPorts)):
         portInfo += "<port name=\"" + aDEVS.IPorts[I].getPortName(
         ) + "\" category=\"I\">\n"
         for j in aDEVS.peek(aDEVS.IPorts[I]):
             portInfo += "<message>" + str(j) + "</message>\n</port>\n"
     execute(
         aDEVS,
         easyCommand("self.tracers.saveXML", [
             toStr(aDEVS.getModelFullName()), aDEVS.timeLast, "'EX'",
             toStr(portInfo),
             toStr(aDEVS.state.toXML()),
             toStr(aDEVS.state)
         ]), self.immediate)
     portInfo = ""
     for I in range(len(aDEVS.OPorts)):
         if aDEVS.OPorts[I] in aDEVS.myOutput:
             portInfo += "<port name=\"" + aDEVS.OPorts[I].getPortName(
             ) + "\" category=\"O\">\n"
             for j in aDEVS.myOutput[aDEVS.OPorts[I]]:
                 portInfo += "<message>" + str(j) + "</message>\n</port>\n"
     execute(
         aDEVS,
         easyCommand("self.tracers.saveXML", [
             toStr(aDEVS.getModelFullName()), aDEVS.timeLast, "'IN'",
             toStr(portInfo),
             toStr(aDEVS.state.toXML()),
             toStr(aDEVS.state)
         ]), self.immediate)
Пример #9
0
def __stop(service):
	if __isRunning(service):
		with hide('everything'):
			execute('service %s stop' % service)
		done("echo 'stopped         : %s'" % service)
	else:
		already("echo 'already stopped : %s'" % service)
Пример #10
0
 def verboseConfluent(self, aDEVS):
     """
     The trace functionality for verbose output at a confluent transition
     Args:
         aDEVS - the model that transitioned
     """
     text = ""
     text += "\n\tCONFLUENT TRANSITION in model <%s>\n" % aDEVS.getModelFullName(
     )
     text += "\t  Input Port Configuration:\n"
     for I in range(len(aDEVS.IPorts)):
         text += "\t    port <" + str(
             aDEVS.IPorts[I].getPortName()) + ">: \n"
         for msg in aDEVS.myInput.get(aDEVS.IPorts[I], []):
             text += "\t       " + str(msg) + "\n"
     text += "\t  New State: %s\n" % str(aDEVS.state)
     text += "\t  Output Port Configuration:\n"
     for I in range(len(aDEVS.OPorts)):
         text += "\t    port <" + str(
             aDEVS.OPorts[I].getPortName()) + ">:\n"
         for msg in aDEVS.myOutput.get(aDEVS.OPorts[I], []):
             text += "\t       " + str(msg) + "\n"
     # Don't show the age
     text += "\t  Next scheduled internal transition at time %5f\n" % (
         aDEVS.timeNext[0])
     execute(
         aDEVS,
         easyCommand("self.tracers.traceVerbose",
                     [aDEVS.timeLast, '"' + text + '"']), self.immediate)
Пример #11
0
 def vcdConfluent(self, aDEVS):
     """
     The trace functionality for VCD output at a confluent transition
     Args:
         aDEVS - the model that transitioned
     """
     name = aDEVS.getModelFullName()
     for I in range(len(aDEVS.IPorts)):
         portName = aDEVS.IPorts[I].getPortName()
         signalBag = aDEVS.peek(aDEVS.IPorts[I])
         if signalBag is not None:
             for portSignal in signalBag:
                 execute(
                     aDEVS,
                     easyCommand("self.tracers.saveVCD", [
                         toStr(aDEVS.getModelFullName()), aDEVS.timeLast,
                         toStr(portName),
                         toStr(portSignal)
                     ]), self.immediate)
     for I in range(len(aDEVS.OPorts)):
         if aDEVS.OPorts[I] in aDEVS.myOutput:
             portName = aDEVS.OPorts[I].getPortName()
             signalBag = aDEVS.myOutput[aDEVS.OPorts[I]]
             if signalBag is not None:
                 for portSignal in signalBag:
                     execute(
                         aDEVS,
                         easyCommand("self.tracers.saveVCD", [
                             toStr(aDEVS.getModelFullName()),
                             aDEVS.timeLast,
                             toStr(portName),
                             toStr(portSignal)
                         ]), self.immediate)
Пример #12
0
def get_ntw(_):
    state, connectivity, _, _, _, _ = execute('nmcli', '-t', 'g').split(':')
    network = execute('nmcli', '-t', '-f', 'NAME', 'c', 'show',
                      '--active').replace('\n', ' -- ')

    symbol = symbols[state if state in symbols else 'disconnected']
    return '{} {}'.format(symbol, network) if network else symbol
Пример #13
0
 def verboseInternal(self, aDEVS):
     """
     The trace functionality for verbose output at an internal transition
     Args:
         aDEVS - the model that transitioned
     """
     text = ""
     text += "\n\tINTERNAL TRANSITION in model <%s>\n" % aDEVS.getModelFullName(
     )
     text += "\t  New State: %s\n" % str(aDEVS.state)
     text += "\t  Output Port Configuration:\n"
     for I in range(len(aDEVS.OPorts)):
         if aDEVS.OPorts[I] in aDEVS.myOutput:
             text += "\t    port <" + str(
                 aDEVS.OPorts[I].getPortName()) + ">: \n"
             for msg in aDEVS.myOutput[aDEVS.OPorts[I]]:
                 text += "\t       " + str(msg) + "\n"
         else:
             text += "\t    port%d: NoEvent\n" % (I)
     # Don't show the age
     text += "\t  Next scheduled internal transition at time %5f\n" % (
         aDEVS.timeNext[0])
     execute(
         aDEVS,
         easyCommand("self.tracers.traceVerbose",
                     [aDEVS.timeLast, '"' + text + '"']), self.immediate)
Пример #14
0
def apt_install(packages, options=['-y']):
    r"""Installs list of packages, with an optional list of options using apt-get."""
    args = ['apt-get']
    args.extend(options)
    args.append('install')
    args.extend(packages)
    execute(args)
Пример #15
0
def __off(service):
	if __isOn(service):
		with hide('everything'):
			execute('chkconfig %s off' % service)
		done("echo 'turn off        : %s'" % service)
	else:
		already("echo 'already off     : %s'" % service)
Пример #16
0
        def __init__(self):
            self.mapping = {}
            self.dsu_list = {}
            self.dsu_list['1.1'] = 24
            self.dsu_list['1.2'] = 24
            cmd = "ls -l /sys/block/ |grep 'host[6-7]'"
            s, o = execute(cmd, False, logging=False)
            if not o:
                return

            num, chanel = self._check_expander()
            if num > 0:
                for ch in chanel:
                    self.make_mapping(ch)
            else:
                cmd = "ls -l /sys/block/ |grep 'host[6-7]'"
                s, o = execute(cmd, False, logging=False)
                for nr in o.split('\n'):
                    n = nr.split()
                    if len(n) == 11:
                        blk = n[8]
                        loc = n[10]
                        m = re.search('port-\d+:\d+', loc)
                        port = m.group()
                        cmd = "ls /sys/class/sas_port/" + port + "/device/phy-*\:*/sas_phy/"
                        ss, phy = execute(cmd, False, logging=False)
                        list = phy.split('-')
                        phy_idx = list[1].replace(':', '-')
                        phy_idx = phy_idx.strip('\n')
                        self.mapping['1.1.%s' %
                                     (self.key_8_loc[phy_idx])] = blk

                self.dsu_list['1.1'] = 8
Пример #17
0
def generate_individual(num, base, scripts):
    # create unique folder
    name = INDIVIDUAL % num
    folder = base + "/" + name
    os.mkdir(folder)

    # move seed file if it exists
    if os.path.exists("%s.seed" % name):
        os.rename("%s.seed" % name, "%s/individual.seed" % name)

    # generate individual
    os.chdir(folder)
    result = util.execute("%s/generate.csh" % scripts, redirect="generate.log")

    # delete the databases
    shutil.rmtree("db", True)
    shutil.rmtree("incremental_db", True)

    # save output files
    os.chdir(base)
    if result == 0:
        for ext in ["csv", "sof"]:
            os.rename("%s/individual.%s" % (name, ext), "%s.%s" % (name, ext))

    # compress any interesting files
    util.execute("tar czf %s.tgz %s" % (name, name))
    shutil.rmtree(name)

    return result
Пример #18
0
def unit_test():
    '''(None) -> None

    Função que pode ser usada para testar a sua classe Lista_Pontos2D
    interativamente.
    '''
    import util
    
    # deseja animação
    SIM = 's'
    NAO = 'n'

    # prompts usados pelo programa
    PROMPT_NO_PONTOS  = "no. de pontos >>> "
    PROMPT_SEMENTE    = "semente >>> "
    PROMPT_ANIMACAO   = "\nexecutar animação ('s' para sim) >>> "

    # mensagens de erro
    ERRO_PONTOS  = "ERRO >>> número de pontos deve ser um inteiro positivo > 1 ('%s')"
    ERRO_SEMENTE = "ERRO >>> semente deve ser um inteiro ('%s')"

    # leia o número de pontos
    n_str = input(PROMPT_NO_PONTOS)

    # verifique se n_str é um string representando um inteiro positivo
    try:
        n = int(n_str)
        if n <= 1: raise ValueError
    except ValueError:
        print(ERRO_PONTOS %n_str)
        return None

    # leia a semente
    semente_str = input(PROMPT_SEMENTE)
 
    # verifique se semente_str é um string representando um inteiro
    try:
        semente = int(semente_str)
    except ValueError:
        print(ERRO_SEMENTE %semente_str)
        return None
   
    # crie uma lista de n pontos (Ponto2D) aleatórios 
    pts = util.gere_pontos(n,semente)

    # crie um objetos Lista_Pontos2D
    pontos = Lista_Pontos2D(pts)
    
    # execute a função selecionada    
    util.execute(pontos)

    # pergunte se a animação deve ser executada
    opcao = input(PROMPT_ANIMACAO).strip()
    if opcao == SIM:
        # execute a animação
        util.animacao(pontos)

    # termino normal... vaze    
    print("Fui!")
Пример #19
0
 def __init__(self):
     self.enabled = False
     cmd = "ls /sys/class/scsi_host/ | wc -l"
     _,o = execute(cmd, False, logging=False)
     if int(o.strip('\n')) > 10:
         e,_ = execute('led-ctl-daemon -t 3U16-STANDARD', False)
         if e == 0:
             self.enabled = True
Пример #20
0
def echo_to_spool(user, line):
    path = '%s/%s' % (spool, user)
    if isExistsLine(path, line):
        error('echo already exists : %s in %s' % (line, path))
    else:
        execute("echo '%s' >> %s" % (line, path))
        __chown(user, path)
        done("echo -n 'create shell : '; ls -l %s" % path)
Пример #21
0
def sed(path, src, dst, e = False):
	if not isExists(path):
		error('echo not exists : %s' % path)
		return

	option = ['-i', '-ie'][e]

	execute("sed %s 's/%s/%s/' %s" % (option, __escape(src), __escape(dst), path))
Пример #22
0
 def DownloadData(self):
     print("Downloading data")
     util.createDir(self.path)
     self.path_data = self.path + '/data'
     util.createDir(self.path_data)
     command = 'kaggle competitions download ' + \
         self.compName + ' -p '+self.path_data
     util.execute(command)
Пример #23
0
def __addRepository(name, url):
	package = __repository(name)

	if __doesNotHasRepository(package):
		with hide('everything'):
			execute('rpm -iv %s' % url)
		done("echo 'install complete  : %s'" % __repository(name))
	else:
		already("echo 'already installed : %s'" % package)
Пример #24
0
def sed(path, src, dst, e=False):
    if not isExists(path):
        error('echo not exists : %s' % path)
        return

    option = ['-i', '-ie'][e]

    execute("sed %s 's/%s/%s/' %s" %
            (option, __escape(src), __escape(dst), path))
Пример #25
0
def __addRepository(name, url):
    package = __repository(name)

    if __doesNotHasRepository(package):
        with hide('everything'):
            execute('rpm -iv %s' % url)
        done("echo 'install complete  : %s'" % __repository(name))
    else:
        already("echo 'already installed : %s'" % package)
Пример #26
0
def import_hedged_fuel_costs(file, tag='hedged'):

    prices = data_frame_from_xlsx(file, named_range='fuel_prices')
    prices = prices.set_index(0)
    prices.index.name = 'year'
    prices = prices.T.set_index(['fuel_type', 'tier']).T.astype(float)
    # switch to one row per value, and assign a name to the value
    prices = pd.DataFrame({'price_mmbtu': prices.stack(['fuel_type', 'tier'])})
    prices['load_zone'] = 'Oahu'
    prices['base_year'] = get_named_cell_from_xlsx(file, named_range='base_year')

    tiers = data_frame_from_xlsx(file, named_range='tier_properties')
    # Transpose, set row and column labels, and convert to floating point (converting None to NaN)
    tiers = tiers.set_index(0).T.set_index(['fuel_type', 'tier']).astype(float)

    # fixed prices vary depending on the finance term; terms are pre-specified in this region
    fixed_costs = data_frame_from_xlsx(file, named_range='tier_fixed_costs')
    # use the first column as indexes (mostly to get column names), then set column headers
    fixed_costs = fixed_costs.set_index(0).T.set_index(['fuel_type', 'tier']).T
    # drop unneeded row for current finance term (we only want the values from the data table below that)
    fixed_costs = fixed_costs.iloc[1:]
    # give the index a name
    fixed_costs.index.name = 'term'
    # convert to row-wise format, give the fixed_cost column a name, and convert the indexes to columns
    fixed_costs = pd.DataFrame({'fixed_cost': fixed_costs.unstack()}).reset_index()
    # add a fuel_scen_id
    fixed_costs['fuel_scen_id'] = tag
    # use the term column as the maximum age for each tier with non-zero fixed costs
    limited_life = fixed_costs['fixed_cost'] > 0
    fixed_costs.loc[limited_life, 'max_age'] = fixed_costs.loc[limited_life, 'term']
    del fixed_costs['term']

    # remove duplicate rows (we don't need multiple rows with multiple ages for the $0 cost tiers)
    # also restore the indexes, to enable joining later
    fixed_costs = fixed_costs.drop_duplicates().set_index(['fuel_type', 'tier'])
    # merge the columns into the tiers table (adding all fuel_scen_id's and max_age's)
    tiers = tiers.join(fixed_costs)

    # merge the columns into the prices table (have to drop the year index to make this work)
    prices = prices.reset_index('year').join(tiers)

    # add the project lifespan into the tier id (have to convert tier index to a column to do this,
    # so might as well remove all indexes)
    prices = prices.reset_index()
    limited_life = prices['fixed_cost'] > 0
    prices.loc[limited_life, 'tier'] += '_' + prices.loc[limited_life, 'max_age'].astype(int).astype(str)

    # restore the indexes and sort the table
    prices = prices.set_index(['fuel_scen_id', 'year', 'fuel_type', 'tier']).sort_index()

    # remove any existing records
    execute("DELETE FROM fuel_costs WHERE fuel_scen_id LIKE %s;", (tag,))

    prices.to_sql('fuel_costs', db_engine, if_exists='append')

    print "Added hedged prices (fuel_scen_id = {}) to fuel_costs table.".format(list(prices.index.levels[0]))
Пример #27
0
def fetch_origin_check_staged(path):
    """given a path on disk (to a git repo), fetch origin and ensure there aren't unstaged files"""
    with scoped_cwd(path):
        execute(['git', 'fetch', 'origin'])
        status = execute(['git', 'status', '-s']).strip()
        if len(status) > 0:
            print('[ERROR] There appear to be unstaged changes.\n' +
                  'Please resolve these before running (ex: `git status`).')
            return 1
    return 0
Пример #28
0
def push_branches_to_remote(path, branches_to_push, dryrun=False):
    if dryrun:
        print('[INFO] would push the following local branches to remote: ' +
              str(branches_to_push))
    else:
        with scoped_cwd(path):
            for branch_to_push in branches_to_push:
                print('- pushing ' + branch_to_push + '...')
                # TODO: if they already exist, force push?? or error??
                execute(['git', 'push', '-u', 'origin', branch_to_push])
Пример #29
0
def download_backup(cloud_provider, rclone_dir):
    cprint(
        f"Let's download the cloud backup of {cloud_provider} to {rclone_dir}",
        'green')
    execute(
        cmd_rclone_syn_to_disk.format(cloud_provider=cloud_provider,
                                      rclone_dir=rclone_dir))
    execute(
        cmd_rclone_check.format(dir_to_check=rclone_dir,
                                cloud_provider=cloud_provider))
Пример #30
0
def install_chef_client(opts, args):
    r"""Installs a chef client through apt-get"""
    # Preseed
    apt_install(['debconf'])
    preseed = [['chef', 'chef/chef_server_url', 'string', opts.url]]
    input = '%s\n' % '\n'.join([' '.join(line) for line in preseed])
    execute(['debconf-set-selections'], input=input)
    
    os.environ['DEBIAN_FRONTEND'] = 'noninteractive'
    apt_install(CHEF_CLIENT_PACKAGES)
Пример #31
0
def echo_to_directory(user, directory, shell_line, shell_name):
    path = '%s/%s' % (directory, shell_name)

    if isExists(path):
        error('echo already exists : %s' % path)
    else:
        execute("echo '%s' > %s" % (shell_line, path))
        __chmod(path)
        __chown(user, path)
        done("echo -n 'create shell : '; ls -l %s" % path)
Пример #32
0
def recover_raid(id):
    try:
        rec = db.Recovery.get(id=id)
    except:
        print 'no such ID: %s' % id
        sys.exit(-1)

    name, nr, raid_disks, level, chunk = exctract_from_log(rec.log)
    disks = []

    try:
        raid = adm.Raid.lookup(name=name)
        if raid.online:
            print 'raid %s online' % raid.name
            sys.exit(-1)
        elif raid.health <> adm.HEALTH_FAILED:
            print 'raid %s is offline, but its health is not failed' % raid.name
            sys.exit(-1)
        else:
            for disk in raid.raid_disks + raid.spare_disks:
                disk.role = adm.ROLE_UNUSED
                disk.save(raid=None, link=False, unplug_seq=0)
                if disk.online and disk.health <> adm.HEALTH_FAILED:
                    Metadata.update(disk.md_path, raid_uuid='')
            raid.db.delete()
    except:
        pass

    for u in raid_disks.split(','):
        try:
            disk = adm.Disk.lookup(uuid=u)
            if disk.online:
                if disk.role == adm.ROLE_UNUSED:
                    disks.append(disk)
                else:
                    print 'disk:%s is used' % u
            else:
                print 'disk:%s is offline' % u
        except:
            print 'disk:%s is not found' % u

    disks = [disk.location for disk in disks]
    adm.create_raid(name,
                    level,
                    '%sKB' % chunk,
                    ','.join(disks),
                    '',
                    'low',
                    sync='no')

    cmd = 'lvscan'
    execute(cmd, False)

    raid = adm.Raid.lookup(name=name)
    raid.update_extents()
Пример #33
0
 def _ensure_ledset(self, op, led):
     for i in range(0, 32):
         cmd = 'ledset %s %s' % (led, op)
         print cmd
         execute(cmd, False)
         status = self._ledget(led)
         if status == op:
             return
         else:
             log.info('leget status: %s, op: %s, sleep and try again' % (status, op))
             time.sleep(0.5)
Пример #34
0
def fuel_costs():
    # create the fuel_costs table if needed
    execute("""
        CREATE TABLE IF NOT EXISTS fuel_costs (
            load_zone varchar(40),
            year int,
            base_year int,
            fuel_type varchar(30),
            price_mmbtu float,
            fixed_cost float,
            max_avail_at_cost float,
            fuel_scen_id varchar(40),
            tier varchar(20),
            max_age int
        );
        ALTER TABLE fuel_costs OWNER TO admin;
    """)

    # TODO: add fixed_cost and max_avail_at_cost for EIA-based forecasts

    def eia_dir(*path):
        return data_dir('EIA-based fuel cost forecasts', *path)

    # Oahu fuel price forecasts, derived from EIA
    # import_eia_fuel_costs(eia_dir("HECO fuel cost forecasts.xlsx"), 'EIA_ref')
    # import_eia_fuel_costs(eia_dir("HECO fuel cost forecasts_low.xlsx"), 'EIA_low')
    # import_eia_fuel_costs(eia_dir("HECO fuel cost forecasts_high.xlsx"), 'EIA_high')
    # import_eia_fuel_costs(eia_dir("HECO fuel cost forecasts_LNG_pegged_to_oil.xlsx"), 'EIA_lng_oil_peg')
    # import_eia_fuel_costs(eia_dir("HECO fuel cost forecasts_high_LNG_pegged_to_oil.xlsx"), 'EIA_high_lng_oil_peg')

    # Oahu hedged fuel costs and equivalent unheged costs from HECO 
    # (note: we use these instead of the PSIP Fuel Price Forecasts workbook because 
    # these adjust to 2016 dollars and include LNG with various durations)
    # import_hedged_fuel_costs(eia_dir("hedged fuel prices.xlsx"), tag='hedged')

    hedged_fuel_scen_id = 'hedged_2016_11_22'
    standard_fuel_scen_id = 'unhedged_2016_11_22'
    import_hedged_fuel_costs(eia_dir("hedged fuel prices 2016-11-22.xlsx"), tag=hedged_fuel_scen_id)
    import_hedged_fuel_costs(eia_dir("unhedged fuel prices 2016-11-22.xlsx"), tag=standard_fuel_scen_id)

    # import_psip_fuel_costs(data_dir("HECO Plans/PSIP-WebDAV/Resource Assumptions/PSIP Fuel Price Forecasts for HE 2016-06-27 regressions.xlsx"))

    # flat fuel price based on 2017 prices in 'unhedged_2016_11_22' 
    execute("""
        CREATE TEMPORARY TABLE tfuelcosts AS
            SELECT * FROM fuel_costs WHERE fuel_scen_id=%s;
        UPDATE TFUELCOSTS a
            SET fuel_scen_id='flat_2016', price_mmbtu=b.price_mmbtu
            FROM tfuelcosts b
            WHERE b.year=2016 AND b.load_zone=a.load_zone AND b.fuel_type=a.fuel_type AND b.tier=a.tier;
        INSERT INTO fuel_costs SELECT * FROM tfuelcosts;
        DROP TABLE tfuelcosts;
    """, (standard_fuel_scen_id,))
Пример #35
0
 def javac(program):
     temp = "__" + name
     util.removeDirs(temp)
     try:
         util.removeFiles(name + ".jar")
         os.makedirs(temp)
         return util.execute(
             "javac", "-sourcepath", ".", "-d", temp, "-cp",
             joinClasspath(classpath()), program) or util.execute(
                 "jar", "cf", name + ".jar", "-C", temp, ".")
     finally:
         util.removeDirs(temp)
Пример #36
0
    def _sync_mmap(self, dev_name):
        print 'EVENT:MONFS=%s' % dev_name
        name = '/root/test'
        cmd = 'cat /proc/`pidof chameleon`/maps |grep nvr |grep %s' % dev_name
        _, o = execute(cmd)
        p = re.search(r'/opt/(.*)', o)
        if p and p.groups()[0]:
            name = '/opt/' + p.groups()[0]

        cmd = 'sync_mmap %s' % (name)
        print cmd
        _, _ = execute(cmd)
Пример #37
0
 def xmlExternal(self, aDEVS):
     """
     The trace functionality for XML output at an external transition
     Args:
         aDEVS - the model that transitioned
     """
     portInfo = ""
     for I in range(len(aDEVS.IPorts)):
         portInfo += "<port name=\""+ aDEVS.IPorts[I].getPortName()+"\" category=\"I\">\n"
         for j in aDEVS.peek(aDEVS.IPorts[I]):
             portInfo += "<message>" + str(j) + "</message>\n</port>\n"
     execute(aDEVS, easyCommand("self.tracers.saveXML", [toStr(aDEVS.getModelFullName()), aDEVS.timeLast, "'EX'", toStr(portInfo), toStr(aDEVS.state.toXML()), toStr(aDEVS.state)]), self.immediate)
Пример #38
0
 def verboseInit(self, aDEVS):
     """
     The trace functionality for verbose output at the initialisation step
     Args:
         aDEVS - the model that transitioned
     """
     text = ""
     text += "\n\tINITIAL CONDITIONS in model <%s>\n" % aDEVS.getModelFullName()
     text += "\t  Initial State: %s\n" % str(aDEVS.state)
     # Don't show the age
     text += "\t  Next scheduled internal transition at time %5f\n" % (aDEVS.timeNext[0])
     execute(aDEVS, easyCommand("self.tracers.traceVerbose", [aDEVS.timeLast, '"' + text + '"']), self.immediate)
Пример #39
0
 def vcdExternal(self, aDEVS):
     """
     The trace functionality for VCD output at an external transition
     Args:
         aDEVS - the model that transitioned
     """
     for I in range(len(aDEVS.IPorts)):
         portName = aDEVS.IPorts[I].getPortName()
         signalBag = aDEVS.peek(aDEVS.IPorts[I])
         if signalBag is not None:
             for portSignal in signalBag:
                 execute(aDEVS, easyCommand("self.tracers.saveVCD", [toStr(aDEVS.getModelFullName()), aDEVS.timeLast, toStr(portName), toStr(portSignal)]), self.immediate)
Пример #40
0
 def xmlInit(self, aDEVS):
     """
     The trace functionality for XML output at initialization
     Args:
         aDEVS - the model that was initialised
     """
     execute(
         aDEVS,
         easyCommand("self.tracers.saveXML", [
             toStr(aDEVS.getModelFullName()), aDEVS.timeLast, "'EX'", "''",
             toStr(aDEVS.state.toXML()),
             toStr(aDEVS.state)
         ]), self.immediate)
Пример #41
0
def __backup(path):
	if __hasNoBackup(path):
		if not isExists(path):
			error('echo not exists : %s' % path)
			return False
		else:
			with hide('everything'):
				execute('cp -p %s %s.origin' % (path, path))
			done('echo create backup : %s.origin' % path)
			return True
	else:
		already('echo already exists : %s' % path)
		return True
def distributed_pv():
    # for now, just reuse old data

    # store data in postgresql tables
    shared_tables.create_table("project")
    shared_tables.create_table("cap_factor")

    # remove old records (best before removing indexes)
    execute("""
        DELETE FROM cap_factor WHERE project_id IN (SELECT project_id FROM project WHERE technology = 'DistPV');
    """)
    execute("""
        DELETE FROM project WHERE technology = 'DistPV';
    """)

    # remove indexes
    shared_tables.drop_indexes("cap_factor")    # drop and recreate is faster than incremental sorting

    execute("""
        INSERT INTO project (load_zone, technology, site, orientation, max_capacity)
        SELECT load_zone, technology, 'DistPV' AS site, orientation, max_capacity
            FROM max_capacity_pre_2016_06_21
            WHERE technology = 'DistPV';
    """)
    execute("""
        INSERT INTO cap_factor (project_id, date_time, cap_factor)
            SELECT project_id, date_time, cap_factor
                FROM cap_factor_pre_2016_06_21 cf JOIN project USING (load_zone, technology, orientation)
                WHERE cf.technology = 'DistPV';
    """)

    # restore indexes
    shared_tables.create_indexes("cap_factor")
Пример #43
0
def test_individual(sof, board, interface, scripts):
	print "testing " + sof

	# program
	if util.execute("quartus_pgm -c %d -m JTAG -o P;%s" % \
			(board, sof), redirect="quartus.pgm.log", append=True) != 0:
		print "programming failed"

	# test
	else:
		score = re.sub("\.sof$", ".score", sof)
		if util.execute("%s/read_score.py -d %s -o %s" % \
				(scripts, interface, score),
				redirect="read_score.log", append=True) != 0:
			print "testing failed"
  def test_for_broken_links(self):
    linkchecker_options = [
      "--check-extern", 
      "--no-warnings", 
      "--timeout 90",
      "--pause=3",
      "--ignore-url='.*eResearch-2013.pdf$'", 
      "--ignore-url='^http://www.jfsowa.com'",
      "--ignore-url='^https://platforms.monash.edu/eresearch/*'",
      "--ignore-url='^http://brain.oxfordjournals.org/content/130/9/2327.full'",
      "--ignore-url='^https://www.auckland.ac.nz/en/about/the-university/atoz-directory.html'",
      "--ignore-url='^https://www.auckland.ac.nz/en/admin/access-links/directory.html'",
      "--ignore-url='^https://www.auckland.ac.nz/en/admin/quick-links/a-to-z-directory.html'",
      "--ignore-url='^http://www.eresearch.org.nz/summer_of_eresearch_2010'",
      "--ignore-url='^http://www.eresearch.org.nz/nz-eresearch-symposium-2010'",
    ]
    cmd = '''linkchecker %s %s''' % (' '.join(linkchecker_options), config.url)
    print("%s" % cmd)
    stdout, stderr, rc = execute(cmd, error_on_stderr=False, error_on_nonzero_rc=False)

    if rc > 0:
      print 'stdout:'
      print '#' * 80
      print stdout
      print '#' * 80
      print ''
      print 'stderr:'
      print '#' * 80
      print stderr
      print '#' * 80
    assert (rc == 0)
Пример #45
0
 def verboseExternal(self, aDEVS):
     """
     The trace functionality for verbose output at an external transition
     Args:
         aDEVS - the model that transitioned
     """
     text = ""
     text += "\n\tEXTERNAL TRANSITION in model <%s>\n" % aDEVS.getModelFullName()
     text += "\t  Input Port Configuration:\n"
     for I in range(len(aDEVS.IPorts)):
         text += "\t    port <" + str(aDEVS.IPorts[I].getPortName()) + ">:\n"
         for msg in aDEVS.myInput.get(aDEVS.IPorts[I], []):
             text += "\t       " + str(msg) + "\n"
     text += "\t  New State: %s\n" % str(aDEVS.state)
     # Don't show the age
     text += "\t  Next scheduled internal transition at time %5f\n" % (aDEVS.timeNext[0])
     execute(aDEVS, easyCommand("self.tracers.traceVerbose", [aDEVS.timeLast, '"' + text + '"']), self.immediate)
Пример #46
0
def __on(service):
	if __isOn(service):
		already("echo 'already on      : %s'" % service)
	else:
		with hide('everything'):
			stdout = execute('chkconfig %s on; true' % service)
			if 'No such file or directory' in stdout:
				error("echo 'turn on error   : %s is unrecognized'" % service)
			else:
				done("echo 'turn on         : %s'" % service)
Пример #47
0
def __start(service):
	if __isRunning(service):
		already("echo 'already started : %s'" % service)
	else:
		with hide('everything'):
			stdout = execute('service %s start; true' % service)
			if 'unrecognized' in stdout:
				error("echo 'start error     : %s is unrecognized'" % service)
			else:
				done("echo 'started         : %s'" % service)
Пример #48
0
def __install(package, repositories):
	if __isNotInstalled(package):
		with hide('stdout'):
			stdout = execute('yum install -y %s%s; true' % (__enablerepos(repositories), package))
			if 'Error: Nothing to do' in stdout:
				error("echo 'install error     : No package %s available.'" % package)
			else:
				done("echo 'install complete  : %s'" % __version(package))
	else:
		already("echo 'already installed : %s'" % __version(package))
def calculate_interconnect_costs():
    """Choose closest interconnect location to each project, and calculate distance to it.
    Also calculate connect_cost_per_mw based on distance and generic connection cost for
    each technology.
    note: this could eventually be updated to use interconnect-specific costs, where
    provided, instead of generic project interconnect costs; in that case, code that
    creates the interconnect table in import_data.py would need to be updated.
    """
    execute("""
        WITH distances as (
            select p.project_id, i.interconnect_id, 
                -- haversine distance formula, radius of earth = 6371 km
                2 *  6371 * sqrt(
                    pow(sin(radians((i.latitude - p.latitude)/2)), 2) 
                    + cos(radians(p.latitude)) * cos(radians(i.latitude)) 
                        * pow(sin(radians((i.longitude - p.longitude)/2)), 2))
                as distance
                from project p, interconnect i
                where p.latitude is not null and p.longitude is not null
        ), closest as (
            select project_id, min(distance) as distance 
                from distances group by 1
        ), neighbor as (
            select c.project_id, d.interconnect_id, c.distance
                from closest c join distances d using (project_id, distance)
            -- note, this may return multiple interconnects with the same distance
            -- but that is rare, and one will be chosen arbitrarily in the update query
        )
        update project p
            set interconnect_id = n.interconnect_id, 
                connect_distance_km = n.distance
            from neighbor n
            where n.project_id = p.project_id;
    """)
    execute("""
        update project p
            set connect_cost_per_mw = 
                1000 * connect_cost_per_kw_generic 
                    + %(connect_cost_per_mw_km)s * coalesce(connect_distance_km, 0)
            from generator_info g
            where g.technology=p.technology;
    """, dict(connect_cost_per_mw_km=connect_cost_per_mw_km))
Пример #50
0
    def POST(self):
        exe_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        dic_ret = {'state': setting.STATE_FAIL, 'msg': setting.STATUS[int(setting.STATE_FAIL)], 'exe_time': exe_time}
        data = web.input()
        job_id = data.id

        state = util.execute('id', job_id, exe_time)
        dic_ret['msg'] = setting.STATUS[int(state)]
        dic_ret['state'] = state

        return json.dumps(dic_ret)
Пример #51
0
def copy_system_libraries():
  ldd = execute(['ldd', os.path.join(OUT_DIR, 'thrust_shell')])
  lib_re = re.compile('\t(.*) => (.+) \(.*\)$')
  for line in ldd.splitlines():
    m = lib_re.match(line)
    if not m:
      continue
    for i, library in enumerate(SYSTEM_LIBRARIES):
      real_library = m.group(1)
      if real_library.startswith(library):
        shutil.copyfile(m.group(2), os.path.join(DIST_DIR, real_library))
        SYSTEM_LIBRARIES[i] = real_library
Пример #52
0
def clone(repository, dst, branch = 'master'):
	if isExists(dst):
		error('echo already exists : %s' % dst)
	else:
		with hide('stdout'):
			stdout = execute('git clone -b %s %s %s; true' % (branch, repository, dst))
			if '403 Forbidden' in stdout:
				error("echo -n 'clone error    : 403 Forbidden'")
			else:
				if 'warning: Remote branch %s not found' % branch in stdout:
					error("echo -n 'warning        : %s dones not found'" % branch)
				done("echo -n 'complete clone : '; ls -ld %s" % dst)
				done("echo -n 'branch         : '; cd %s; git rev-parse --abbrev-ref HEAD" % dst)
Пример #53
0
    def extract_symbols(self, path):
        """Extracts symbol table information"""
        s = execute('objdump -t "%s"' % path)
        for line in s.splitlines():
            line = line.strip()

            if not line:
                continue

            try:
                l, r = line.split('\t')
            except ValueError:
                continue

            l = l.strip()
            addr, sep, typ = l.partition(' ')
            if not sep:
                continue
            
            r = r.strip()
            size, sep, name = r.partition(' ')
            if not sep:
                continue
            
            size = size.strip()
            size.lstrip('0')
            name = name.strip()

            size = int(size, 16)
            addr = int(addr, 16)

            if addr == 0:
                continue

            if typ.find("g") == -1 or typ.find("gcc_except_table") != -1 or typ.find("F") != -1:
                name = name + "<" + str(addr) + ">"

            # Create label and store
            if name in self.labels:
                if self.labels[name]['offset'] != addr:
                    sys.stderr.write('Warning: %s: -t reported %x, already is %x\n' % (name, addr, self.labels[name]['offset']))
                    print typ
                        
            label = {
                'name': name,
                'offset': addr,
                'end_offset': addr + size
            }
            self.labels[name] = label
Пример #54
0
    def extract_relocations(self):
        """Extracts relocation information"""
        s = execute('objdump -rR "%s"' % self.path)
        self.relocations = []

        for line in s.splitlines():
            cols = line.split(None, 3)
            if len(cols) != 3:
                continue
            
            addr, reloc_type, value = cols
            try:
                addr = int(addr, 16)
            except ValueError:
                continue
            
            self.relocations += [(addr, reloc_type, value)]

        self.relocations.sort()
Пример #55
0
    def __init__(self, path=None):
        """Initializes a new ObjDump instance with data extracted from the binary
        located at 'path'"""

        self.sections = {}
        self.labels = {}
        self.min_address = None
        self.max_address = None
        self.path = path

        if path is not None and not os.path.exists(path):
            raise Exception("'%s' does not exist" % path)
        
        # Extract symbols
        self.extract_symbols(path)

        # Extract relocation information
        self.extract_relocations()

        if path is not None:
            parse_dump(execute('objdump -d "%s"' % path), self)
Пример #56
0
def __directoriesSortedByOlder(path):
	with hide('everything'):
		return execute('ls %s | sort' % path).split('\r\n')
Пример #57
0
def cleanUp(path, upperLimit):
	targets = __directoriesSortedByOlder(path)[:-upperLimit]
	[execute('rm -rf %s/%s' % (path, target)) for target in targets]
Пример #58
0
def cat(path):
	assert isExists(path)

	with hide('everything'):
		return execute('cat %s' % path)