Exemple #1
0
def get_pf(pfname):
	tstep = float(stock.pfget(pfname, 'tstep'))
	tpad = float(stock.pfget(pfname, 'tpad'))
	filter = stock.pfget(pfname, 'filter')
	stachans = stock.pfget(pfname, 'stachan')
	sta = []
	chan = []
	for i in range(0, len(stachans)):
		if len(stachans[i]) != 0:
			k = 0
			tempsta = ''
			tempchan = ''
			for j in range(0, len(stachans[i])):
				if (stachans[i][j] == ' ' or stachans[i][j] == ':'):
					sta.append(tempsta)
					break
				else:
					tempsta = tempsta + stachans[i][j]
			
			for j in range(0, len(stachans[i])):
				if (stachans[i][j] == ':'):
					tempchan = ''
				else:
					tempchan = tempchan + stachans[i][j]
			chan.append(tempchan)

	return (tstep, tpad, filter, sta, chan)
Exemple #2
0
def get_pf(pfname):
    """Return a dict from a pf file"""
    if hasattr(stock, 'pfread'):
        return stock.pfread(pfname).pf2dict()
    elif hasattr(stock, 'pfget'):
        return stock.pfget(pfname)
    else:
        raise AttributeError("No pf function available")
Exemple #3
0
def load_pf(hp, pffile='dbhash.pf'):
    '''Update runtime settings from a pf file
    
    This inputs from pf and converts the original HASH command line params to
    proper python type. One can also specify names of velocity model files in
    the pf file. See an actual file for example.
    
    Right now these settings are inherited from the HashPype class,
     and are not instance attributes.
     
     Input
     -----
     pffile : string of full path to pf file
     
    '''
    # Version change 5.2->5.3 broke Antelope API, quick fix for now
    # TODO: 1) use Mark's version agnostic 'pfgetter' function
    #       2) man up and use JSON/ini/native python for your configs
    try:
        from antelope.stock import pfget
    except ImportError:
        from antelope.stock import pfread as pfget

    pf_settings = pfget(pffile)
    pf_keys = list(pf_settings.keys())  # b/c v5.3 broke backwards dict compat

    # Little hack to do type conversions
    for key in pf_keys:
        pfi = pf_settings[key]
        if key in ['badfrac', 'prob_max']:
            pfi = float(pfi)
        elif key in [
                'npolmin', 'max_agap', 'max_pgap', 'dang', 'nmc', 'maxout',
                'delmax', 'cangle'
        ]:
            pfi = int(pfi)
        else:
            pass
        hp.__setattr__(key, pfi)

    if 'vmodel_dir' in pf_keys and 'vmodels' in pf_keys:
        hp.vmodels = [
            os.path.join(hp.vmodel_dir, table) for table in hp.vmodels
        ]
Exemple #4
0
def load_pf(hp, pffile="dbhash.pf"):
    """Update runtime settings from a pf file
    
    This inputs from pf and converts the original HASH command line params to
    proper python type. One can also specify names of velocity model files in
    the pf file. See an actual file for example.
    
    Right now these settings are inherited from the HashPype class,
     and are not instance attributes.
     
     Input
     -----
     pffile : string of full path to pf file
     
    """
    # Version change 5.2->5.3 broke Antelope API, quick fix for now
    # TODO: 1) use Mark's version agnostic 'pfgetter' function
    #       2) man up and use JSON/ini/native python for your configs
    try:
        from antelope.stock import pfget
    except ImportError:
        from antelope.stock import pfread as pfget

    pf_settings = pfget(pffile)
    pf_keys = list(pf_settings.keys())  # b/c v5.3 broke backwards dict compat

    # Little hack to do type conversions
    for key in pf_keys:
        pfi = pf_settings[key]
        if key in ["badfrac", "prob_max"]:
            pfi = float(pfi)
        elif key in ["npolmin", "max_agap", "max_pgap", "dang", "nmc", "maxout", "delmax", "cangle"]:
            pfi = int(pfi)
        else:
            pass
        hp.__setattr__(key, pfi)

    if "vmodel_dir" in pf_keys and "vmodels" in pf_keys:
        hp.vmodels = [os.path.join(hp.vmodel_dir, table) for table in hp.vmodels]
Exemple #5
0
def dbloc_source_db(db, pointer=True):
    """
    Checks if you are in a dbloc2 'trial' db and returns the source
    one if you are, otherwise returns the same Dbptr. This is for running
    interactive scripts lauched from dbloc2 and writing to a non-volitile
    original db.
    
    INPUT: Dbptr of current temp database in dbloc2
    OUTPUT: Dbptr to database that dbloc2 is using.
    """
    try:
        from antelope.stock import pfget
    except ImportError:
        from antelope.stock import pfread as pfget

    db = Dbptr(db, perm="r+")
    dbname = db.query("dbDATABASE_NAME")
    pf_settings = pfget("dbloc2")
    pfdef = pf_settings["Define"]
    tempdb = pfdef["Temporary_db"]
    workdir = pfdef["Work_dir"]
    dblocdb = os.path.join(workdir, tempdb)
    if dbname.endswith(tempdb):
        # path of trial db from dbloc2
        dbcwd = os.path.dirname(dbname)
        # relative name of 1st db in 'trial' database decriptor file
        dbpath0 = db.query("dbDBPATH").split(":")[0].translate(None, "{}")
        # full absolute path database name to source
        dbname = os.path.abspath(os.path.join(dbcwd, dbpath0))
        db.close()
        db = Dbptr(dbname, perm="r+")
    if pointer:
        return db
    else:
        db.close()
        return dbname
Exemple #6
0
def dbloc_source_db(db, pointer=True):
    """
    Checks if you are in a dbloc2 'trial' db and returns the source
    one if you are, otherwise returns the same Dbptr. This is for running
    interactive scripts lauched from dbloc2 and writing to a non-volitile
    original db.
    
    INPUT: Dbptr of current temp database in dbloc2
    OUTPUT: Dbptr to database that dbloc2 is using.
    """
    try:
        from antelope.stock import pfget
    except ImportError:
        from antelope.stock import pfread as pfget
    
    db = Dbptr(db, perm='r+') 
    dbname = db.query('dbDATABASE_NAME')
    pf_settings = pfget('dbloc2')
    pfdef = pf_settings['Define']
    tempdb = pfdef['Temporary_db']
    workdir = pfdef['Work_dir']
    dblocdb = os.path.join(workdir,tempdb)
    if dbname.endswith(tempdb):
        # path of trial db from dbloc2
        dbcwd = os.path.dirname(dbname)
        # relative name of 1st db in 'trial' database decriptor file
        dbpath0 = db.query('dbDBPATH').split(':')[0].translate(None,'{}')
        # full absolute path database name to source
        dbname = os.path.abspath(os.path.join(dbcwd, dbpath0))
        db.close()
        db = Dbptr(dbname, perm='r+')
    if pointer:
        return db
    else:
        db.close()
        return dbname
def main(argv=None):
    """Main processing script for all maps """

    elog.elog_init(sys.argv)
    elog.elog_notify("Start of script")

    verbose, debug, year, month, maptype, deploytype, size = process_command_line(argv)
        
    if debug:
        elog.elog_notify("*** DEBUGGING ON ***")
        elog.elog_notify("*** No grd or grad files - just single color for speed ***")

    common_pf = 'common.pf'
    stations_pf = 'stations.pf'

    elog.elog_notify(" - Creating **%s** maps" % deploytype)
    if verbose:
        elog.elog_notify(" - Parse configuration parameter file (%s)" % common_pf)
        elog.elog_notify(" - Parse stations parameter file (%s)" % stations_pf)

    wet_rgb = '202/255/255'

    pfupdate(common_pf)
    pfupdate(stations_pf)

    dbmaster = pfget(common_pf, 'USARRAY_DBMASTER')
    networks = pfget_arr(stations_pf, 'network')
    infrasound = pfget_arr(stations_pf, 'infrasound')
    colors = pfget_arr(stations_pf, 'colors')
    # Force the tmp dir environmental variable
    tmp = pfget(common_pf, 'TMP')
    os.environ['TMPDIR'] = os.environ['TEMP'] = os.environ['TMP'] = tmp
    gmtbindir = pfget(common_pf, 'GMT_BIN')
    usa_coords = pfget_arr(common_pf, 'USACOORDS')
    ak_coords = pfget_arr(common_pf, 'AKCOORDS')
    web_output_dir = pfget(common_pf, 'CACHE_MONTHLY_DEPLOYMENT')
    web_output_dir_infra = pfget(common_pf, 'CACHE_MONTHLY_DEPLOYMENT_INFRA')
    infrasound_mapping = pfget(common_pf, 'INFRASOUND_MAPPING')
    output_dir = '/var/tmp' # FOR TESTING
    sys.path.append(gmtbindir)
    if size == 'wario':
        paper_orientation = 'landscape'
        paper_media = 'b0'
        symsize = '0.3'
    else:
        paper_orientation = 'portrait'
        paper_media = 'a1'
        symsize = '0.15'

    # Make sure execution occurs in the right directory
    cwd = os.getcwd()
    path_parts = cwd.split('/')
    if path_parts[-1] == 'deployment_history' and path_parts[-2] == 'bin':
        if verbose or debug:
            elog.elog_notify(' - Already in the correct current working directory %s' % cwd)
    else:
        cwd = os.getcwd() + '/bin/deployment_history'
        if verbose or debug:
            elog.elog_notify (' - Changed current working directory to %s' % cwd)
        os.chdir(cwd)
    # Make sure we set some GMT parameters for just this script
    # GMTSET
    try:
        set_gmt_params(paper_orientation, paper_media)
    except Exception, e:
        elog.elog_complain("An error occurred setting GMT params %s")
        raise