Пример #1
0
def config(set, sync):
    """This command should now only be used to use the command line to set
    a parameter value in the data base. It used to launch the Configurator but
    the recommended way to configure MSNoise is to use the "msnoise admin" web
    interface."""
    if set:
        from ..default import default
        if not set.count("="):
            click.echo("!! format of the set command is name=value !!")
            return
        name, value = set.split("=")
        if name not in default:
            click.echo("!! unknown parameter %s !!" % name)
            return
        from ..api import connect, update_config
        db = connect()
        update_config(db, name, value)
        db.commit()
        db.close()
        click.echo("Successfully updated parameter %s = %s" % (name, value))
    elif sync:
        import glob
        from ..api import connect, get_config, get_stations, update_station,\
            preload_instrument_responses

        db = connect()
        responses = preload_instrument_responses(db)
        netsta = []
        for id, row in responses.iterrows():
            net, sta, loc, chan = row["channel_id"].split(".")
            netsta.append("%s.%s" % (net, sta))
        responses["netsta"] = netsta

        for station in get_stations(db):
            id = "%s.%s" % (station.net, station.sta)
            coords = responses[responses["netsta"] == id]
            lon = float(coords["longitude"].values[0])
            lat = float(coords["latitude"].values[0])
            update_station(
                db,
                station.net,
                station.sta,
                lon,
                lat,
                0,
                "DEG",
            )
            logging.info("Added coordinates (%.5f %.5f) for station %s.%s" %
                         (lon, lat, station.net, station.sta))
        db.close()

    else:
        from ..s001configurator import main
        click.echo('Let\'s Configure MSNoise !')
        main()
Пример #2
0
def config_sync():
    """
    Synchronise station metadata from inventory/dataless.
    """
    import glob
    from ..api import connect, get_config, get_stations, update_station,\
        preload_instrument_responses

    db = connect()
    responses = preload_instrument_responses(db)
    netsta = []
    for id, row in responses.iterrows():
        net, sta, loc, chan = row["channel_id"].split(".")
        netsta.append("%s.%s"%(net,sta))
    responses["netsta"] = netsta

    for station in get_stations(db):
        id = "%s.%s" % (station.net, station.sta)
        coords = responses[responses["netsta"] == id]
        lon = float(coords["longitude"].values[0])
        lat = float(coords["latitude"].values[0])
        update_station(db, station.net, station.sta, lon, lat, 0, "DEG", )
        logging.info("Added coordinates (%.5f %.5f) for station %s.%s" %
                    (lon, lat, station.net, station.sta))
    db.close()
Пример #3
0
def scan_archive(ctx, init, path, recursively):
    """Scan the archive and insert into the Data Availability table."""
    if path:
        logging.info("Overriding workflow...")
        from obspy import UTCDateTime
        from ..s01scan_archive import worker
        db = connect()
        startdate = UTCDateTime(get_config(db, "startdate"))
        enddate = UTCDateTime(get_config(db, "enddate"))
        cc_sampling_rate = float(get_config(db, "cc_sampling_rate"))
        db.close()
        if recursively:
            for root, dirs, _ in os.walk(path):
                for d in dirs:
                    tmppath = os.path.join(root, d)
                    _ = os.listdir(tmppath)
                    if not len(_):
                        continue
                    worker(sorted(_),
                           tmppath,
                           startdate,
                           enddate,
                           cc_sampling_rate,
                           init=True)
        worker(sorted(os.listdir(path)),
               path,
               startdate,
               enddate,
               cc_sampling_rate,
               init=True)
    else:
        from ..s01scan_archive import main
        main(init, threads=ctx.obj['MSNOISE_threads'])
Пример #4
0
def config_sync():
    """
    Synchronise station metadata from inventory/dataless.
    """
    import glob
    from ..api import connect, get_config, get_stations, update_station,\
        preload_instrument_responses

    db = connect()
    responses = preload_instrument_responses(db)
    netsta = []
    for id, row in responses.iterrows():
        net, sta, loc, chan = row["channel_id"].split(".")
        netsta.append("%s.%s"%(net,sta))
    responses["netsta"] = netsta

    for station in get_stations(db):
        id = "%s.%s" % (station.net, station.sta)
        coords = responses[responses["netsta"] == id]
        lon = float(coords["longitude"].values[0])
        lat = float(coords["latitude"].values[0])
        update_station(db, station.net, station.sta, lon, lat, 0, "DEG", )
        logging.info("Added coordinates (%.5f %.5f) for station %s.%s" %
                    (lon, lat, station.net, station.sta))
    db.close()
Пример #5
0
def execute(sql_command, outfile=None, show=True):
    """EXPERT MODE: Executes 'sql_command' on the database. Use this command
    at your own risk!!"""
    from msnoise.api import connect

    db = connect()
    for cmd in sql_command.split(";"):
        if not len(cmd):
            continue
        print("Executing '%s'" % cmd)
        r = db.execute(cmd)
        if cmd.count("select") or cmd.count("SELECT"):
            result = r.fetchall()
            if not len(result):
                print("The query returned no results, sorry.")
            else:
                import pandas as pd
                df = pd.DataFrame(result, columns=r.keys())
                if show:
                    pd.set_option('display.max_rows', None)
                    pd.set_option('display.max_columns', None)
                    pd.set_option('display.width', None)
                    pd.set_option('display.max_colwidth', None)
                    print(df)
                if outfile:
                    if outfile == "?":
                        df.to_csv("request.csv")
                    else:
                        df.to_csv("%s" % outfile)
    db.commit()
    db.close()
Пример #6
0
def populate(fromda):
    """Rapidly scan the archive filenames and find Network/Stations"""
    if fromda:
        logging.info("Overriding workflow...")
        from ..msnoise_table_def import DataAvailability
        from ..api import update_station
        db = connect()
        stations = db.query(DataAvailability.net, DataAvailability.sta). \
            group_by(DataAvailability.net, DataAvailability.sta)

        for net, sta in stations:
            print('Adding:', net, sta)
            X = 0.0
            Y = 0.0
            altitude = 0.0
            coordinates = 'UTM'
            instrument = 'N/A'
            update_station(db,
                           net,
                           sta,
                           X,
                           Y,
                           altitude,
                           coordinates=coordinates,
                           instrument=instrument)
    else:
        from ..s002populate_station_table import main
        main()
Пример #7
0
def main(smooth, show, outfile):
    db = connect()
    fig = plt.figure()
    for sta1, sta2 in get_station_pairs(db):
        sta1 = "%s.%s" % (sta1.net, sta1.sta)
        sta2 = "%s.%s" % (sta2.net, sta2.sta)
        pair = "%s_%s" % (sta1, sta2)
        try:
            st = read(os.path.join('SARA', 'RATIO', pair, "*"))
        except:
            print("No data for %s" % pair)
            continue
        st.merge(fill_value=np.nan)
        t = pd.date_range(st[0].stats.starttime.datetime,
                          periods=st[0].stats.npts,
                          freq="%ims" % (st[0].stats.delta * 1000))
        s = pd.Series(data=st[0].data, index=t, dtype=st[0].data.dtype)
        s = s.resample('1T').median()
        s.to_csv("%s.csv" % pair)
        if smooth != 1:
            s = s.rolling(window=smooth, min_periods=1, center=True).median()
        plt.plot(s.index, s, lw=1.0, label='%s' % pair)
    plt.ylim(0, 200)
    plt.legend()
    fig.autofmt_xdate()
    plt.tight_layout()
    if outfile:
        plt.savefig(outfile)
    if show:
        plt.show()
Пример #8
0
def config_get(names):
    """
    Display the value of the given configuration variable(s).
    """
    from ..api import connect, get_config
    db = connect()
    show_config_values(db, names)
    db.close()
Пример #9
0
def config_get(names):
    """
    Display the value of the given configuration variable(s).
    """
    from ..api import connect, get_config
    db = connect()
    show_config_values(db, names)
    db.close()
Пример #10
0
def execute(sql_command):
    """EXPERT MODE: Executes 'sql_command' on the database. Use this command
    at your own risk!!"""
    from msnoise.api import connect

    db = connect()
    r = db.execute(sql_command)

    if sql_command.count("select") or sql_command.count("SELECT"):
        print(r.fetchall())
    db.commit()
    db.close()
Пример #11
0
def main(per, a1, b1, l1, s1, a2, b2, l2, s2, filterid, comp, show):
    # Smoothing and damping parameters
    db = connect()
    alpha1 = a1 if a1 else float(get_config(db, "alpha1", plugin="Tomo"))
    beta1 = b1 if b1 else float(get_config(db, "beta1", plugin="Tomo"))
    lambda1 = l1 if l1 else float(get_config(db, "lambda1", plugin="Tomo"))
    sigma1 = s1 if s1 else float(get_config(db, "sigma1", plugin="Tomo"))

    alpha2 = a2 if a2 else float(get_config(db, "alpha2", plugin="Tomo"))
    beta2 = b2 if b2 else float(get_config(db, "beta2", plugin="Tomo"))
    lambda2 = l2 if l2 else float(get_config(db, "lambda2", plugin="Tomo"))
    sigma2 = s2 if s2 else float(get_config(db, "sigma2", plugin="Tomo"))

    v_cmap = get_config(db, "v_cmap", plugin="Tomo")
    d_cmap = get_config(db, "d_cmap", plugin="Tomo")

    if per is None:
        PER = get_config(db, "ftan_periods", plugin="Tomo")
        periods = np.array([float(pi) for pi in PER.split(',')])
    else:
        periods = [
            float(per),
        ]

    # ANSWT inputs

    gridfile = os.path.join("TOMO_FILES", "%02i" % filterid, comp, "Grid.dat")
    stacoordfile = os.path.join("TOMO_FILES", "%02i" % filterid, comp,
                                "STACoord.dat")

    for per in periods:
        DCfile = os.path.join("TOMO_FILES", "%02i" % filterid, comp,
                              "TestGroupVel_%.3fs.dat" % float(per))
        PERIOD = per

        paramfile = os.path.join("TOMO_FILES", "%02i" % filterid, comp,
                                 'ParamFile.txt')
        fid = open(paramfile, 'w')
        fid.write(
            '%% alpha1 \t beta1 \t lambda1 \t Lcorr1 \t alpha2 \t beta2 \t lambda2 \t Lcorr2\n'
        )
        fid.write(
            '%f %f %f %f %f %f %f %f\n' %
            (alpha1, beta1, lambda1, sigma1, alpha2, beta2, lambda2, sigma2))
        fid.close()
        try:
            ANSWT(gridfile, stacoordfile, DCfile, paramfile, PERIOD, show,
                  v_cmap, d_cmap)
        except:
            traceback.print_exc()
            print("!" * 80)
            print("Can't compute tomo for period=", per)
            print("!" * 80)
Пример #12
0
def info():
    from msnoise.api import connect, get_config
    from .default import default
    db = connect()
    click.echo('')
    click.echo('Raw config bits: "D"efault or "M"odified (green)')
    for key in default.keys():
        tmp = get_config(db, key, plugin='Tomo')
        if tmp == default[key][1]:
            click.secho(" D %s: %s" % (key, tmp))
        else:
            click.secho(" M %s: %s" % (key, tmp), fg='green')
Пример #13
0
def clean_duplicates():
    """Checks the Jobs table and deletes duplicate entries"""
    from msnoise.api import connect, get_tech

    db = connect()
    if get_tech() == 1:
        query = "DELETE FROM jobs WHERE rowid NOT IN (SELECT MIN(rowid) FROM jobs GROUP BY day,pair,jobtype)"
    else:
        query = "DELETE from jobs USING jobs, jobs as vtable WHERE (jobs.ref > vtable.ref) AND (jobs.day=vtable.day) AND (jobs.pair=vtable.pair) AND (jobs.jobtype=vtable.jobtype)"
    db.execute(query)
    db.commit()
    db.close()
Пример #14
0
def execute(sql_command):
    """EXPERT MODE: Executes 'sql_command' on the database. Use this command
    at your own risk!!"""
    from msnoise.api import connect, get_tech

    db = connect()
    r = db.execute(sql_command)

    if sql_command.count("select") or sql_command.count("SELECT"):
        print(r.fetchall())
    db.commit()
    db.close()
Пример #15
0
def reset(jobtype, all, rule):
    """Resets the job to "T"odo. ARG is [CC] or [DTT]. By default
    only resets jobs "I"n progress. --all resets all jobs, whatever
    the flag value"""
    from ..api import connect, reset_jobs
    session = connect()
    if jobtype == "DA":
        session.execute("update data_availability set flag='M' where 1")
    elif jobtype != jobtype.upper():
        logging.info("The jobtype %s is not uppercase (usually jobtypes"
                     " are uppercase...)" % jobtype)
    reset_jobs(session, jobtype, all, rule)
    session.close()
Пример #16
0
def new_jobs(init, nocc, hpc=""):
    """Determines if new CC jobs are to be defined"""
    if not hpc:
        from ..s02new_jobs import main
        main(init, nocc)
    if hpc:
        from ..api import connect
        left, right = hpc.split(":")
        db = connect()
        db.execute(
            "INSERT INTO jobs (pair, day, jobtype, flag) SELECT pair, day, '%s', 'T' from jobs where jobtype='%s' and flag='D';"
            % (right, left))
        db.commit()
        db.close()
Пример #17
0
def main():
    db = connect()
    while is_next_job(db, jobtype='AMAZ1'):
        jobs = get_next_job(db, jobtype='AMAZ1')
        for job in jobs:
            net, sta = job.pair.split('.')
            gd = UTCDateTime(job.day).datetime
            print("Processing %s.%s for day %s"%(net,sta, job.day))
            files = get_data_availability(
                    db, net=net, sta=sta, starttime=gd, endtime=gd,
                    comp="Z")
            for file in files:
                fn = os.path.join(file.path, file.file)
                st = read(fn, starttime=UTCDateTime(job.day), endtime=UTCDateTime(job.day)+86400)
                print st
Пример #18
0
def upgrade():
    """Upgrade the database from previous to a new version.\n
    This procedure adds new parameters with their default value
    in the config database.
    """
    from ..api import connect, Config, read_database_inifile
    from ..default import default
    db = connect()
    tech, hostname, database, username, password, prefix = \
        read_database_inifile()
    if prefix != "":
        prefix = prefix + "_"
    for name in default.keys():
        try:
            db.add(Config(name=name, value=default[name][-1]))
            db.commit()
        except:
            db.rollback()
            # print("Passing %s: already in DB" % name)
            continue
    try:
        db.execute("CREATE UNIQUE INDEX job_index ON %sjobs (day, pair, "
                   "jobtype)" % prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'job_index' is already in the DB")
        traceback.print_exc()
        db.rollback()

    try:
        db.execute("CREATE INDEX job_index2 ON %sjobs (jobtype, flag)" %
                   prefix)
        db.commit()
    except:
        logging.info(
            "It looks like the v1.6 'job_index2' is already in the DB")
        db.rollback()

    try:
        db.execute(
            "CREATE UNIQUE INDEX da_index ON %sdata_availability (path, "
            "file, net, sta, comp)" % prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'da_index' is already in the DB")
        db.rollback()

    db.close()
Пример #19
0
def reset(jobtype, all, rule):
    """Resets the job to "T"odo. ARG is [CC] or [DTT]. By default
    only resets jobs "I"n progress. --all resets all jobs, whatever
    the flag value"""
    from ..api import connect, reset_jobs, read_db_inifile
    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    session = connect()
    if jobtype == "DA":
        session.execute("UPDATE {0}data_availability SET flag='M'"
                        .format(prefix))
    elif jobtype != jobtype.upper():
        logging.info("The jobtype %s is not uppercase (usually jobtypes"
                     " are uppercase...)"%jobtype)
    reset_jobs(session, jobtype, all, rule)
    session.close()
Пример #20
0
def reset(jobtype, all, rule):
    """Resets the job to "T"odo. ARG is [CC] or [DTT]. By default
    only resets jobs "I"n progress. --all resets all jobs, whatever
    the flag value"""
    from ..api import connect, reset_jobs, read_db_inifile
    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    session = connect()
    if jobtype == "DA":
        session.execute("UPDATE {0}data_availability SET flag='M'"
                        .format(prefix))
    elif jobtype != jobtype.upper():
        logging.info("The jobtype %s is not uppercase (usually jobtypes"
                     " are uppercase...)"%jobtype)
    reset_jobs(session, jobtype, all, rule)
    session.close()
Пример #21
0
def new_jobs(init, nocc, hpc=""):
    """Determines if new CC jobs are to be defined"""
    if not hpc:
        from ..s02new_jobs import main
        main(init, nocc)
    if hpc:
        from ..api import connect, read_db_inifile
        dbini = read_db_inifile()
        prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
        left, right = hpc.split(':')
        db = connect()
        db.execute("INSERT INTO {prefix}jobs (pair, day, jobtype, flag) "
                   "SELECT pair, day, '{right_type}', 'T' FROM {prefix}jobs "
                   "WHERE jobtype='{left_type}' AND flag='D';"
                   .format(prefix=prefix, right_type=right, left_type=left))
        db.commit()
        db.close()
Пример #22
0
def new_jobs(init, nocc, hpc=""):
    """Determines if new CC jobs are to be defined"""
    if not hpc:
        from ..s02new_jobs import main
        main(init, nocc)
    if hpc:
        from ..api import connect, read_db_inifile
        dbini = read_db_inifile()
        prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
        left, right = hpc.split(':')
        db = connect()
        db.execute("INSERT INTO {prefix}jobs (pair, day, jobtype, flag) "
                   "SELECT pair, day, '{right_type}', 'T' FROM {prefix}jobs "
                   "WHERE jobtype='{left_type}' AND flag='D';"
                   .format(prefix=prefix, right_type=right, left_type=left))
        db.commit()
        db.close()
Пример #23
0
def reset(jobtype, all, rule):
    """Resets the job to "T"odo. JOBTYPE is the acronym of the job type.
    By default only resets jobs "I"n progress. --all resets all jobs, whatever
    the flag value. Standard Job Types are CC, STACK, MWCS and DTT, but
    plugins can define their own."""
    from ..api import connect, reset_jobs, read_db_inifile
    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    session = connect()
    if jobtype == "DA":
        session.execute("UPDATE {0}data_availability SET flag='M'"
                        .format(prefix))
    elif jobtype != jobtype.upper():
        logging.info("The jobtype %s is not uppercase (usually jobtypes"
                     " are uppercase...)"%jobtype)
    reset_jobs(session, jobtype, all, rule)
    session.close()
Пример #24
0
def upgrade():
    """Upgrade the database from previous to a new version.\n
    This procedure adds new parameters with their default value
    in the config database.
    """
    from ..api import connect, Config, read_db_inifile
    from ..default import default
    db = connect()
    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    for name in default.keys():
        try:
            db.add(Config(name=name, value=default[name].default))
            db.commit()
        except:
            db.rollback()
            # print("Passing %s: already in DB" % name)
            continue
    try:
        db.execute("CREATE UNIQUE INDEX job_index ON %sjobs (day, pair, "
                   "jobtype)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'job_index' is already in the DB")
        db.rollback()

    try:
        db.execute("CREATE INDEX job_index2 ON %sjobs (jobtype, flag)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.6 'job_index2' is already in the DB")
        db.rollback()

    try:
        db.execute("CREATE UNIQUE INDEX da_index ON %sdata_availability (path, "
                   "file, net, sta, loc, chan)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'da_index' is already in the DB")
        db.rollback()

    db.close()
Пример #25
0
def upgrade():
    """Upgrade the database from previous to a new version.\n
    This procedure adds new parameters with their default value
    in the config database.
    """
    from ..api import connect, Config, read_db_inifile
    from ..default import default
    db = connect()
    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    for name in default.keys():
        try:
            db.add(Config(name=name, value=default[name][1]))
            db.commit()
        except:
            db.rollback()
            # print("Passing %s: already in DB" % name)
            continue
    try:
        db.execute("CREATE UNIQUE INDEX job_index ON %sjobs (day, pair, "
                   "jobtype)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'job_index' is already in the DB")
        db.rollback()

    try:
        db.execute("CREATE INDEX job_index2 ON %sjobs (jobtype, flag)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.6 'job_index2' is already in the DB")
        db.rollback()

    try:
        db.execute("CREATE UNIQUE INDEX da_index ON %sdata_availability (path, "
                   "file, net, sta, comp)" %
                   prefix)
        db.commit()
    except:
        logging.info("It looks like the v1.5 'da_index' is already in the DB")
        db.rollback()

    db.close()
Пример #26
0
def execute(sql_command):
    """EXPERT MODE: Executes 'sql_command' on the database. Use this command
    at your own risk!!"""
    from msnoise.api import connect

    db = connect()
    r = db.execute(sql_command)

    if sql_command.count("select") or sql_command.count("SELECT"):
        result = r.fetchall()
        if not len(result):
            print("The query returned no results, sorry.")
        else:
            import pandas as pd
            df = pd.DataFrame(result)
            print(df)
    db.commit()
    db.close()
Пример #27
0
def info(jobs):
    """Outputs general information about the current install and config, plus
    information about jobs and their status."""
    from ..api import connect

    if not os.path.isfile('db.ini'):
        click.secho(' - db.ini is not present, is MSNoise installed here ?',
                    fg='red')
        return

    db = connect()
    if not jobs:
        info_folders(db)
        info_parameters(db)
        info_stations(db)
    info_jobs(db)
    info_plugins(db)
    db.close()
Пример #28
0
def info(jobs):
    """Outputs general information about the current install and config, plus
    information about jobs and their status."""
    from ..api import connect

    if not os.path.isfile('db.ini'):
        click.secho(' - db.ini is not present, is MSNoise installed here ?',
                    fg='red')
        return

    db = connect()
    if not jobs:
        info_folders(db)
        info_parameters(db)
        info_stations(db)
    info_jobs(db)
    info_plugins(db)
    db.close()
Пример #29
0
def clean_duplicates():
    """Checks the Jobs table and deletes duplicate entries"""
    from msnoise.api import connect, read_db_inifile

    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    db = connect()
    if dbini.tech == 1:
        query = 'DELETE FROM {0}jobs WHERE rowid NOT IN '\
                '(SELECT MIN(rowid) FROM {0}jobs GROUP BY day,pair,jobtype)'\
                .format(prefix)
    else:
        query = 'DELETE from {0}jobs USING {0}jobs as j1, {0}jobs as j2 '\
                'WHERE (j1.ref > j2.ref) AND (j1.day=j2.day) '\
                'AND (j1.pair=j2.pair) AND (j1.jobtype=j2.jobtype)'\
                .format(prefix)
    db.execute(query)
    db.commit()
    db.close()
Пример #30
0
def config_set(name_value):
    """
    Set a configuration value. The argument should be of the form
    'variable=value'.
    """
    from ..default import default
    if not name_value.count("="):
        click.echo("!! format of the set command is name=value !!")
        return
    name, value = name_value.split("=")
    if name not in default:
        click.echo("!! unknown parameter %s !!" % name)
        return
    from ..api import connect, update_config
    db = connect()
    update_config(db, name, value)
    db.commit()
    db.close()
    click.echo("Successfully updated parameter %s = %s" % (name, value))
Пример #31
0
def clean_duplicates():
    """Checks the Jobs table and deletes duplicate entries"""
    from msnoise.api import connect, read_db_inifile

    dbini = read_db_inifile()
    prefix = (dbini.prefix + '_') if dbini.prefix != '' else ''
    db = connect()
    if dbini.tech == 1:
        query = 'DELETE FROM {0}jobs WHERE rowid NOT IN '\
                '(SELECT MIN(rowid) FROM {0}jobs GROUP BY day,pair,jobtype)'\
                .format(prefix)
    else:
        query = 'DELETE from {0}jobs USING {0}jobs as j1, {0}jobs as j2 '\
                'WHERE (j1.ref > j2.ref) AND (j1.day=j2.day) '\
                'AND (j1.pair=j2.pair) AND (j1.jobtype=j2.jobtype)'\
                .format(prefix)
    db.execute(query)
    db.commit()
    db.close()
Пример #32
0
def config_set(name_value):
    """
    Set a configuration value. The argument should be of the form
    'variable=value'.
    """
    from ..default import default
    if not name_value.count("="):
        click.echo("!! format of the set command is name=value !!")
        return
    name, value = name_value.split("=")
    if name not in default:
        click.echo("!! unknown parameter %s !!" % name)
        return
    from ..api import connect, update_config
    db = connect()
    update_config(db, name, value)
    db.commit()
    db.close()
    click.echo("Successfully updated parameter %s = %s" % (name, value))
Пример #33
0
def populate(fromda):
    """Rapidly scan the archive filenames and find Network/Stations"""
    if fromda:
        logging.info("Overriding workflow...")
        db = connect()
        stations = db.query(DataAvailability.net, DataAvailability.sta). \
            group_by(DataAvailability.net, DataAvailability.sta)

        for net, sta in stations:
            print('Adding:', net, sta)
            X = 0.0
            Y = 0.0
            altitude = 0.0
            coordinates = 'UTM'
            instrument = 'N/A'
            update_station(db, net, sta, X, Y, altitude,
                           coordinates=coordinates, instrument=instrument)
    else:
        from ..s002populate_station_table import main
        main()
Пример #34
0
def da_stations_update_loc_chan():
    """EXPERT MODE: Populates the Location & Channel from the Data Availability
    table. Warning: rewrites automatically, no confirmation."""
    from msnoise.api import connect, get_stations

    session = connect()
    stations = get_stations(session)
    for sta in stations:
        data = session.query(DataAvailability). \
            filter(text("net=:net")). \
            filter(text("sta=:sta")). \
            group_by(DataAvailability.net, DataAvailability.sta,
                     DataAvailability.loc, DataAvailability.chan). \
            params(net=sta.net, sta=sta.sta).all()
        locids = sorted([d.loc for d in data])
        chans = sorted([d.chan for d in data])
        print("%s.%s has locids:%s and chans:%s" % (sta.net, sta.sta,
                                                    locids, chans))
        sta.used_location_codes = ",".join(locids)
        sta.used_channel_names = ",".join(chans)
        session.commit()
Пример #35
0
def info(jobs):
    """Outputs general information about the current install and config, plus
    information about jobs and their status."""
    from ..api import connect, get_config, get_job_types, get_filters, \
        get_stations
    from ..default import default

    def d(path):
        return os.path.split(path)[0]

    if not os.path.isfile('db.ini'):
        click.secho(' - db.ini is not present, is MSNoise installed here ?',
                    fg='red')
        return

    db = connect()

    if not jobs:
        click.echo('')
        click.echo('General:')

        click.echo('MSNoise is installed in: %s' %
                   d(d(d(os.path.abspath(__file__)))))

        if os.path.isfile('db.ini'):
            click.echo(' - db.ini is present')
        else:
            click.secho(
                ' - db.ini is not present, is MSNoise installed here ?',
                fg='red')
            return
        click.echo('')

        click.echo('')
        click.echo('Configuration:')

        data_folder = get_config(db, "data_folder")
        if os.path.isdir(data_folder):
            click.echo(" - %s exists" % data_folder)
        else:
            click.secho(" - %s does not exists !" % data_folder, fg='red')

        output_folder = get_config(db, "output_folder")
        if os.path.isdir(output_folder):
            click.echo(" - %s exists" % output_folder)
        else:
            if get_config(db, 'keep_all') in ['Y', 'y']:
                for job in get_job_types(db):
                    if job[1] == 'D':
                        if job[0] > 0:
                            click.secho(
                                " - %s does not exists and that is not normal"
                                " (%i CC jobs done)" % (output_folder, job[0]),
                                fg='red')
                        else:
                            click.secho(
                                " - %s does not exists and that is normal"
                                " (%i CC jobs done)" % (output_folder, job[0]))
            else:
                click.secho(" - %s does not exists (and that is normal because"
                            " keep_all=False)" % output_folder)

        click.echo('')
        click.echo('Raw config bits: "D"efault or "M"odified (green)')
        for key in default.keys():
            tmp = get_config(db, key)
            if tmp == default[key][1]:
                click.secho(" D %s: %s" % (key, tmp))
            else:
                click.secho(" M %s: %s" % (key, tmp), fg='green')
        # TODO add plugins params

        click.echo('')
        click.echo('Filters:')
        print('ID: [low:high]  [mwcs_low:mwcs_high]    mwcs_wlen    mwcs_step'
              '   used')
        for f in get_filters(db, all=True):
            data = (f.ref, f.low, f.high, f.mwcs_low, f.mwcs_high, f.mwcs_wlen,
                    f.mwcs_step, ['N', 'Y'][f.used])
            print('%02i: [%.03f:%.03f] [%.03f:%.03f] %.03i %.03i %s' % data)

        click.echo('')
        click.echo('Stations:')
        for s in get_stations(db, all=True):
            data = (s.net, s.sta, s.X, s.Y, s.altitude, s.coordinates,
                    ['N', 'Y'][s.used])
            print('%s.%s %.4f %.4f %.1f %s %s' % data)

    click.echo("MSNoise")
    for jobtype in ["CC", "STACK", "MWCS", "DTT"]:
        click.echo(' %s:' % jobtype)
        for (n, jobtype) in get_job_types(db, jobtype):
            click.echo("  %s : %i" % (jobtype, n))

    plugins = get_config(db, "plugins")
    if plugins:
        plugins = plugins.split(",")
        for ep in pkg_resources.iter_entry_points(
                group='msnoise.plugins.jobtypes'):
            module_name = ep.module_name.split(".")[0]
            if module_name in plugins:
                click.echo('')
                click.echo('Plugin: %s' % module_name)
                for row in ep.load()():
                    click.echo(' %s:' % row["name"])
                    for (n, jobtype) in get_job_types(db, row["name"]):
                        click.echo("  %s : %i" % (jobtype, n))
Пример #36
0
def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
         outfile=False, refilter=None, startdate=None, enddate=None):

    db = connect()
    cc_sampling_rate = float(get_config(db, 'cc_sampling_rate'))
    start, end, datelist = build_movstack_datelist(db)
    base = mdates.date2num(start)
    sta1 = sta1.replace('.', '_')
    sta2 = sta2.replace('.', '_')

    # TODO: Height adjustment of the plot for large number of stacks.
    # Preferably interactive
    fig = plt.figure(figsize=(12, 9))

    if refilter:
        freqmin, freqmax = refilter.split(':')
        freqmin = float(freqmin)
        freqmax = float(freqmax)

    if sta2 >= sta1:
        pair = "%s:%s" % (sta1, sta2)

        print("New Data for %s-%s-%i-%i" % (pair, components, filterid,
                                            mov_stack))
        nstack, stack_total = get_results(db, sta1, sta2, filterid, components,
                                          datelist, mov_stack, format="matrix")
        ax = fig.add_subplot(111)
        for i, line in enumerate(stack_total):
            if np.all(np.isnan(line)):
                continue

            if refilter:
                line = bandpass(line, freqmin, freqmax, cc_sampling_rate,
                                zerophase=True)

            freq, line = prepare_abs_postitive_fft(line, cc_sampling_rate)
            line /= line.max()

            ax.plot(freq, line * ampli + i + base, c='k')

        for filterdb in get_filters(db, all=True):
            if filterid == filterdb.ref:
                low = float(filterdb.low)
                high = float(filterdb.high)
                break

        ax.set_ylim(start-datetime.timedelta(days=ampli),
                    end+datetime.timedelta(days=ampli))
        ax.yaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))

        ax.set_xlabel("Frequency [Hz]")
        ax.set_xscale('log')
        ax.grid()

        title = '%s : %s, %s, Filter %d (%.2f - %.2f Hz), Stack %d' %\
                (sta1.replace('_', '.'), sta2.replace('_', '.'), components,
                 filterid, low, high, mov_stack)
        if refilter:
            title += ", Re-filtered (%.2f - %.2f Hz)" % (freqmin, freqmax)
        ax.set_title(title)

        cursor = Cursor(ax, useblit=True, color='red', linewidth=1.2)
        print(outfile)
        if outfile:
            if outfile.startswith("?"):
                pair = pair.replace(':', '-')
                outfile = outfile.replace('?', '%s-%s-f%i-m%i' % (pair,
                                                                  components,
                                                                  filterid,
                                                                  mov_stack))
            outfile = "spectime" + outfile
            print("output to:", outfile)
            plt.savefig(outfile)
        if show:
            plt.show()
        else:
            plt.close(fig)
Пример #37
0
def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
         outfile=False, refilter=None, startdate=None, enddate=None, **kwargs):

    db = connect()
    cc_sampling_rate = float(get_config(db, 'cc_sampling_rate'))
    start, end, datelist = build_movstack_datelist(db)
    base = mdates.date2num(start)
    sta1 = sta1.replace('.', '_')
    sta2 = sta2.replace('.', '_')

    # TODO: Height adjustment of the plot for large number of stacks.
    # Preferably interactive
    fig = plt.figure(figsize=(12, 9))

    if refilter:
        freqmin, freqmax = refilter.split(':')
        freqmin = float(freqmin)
        freqmax = float(freqmax)

    if sta2 >= sta1:
        pair = "%s:%s" % (sta1, sta2)

        print("New Data for %s-%s-%i-%i" % (pair, components, filterid,
                                            mov_stack))
        nstack, stack_total = get_results(db, sta1, sta2, filterid, components,
                                          datelist, mov_stack, format="matrix")
        ax = fig.add_subplot(111)
        for i, line in enumerate(stack_total):
            if np.all(np.isnan(line)):
                continue

            if refilter:
                line = bandpass(line, freqmin, freqmax, cc_sampling_rate,
                                zerophase=True)

            freq, line = prepare_abs_postitive_fft(line, cc_sampling_rate)
            line /= line.max()

            ax.plot(freq, line * ampli + i + base, c='k')

        for filterdb in get_filters(db, all=True):
            if filterid == filterdb.ref:
                low = float(filterdb.low)
                high = float(filterdb.high)
                break

        ax.set_ylim(start-datetime.timedelta(days=ampli),
                    end+datetime.timedelta(days=ampli))
        ax.yaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))

        if "xlim" in kwargs:
            plt.xlim(kwargs["xlim"][0],kwargs["xlim"][1])

        ax.set_xlabel("Frequency [Hz]")
        ax.set_xscale('log')
        ax.grid()

        title = '%s : %s, %s, Filter %d (%.2f - %.2f Hz), Stack %d' %\
                (sta1.replace('_', '.'), sta2.replace('_', '.'), components,
                 filterid, low, high, mov_stack)
        if refilter:
            title += ", Re-filtered (%.2f - %.2f Hz)" % (freqmin, freqmax)
        ax.set_title(title)

        cursor = Cursor(ax, useblit=True, color='red', linewidth=1.2)
        print(outfile)
        if outfile:
            if outfile.startswith("?"):
                pair = pair.replace(':', '-')
                outfile = outfile.replace('?', '%s-%s-f%i-m%i' % (pair,
                                                                  components,
                                                                  filterid,
                                                                  mov_stack))
            outfile = "spectime" + outfile
            print("output to:", outfile)
            plt.savefig(outfile)
        if show:
            plt.show()
        else:
            plt.close(fig)
Пример #38
0
cli.add_command(stack)
cli.add_command(compute_mwcs)
cli.add_command(compute_stretching)
cli.add_command(compute_dtt)
cli.add_command(compute_dvv)
cli.add_command(reset)
cli.add_command(db)
cli.add_command(jupyter)
cli.add_command(test)
# Finally add the plot group too:
cli.add_command(plot)

try:
    from ..api import connect, get_config

    db = connect()
    plugins = get_config(db, "plugins")
    db.close()
except:
    plugins = None

if plugins:
    plugins = plugins.split(",")
    for ep in pkg_resources.iter_entry_points(
            group='msnoise.plugins.commands'):
        module_name = ep.module_name.split(".")[0]
        if module_name in plugins:
            plugin.add_command(ep.load())
            p.add_command(ep.load())

cli.add_command(plugin)
Пример #39
0
    STA1 and STA2 must be provided with this format: NET.STA !\n
    DAY must be provided in the ISO format: YYYY-MM-DD"""
    if sta1 > sta2:
        click.echo("Stations STA1 and STA2 must be sorted alphabetically.")
        return
    if ctx.obj['MSNOISE_custom']:
        from dtt import main
    else:
        from ..plots.dtt import main
    main(sta1, sta2, filterid, comp, day, mov_stack, show, outfile)


## Main script

try:
    db = connect()
    plugins = get_config(db, "plugins")
    db.close()
except DBConfigNotFoundError:
    plugins = None
except sqlalchemy.exc.OperationalError as e:
    logging.critical('Unable to read project configuration: error connecting to the database:\n{}'.format(str(e)))
    sys.exit(1)

if plugins:
    plugins = plugins.split(",")
    for ep in pkg_resources.iter_entry_points(group='msnoise.plugins.commands'):
        module_name = ep.module_name.split(".")[0]
        if module_name in plugins:
            plugin.add_command(ep.load())
            p.add_command(ep.load())
Пример #40
0
def main(sta1,
         sta2,
         filterid,
         components,
         mov_stack=1,
         ampli=5,
         seismic=False,
         show=False,
         outfile=None,
         envelope=False,
         refilter=None,
         startdate=None,
         enddate=None):
    db = connect()
    maxlag = float(get_config(db, 'maxlag'))
    samples = get_maxlag_samples(db)
    cc_sampling_rate = float(get_config(db, 'cc_sampling_rate'))

    start, end, datelist = build_movstack_datelist(db, startdate, enddate)
    base = mdates.date2num(start)

    fig = plt.figure(figsize=(12, 9))

    sta1 = sta1.replace('.', '_')
    sta2 = sta2.replace('.', '_')
    t = np.arange(samples) / cc_sampling_rate - maxlag

    if refilter:
        freqmin, freqmax = refilter.split(':')
        freqmin = float(freqmin)
        freqmax = float(freqmax)

    if sta2 >= sta1:
        pair = "%s:%s" % (sta1, sta2)

        print("New Data for %s-%s-%i-%i" %
              (pair, components, filterid, mov_stack))
        nstack, stack_total = get_results(db,
                                          sta1,
                                          sta2,
                                          filterid,
                                          components,
                                          datelist,
                                          mov_stack,
                                          format="matrix")
        ax = fig.add_subplot(111)
        for i, line in enumerate(stack_total):
            if np.all(np.isnan(line)):
                continue

            if refilter:
                line = bandpass(line,
                                freqmin,
                                freqmax,
                                cc_sampling_rate,
                                zerophase=True)
            if envelope:
                line = obspy_envelope(line)

            line /= line.max()
            ax.plot(t, line * ampli + i + base, c='k')

            if seismic:
                y1 = np.ones(len(line)) * i
                y2 = line * ampli + i + base
                ax.fill_between(t,
                                y1,
                                y2,
                                where=y2 >= y1,
                                facecolor='k',
                                interpolate=True)

        for filterdb in get_filters(db, all=True):
            if filterid == filterdb.ref:
                low = float(filterdb.low)
                high = float(filterdb.high)
                break

        ax.set_xlabel("Lag Time (s)")
        ax.axhline(0, lw=0.5, c='k')
        ax.grid()
        ax.scatter(0, [
            start,
        ], alpha=0)
        ax.set_ylim(start - datetime.timedelta(days=ampli),
                    end + datetime.timedelta(days=ampli))
        ax.set_xlim(-maxlag, maxlag)
        ax.fmt_ydata = mdates.DateFormatter('%Y-%m-%d')
        cursor = Cursor(ax, useblit=True, color='red', linewidth=1.2)

        title = '%s : %s, %s, Filter %d (%.2f - %.2f Hz), Stack %d' % \
                (sta1.replace('_', '.'), sta2.replace('_', '.'), components,
                 filterid, low, high, mov_stack)

        if refilter:
            title += ", Re-filtered (%.2f - %.2f Hz)" % (freqmin, freqmax)
        ax.set_title(title)

        if outfile:
            if outfile.startswith("?"):
                pair = pair.replace(':', '-')
                outfile = outfile.replace(
                    '?',
                    '%s-%s-f%i-m%i' % (pair, components, filterid, mov_stack))
            outfile = "ccftime " + outfile
            print("output to:", outfile)
            fig.savefig(outfile)

        if show:
            fig.show()
        else:
            plt.close(fig)
Пример #41
0
def main(loglevel="INFO", njobs_per_worker=9999):
    logger = logbook.Logger("msnoise")
    # Reconfigure logger to show the pid number in log records
    logger = get_logger('msnoise.compute_psd_child', loglevel, with_pid=True)
    logger.info('*** Starting: Compute PPSD ***')
    db = connect()
    logger.debug('Preloading all instrument response')
    responses = preload_instrument_responses(db, return_format="inventory")

    params = get_params(db)
    ppsd_components = params.qc_components
    ppsd_length = params.qc_ppsd_length
    ppsd_overlap = params.qc_ppsd_overlap
    ppsd_period_smoothing_width_octaves = params.qc_ppsd_period_smoothing_width_octaves
    ppsd_period_step_octaves = params.qc_ppsd_period_step_octaves
    ppsd_period_limits = params.qc_ppsd_period_limits
    ppsd_db_bins = params.qc_ppsd_db_bins

    while is_next_job(db, jobtype='PSD'):
        logger.info("Getting the next job")
        jobs = get_next_job(db, jobtype='PSD', limit=njobs_per_worker)
        logger.debug("I will process %i jobs" % len(jobs))
        if len(jobs) == 0:
            # edge case, should only occur when is_next returns true, but
            # get_next receives no jobs (heavily parallelised code)
            continue
        for job in jobs:
            net, sta, loc = job.pair.split('.')
            print("Processing %s" % job.pair)
            gd = UTCDateTime(job.day).datetime
            files = get_data_availability(
                db,
                net=net,
                sta=sta,
                loc=loc,
                starttime=(UTCDateTime(job.day) - 1.5 * ppsd_length).datetime,
                endtime=gd)
            if len(files) == 0:
                print("No files found for %s" % job.day)
                continue

            for comp in ppsd_components:
                toprocess = []
                for file in files:
                    if file.chan[-1] != comp:
                        continue
                    tmp = os.path.join(file.path, file.file)
                    toprocess.append(tmp)
                if len(toprocess) == 0:
                    continue
                st = Stream()
                for tmp in np.unique(toprocess):
                    logger.debug("Reading %s" % tmp)
                    try:
                        st += read(
                            tmp,
                            starttime=UTCDateTime(gd) - 1.5 * ppsd_length,
                            endtime=UTCDateTime(gd +
                                                datetime.timedelta(days=1)) -
                            0.001)
                    except:
                        logger.debug("Problem loading %s" % tmp)
                if not len(st):
                    continue

                try:
                    st.merge()
                except:
                    logger.info("Failed merging streams:")
                    traceback.print_exc()
                    continue

                st = st.split()
                for tr in st:
                    tr.stats.network = tr.stats.network.upper()
                    tr.stats.station = tr.stats.station.upper()
                    tr.stats.channel = tr.stats.channel.upper()

                tr = st.select(component=comp)[0]
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))
                npzdout = os.path.join("PSD", "NPZ", out)
                logger.debug("ppsd will be output to: %s" % npzdout)
                ppsd = PPSD(tr.stats,
                            metadata=responses,
                            ppsd_length=ppsd_length,
                            overlap=ppsd_overlap,
                            period_smoothing_width_octaves=
                            ppsd_period_smoothing_width_octaves,
                            period_step_octaves=ppsd_period_step_octaves,
                            period_limits=ppsd_period_limits,
                            db_bins=ppsd_db_bins)
                # TODO handle when the response for this trace is not in the inv
                ppsd.add(st)
                out = to_sds(tr.stats, gd.year, int(gd.strftime('%j')))

                pngout = os.path.join("PSD", "PNG", out)
                if not os.path.isdir(os.path.split(npzdout)[0]):
                    os.makedirs(os.path.split(npzdout)[0])
                    os.makedirs(os.path.split(pngout)[0])

                ppsd.save_npz(npzdout + ".npz")
                update_job(db, job.day, job.pair, 'PSD', 'D', ref=job.ref)
                if not params.hpc:
                    for job in jobs:
                        update_job(db, job.day, job.pair, 'PSD2HDF', 'T')
                try:
                    ppsd.plot(pngout + ".png")
                except:
                    logger.debug("Error saving PNG image")
                    traceback.print_exc()

                del ppsd

        logger.debug('Day (job) "D"one')