def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, run_node_test=False)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    # create the execution log

    # load polyhedrons
    project = Project(cnn, args.project[0])

    # plot initial state
    tqdm.write(' -- Plotting initial ETMs (unaligned)...')

    #for etm in tqdm(project.etms, ncols=160):
    #    etm.plot(pngfile=args.project[0] + '/' + etm.NetworkCode + '.' + etm.StationCode + '_0.png', residuals=True)

    project.align_stack()

    tqdm.write(' -- Plotting intermediate step ETMs (aligned)...')
Exemple #2
0
def main():

    Config = pyOptions.ReadOptions('gnss_data.cfg')
    JobServer = pyJobServer.JobServer(Config)  # type: pyJobServer.JobServer

    cnn = dbConnection.Cnn('gnss_data.cfg')

    archive = pyArchiveStruct.RinexStruct(cnn)

    rinex = cnn.query_float(
        'SELECT * FROM rinex WHERE "ObservationYear" <= 1995 ORDER BY "NetworkCode", '
        '"StationCode", "ObservationYear", "ObservationDOY"',
        as_dict=True)

    pbar = tqdm(desc='%-30s' % ' >> Processing rinex files',
                total=len(rinex),
                ncols=160)

    modules = ('os', 'pyRinex')
    callback = []

    for rnx in rinex:

        filename = archive.build_rinex_path(rnx['NetworkCode'],
                                            rnx['StationCode'],
                                            rnx['ObservationYear'],
                                            rnx['ObservationDOY'],
                                            filename=rnx['Filename'])

        arguments = (rnx['NetworkCode'], rnx['StationCode'],
                     Config.archive_path, filename)

        JobServer.SubmitJob(check_rinex, arguments, (), modules, callback,
                            callback_class(pbar), 'callbackfunc')

        if JobServer.process_callback:
            # handle any output messages during this batch
            callback = output_handle(callback)
            JobServer.process_callback = False

    tqdm.write(' >> waiting for jobs to finish...')
    JobServer.job_server.wait()
    tqdm.write(' >> Done.')

    # process the errors and the new stations
    output_handle(callback)

    pbar.close()
Exemple #3
0
def main():
    parser = argparse.ArgumentParser(description='Script to synchronize AWS with OSU\'s archive database')

    parser.add_argument('date', type=str, nargs=1, help="Check the sync state for this given date. Format can be fyear or yyyy_ddd.")
    parser.add_argument('-mark', '--mark_uploaded', nargs='+', type=str, help="Pass net.stnm to mark these files as transferred to the AWS", metavar='{net.stnm}')
    parser.add_argument('-pull', '--pull_rinex', action='store_true', help="Get all the unsynchronized RINEX files in the local dir")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # before attempting anything, check aliases!!
    print ' >> Checking GAMIT aliases'
    check_aliases(cnn)

    # initialize the PP job server
    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, 1500)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    dd = args.date[0]

    if '_' in dd:
        date = pyDate.Date(year=int(dd.split('_')[0]), doy=int(dd.split('_')[1]))
    elif dd == 'all':
        # run all dates (1994 to 2018)
        ts = range(pyDate.Date(year=2004, doy=20).mjd, pyDate.Date(year=2018, doy=87).mjd, 1)
        ts = [pyDate.Date(mjd=tts) for tts in ts]
        for date in ts:
            print ' >> Processing ' + str(date)
            pull_rinex(cnn, date, Config, JobServer)

        return
    else:
        date = pyDate.Date(fyear=float(dd))

    if args.pull_rinex:
        pull_rinex(cnn, date, Config, JobServer)

    if args.mark_uploaded is not None:
        print 'Processing %i for day %s' % (len(args.mark_uploaded), date.yyyyddd())
        # mark the list of stations as transferred to the AWS
        mark_uploaded(cnn, date, args.mark_uploaded)
def main():

    parser = argparse.ArgumentParser(
        description=
        'Database integrity tools, metadata check and fixing tools program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help='Date range filter for all operations. '
        'Can be specified in wwww-d, yyyy_ddd, yyyy/mm/dd or fyear format')

    parser.add_argument(
        '-rinex',
        '--check_rinex',
        choices=['fix', 'report'],
        type=str,
        nargs=1,
        help=
        'Check the RINEX integrity of the archive-database by verifying that the RINEX files '
        'reported in the rinex table exist in the archive. If argument = "fix" and a RINEX file '
        'does not exist, remove the record. PPP records or gamit_soln are deleted. If argument = '
        '"report" then just list the missing files.')

    parser.add_argument(
        '-rnx_count',
        '--rinex_count',
        action='store_true',
        help='Count the total number of RINEX files (unique station-days) '
        'per day for a given time interval.')

    parser.add_argument(
        '-stnr',
        '--station_info_rinex',
        action='store_true',
        help=
        'Check that the receiver serial number in the rinex headers agrees with the station info '
        'receiver serial number.')

    parser.add_argument(
        '-stns',
        '--station_info_solutions',
        action='store_true',
        help='Check that the PPP hash values match the station info hash.')

    parser.add_argument(
        '-stnp',
        '--station_info_proposed',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Output a proposed station.info using the RINEX metadata. Optional, specify [ignore_days] '
        'to ignore station.info records <= days.')

    parser.add_argument(
        '-stnc',
        '--station_info_check',
        action='store_true',
        help=
        'Check the consistency of the station information records in the database. Date range '
        'does not apply. Also, check that the RINEX files fall within a valid station information '
        'record.')

    parser.add_argument(
        '-g',
        '--data_gaps',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Check the RINEX files in the database and look for gaps (missing days). '
        'Optional, [ignore_days] with the smallest gap to display.')

    parser.add_argument('-gg',
                        '--graphical_gaps',
                        action='store_true',
                        help='Visually output RINEX gaps for stations.')

    parser.add_argument(
        '-sc',
        '--spatial_coherence',
        choices=['exclude', 'delete', 'noop'],
        type=str,
        nargs=1,
        help=
        'Check that the RINEX files correspond to the stations they are linked to using their '
        'PPP coordinate. If keyword [exclude] or [delete], add the PPP solution to the excluded '
        'table or delete the PPP solution. If [noop], then only report but do not '
        'exlude or delete.')

    parser.add_argument(
        '-print',
        '--print_stninfo',
        choices=['long', 'short'],
        type=str,
        nargs=1,
        help=
        'Output the station info to stdout. [long] outputs the full line of the station info. '
        '[short] outputs a short version (better for screen visualization).')

    parser.add_argument(
        '-r',
        '--rename',
        metavar='net.stnm',
        nargs=1,
        help=
        "Takes the data from the station list and renames (merges) it to net.stnm. "
        "It also changes the rinex filenames in the archive to match those of the new destiny "
        "station. Only a single station can be given as the origin and destiny. "
        "Limit the date range using the -d option.")

    parser.add_argument(
        '-es',
        '--exclude_solutions',
        metavar=('{start_date}', '{end_date}'),
        nargs=2,
        help=
        'Exclude PPP solutions (by adding them to the excluded table) between {start_date} '
        'and {end_date}')

    parser.add_argument(
        '-del',
        '--delete_rinex',
        metavar=('{start_date}', '{end_date}', '{completion}'),
        nargs=3,
        help='Delete RINEX files (and associated solutions, PPP and GAMIT) '
        'from archive between {start_date} and {end_date} with completion <= {completion}. '
        'Completion ranges form 1.0 to 0.0. Use 1.0 to delete all data. '
        'Operation cannot be undone!')

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")  # type: dbConnection.Cnn

    # create the execution log
    cnn.insert('executions', script='pyIntegrityCheck.py')

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    #####################################
    # date filter

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    #####################################

    if args.check_rinex:
        CheckRinexIntegrity(cnn, Config, stnlist, dates[0], dates[1],
                            args.check_rinex[0], JobServer)

    #####################################

    if args.rinex_count:
        RinexCount(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.station_info_rinex:
        StnInfoRinexIntegrity(cnn, stnlist, dates[0], dates[1], JobServer)

    #####################################

    if args.station_info_check:
        StnInfoCheck(cnn, stnlist, Config)

    #####################################

    if args.data_gaps is not None:
        GetStnGaps(cnn, stnlist, args.data_gaps, dates[0], dates[1])

    if args.graphical_gaps:
        VisualizeGaps(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.spatial_coherence is not None:
        CheckSpatialCoherence(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.exclude_solutions is not None:
        try:
            dates = process_date(args.exclude_solutions)
        except ValueError as e:
            parser.error(str(e))

        ExcludeSolutions(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.print_stninfo is not None:
        if args.print_stninfo[0] == 'short':
            PrintStationInfo(cnn, stnlist, True)
        elif args.print_stninfo[0] == 'long':
            PrintStationInfo(cnn, stnlist, False)
        else:
            parser.error(
                'Argument for print_stninfo has to be either long or short')

    #####################################

    if args.station_info_proposed is not None:
        for stn in stnlist:
            stninfo = pyStationInfo.StationInfo(cnn,
                                                stn['NetworkCode'],
                                                stn['StationCode'],
                                                allow_empty=True)
            sys.stdout.write(
                stninfo.rinex_based_stninfo(args.station_info_proposed))

    #####################################

    if args.delete_rinex is not None:
        try:
            dates = process_date(args.delete_rinex[0:2])
        except ValueError as e:
            parser.error(str(e))

        DeleteRinex(cnn, stnlist, dates[0], dates[1],
                    float(args.delete_rinex[2]))

    #####################################

    if args.rename:
        if len(stnlist) > 1:
            parser.error(
                'Only a single station should be given for the origin station')

        if '.' not in args.rename[0]:
            parser.error('Format for destiny station should be net.stnm')
        else:
            DestNetworkCode = args.rename[0].split('.')[0]
            DestStationCode = args.rename[0].split('.')[1]

            RenameStation(cnn, stnlist[0]['NetworkCode'],
                          stnlist[0]['StationCode'], DestNetworkCode,
                          DestStationCode, dates[0], dates[1],
                          Config.archive_path)

    JobServer.close_cluster()
Exemple #5
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        'stack_name',
        type=str,
        nargs=1,
        metavar='{stack name}',
        help=
        "Specify a name for the stack: eg. itrf2014 or posgar07b. This name should be unique "
        "and cannot be repeated for any other solution project")
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument('-plot',
                        '--plot_stack_etms',
                        action='store_true',
                        default=False,
                        help="Plot the stack ETMs after computation is done")
    parser.add_argument(
        '-constrains',
        '--external_constrains',
        nargs='+',
        help=
        "File with external constrains parameters (position, velocity and periodic). These may be "
        "from a parent frame such as ITRF. "
        "Inheritance will occur with stations on the list whenever a parameter exists. "
        "Example: -constrains itrf14.txt "
        "Format is: net.stn x y z epoch vx vy vz sn_1y sn_6m cn_1y cn_6m se_1y se_6m ce_1y ce_6m "
        "su_1y su_6m cu_1y cu_6m ")
    parser.add_argument(
        '-d',
        '--date_end',
        nargs=1,
        metavar='date',
        help=
        'Limit the polyhedrons to the specified date. Can be in wwww-d, yyyy_ddd, yyyy/mm/dd '
        'or fyear format')
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    dates = [Date(year=1980, doy=1), Date(datetime=datetime.now())]
    if args.date_end is not None:
        try:
            dates = process_date(
                [str(Date(year=1980, doy=1).fyear), args.date_end[0]])
        except ValueError as e:
            parser.error(str(e))

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.external_constrains:
        constrains = load_constrains(args.external_constrains[0])
    else:
        constrains = None

    # create the stack object
    stack = pyStack.Stack(cnn,
                          args.project[0],
                          args.stack_name[0],
                          args.redo_stack,
                          end_date=dates[1])

    # stack.align_spaces(frame_params)
    # stack.to_json('alignment.json')
    # exit()

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if not stack[j].aligned:
                # do not move this if up one level: to speed up the target polyhedron loading process, the target is
                # set to an empty list when the polyhedron is already aligned
                if stack[j].date != target[j].date:
                    # raise an error if dates don't agree!
                    raise StandardError(
                        'Error processing %s: dates don\'t agree (target date %s)'
                        % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
                else:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        stack.transformations.append([poly.info() for poly in stack])
        qbar.close()

    if args.redo_stack:
        # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
        calculate_etms(cnn,
                       stack,
                       JobServer,
                       iterations=None,
                       create_target=False)
        # only apply common mode removal if redoing the stack
        if args.external_constrains:
            stack.remove_common_modes(constrains)
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        stack.align_spaces(constrains)

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None, create_target=False)

    # save the json with the information about the alignment
    stack.to_json(args.stack_name[0] + '_alignment.json')
    # save polyhedrons to the database
    stack.save()

    if args.plot_stack_etms:
        qbar = tqdm(total=len(stack.stations), ncols=160)
        for stn in stack.stations:
            # plot the ETMs
            qbar.update()
            qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
            plot_etm(cnn, stack, stn, args.directory)

        qbar.close()
Exemple #6
0
    # function to print any error that are encountered during parallel execution
    for msg in messages:
        if msg:
            file_append(
                'errors_amend.log',
                'ON ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                ' an unhandled error occurred:\n' + msg + '\n' +
                'END OF ERROR =================== \n\n')
    return []


cnn = dbConnection.Cnn('gnss_data.cfg')
options = pyOptions.ReadOptions('gnss_data.cfg')

JobServer = pyJobServer.JobServer(options)
archive = pyArchiveStruct.RinexStruct(cnn)

for table in ['rinex']:

    print(" >> Processing " + table)

    tbl = cnn.query('SELECT * FROM ' + table + ' WHERE "Completion" is null')

    rnx = tbl.dictresult()

    callback = []
    pbar = tqdm(total=len(rnx), ncols=80)

    depfuncs = (verify_rinex_date_multiday, )
    modules = ('pyRinex', 'dbConnection', 'traceback', 'platform', 'pyDate',
def main():

    # put connection and config in global variable to use inside callback_handle
    global cnn
    global repository_data_in

    # bind to the repository directory
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        '-purge',
        '--purge_locks',
        action='store_true',
        help=
        "Delete any network starting with '?' from the stations table and purge the contents of "
        "the locks table, deleting the associated files from data_in.")

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions('gnss_data.cfg')

    repository_data_in = Config.repository_data_in

    if not os.path.isdir(Config.repository):
        print "the provided repository path in gnss_data.cfg is not a folder"
        exit()

    JobServer = pyJobServer.JobServer(Config,
                                      run_parallel=not args.noparallel,
                                      software_sync=[
                                          Config.options['ppp_remote_local']
                                      ])  # type: pyJobServer.JobServer

    cnn = dbConnection.Cnn('gnss_data.cfg')
    # create the execution log
    cnn.insert('executions', script='ArchiveService.py')

    # set the data_xx directories
    data_in = os.path.join(Config.repository, 'data_in')
    data_in_retry = os.path.join(Config.repository, 'data_in_retry')
    data_reject = os.path.join(Config.repository, 'data_rejected')

    # if if the subdirs exist
    if not os.path.isdir(data_in):
        os.makedirs(data_in)

    if not os.path.isdir(data_in_retry):
        os.makedirs(data_in_retry)

    if not os.path.isdir(data_reject):
        os.makedirs(data_reject)

    # delete any locks with a NetworkCode != '?%'
    cnn.query('delete from locks where "NetworkCode" not like \'?%\'')
    # get the locks to avoid reprocessing files that had no metadata in the database
    locks = cnn.query('SELECT * FROM locks')
    locks = locks.dictresult()

    if args.purge_locks:
        # first, delete all associated files
        for lock in tqdm(locks,
                         ncols=160,
                         unit='crz',
                         desc='%-30s' % ' >> Purging locks',
                         disable=None):
            try:
                os.remove(
                    os.path.join(Config.repository_data_in, lock['filename']))
            except Exception:
                sys.exc_clear()

        # purge the contents of stations. This will automatically purge the locks table
        cnn.query('delete from stations where "NetworkCode" like \'?%\'')
        # purge the networks
        cnn.query('delete from networks where "NetworkCode" like \'?%\'')
        # purge the locks already taken care of (just in case)
        cnn.query('delete from locks where "NetworkCode" not like \'?%\'')
        # get the locks to avoid reprocessing files that had no metadata in the database
        locks = cnn.query('SELECT * FROM locks')
        locks = locks.dictresult()

    # look for data in the data_in_retry and move it to data_in

    archive = pyArchiveStruct.RinexStruct(cnn)

    pbar = tqdm(desc='%-30s' % ' >> Scanning data_in_retry',
                ncols=160,
                unit='crz',
                disable=None)

    rfiles, paths, _ = archive.scan_archive_struct(data_in_retry, pbar)

    pbar.close()

    pbar = tqdm(desc='%-30s' % ' -- Moving files to data_in',
                total=len(rfiles),
                ncols=160,
                unit='crz',
                disable=None)

    for rfile, path in zip(rfiles, paths):

        dest_file = os.path.join(data_in, rfile)

        # move the file into the folder
        Utils.move(path, dest_file)

        pbar.set_postfix(crinez=rfile)
        pbar.update()

        # remove folder from data_in_retry (also removes the log file)
        try:
            # remove the log file that accompanies this Z file
            os.remove(path.replace('d.Z', '.log'))
        except Exception:
            sys.exc_clear()

    pbar.close()
    tqdm.write(' -- Cleaning data_in_retry')
    remove_empty_folders(data_in_retry)

    # take a break to allow the FS to finish the task
    time.sleep(5)

    files_path = []
    files_list = []

    pbar = tqdm(desc='%-30s' % ' >> Repository crinez scan',
                ncols=160,
                disable=None)

    rpaths, _, files = archive.scan_archive_struct(data_in, pbar)

    pbar.close()

    pbar = tqdm(desc='%-30s' % ' -- Checking the locks table',
                total=len(files),
                ncols=130,
                unit='crz',
                disable=None)

    for file, path in zip(files, rpaths):
        pbar.set_postfix(crinez=file)
        pbar.update()
        if path not in [lock['filename'] for lock in locks]:
            files_path.append(path)
            files_list.append(file)

    pbar.close()

    tqdm.write(" -- Found %i files in the lock list..." % (len(locks)))
    tqdm.write(
        " -- Found %i files (matching format [stnm][doy][s].[yy]d.Z) to process..."
        % (len(files_list)))

    pbar = tqdm(desc='%-30s' % ' >> Processing repository',
                total=len(files_path),
                ncols=160,
                unit='crz',
                disable=None)

    # dependency functions
    depfuncs = (check_rinex_timespan_int, write_error, error_handle,
                insert_data, verify_rinex_multiday)
    # import modules
    modules = ('pyRinex', 'pyArchiveStruct', 'pyOTL', 'pyPPP', 'pyStationInfo',
               'dbConnection', 'Utils', 'os', 'uuid', 'datetime', 'pyDate',
               'numpy', 'traceback', 'platform', 'pyBrdc', 'pyProducts',
               'pyOptions', 'pyEvents')

    JobServer.create_cluster(process_crinex_file,
                             depfuncs,
                             callback_handle,
                             pbar,
                             modules=modules)

    for file_to_process, sfile in zip(files_path, files_list):

        JobServer.submit(file_to_process, sfile, data_reject, data_in_retry)

    JobServer.wait()

    pbar.close()

    JobServer.close_cluster()

    print_archive_service_summary()

    # iterate to delete empty folders
    remove_empty_folders(data_in)
Exemple #8
0
def main():

    parser = argparse.ArgumentParser(
        description='Parallel.GAMIT main execution program')

    parser.add_argument(
        'session_cfg',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument(
        '-d',
        '--date',
        type=str,
        nargs=2,
        metavar='{date}',
        help=
        "Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format"
    )

    parser.add_argument(
        '-dp',
        '--date_parser',
        type=str,
        nargs=2,
        metavar='{year} {doys}',
        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
        "Cannot cross year boundaries")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)"
    )

    parser.add_argument(
        '-c',
        '--check_mode',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "Check station(s) mode. If station(s) are not present in the GAMIT polyhedron, "
        "(i.e. the RINEX file(s) were missing at the time of the processing) Parallel.GAMIT will "
        "add the station to the closest subnetwork(s) and reprocess them. If station(s) were "
        "present at the time of the processing but failed to process (i.e. they are in the "
        "missing stations list), these subnetworks will be reprocessed to try to obtain a "
        "solution. Station list provided in the cfg is ignored in this mode. Therefore, changes "
        "in the station list will not produce any changes in network configuration. Purge not "
        "allowed when using this mode. (Syntax: -c igm1 lpgs rms.vbca)")

    parser.add_argument(
        '-i',
        '--ignore_missing',
        action='store_true',
        help=
        "When using check mode or processing existing sessions, ignore missing stations. In other "
        "words, do not try to reprocess sessions that have missing solutions.")

    parser.add_argument(
        '-p',
        '--purge',
        action='store_true',
        default=False,
        help=
        "Purge year doys from the database and directory structure and re-run the solution."
    )

    parser.add_argument(
        '-dry',
        '--dry_run',
        action='store_true',
        help="Generate the directory structures (locally) but do not run GAMIT. "
        "Output is left in the production directory.")

    parser.add_argument(
        '-kml',
        '--create_kml',
        action='store_true',
        help="Create a KML with everything processed in this run.")

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')  # type: dbConnection.Cnn

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any(doy for doy in doys if doy < 1):
                parser.error(
                    'DOYs cannot start with zero. Please selected a DOY range between 1-365/366'
                )

            if 366 in doys:
                if year % 4 != 0:
                    parser.error(
                        'Year ' + str(year) +
                        ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error(
                    'Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [
                pyDate.Date(mjd=i)
                for i in range(drange[0].mjd, drange[1].mjd + 1)
            ]

    except ValueError as e:
        parser.error(str(e))

    print(
        ' >> Reading configuration files and creating project network, please wait...'
    )

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(
        ' >> Checking GAMIT tables for requested config and year, please wait...'
    )

    JobServer = pyJobServer.JobServer(
        GamitConfig,
        check_gamit_tables=(pyDate.Date(year=drange[1].year,
                                        doy=drange[1].doy),
                            GamitConfig.gamitopt['eop_type']),
        run_parallel=not args.noparallel,
        software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    # initialize stations in the project
    stations = station_list(cnn,
                            GamitConfig.NetworkConfig['stn_list'].split(','),
                            drange)

    check_station_list = args.check_mode
    if check_station_list is not None:
        print(' >> Check mode. List of stations to check for selected days:')
        Utils.print_columns(check_station_list)
        check_stations = station_list(cnn, check_station_list, drange)
    else:
        check_stations = StationCollection()

    dry_run = False if args.dry_run is None else args.dry_run

    if not dry_run and not len(check_stations):
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)
    elif args.purge:
        tqdm.write(
            ' >> Dry run or check mode activated. Cannot purge solutions in these modes.'
        )

    # run the job server
    sessions = ExecuteGamit(cnn, JobServer, GamitConfig, stations,
                            check_stations, args.ignore_missing, dates,
                            args.dry_run, args.create_kml)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(cnn, JobServer, GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id.lower(), dates, sessions,
                 GamitConfig, JobServer)

    tqdm.write(' >> %s Successful exit from Parallel.GAMIT' % print_datetime())
Exemple #9
0
def main():

    global cnn

    parser = argparse.ArgumentParser(description='Parallel.GAMIT main execution program')

    parser.add_argument('session_cfg', type=str, nargs=1, metavar='session.cfg',
                        help="Filename with the session configuration to run Parallel.GAMIT")
    parser.add_argument('-d', '--date', type=str, nargs=2, metavar='{date}',
                        help="Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format")
    parser.add_argument('-dp', '--date_parser', type=str, nargs=2, metavar='{year} {doys}',
                        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
                             "Cannot cross year boundaries")
    parser.add_argument('-e', '--exclude', type=str, nargs='+', metavar='station',
                        help="List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)")
    parser.add_argument('-p', '--purge', action='store_true',
                        help="Purge year doys from the database and directory structure and re-run the solution.")
    parser.add_argument('-dry', '--dry_run', action='store_true',
                        help="Generate the directory structures (locally) but do not run GAMIT. "
                             "Output is left in the production directory.")
    parser.add_argument('-kml', '--generate_kml', action='store_true',
                        help="Generate KML and exit without running GAMIT.")

    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any([doy for doy in doys if doy < 1]):
                parser.error('DOYs cannot start with zero. Please selected a DOY range between 1-365/366')

            if 366 in doys:
                if year % 4 != 0:
                    parser.error('Year ' + str(year) + ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error('Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [pyDate.Date(mjd=i) for i in range(drange[0].mjd, drange[1].mjd + 1)]

    except ValueError as e:
        parser.error(str(e))

    print(' >> Reading configuration files and creating project network, please wait...')

    GamitConfig = pyGamitConfig.GamitConfiguration(args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(' >> Checing GAMIT tables for requested config and year, please wait...')

    JobServer = pyJobServer.JobServer(GamitConfig,
                                      check_gamit_tables=(pyDate.Date(year=drange[1].year, doy=drange[1].doy),
                                                          GamitConfig.gamitopt['eop_type']),
                                      run_parallel=not args.noparallel,
                                      software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    cnn = dbConnection.Cnn(GamitConfig.gamitopt['gnss_data'])  # type: dbConnection.Cnn

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    if args.dry_run is not None:
        dry_run = args.dry_run
    else:
        dry_run = False

    if not dry_run:
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)

    # initialize stations in the project
    stations = station_list(cnn, GamitConfig.NetworkConfig, drange)

    tqdm.write(' >> Creating GAMIT session instances, please wait...')

    sessions = []
    archive = pyArchiveStruct.RinexStruct(cnn)  # type: pyArchiveStruct.RinexStruct

    for date in tqdm(dates, ncols=80):

        # make the dir for these sessions
        # this avoids a racing condition when starting each process
        pwd = GamitConfig.gamitopt['solutions_dir'].rstrip('/') + '/' + date.yyyy() + '/' + date.ddd()

        if not os.path.exists(pwd):
            os.makedirs(pwd)

        net_object = Network(cnn, archive, GamitConfig, stations, date)

        sessions += net_object.sessions

    if args.generate_kml:
        # generate a KML of the sessions
        generate_kml(dates, sessions, GamitConfig)
        exit()

    # print a summary of the current project (NOT VERY USEFUL AFTER ALL)
    # print_summary(stations, sessions, drange)

    # run the job server
    ExecuteGamit(sessions, JobServer, dry_run)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id, sessions, GamitConfig)

    print(' >> Done processing and parsing information. Successful exit from Parallel.GAMIT')
Exemple #10
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument(
        '-itrf',
        '--itrf',
        nargs='+',
        help=
        "File with the ITRF periodic space parameters as given by Zuheir Altamimi and the list "
        "of stations to inherit the periodic terms from. Example: -itrf periodic.dat igs.braz "
        "rms.autf rms.igm1 rms.sant ...")
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.itrf:
        periodic = load_periodic_space(args.itrf[0])
    else:
        periodic = None

    # create the stack object
    stack = pyStack.Stack(cnn, args.project[0], args.redo_stack)

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if stack[j].date != target[j].date:
                # raise an error if dates don't agree!
                raise StandardError(
                    'Error processing %s: dates don\'t agree (target date %s)'
                    % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
            else:
                if not stack[j].aligned:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        qbar.close()

    # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
    calculate_etms(cnn, stack, JobServer, iterations=None)

    if args.redo_stack:
        # only apply common mode removal if redoing the stack
        if args.itrf:
            stack.remove_common_modes(periodic, args.itrf[1:])
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        # TODO: include alignment to velocity and coordinate space

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None)

    # save polyhedrons to the database
    stack.save()

    qbar = tqdm(total=len(stack.stations), ncols=160)

    for stn in stack.stations:
        # plot the ETMs
        qbar.update()
        qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
        plot_etm(cnn, stack, stn, args.directory)

    qbar.close()
Exemple #11
0
def main():

    global wrms_n, wrms_e, wrms_u, project

    parser = argparse.ArgumentParser(
        description='GNSS daily repetitivities analysis (DRA)')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help=
        'Date range filter. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )

    parser.add_argument(
        '-w',
        '--plot_window',
        nargs='+',
        metavar='date',
        help=
        'Date window range to plot. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot a histogram of the daily repetitivities")
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        help=
        "Provide additional information during the alignment process (for debugging purposes)"
    )

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions
    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    pdates = None
    if args.plot_window is not None:
        if len(args.plot_window) == 1:
            try:
                pdates = process_date(args.plot_window,
                                      missing_input=None,
                                      allow_days=False)
                pdates = (pdates[0].fyear, )
            except ValueError:
                # an integer value
                pdates = float(args.plot_window[0])
        else:
            pdates = process_date(args.plot_window)
            pdates = (pdates[0].fyear, pdates[1].fyear)

    # create folder for plots
    path_plot = project + '_dra'
    if not os.path.isdir(path_plot):
        os.makedirs(path_plot)

    ########################################
    # load polyhedrons
    # create the DRA object
    dra = DRA(cnn, args.project[0], dates[0], dates[1], args.verbose)

    dra.stack_dra()

    dra.to_json(project + '_dra.json')

    missing_doys = []

    tqdm.write(
        ' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm are shown below:'
    )
    for i, d in enumerate(dra):
        if d.wrms is not None:
            if d.wrms > 0.008:
                tqdm.write(
                    ' -- %s (%04i) %2i it wrms: %4.1f D-W: %5.3f IQR: %4.1f' %
                    (d.date.yyyyddd(), d.stations_used, d.iterations,
                     d.wrms * 1000, d.down_frac, d.iqr * 1000))

    qbar = tqdm(total=len(dra.stations),
                desc=' >> Computing DRAs',
                ncols=160,
                disable=None)

    modules = ('pyETM', 'dbConnection', 'traceback', 'io', 'numpy')
    JobServer.create_cluster(compute_dra,
                             progress_bar=qbar,
                             callback=callback_handler,
                             modules=modules)

    # plot each DRA
    for stn in dra.stations:
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        ts = dra.get_station(NetworkCode, StationCode)
        JobServer.submit(ts, NetworkCode, StationCode, pdates, project,
                         args.histogram)

    JobServer.wait()
    qbar.close()
    JobServer.close_cluster()

    wrms_n = np.array(wrms_n)
    wrms_e = np.array(wrms_e)
    wrms_u = np.array(wrms_u)

    # plot the WRM of the DRA stack and number of stations
    f, axis = plt.subplots(nrows=3, ncols=2,
                           figsize=(15, 10))  # type: plt.subplots

    # WRMS
    ax = axis[0][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['wrms'] * 1000 for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('WRMS [mm]')
    ax.grid(True)
    ax.set_ylim(0, 10)

    # station count
    ax = axis[1][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['stations_used'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('Station count')
    ax.grid(True)

    # d-w fraction
    ax = axis[2][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['downweighted_fraction'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('DW fraction')
    ax.grid(True)

    ax = axis[0][1]
    ax.hist(wrms_n[wrms_n <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit N [mm]')
    ax.set_title('Daily repetitivities NEU')

    ax = axis[1][1]
    ax.hist(wrms_e[wrms_e <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 8)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit E [mm]')

    ax = axis[2][1]
    ax.hist(wrms_u[wrms_u <= 10], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 10)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit U [mm]')

    f.suptitle('Daily repetitivity analysis for project %s\n'
               'Solutions with WRMS > 10 mm are not shown' % project,
               fontsize=12,
               family='monospace')
    plt.savefig(project + '_dra.png')
    plt.close()

    ax.set_xlim(0, 8)
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    if not args.noparallel:
        JobServer = pyJobServer.JobServer(
            Config, run_node_test=False)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = Project(cnn,
                      args.project[0],
                      max_iters,
                      use=use_stn,
                      exclude=exclude_stn)

    #project.remove_common_modes(cnn)
    #exit()

    calculate_etms(cnn, project, JobServer)

    align_stack(cnn, project, JobServer)

    # remove common modes
    updated_poly = project.remove_common_modes(cnn)
    updated_poly.sort(key=lambda k: k['FYear'])

    # replace with new polyhedrons
    project.polyhedrons = updated_poly
    # last call to calculate ETMs
    calculate_etms(cnn, project, JobServer)

    tqdm.write(' -- Plotting final ETMs (aligned)...')

    project.plot_etms()