コード例 #1
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Program to perform weekly loosely-constrained solutions. Combination '
        'is performed using GLOBK. Result is output in SINEX format.')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help="List of networks/stations to include in the solution.")

    parser.add_argument(
        '-s',
        '--session_config',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument('-w',
                        '--gpsweek',
                        nargs=1,
                        help="GPS week to combine.")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='station',
        help="List of stations to exclude (e.g. -e igm1 lpgs vbca)")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # get the working dates
    date_s = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=0)
    date_e = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=6)

    print ' >> Working with GPS week ' + args.gpsweek[0] + ' (%s to %s)' % (
        date_s.yyyyddd(), date_e.yyyyddd())

    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        args.stnlist += ['-' + exc for exc in exclude]

    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # check that the selected stations have all different station codes
    # otherwise, exit with error
    for i in range(len(stnlist) - 1):
        for j in range(i + 1, len(stnlist)):
            if stnlist[i]['StationCode'] == stnlist[j]['StationCode']:
                print 'During station selection, two identical station codes were found. Please remove one and ' \
                      'try again.'
                exit()

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_config[0])  # type: pyGamitConfig.GamitConfiguration

    project = GamitConfig.NetworkConfig.network_id.lower()
    org = GamitConfig.gamitopt['org']

    # start making sure that the solutions were already incorporated into the stack
    cc = cnn.query_float(
        'SELECT count(*) FROM stacks WHERE ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
        'AND "Project" = \'%s\'' %
        (date_s.year, date_s.doy, date_e.year, date_e.doy, project))

    if cc[0] == 0:
        print ' >> No solutions could be found in the stack for the specified gps week. Did you run the stacker ' \
              'before attempting to combine the solutions?'
        exit()
    else:
        soln_pwd = GamitConfig.gamitopt['solutions_dir']

        # create a globk directory in production
        if not os.path.exists('production/globk'):
            os.makedirs('production/globk')

        # check if week folder exists
        if os.path.exists('production/globk/' + args.gpsweek[0]):
            rmtree('production/globk/' + args.gpsweek[0])

        # create the directory
        os.makedirs('production/globk/' + args.gpsweek[0])

        globk_pwd = 'production/globk/' + args.gpsweek[0]

        glx_list = []

        # make a list of the h files that need to be combined
        for day in range(0, 7):
            date = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=day)

            soln_dir = os.path.join(
                soln_pwd,
                date.yyyy() + '/' + date.ddd() + '/' + project + '/glbf')

            if os.path.exists(soln_dir):
                glx = glob.glob(os.path.join(soln_dir, '*.GLX.*'))
                if len(glx) > 0:
                    glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})
                else:
                    glx = glob.glob(os.path.join(soln_dir, '*.glx'))
                    glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})

        # create the earthquakes.txt file to remove outliers
        with open(globk_pwd + '/eq_rename.txt', 'w') as fd:
            for stn in stnlist:
                rm = cnn.query_float(
                    'SELECT * FROM gamit_soln_excl WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                    ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i)'
                    % (project, stn['NetworkCode'], stn['StationCode'],
                       date_s.year, date_s.doy, date_e.year, date_e.doy),
                    as_dict=True)
                for r in rm:
                    date = pyDate.Date(year=r['Year'], doy=r['DOY'])

                    fd.write(
                        ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                        % (stn['StationCode'], stn['StationCode'], org +
                           date.wwwwd() + '.GLX', date.yyyy()[2:], date.month,
                           date.day, date.yyyy()[2:], date.month, date.day))

                # check for renames that might not agree between days
                mv = cnn.query_float(
                    'SELECT * FROM gamit_subnets WHERE "Project" = \'%s\' AND ("Year", "DOY") '
                    'BETWEEN (%i, %i) AND (%i, %i) AND \'%s.%s\' = ANY(stations)'
                    % (project, date_s.year, date_s.doy, date_e.year,
                       date_e.doy, stn['NetworkCode'], stn['StationCode']),
                    as_dict=True)

                for m in mv:
                    date = pyDate.Date(year=m['Year'], doy=m['DOY'])
                    # check on each day to see if alias agrees with station code
                    for i, s in enumerate(m['stations']):
                        if s.split('.')[1] != m['alias'][i] and \
                                s == stn['NetworkCode'] + '.' + stn['StationCode']:

                            print ' -- Alias for %s.%s = %s: renaming' \
                                  % (stn['NetworkCode'], stn['StationCode'], m['alias'][i])

                            # change the name of the station to the original name
                            fd.write(
                                ' rename %s_gps %s_gps %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                                % (m['alias'], stn['StationCode'],
                                   org + date.wwwwd() + '.GLX',
                                   date.yyyy()[2:], date.month, date.day,
                                   date.yyyy()[2:], date.month, date.day))

                        elif s not in [
                                st['NetworkCode'] + '.' + st['StationCode']
                                for st in stnlist
                        ]:
                            print ' -- Removing %s: not selected' % s
                            # just in case, remove any other occurrences of this station code
                            fd.write(
                                ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                                % (m['alias'][i],
                                   id_generator(), org + date.wwwwd() + '.GLX',
                                   date.yyyy()[2:], date.month, date.day,
                                   date.yyyy()[2:], date.month, date.day))

        # ready to pass list to globk object
        Globk(globk_pwd, org, glx_list, date_s.wwww(),
              ' '.join([stn['StationCode'].upper() for stn in stnlist]))

        process_sinex(cnn, project, [date_s, date_e],
                      globk_pwd + '/' + org + date_s.wwww() + '7.snx')
コード例 #2
0
def main():

    parser = argparse.ArgumentParser(
        description='Parallel.GAMIT main execution program')

    parser.add_argument(
        'session_cfg',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument(
        '-d',
        '--date',
        type=str,
        nargs=2,
        metavar='{date}',
        help=
        "Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format"
    )

    parser.add_argument(
        '-dp',
        '--date_parser',
        type=str,
        nargs=2,
        metavar='{year} {doys}',
        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
        "Cannot cross year boundaries")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)"
    )

    parser.add_argument(
        '-c',
        '--check_mode',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "Check station(s) mode. If station(s) are not present in the GAMIT polyhedron, "
        "(i.e. the RINEX file(s) were missing at the time of the processing) Parallel.GAMIT will "
        "add the station to the closest subnetwork(s) and reprocess them. If station(s) were "
        "present at the time of the processing but failed to process (i.e. they are in the "
        "missing stations list), these subnetworks will be reprocessed to try to obtain a "
        "solution. Station list provided in the cfg is ignored in this mode. Therefore, changes "
        "in the station list will not produce any changes in network configuration. Purge not "
        "allowed when using this mode. (Syntax: -c igm1 lpgs rms.vbca)")

    parser.add_argument(
        '-i',
        '--ignore_missing',
        action='store_true',
        help=
        "When using check mode or processing existing sessions, ignore missing stations. In other "
        "words, do not try to reprocess sessions that have missing solutions.")

    parser.add_argument(
        '-p',
        '--purge',
        action='store_true',
        default=False,
        help=
        "Purge year doys from the database and directory structure and re-run the solution."
    )

    parser.add_argument(
        '-dry',
        '--dry_run',
        action='store_true',
        help="Generate the directory structures (locally) but do not run GAMIT. "
        "Output is left in the production directory.")

    parser.add_argument(
        '-kml',
        '--create_kml',
        action='store_true',
        help="Create a KML with everything processed in this run.")

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')  # type: dbConnection.Cnn

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any(doy for doy in doys if doy < 1):
                parser.error(
                    'DOYs cannot start with zero. Please selected a DOY range between 1-365/366'
                )

            if 366 in doys:
                if year % 4 != 0:
                    parser.error(
                        'Year ' + str(year) +
                        ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error(
                    'Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [
                pyDate.Date(mjd=i)
                for i in range(drange[0].mjd, drange[1].mjd + 1)
            ]

    except ValueError as e:
        parser.error(str(e))

    print(
        ' >> Reading configuration files and creating project network, please wait...'
    )

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(
        ' >> Checking GAMIT tables for requested config and year, please wait...'
    )

    JobServer = pyJobServer.JobServer(
        GamitConfig,
        check_gamit_tables=(pyDate.Date(year=drange[1].year,
                                        doy=drange[1].doy),
                            GamitConfig.gamitopt['eop_type']),
        run_parallel=not args.noparallel,
        software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    # initialize stations in the project
    stations = station_list(cnn,
                            GamitConfig.NetworkConfig['stn_list'].split(','),
                            drange)

    check_station_list = args.check_mode
    if check_station_list is not None:
        print(' >> Check mode. List of stations to check for selected days:')
        Utils.print_columns(check_station_list)
        check_stations = station_list(cnn, check_station_list, drange)
    else:
        check_stations = StationCollection()

    dry_run = False if args.dry_run is None else args.dry_run

    if not dry_run and not len(check_stations):
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)
    elif args.purge:
        tqdm.write(
            ' >> Dry run or check mode activated. Cannot purge solutions in these modes.'
        )

    # run the job server
    sessions = ExecuteGamit(cnn, JobServer, GamitConfig, stations,
                            check_stations, args.ignore_missing, dates,
                            args.dry_run, args.create_kml)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(cnn, JobServer, GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id.lower(), dates, sessions,
                 GamitConfig, JobServer)

    tqdm.write(' >> %s Successful exit from Parallel.GAMIT' % print_datetime())
コード例 #3
0
            raise pyStationException(
                'type: ' + str(type(item)) +
                ' invalid. Can only pass Station or String objects.')

    def __contains__(self, item):
        return self.ismember(item)


if __name__ == '__main__':
    import dbConnection
    import pyGamitConfig
    import pyArchiveStruct
    import pyGamitSession

    cnn = dbConnection.Cnn('gnss_data.cfg')
    GamitConfig = pyGamitConfig.GamitConfiguration('CPC-Ar_session.cfg')
    archive = pyArchiveStruct.RinexStruct(cnn)
    import pycos
    import pyDate
    dr = [pyDate.Date(year=2010, doy=1), pyDate.Date(year=2010, doy=2)]
    s1 = Station(cnn, 'rms', 'igm1', dr)
    s2 = Station(cnn, 'rms', 'lpgs', dr)
    s3 = Station(cnn, 'rms', 'chac', dr)
    s4 = Station(cnn, 'cap', 'chac', dr)
    si = StationInstance(cnn, archive, s1, dr[0], GamitConfig)
    gs = pyGamitSession.GamitSession(cnn, archive, 'igg', 'IGN', None,
                                     pyDate.Date(year=2020, doy=100),
                                     GamitConfig, [s1, s2, s3])
    c = StationCollection()
    a = pycos.unserialize(pycos.serialize(gs))
    print(a)
コード例 #4
0
def main():

    global cnn

    parser = argparse.ArgumentParser(description='Parallel.GAMIT main execution program')

    parser.add_argument('session_cfg', type=str, nargs=1, metavar='session.cfg',
                        help="Filename with the session configuration to run Parallel.GAMIT")
    parser.add_argument('-d', '--date', type=str, nargs=2, metavar='{date}',
                        help="Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format")
    parser.add_argument('-dp', '--date_parser', type=str, nargs=2, metavar='{year} {doys}',
                        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
                             "Cannot cross year boundaries")
    parser.add_argument('-e', '--exclude', type=str, nargs='+', metavar='station',
                        help="List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)")
    parser.add_argument('-p', '--purge', action='store_true',
                        help="Purge year doys from the database and directory structure and re-run the solution.")
    parser.add_argument('-dry', '--dry_run', action='store_true',
                        help="Generate the directory structures (locally) but do not run GAMIT. "
                             "Output is left in the production directory.")
    parser.add_argument('-kml', '--generate_kml', action='store_true',
                        help="Generate KML and exit without running GAMIT.")

    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any([doy for doy in doys if doy < 1]):
                parser.error('DOYs cannot start with zero. Please selected a DOY range between 1-365/366')

            if 366 in doys:
                if year % 4 != 0:
                    parser.error('Year ' + str(year) + ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error('Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [pyDate.Date(mjd=i) for i in range(drange[0].mjd, drange[1].mjd + 1)]

    except ValueError as e:
        parser.error(str(e))

    print(' >> Reading configuration files and creating project network, please wait...')

    GamitConfig = pyGamitConfig.GamitConfiguration(args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(' >> Checing GAMIT tables for requested config and year, please wait...')

    JobServer = pyJobServer.JobServer(GamitConfig,
                                      check_gamit_tables=(pyDate.Date(year=drange[1].year, doy=drange[1].doy),
                                                          GamitConfig.gamitopt['eop_type']),
                                      run_parallel=not args.noparallel,
                                      software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    cnn = dbConnection.Cnn(GamitConfig.gamitopt['gnss_data'])  # type: dbConnection.Cnn

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    if args.dry_run is not None:
        dry_run = args.dry_run
    else:
        dry_run = False

    if not dry_run:
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)

    # initialize stations in the project
    stations = station_list(cnn, GamitConfig.NetworkConfig, drange)

    tqdm.write(' >> Creating GAMIT session instances, please wait...')

    sessions = []
    archive = pyArchiveStruct.RinexStruct(cnn)  # type: pyArchiveStruct.RinexStruct

    for date in tqdm(dates, ncols=80):

        # make the dir for these sessions
        # this avoids a racing condition when starting each process
        pwd = GamitConfig.gamitopt['solutions_dir'].rstrip('/') + '/' + date.yyyy() + '/' + date.ddd()

        if not os.path.exists(pwd):
            os.makedirs(pwd)

        net_object = Network(cnn, archive, GamitConfig, stations, date)

        sessions += net_object.sessions

    if args.generate_kml:
        # generate a KML of the sessions
        generate_kml(dates, sessions, GamitConfig)
        exit()

    # print a summary of the current project (NOT VERY USEFUL AFTER ALL)
    # print_summary(stations, sessions, drange)

    # run the job server
    ExecuteGamit(sessions, JobServer, dry_run)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id, sessions, GamitConfig)

    print(' >> Done processing and parsing information. Successful exit from Parallel.GAMIT')
コード例 #5
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Program to perform weekly loosely-constrained solutions. Combination '
        'is performed using GLOBK. Result is output in SINEX format.')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help="List of networks/stations to include in the solution.")

    parser.add_argument(
        '-s',
        '--session_config',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument('-w',
                        '--gpsweek',
                        nargs=1,
                        help="GPS week to combine.")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='station',
        help="List of stations to exclude (e.g. -e igm1 lpgs vbca)")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # get the working dates
    date_s = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=0)
    date_e = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=6)

    print ' >> Working with GPS week ' + args.gpsweek[0] + ' (%s to %s)' % (
        date_s.yyyyddd(), date_e.yyyyddd())

    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        args.stnlist += ['-' + exc for exc in exclude]

    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # check that the selected stations have all different station codes
    # otherwise, exit with error
    for i in range(len(stnlist) - 1):
        for j in range(i + 1, len(stnlist)):
            if stnlist[i]['StationCode'] == stnlist[j]['StationCode']:
                print 'During station selection, two identical station codes were found. Please remove one and ' \
                      'try again.'
                exit()

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_config[0])  # type: pyGamitConfig.GamitConfiguration

    project = GamitConfig.NetworkConfig.network_id.lower()
    org = GamitConfig.gamitopt['org']

    print ' >> REMINDER: To automatically remove outliers during the weekly combination, first run DRA.py to analyze ' \
          'the daily repetitivities'

    soln_pwd = GamitConfig.gamitopt['solutions_dir']

    # create a globk directory in production
    if not os.path.exists('production/globk'):
        os.makedirs('production/globk')

    # check if week folder exists
    if os.path.exists('production/globk/' + args.gpsweek[0]):
        rmtree('production/globk/' + args.gpsweek[0])

    # create the directory
    os.makedirs('production/globk/' + args.gpsweek[0])

    globk_pwd = 'production/globk/' + args.gpsweek[0]

    glx_list = []

    # make a list of the h files that need to be combined
    for day in range(0, 7):
        date = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=day)

        soln_dir = os.path.join(
            soln_pwd,
            date.yyyy() + '/' + date.ddd() + '/' + project + '/glbf')

        if os.path.exists(soln_dir):
            glx = glob.glob(os.path.join(soln_dir, '*.GLX.*'))
            if len(glx) > 0:
                glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})
            else:
                glx = glob.glob(os.path.join(soln_dir, '*.glx'))
                glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})

    # create the earthquakes.txt file to remove outliers
    with open(globk_pwd + '/eq_rename.txt', 'w') as fd:
        rename = []
        remove = []
        use_site = []
        fd.write('# LIST OF OUTLIERS DETECTED BY DRA\n')
        for stn in stnlist:
            # obtain the filtered solutions
            rm = cnn.query_float(
                'SELECT * FROM gamit_soln_excl WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
                'ORDER BY residual' %
                (project, stn['NetworkCode'], stn['StationCode'], date_s.year,
                 date_s.doy, date_e.year, date_e.doy),
                as_dict=True)

            # obtain the total number of solutions
            sl = cnn.query_float(
                'SELECT * FROM gamit_soln WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
                % (project, stn['NetworkCode'], stn['StationCode'],
                   date_s.year, date_s.doy, date_e.year, date_e.doy),
                as_dict=True)
            for i, r in enumerate(rm):
                date = pyDate.Date(year=r['Year'], doy=r['DOY'])
                # if the number of rejected solutions is equal to the number of total solutions,
                # leave out the first one (i == 0) which is the one with the lowest residual (see ORDER BY in rm)
                if len(rm) < len(sl) or (len(rm) == len(sl) and i != 0):
                    fd.write(
                        ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                        % (stn['StationCode'], stn['StationCode'], org +
                           date.wwwwd() + '.GLX', date.yyyy()[2:], date.month,
                           date.day, date.yyyy()[2:], date.month, date.day))

            # check for renames that might not agree between days
            mv = cnn.query_float(
                'SELECT * FROM gamit_subnets WHERE "Project" = \'%s\' AND ("Year", "DOY") '
                'BETWEEN (%i, %i) AND (%i, %i) AND \'%s.%s\' = ANY(stations)' %
                (project, date_s.year, date_s.doy, date_e.year, date_e.doy,
                 stn['NetworkCode'], stn['StationCode']),
                as_dict=True)

            for m in mv:
                date = pyDate.Date(year=m['Year'], doy=m['DOY'])
                # check on each day to see if alias agrees with station code
                for i, s in enumerate(m['stations']):
                    if s.split('.')[1] != m['alias'][i] and \
                            s == stn['NetworkCode'] + '.' + stn['StationCode']:

                        print ' -- %s alias for %s.%s = %s: renaming' \
                              % (date.yyyyddd(), stn['NetworkCode'], stn['StationCode'], m['alias'][i])

                        # change the name of the station to the original name
                        rename.append(
                            ' rename %s_gps %s_dup %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                            % (m['alias'][i],
                               stn['StationCode'], org + date.wwwwd() + '.GLX',
                               date.yyyy()[2:], date.month, date.day,
                               date.yyyy()[2:], date.month, date.day))
                        use_site.append('%s_dup' % stn['StationCode'])

                    elif s not in [
                            st['NetworkCode'] + '.' + st['StationCode']
                            for st in stnlist
                    ]:
                        # print ' -- Removing %s: not selected' % s
                        # just in case, remove any other occurrences of this station code
                        remove.append(
                            ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                            %
                            (m['alias'][i], m['alias'][i], org + date.wwwwd() +
                             '.GLX', date.yyyy()[2:], date.month, date.day,
                             date.yyyy()[2:], date.month, date.day))
                    else:
                        use_site.append('%s_gps' % stn['StationCode'])

        fd.write('# LIST OF STATIONS TO BE REMOVED\n')
        fd.write(''.join(remove))
        fd.write('# LIST OF STATIONS TO BE RENAMED\n')
        fd.write(''.join(rename))

    print ' >> Converting to SINEX the daily solutions'

    for day, glx in enumerate(glx_list):
        date = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=day)

        print ' -- Working on %s' % date.wwwwd()
        # delete the existing GLX files
        for ff in glob.glob(globk_pwd + '/*.GLX'):
            os.remove(ff)

        Globk(globk_pwd, org, [glx], date.wwww(), date.gpsWeekDay + 8,
              ' '.join(set(use_site)))
        # convert the file to a valid gpsweek day
        move(
            globk_pwd + '/' + org + date.wwww() + '%i.snx' %
            (date.gpsWeekDay + 8),
            globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

        process_sinex(
            cnn, project, [date, date],
            globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

    # delete the existing GLX files: get ready for weekly combination
    for ff in glob.glob(globk_pwd + '/*.GLX'):
        os.remove(ff)
    # ready to pass list to globk object
    Globk(globk_pwd, org, glx_list, date_s.wwww(), 7, ' '.join(set(use_site)))
    print ' >> Formatting the SINEX file'

    process_sinex(cnn, project, [date_s, date_e],
                  globk_pwd + '/' + org + date_s.wwww() + '7.snx')