Ejemplo n.º 1
0
    def parse_arguments(argstring):
        parser = ArgumentRecorder()
        add_arguments(parser)
        args, extra_args = parser.parse_known_args(argstring)
        if '--ignore-gooey' in extra_args:  # Gooey adds '--ignore-gooey' when it calls the command
            extra_args.remove('--ignore-gooey')

        if args.logfile:
            logfile = open(args.logfile, 'w')
            parser.write_comments(args,
                                  logfile,
                                  incomments=ArgumentHelper.separator())
            logfile.close()

        args.extra_args = extra_args
        args.substitute = {
            sub.split(':')[0]: sub.split(':')[1]
            for sub in args.substitute
        } if args.substitute else {}
        return args
Ejemplo n.º 2
0
def bomSites(arglist=None):

    parser = ArgumentRecorder(
        description=
        'Output BOM site data for a given state to CSV or database.',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)
    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of rows to process')

    parser.add_argument('-s',
                        '--state',
                        type=str,
                        choices=['SA', 'NSW', 'NT', 'QLD', 'TAS', 'VIC', 'WA'],
                        required=True)

    parser.add_argument('--no-comments',
                        action='store_true',
                        help='Do not output descriptive comments')
    parser.add_argument('--no-header',
                        action='store_true',
                        help='Do not output CSV header with column names')

    parser.add_argument(
        'outdata',
        type=str,
        nargs='?',
        help=
        'Output CSV file or SQLAlchemy specification, otherwise use stdout.',
        output=True)

    args = parser.parse_args(arglist)

    if not args.outdata:
        outfile = sys.stdout
        bomdb = None
        logfilename = None
    elif "://" in args.outdata:  # Database
        outfile = None
        bomdb = create_engine(args.outdata)
        logfilename = args.outdata.split('/')[-1].rsplit('.', 1)[0] + '.log'
    else:
        if os.path.exists(args.outdata):
            shutil.move(args.outdata, args.outdata + '.bak')

        outfile = open(args.outdata, 'w')
        bomdb = None
        logfilename = None

    if not args.no_comments:
        if logfilename:
            logfile = open(logfilename, 'w')
        else:
            logfile = outfile

        parser.write_comments(args,
                              logfile,
                              incomments=ArgumentHelper.separator())

        if logfilename:
            logfile.close()

    if args.verbosity >= 1:
        print("Loading BOM data.", file=sys.stderr)

    reqlines = requests.get(
        'http://www.bom.gov.au/climate/data/lists_by_element/alpha' +
        args.state + '_136.txt',
        stream=True).iter_lines(decode_unicode=True)

    firstline = next(reqlines)
    produced = dateparser.parse(
        re.match('.+Produced: (.+)', firstline).group(1))

    dummyline = next(reqlines)
    headingline = next(reqlines)
    fields = [(m.group(), m.start()) for m in re.finditer(r'\S+', headingline)]
    for idx in range(len(fields)):
        if idx:
            fields[idx - 1] += (fields[idx][1] - 1, )
    fields[-1] += (None, )
    dummyline = next(reqlines)

    def str2bool(v):
        return v.lower() in ("yes", "true", "t", "1")

    def partdate(v):
        return dateparser.parse(v, default=datetime(1, 1, 1))

    fieldtype = {
        'Site': ('Site', Integer, int),
        'Name': ('Name', String(32), str.strip),
        'Lat': ('Lat', Float, float),
        'Lon': ('Lon', Float, float),
        'Start': ('Start', Date, partdate),
        'End': ('End', Date, partdate),
        'Years': ('Years', Float, float),
        '%': ('Percent', Integer, int),
        'AWS': ('AWS', Boolean, str2bool)
    }

    if bomdb:  # Database
        bomcon = bomdb.connect()
        bomtr = bomcon.begin()
        bommd = MetaData(bind=bomdb)
        try:
            bomSite = Table('Site', bommd, autoload=True)
        except exc.NoSuchTableError:
            bomSite = Table('Site', bommd)
            for field in fields:
                bomSite = Table('Site',
                                bommd,
                                Column(fieldtype[field[0]][0],
                                       fieldtype[field[0]][1]),
                                extend_existing=True)
            bomSite.create(bomdb)

        inrowcount = 0
        while True:
            if args.limit and inrowcount == args.limit:
                break
            inrowcount += 1

            line = next(reqlines)
            if not line:
                break
            bomcon.execute(bomSite.insert().values({
                fieldtype[field[0]][0]:
                fieldtype[field[0]][2](line[field[1]:field[2]])
                for field in fields
            }))

        bomtr.commit()
        bomtr = None
        bomcon.close()
        bomdb.dispose()

    else:
        outcsv = csv.writer(outfile)
        if not args.no_header:
            outcsv.writerow([fieldtype[field[0]][0] for field in fields])
        inrowcount = 0
        while True:
            if args.limit and inrowcount == args.limit:
                break
            inrowcount += 1

            line = next(reqlines)
            if not line:
                break
            outcsv.writerow(
                [str.strip(line[field[1]:field[2]]) for field in fields])

        outfile.close()

    exit(0)
Ejemplo n.º 3
0
def bomPresentation(arglist=None):

    parser = ArgumentRecorder(
        description='Present BOM wind data as wind rose.',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)

    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of rows to process')
    parser.add_argument('--since',
                        type=str,
                        help='Lower bound date/time in any sensible format')
    parser.add_argument('--until',
                        type=str,
                        help='Upper bound date/time in any sensible format')

    parser.add_argument(
        '-b',
        '--bomfile',
        type=str,
        help='CSV file containing BOM data; otherwise use stdin')

    parser.add_argument('-o',
                        '--outfile',
                        type=str,
                        help='Output SVG file, otherwise plot on screen.',
                        output=True)
    parser.add_argument('--logfile', type=str, help="Logfile", private=True)
    parser.add_argument('--no-logfile',
                        action='store_true',
                        help='Do not output descriptive comments')

    args = parser.parse_args(arglist)

    if args.bomfile:
        bomfile = open(args.bomfile, 'r')
    else:
        bomfile = peekable(sys.stdin)

    until = dateparser.parse(args.until) if args.until else None
    since = dateparser.parse(args.since) if args.since else None

    # Read comments at start of bomfile.
    incomments = ArgumentHelper.read_comments(
        bomfile) or ArgumentHelper.separator()
    bomfieldnames = next(csv.reader([next(bomfile)]))
    bomcsv = csv.DictReader(bomfile, fieldnames=bomfieldnames)

    if not args.no_logfile:
        if not args.logfile and not args.outfile:
            logfile = sys.stdout
        else:
            if args.logfile:
                logfilename = args.logfile
            elif args.outfile:
                logfilename = args.outfile.split('/')[-1].rsplit('.',
                                                                 1)[0] + '.log'

            logfile = open(logfilename, 'w')

        parser.write_comments(args, logfile, incomments=incomments)

        if args.logfile or args.outfile:
            logfile.close()

    windspeed = []
    winddir = []
    linecount = 0
    for bomline in bomcsv:
        timestamp = dateparser.parse(bomline['DateTime'])
        if since and timestamp < since:
            continue
        if until and timestamp > until:
            continue

        windspeed += [float(bomline['Wind speed measured in km/h'])]
        winddir += [float(bomline['Wind direction measured in degrees'])]
        linecount += 1
        if args.limit and linecount == args.limit:
            break

    windspeed = numpy.array(windspeed)
    winddir = numpy.array(winddir)

    ax = WindroseAxes.from_ax()
    ax.bar(winddir, windspeed, bins=numpy.arange(0, 80, 10), cmap=cm.Blues)
    ax.set_yticks(numpy.arange(0, 10, 2))
    ax.set_yticklabels([])
    #ax.axis('off')
    #ax.set_legend()

    if args.outfile:
        pyplot.savefig(args.outfile, transparent=True, format='svg')
    else:
        pyplot.show()

    if args.bomfile:
        bomfile.close()
Ejemplo n.º 4
0
def bomDailyRailfall(arglist=None):

    parser = ArgumentRecorder(
        description='Output BOM daily rainfall data to CSV or database.',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)
    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of rows to process')

    parser.add_argument('--no-comments',
                        action='store_true',
                        help='Do not output descriptive comments')
    parser.add_argument('--no-header',
                        action='store_true',
                        help='Do not output CSV header with column names')

    parser.add_argument(
        '-f',
        '--filter',
        type=str,
        help=
        'Python expression evaluated to determine whether site is included, for example \'Name == "WALPOLE"\''
    )
    parser.add_argument('-d',
                        '--dry-run',
                        action='store_true',
                        help='Just select sites without collecting data')

    parser.add_argument(
        '-s',
        '--sites',
        type=str,
        required=True,
        help='CSV file or SQLAlchemy database specification for site data',
        input=True)

    parser.add_argument(
        'outdata',
        type=str,
        nargs='?',
        help=
        'Output CSV file, otherwise use database if specified, or stdout if not.',
        output=True)

    args = parser.parse_args(arglist)
    hiddenargs = ['verbosity', 'no_comments']

    incomments = ''
    if "://" in args.sites:  # Database
        outfile = None
        bomdb = create_engine(args.sites)
        bomcon = bomdb.connect()
        bommd = MetaData(bind=bomdb)
        logfilename = args.sites.split('/')[-1].rsplit('.', 1)[0] + '.log'
    else:
        bomdb = None
        sitefile = open(args.sites, 'r')
        # Read comments at start of infile.
        incomments = ArgumentHelper.read_comments(sitefile)
        sitefieldnames = next(csv.reader([next(sitefile)]))
        sitereader = csv.DictReader(sitefile, fieldnames=sitefieldnames)

    if args.outdata:
        if os.path.exists(args.outdata):
            shutil.move(args.outdata, args.outdata + '.bak')

        outfile = open(args.outdata, 'w')
        logfilename = None
    elif bomdb is None:
        outfile = sys.stdout
        logfilename = None

    if not args.no_comments and not args.dry_run:
        comments = parser.build_comments(args, args.outdata)

        if logfilename:
            incomments += open(logfilename, 'r').read()
            logfile = open(logfilename, 'w')
        else:
            logfile = outfile

        if not incomments:
            incomments = ArgumentHelper.separator()

        comments += incomments
        logfile.write(comments)

        if logfilename:
            logfile.close()

    if bomdb:
        bomSite = Table('Site', bommd, autoload=True)
        columns = [col.key for col in bomSite.c]
    else:
        columns = sitefieldnames

    if args.filter:
        exec(
            "\
def evalfilter(" + ','.join(columns) + ",**kwargs):\n\
    return " + args.filter, globals())

    sites = []
    if bomdb:
        for row in bomcon.execute(bomSite.select()):
            rowargs = {item[0]: item[1] for item in row.items()}
            keep = True
            if args.filter:
                keep = evalfilter(**rowargs) or False
            if not keep:
                continue

            sites += [row]
    else:
        for row in sitereader:
            rowargs = {item[0]: item[1] for item in row.items()}
            keep = True
            if args.filter:
                keep = evalfilter(**rowargs) or False
            if not keep:
                continue

            sites += [row]

    if args.verbosity >= 1:
        print("Found " + str(len(sites)) + " sites:", file=sys.stderr)
        for site in sites:
            print("    " + site['Name'] + " - " + str(site['Site']),
                  file=sys.stderr)

    def intOrNone(v):
        return int(v) if v else None

    def floatOrNone(v):
        return float(v) if v else None

    outfields = OrderedDict([
        ('Product code', ('Product', str.strip)),
        ('Bureau of Meteorology station number', ('Site', int)),
        ('Date', ('Date', dateparser.parse)),
        ('Rainfall amount (millimetres)', ('Rainfall', floatOrNone)),
        ('Period over which rainfall was measured (days)', ('Period',
                                                            intOrNone)),
        ('Quality', ('Quality', str.strip))
    ])

    if outfile:
        outcsv = csv.writer(outfile)
        if not args.no_header:
            outcsv.writerow([data[0] for field, data in outfields.items()])
    else:
        try:
            bomRainfall = Table('Rainfall', bommd, autoload=True)
        except exc.NoSuchTableError:
            bomRainfall = Table(
                'Rainfall', bommd,
                Column('Product', String(32), primary_key=True),
                Column('Site', Integer, primary_key=True),
                Column('Date', Date, primary_key=True),
                Column('Rainfall', Float), Column('Period', Integer),
                Column('Quality', String(32)))
            bomRainfall.create(bomdb)

        bomtr = bomcon.begin()

    for site in sites:

        if args.verbosity >= 1:
            print("Loading BOM daily rainfall data from site " + site['Name'] +
                  " - " + str(site['Site']),
                  file=sys.stderr)

        if args.dry_run:
            continue

        sitepage = requests.get(
            'http://www.bom.gov.au/jsp/ncc/cdio/weatherData/av?p_nccObsCode=136&p_display_type=dailyDataFile&p_startYear=&p_c=&p_stn_num='
            + str(site['Site']))

        soup = BeautifulSoup(sitepage.content, "html.parser")
        link = soup.find(
            "a", title="Data file for daily rainfall data for all years")
        if not link:
            raise RuntimeError("Station data not found")

        zipfile = ZipFile(
            BytesIO(
                requests.get('http://www.bom.gov.au' + link['href'],
                             stream=True).content))
        csvname = next(name for name in zipfile.namelist()
                       if name[-4:] == '.csv')
        csvdata = csv.DictReader(TextIOWrapper(zipfile.open(csvname)))
        fields = csvdata.fieldnames
        fields += ('Date', )

        inrowcount = 0
        while True:
            if args.limit and inrowcount == args.limit:
                break

            try:
                line = next(csvdata)
            except StopIteration:
                break
            if line['Rainfall amount (millimetres)'] == '':
                continue

            inrowcount += 1

            line['Date'] = line['Year'] + '-' + line['Month'] + '-' + line[
                'Day']

            if outfile:
                line['Date'] = line['Year'] + '-' + line['Month'] + '-' + line[
                    'Day']
                outcsv.writerow([line[field] for field in outfields])
            else:
                try:
                    bomcon.execute(bomRainfall.insert().values({
                        data[0]: data[1](line[field])
                        for field, data in outfields.items()
                    }))
                except exc.IntegrityError:
                    bomcon.execute(
                        bomRainfall.update(
                            and_(
                                bomRainfall.c['Product'] == bindparam(
                                    'Product'),
                                bomRainfall.c['Site'] == bindparam('Site'),
                                bomRainfall.c['Date'] == bindparam(
                                    'Date'))).values({
                                        data[0]: bindparam(data[0])
                                        for field, data in outfields.items()
                                    }), {
                                        data[0]: data[1](line[field])
                                        for field, data in outfields.items()
                                    })

    if outfile:
        outfile.close()
    else:
        bomtr.commit()

    if bomdb:
        bomcon.close()
        bomdb.dispose()

    exit(0)
Ejemplo n.º 5
0
def bomPresentation(arglist=None):

    parser = ArgumentRecorder(
        description='Present BOM temperature and humidity as line graph.',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)

    parser.add_argument(
        '-b',
        '--bomfile',
        type=str,
        help='CSV file containing BOM data; otherwise use stdin')
    parser.add_argument('-O',
                        '--obsfile',
                        type=str,
                        help='CSV file observation timestamps')

    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of rows to process')
    parser.add_argument('--since',
                        type=str,
                        help='Lower bound date/time in any sensible format')
    parser.add_argument('--until',
                        type=str,
                        help='Upper bound date/time in any sensible format')

    parser.add_argument('-W',
                        '--width',
                        type=int,
                        default=297,
                        help='Plot width in millimetres')
    parser.add_argument('-H',
                        '--height',
                        type=int,
                        default=50,
                        help='Plot width in millimetres')

    parser.add_argument('-o',
                        '--outfile',
                        type=str,
                        help='Output SVG file, otherwise plot on screen.',
                        output=True)
    parser.add_argument('--logfile', type=str, help="Logfile", private=True)
    parser.add_argument('--no-logfile',
                        action='store_true',
                        help='Do not output descriptive comments')

    args = parser.parse_args(arglist)

    if args.bomfile:
        bomfile = open(args.bomfile, 'r')
    else:
        bomfile = peekable(sys.stdin)

    until = dateparser.parse(args.until) if args.until else None
    since = dateparser.parse(args.since) if args.since else None

    # Read comments at start of bomfile.
    incomments = ArgumentHelper.read_comments(
        bomfile) or ArgumentHelper.separator()
    bomfieldnames = next(csv.reader([next(bomfile)]))
    bomcsv = csv.DictReader(bomfile, fieldnames=bomfieldnames)

    if not args.no_logfile:
        if not args.logfile and not args.outfile:
            logfile = sys.stdout
        else:
            if args.logfile:
                logfilename = args.logfile
            elif args.outfile:
                logfilename = args.outfile.split('/')[-1].rsplit('.',
                                                                 1)[0] + '.log'

            logfile = open(logfilename, 'w')

        parser.write_comments(args, logfile, incomments=incomments)

        if args.logfile or args.outfile:
            logfile.close()

    observations = []
    if args.obsfile:
        obsfile = open(args.obsfile, 'r')
        obsfieldnames = next(csv.reader([next(obsfile)]))
        obscsv = csv.DictReader(obsfile, fieldnames=obsfieldnames)

        for obsline in obscsv:
            observations += [dateparser.parse(obsline['datetime'])]

    obsdatetime = []
    obstemp = []
    obshumidity = []
    start = None
    end = None
    linecount = 0
    for bomline in bomcsv:
        timestamp = dateparser.parse(bomline['DateTime'])
        if since and timestamp < since:
            continue
        if until and timestamp > until:
            continue

        if not start or timestamp < start:
            start = timestamp
        if not end or timestamp > start:
            end = timestamp

        obsdatetime += [timestamp]
        obstemp += [float(bomline['Air temperature in Degrees C'])]
        obshumidity += [float(bomline['Relative humidity in percentage %'])]

        linecount += 1
        if args.limit and linecount == args.limit:
            break

    fig, ax1 = pyplot.subplots()
    fig.set_size_inches(args.width / 25.4, args.height / 25.4)
    ax2 = ax1.twinx()
    ax3 = ax1.twiny()

    ax1.set_xlim(left=start, right=end)
    ax1.set_ylabel("Temperature", color="red")
    ax1.tick_params(axis="y", colors="red")
    ax1.plot(obsdatetime, obstemp, color="red")

    ax2.plot(obsdatetime, obshumidity, color="blue")
    ax2.set_ylabel("Relative humidity", color="blue")
    ax2.tick_params(axis="y", colors="blue")
    ax2.spines['left'].set_visible(False)

    ax1.xaxis.set_minor_locator(mdates.HourLocator(byhour=range(0, 24, 3)))
    ax1.xaxis.set_minor_formatter(mdates.DateFormatter('%H'))
    ax1.xaxis.set_major_locator(mdates.HourLocator(byhour=12))
    ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d/%m'))
    ax1.xaxis.set_remove_overlapping_locs(False)
    ax1.tick_params(axis='x', which='major', length=0)
    ax1.xaxis.set_tick_params(which='major', pad=18, labelsize="large")

    ax3.xaxis.set_ticks_position('top')
    ax3.set_xticks(observations)
    ax3.set_xticklabels(observations)
    pyplot.setp(ax3.get_xticklabels(), visible=False)
    ax3.set_xlim(left=start, right=end)
    ax3.yaxis.set_visible(False)

    if args.outfile:
        pyplot.savefig(args.outfile, transparent=True, format='svg')
    else:
        pyplot.show()

    if args.bomfile:
        bomfile.close()
Ejemplo n.º 6
0
def trackHotspotSatellite(arglist=None):

    parser = ArgumentRecorder(
        description=
        'Track satellite position, bearing, previous and next passdatetimes from hotspot data.',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)

    parser.add_argument('-u',
                        '--user',
                        type=str,
                        required=True,
                        help="PostgreSQL username")
    parser.add_argument('-d',
                        '--database',
                        type=str,
                        required=True,
                        help="PostgreSQL database")

    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of rows to process')

    parser.add_argument('-t',
                        '--tlefile',
                        type=str,
                        required=True,
                        help='File containing TLE data')
    parser.add_argument('-w',
                        '--where',
                        type=str,
                        required=True,
                        help="'Where' clause to select hotspots")
    parser.add_argument('-H',
                        '--hotspots',
                        type=str,
                        default='hotspots',
                        help="Hotspot table name")
    parser.add_argument(
        '-s',
        '--suffix',
        type=str,
        required=True,
        help="Suffix to append to 'hotspots' to get output table name")
    parser.add_argument('-D',
                        '--drop-table',
                        action='store_true',
                        help='Drop output table if it exists')

    parser.add_argument('--logfile',
                        type=str,
                        help="Logfile, default is 'hotspots'_'suffix'.log",
                        private=True)
    parser.add_argument('--no-logfile',
                        action='store_true',
                        help='Do not output descriptive comments')

    args = parser.parse_args(arglist)

    if not args.no_logfile:
        if not args.logfile:
            args.logfile = args.hotspots + '_' + args.suffix + '.log'

        if os.path.exists(args.logfile):
            shutil.move(args.logfile,
                        args.logfile.split('/')[-1].rsplit('.', 1)[0] + '.bak')

        logfile = open(args.logfile, 'w')
        parser.write_comments(args,
                              logfile,
                              incomments=ArgumentHelper.separator())
        logfile.close()

    satcodes = {'N': 37849, 'Terra': 25994, 'Aqua': 27424}

    tledict = {}
    for norad_id in satcodes.values():
        tledict[norad_id] = ()

    tlefile = open(args.tlefile, 'r')
    line1 = tlefile.readline().rstrip()
    while len(line1) > 1:
        norad_id = int(line1[2:7])
        year2d = int(line1[18:20])
        daynum = float(line1[20:32])
        tledate = datetime(2000 + year2d if year2d <= 56 else 1900 + year2d, 1,
                           1) + timedelta(days=daynum)
        line2 = tlefile.readline().rstrip()
        tledict[norad_id] += ((tledate, line1, line2), )
        line1 = tlefile.readline().rstrip()

    psqlin = subprocess.Popen([
        'psql', args.database, args.user, '--quiet', '--command',
        r'\timing off', '--command',
        r'\copy (SELECT * FROM ' + args.hotspots + ' WHERE ' + args.where +
        ' ORDER BY acq_date + acq_time) TO STDOUT CSV HEADER'
    ],
                              stdout=subprocess.PIPE,
                              encoding='UTF-8')

    satcsv = csv.DictReader(psqlin.stdout)

    psqlout = subprocess.Popen([
        'psql', args.database, args.user, '--quiet', '--command',
        r'\timing off'
    ] + ([
        '--command', 'DROP TABLE IF EXISTS ' + args.hotspots + '_' +
        args.suffix
    ] if args.drop_table else []) + [
        '--command', 'CREATE TABLE ' + args.hotspots + '_' + args.suffix +
        ' AS TABLE ' + args.hotspots + ' WITH NO DATA', '--command',
        'ALTER TABLE ' + args.hotspots + '_' + args.suffix + '     \
                                                  ADD COLUMN pass_azimuth NUMERIC(8,5),                 \
                                                  ADD COLUMN pass_elevation NUMERIC(8,5),               \
                                                  ADD COLUMN pass_bearing NUMERIC(8,5),                 \
                                                  ADD COLUMN pass_datetime TIMESTAMP WITHOUT TIME ZONE, \
                                                  ADD COLUMN prev_azimuth NUMERIC(8,5),                 \
                                                  ADD COLUMN prev_elevation NUMERIC(8,5),               \
                                                  ADD COLUMN prev_datetime TIMESTAMP WITHOUT TIME ZONE, \
                                                  ADD COLUMN next_azimuth NUMERIC(8,5),                 \
                                                  ADD COLUMN next_elevation NUMERIC(8,5),               \
                                                  ADD COLUMN next_datetime TIMESTAMP WITHOUT TIME ZONE, \
                                                  DROP COLUMN IF EXISTS geometry,                       \
                                                  ADD COLUMN geometry geometry GENERATED ALWAYS AS (ST_Rotate(ST_MakeEnvelope((ST_X(ST_Transform(ST_SetSRID(ST_MakePoint((longitude)::DOUBLE PRECISION, (latitude)::DOUBLE PRECISION), 4326), 28350)) - ((scan * (500)::NUMERIC))::DOUBLE PRECISION), (ST_Y(ST_Transform(ST_SetSRID(ST_MakePoint((longitude)::DOUBLE PRECISION, (latitude)::DOUBLE PRECISION), 4326), 28350)) - ((track * (500)::NUMERIC))::DOUBLE PRECISION), (ST_X(ST_Transform(ST_SetSRID(ST_MakePoint((longitude)::DOUBLE PRECISION, (latitude)::DOUBLE PRECISION), 4326), 28350)) + ((scan * (500)::NUMERIC))::DOUBLE PRECISION), (ST_Y(ST_Transform(ST_SetSRID(ST_MakePoint((longitude)::DOUBLE PRECISION, (latitude)::DOUBLE PRECISION), 4326), 28350)) + ((track * (500)::NUMERIC))::DOUBLE PRECISION), 28350), ((((- pass_bearing) * 3.1415926) / (180)::NUMERIC))::DOUBLE PRECISION, ST_Transform(ST_SetSRID(ST_MakePoint((longitude)::DOUBLE PRECISION, (latitude)::DOUBLE PRECISION), 4326), 28350))) STORED',
        '--command', r'\copy ' + args.hotspots + '_' + args.suffix +
        ' FROM STDIN CSV HEADER', '--command',
        'ALTER TABLE ' + args.hotspots + '_' + args.suffix + '     \
                                                  ADD COLUMN id SERIAL'
    ],
                               stdin=subprocess.PIPE,
                               encoding='UTF-8')

    satout = csv.DictWriter(
        psqlout.stdin,
        fieldnames=satcsv.fieldnames + [
            'pass_azimuth', 'pass_elevation', 'pass_bearing', 'pass_datetime',
            'prev_azimuth', 'prev_elevation', 'prev_datetime', 'next_azimuth',
            'next_elevation', 'next_datetime'
        ])

    minelevation = 90
    tleindexes = {}
    lastdatetime = datetime(MINYEAR, 1, 1)
    lastline1 = None
    lastline2 = None
    inrowcount = 0
    for satline in satcsv:
        thisdatetime = dateparser.parse(satline['acq_date'] + ' ' +
                                        satline['acq_time'])
        satcode = satcodes[satline['satellite']]
        tletuples = tledict[satcode]
        tleidx = tleindexes.get(satcode, 0)
        assert (thisdatetime >= lastdatetime)
        lastdatetime = thisdatetime
        while tletuples[tleidx + 1][0] <= thisdatetime - timedelta(hours=12):
            tleidx += 1
        tleindexes[satcode] = tleidx

        tletuple = tletuples[tleidx]
        line1 = tletuple[1]
        line2 = tletuple[2]

        if line1 != lastline1 or line2 != lastline2:
            orb = Orbital("", line1=line1, line2=line2)
            lastline1 = line1
            lastline2 = line2

        passdatetimes = [
            next_pass[2]
            for next_pass in orb.get_next_passes(thisdatetime -
                                                 timedelta(hours=24),
                                                 48,
                                                 float(satline['longitude']),
                                                 float(satline['latitude']),
                                                 0,
                                                 horizon=0)
        ]

        nearpasses = []
        leastoffset = 999999
        leastoffsetidx = None
        for passidx in range(len(passdatetimes)):
            max_elevation_time = passdatetimes[passidx]
            (azimuth,
             elevation) = orb.get_observer_look(max_elevation_time,
                                                float(satline['longitude']),
                                                float(satline['latitude']), 0)
            thisoffset = (max_elevation_time - thisdatetime).total_seconds()
            if abs(thisoffset) < abs(leastoffset):
                leastoffsetidx = len(nearpasses)
                leastoffset = thisoffset

            nearpasses += [(max_elevation_time, azimuth, elevation)]

        if abs(leastoffset) > 600:
            print("WARNING: offset=", leastoffset, "prev offset=",
                  (nearpasses[leastoffsetidx - 1][0] -
                   thisdatetime).total_seconds() if leastoffsetidx > 0 else "",
                  "next offset=", (nearpasses[leastoffsetidx + 1][0] -
                                   thisdatetime).total_seconds()
                  if leastoffsetidx < len(nearpasses) - 1 else "",
                  " satellite ", satline['satellite'])
            #print("   ", satline)

        if len(nearpasses):
            nearestpass = nearpasses[leastoffsetidx]
            satline['pass_datetime'] = nearestpass[0]
            satline['pass_azimuth'] = nearestpass[1]
            satline['pass_elevation'] = nearestpass[2]

            (lon1, lat1, alt1) = orb.get_lonlatalt(max_elevation_time -
                                                   timedelta(seconds=30))
            (lon2, lat2, alt2) = orb.get_lonlatalt(max_elevation_time +
                                                   timedelta(seconds=30))
            point1 = (lon1, lat1)
            point2 = (lon2, lat2)
            bearing = sphere.bearing(point1, point2)
            satline['pass_bearing'] = bearing

            if leastoffsetidx > 0:
                prevpass = nearpasses[leastoffsetidx - 1]
                satline['prev_datetime'] = prevpass[0]
                satline['prev_azimuth'] = prevpass[1]
                satline['prev_elevation'] = prevpass[2]

            if leastoffsetidx < len(nearpasses) - 1:
                nextpass = nearpasses[leastoffsetidx + 1]
                satline['next_datetime'] = nextpass[0]
                satline['next_azimuth'] = nextpass[1]
                satline['next_elevation'] = nextpass[2]

        satout.writerow(satline)

        inrowcount += 1
        if args.limit and inrowcount == args.limit:
            break
Ejemplo n.º 7
0
def fireProgression(arglist=None):

    parser = ArgumentRecorder(description='Present BOM data.',
                              fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)

    parser.add_argument('-u',
                        '--user',
                        type=str,
                        required=True,
                        help="PostgreSQL username")
    parser.add_argument('-d',
                        '--database',
                        type=str,
                        required=True,
                        help="PostgreSQL database")
    parser.add_argument('-t',
                        '--table',
                        type=str,
                        required=True,
                        help="PostgreSQL table with hotspot datas")

    parser.add_argument('-q',
                        '--qgisfile',
                        type=str,
                        required=True,
                        help='QGIS base file')
    parser.add_argument('-l',
                        '--layout',
                        type=str,
                        required=True,
                        help='Layout from QGIS file')

    parser.add_argument(
        '-o',
        '--outfile',
        type=str,
        help='Output SVG file base name, otherwise use table name.',
        output=True)

    parser.add_argument('--logfile', type=str, help="Logfile", private=True)
    parser.add_argument('--no-logfile',
                        action='store_true',
                        help='Do not output descriptive comments')

    args = parser.parse_args(arglist)

    if not args.outfile:
        args.outfile = args.table

    if not args.no_logfile:
        if not args.logfile and not args.outfile:
            logfile = sys.stdout
        else:
            if args.logfile:
                logfilename = args.logfile
            elif args.outfile:
                logfilename = args.outfile.split('/')[-1].rsplit('.',
                                                                 1)[0] + '.log'

            logfile = open(logfilename, 'w')

        parser.write_comments(args,
                              logfile,
                              incomments=ArgumentHelper.separator())

        if args.logfile or args.outfile:
            logfile.close()

    QgsApplication.setPrefixPath("/usr", True)
    qgs = QgsApplication([], True)
    qgs.initQgis()

    project = QgsProject.instance()
    project.read(args.qgisfile)

    manager = QgsProject.instance().layoutManager()
    layout = manager.layoutByName(args.layout)

    psqlin = subprocess.Popen([
        'psql', args.database, args.user, '--quiet', '--command',
        r'\timing off', '--command',
        r'\copy (SELECT satellite, instrument, acq_date + acq_time AS datetime FROM '
        + args.table +
        '       GROUP BY satellite, instrument, datetime ORDER BY datetime) TO STDOUT CSV HEADER'
    ],
                              stdout=subprocess.PIPE,
                              encoding='UTF-8')

    satcsv = csv.DictReader(psqlin.stdout)
    for satline in satcsv:
        temporal = QDateTime(dateparser.parse(satline['datetime']))
        print("Outputting: ", temporal.toString())
        layout.items()[0].setTemporalRange(QgsDateTimeRange(
            temporal, temporal))

        exporter = QgsLayoutExporter(layout)
        exporter.exportToSvg(args.outfile + '_' + satline['datetime'] + '.svg',
                             QgsLayoutExporter.SvgExportSettings())

    qgs.exitQgis()
Ejemplo n.º 8
0
def retrieveTLE(arglist=None):

    parser = ArgumentRecorder(
        description='Retrieve TLD data from space-track.org',
        fromfile_prefix_chars='@')

    parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)

    parser.add_argument('--no-comments',
                        action='store_true',
                        help='Do not output descriptive comments')
    parser.add_argument('--no-header',
                        action='store_true',
                        help='Do not output CSV header with column names')

    parser.add_argument('-u',
                        '--user',
                        type=str,
                        required=True,
                        help='SpaceTrack.org username')
    parser.add_argument('-p',
                        '--password',
                        type=str,
                        required=True,
                        help='SpaceTrack.org password')

    parser.add_argument('-s',
                        '--satellite',
                        type=str,
                        required=True,
                        help='NORAD name')
    parser.add_argument('--startdate',
                        type=str,
                        required=True,
                        help='Start date/time in any sensible format')
    parser.add_argument('--enddate',
                        type=str,
                        help='End date/time in any sensible format')
    parser.add_argument('-l',
                        '--limit',
                        type=int,
                        help='Limit number of TLEs to retrieve')

    parser.add_argument('-o',
                        '--outfile',
                        type=str,
                        help='Output CSV file, otherwise use stdout.',
                        output=True)
    parser.add_argument(
        '--logfile',
        type=str,
        help=
        "Logfile, default is 'outfile'.log or stdout if outfile not specified")
    parser.add_argument('--no-logfile',
                        action='store_true',
                        help='Do not output descriptive comments')

    args = parser.parse_args(arglist)

    args.startdate = dateparser.parse(
        args.startdate) if args.startdate else None
    args.enddate = dateparser.parse(args.enddate) if args.enddate else None

    if args.outfile is None:
        outfile = sys.stdout
    else:
        if os.path.exists(args.outfile):
            shutil.move(args.outfile, args.outfile + '.bak')

        outfile = open(args.outfile, 'w')

    if not args.no_logfile:
        if not args.logfile and not args.outfile:
            logfile = sys.stdout
        else:
            if args.logfile:
                logfilename = args.logfile
            elif args.outfile:
                logfilename = args.outfile.split('/')[-1].rsplit('.',
                                                                 1)[0] + '.log'

            logfile = open(logfilename, 'w')

        parser.write_comments(args,
                              logfile,
                              incomments=ArgumentHelper.separator())

        if args.logfile or args.outfile:
            logfile.close()

    st = SpaceTrackClient(identity=args.user, password=args.password)
    tledict = tlefile.read_platform_numbers()
    norad_cat_id = tledict[args.satellite]
    drange = op.inclusive_range(args.startdate, args.enddate or date.today())
    lines = st.tle(norad_cat_id=norad_cat_id,
                   epoch=drange,
                   format='tle',
                   limit=args.limit).split("\n")
    for line in lines:
        if len(line):
            outfile.write(line + "\n")

    if args.outfile is not None:
        outfile.close()

    exit(0)