Пример #1
0
def open_atcf_db(dbname=ATCF_DECKS_DB):
    '''Open the ATCF Decks Database, create it if it doesn't exist'''

    # Make sure the directory exists.  If the db doesn't exist,
    # the sqlite3.connect command will create it - which will
    # fail if the directory doesn't exist.
    from os.path import dirname as pathdirname
    from geoips2.filenames.base_paths import make_dirs
    make_dirs(pathdirname(dbname))

    import sqlite3
    conn = sqlite3.connect(dbname)
    conn_cursor = conn.cursor()
    # Try to create the table - if it already exists, it will just fail
    # trying to create, pass, and return the already opened db.
    try:
        conn_cursor.execute('''CREATE TABLE atcf_deck_stormfiles
            (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
                filename text,
                last_updated timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL,
                storm_num integer,
                storm_basin text,
                start_datetime timestamp,
                start_lat real,
                start_lon real,
                start_vmax real,
                start_name real,
                vmax real,
                end_datetime timestamp)''')
        # Add in at some point?
        # storm_start_datetime timestamp,
    except sqlite3.OperationalError:
        pass
    return conn_cursor, conn
Пример #2
0
#
# You should have received a copy of the GNU Lesser General Public License
# along with pspbar.  If not, see <https://www.gnu.org/licenses/>.
#
'''(WIFI)Internet-monitoring segments'''


from typing import Dict
from os.path import join as joinpath
from os.path import dirname as pathdirname
from subprocess import Popen, PIPE
from psutil import net_io_counters
from .classes import BarSeg


NETCHECK = joinpath(pathdirname(__file__), 'shell_dep',
                    'netcheck.sh')


def ip_addr(_=None) -> Dict[str, str]:
    '''Create IP ADDRESS string'''
    color = 0x777777
    stdout, stderr = Popen(
        ['bash', NETCHECK], stdout=PIPE, stderr=PIPE
    ).communicate()
    stdout = stdout.decode("utf-8")
    # print("NET_STATUS:", stdout, stderr)
    if not stderr:
        addr = stdout.split("\t")[0]
        net_type = int(stdout.split("\t")[2])
        if net_type & 8:
Пример #3
0
    predictions_filter = parsed_args.predictions_filter
    if predictions_filter is None:
        predictions_filter = []
    else:
        filt_parser = lambda x: (x[0], x[1].split(','))
        predictions_filter = [
            filt_parser(filt.split(':')) for filt in predictions_filter
        ]

    predictions_list = construct_path_from(
        predictions_list,
        'predictions',
        rel_path_to_this_script_dir,
        predictions_filter=predictions_filter)

    metrics_list = [
        get_metric(metric_name) for metric_name in parsed_args.metrics_list
    ]

    reporter = Reporter(output_filepath_template, parsed_args.whole_country)
    return reporter.report(predictions_list,
                           metrics_list,
                           horizons_list=parsed_args.horizons_list,
                           date_selector=parsed_args.date_selector)


if __name__ == '__main__':
    print('Generated files:' + str(
        main(sys.argv[1:],
             rel_path_to_this_script_dir=pathdirname(sys.argv[0]) or '.')))
def old_tcweb_fnames_remove_duplicates(fname,
                                       mins_to_remove=10,
                                       remove_files=False):
    # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
    # 20201010.222325.WP162020.gmi.GPM.89H.40kts.14p2.1p0.jpg.yaml
    matching_fnames = []
    removed_fnames = []
    saved_fnames = []
    ext1 = pathsplitext(fname)[-1]
    ext2 = pathsplitext(pathsplitext(fname)[0])[-1]
    ext3 = pathsplitext(pathsplitext(pathsplitext(fname)[0])[0])[-1]
    if (ext1 == '.jpg') or (ext1 == '.yaml' and ext2 == '.jpg'):
        LOG.info(
            'MATCHES EXT FORMAT. jpg or jpg.yaml. Attempting to remove old_tcweb duplicates'
        )
    else:
        LOG.info(
            'NOT REMOVING DUPLICATES. Not old_tcweb filename, not jpg or jpg.yaml.'
        )
        return [], []

    dirname = pathdirname(fname)
    basename = pathbasename(fname)
    # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
    parts = basename.split('.')
    if (len(parts) == 10 and ext1 == '.yaml') or (len(parts) == 9
                                                  and ext1 == '.jpg'):
        LOG.info(
            'NOT REMOVING DUPLICATES. Not old_tcweb filename, does not contain 9 or 10 fields.'
        )
        return [], []

    try:
        # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
        yyyymmdd = parts[0]
        hhmnss = parts[1]
        stormname = parts[2]
        sensor = parts[3]
        platform = parts[4]
        product = parts[5]
        intensity = parts[6]
        coverage = parts[7]
        res = parts[8]
        if 'p' not in coverage or 'p' not in res:
            LOG.info(
                'NOT REMOVING DUPLICATES. Not old_tcweb filename, coverage or res not "NNpN.'
            )
            return [], []
        if 'kts' not in intensity:
            LOG.info(
                'NOT REMOVING DUPLICATES. Not old_tcweb filename, intensity does not contain "kts".'
            )
            return [], []
    except IndexError:
        LOG.info(
            'NOT REMOVING DUPLICATES. Unmatched filename format, incorrect number of . delimited fields'
        )
        return [], []
    try:
        fname_dt = datetime.strptime(yyyymmdd + hhmnss, '%Y%m%d%H%M%S')
    except ValueError:
        LOG.info(
            'NOT REMOVING DUPLICATES. Unmatched old_tcweb filename format, incorrect date time string.'
        )
        return [], []
    timediff = timedelta(minutes=mins_to_remove)
    for currdt in minrange(fname_dt - timediff, fname_dt + timediff):
        # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
        # 20201010.222325.WP162020.gmi.GPM.37V.*.*.1p0.jpg*
        dtstr = currdt.strftime(
            '{0}/%Y%m%d.%H%M*.{1}.{2}.{3}.{4}.*.*.{5}.jpg*'.format(
                dirname, stormname, sensor, platform, product, res))
        # print(dtstr)
        matching_fnames += glob(dtstr)
    max_coverage = 0
    for matching_fname in matching_fnames:
        # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
        parts = pathbasename(matching_fname).split('.')
        coverage = float(parts[7].replace('p', '.'))
        max_coverage = max(coverage, max_coverage)

    gotone = False
    LOG.info('CHECKING DUPLICATE FILES')
    for matching_fname in list(set(matching_fnames)):
        # 20201010.222325.WP162020.gmi.GPM.37V.40kts.14p2.1p0.jpg
        parts = pathbasename(matching_fname).split('.')
        coverage = float(parts[7].replace('p', '.'))
        if coverage < max_coverage or gotone is True:
            removed_fnames += [matching_fname]
            # Test it out for a bit first
            if remove_files is True:
                LOG.info(
                    'DELETING DUPLICATE FILE with less coverage %s < %s %s',
                    coverage, max_coverage, matching_fname)
                osunlink(matching_fname)
            else:
                LOG.info(
                    'TEST DELETING DUPLICATE FILE with less coverage %s < %s %s',
                    coverage, max_coverage, matching_fname)
        else:
            if len(matching_fnames) == 1:
                LOG.info(
                    'SAVING DUPLICATE FILE (only one!) with max coverage %s %s',
                    max_coverage, matching_fname)
            else:
                LOG.info('SAVING DUPLICATE FILE with max coverage %s %s',
                         max_coverage, matching_fname)
            saved_fnames += [matching_fname]
            gotone = True
    return removed_fnames, saved_fnames
Пример #5
0
def metoctiff_filename_remove_duplicates(fname,
                                         mins_to_remove=10,
                                         remove_files=False):
    # 20201010.222325.GPM.gmi.89H.WP162020.14pc.jif.gz
    # 20201010.222325.GPM.gmi.89H.WP162020.14pc.jif.gz.yaml
    matching_fnames = []
    removed_fnames = []
    saved_fnames = []
    ext1 = pathsplitext(fname)[-1]
    ext2 = pathsplitext(pathsplitext(fname)[0])[-1]
    ext3 = pathsplitext(pathsplitext(pathsplitext(fname)[0])[0])[-1]
    if (ext1 == '.gz' and ext2 == '.jif') or (ext1 == '.yaml' and ext2 == '.gz'
                                              and ext3 == '.jif'):
        LOG.info(
            'MATCHES EXT FORMAT. .jif.gz or .jif.gz.yaml. Attempting to remove metoctiff duplicates'
        )
    else:
        LOG.info(
            'NOT REMOVING DUPLICATES. Not metoctiff filename, not .jif.gz or .jif.gz.yaml.'
        )
        return [], []
    dirname = pathdirname(fname)
    basename = pathbasename(fname)
    parts = basename.split('.')
    if (len(parts) == 10 and ext1 == '.yaml') or (len(parts) == 9
                                                  and ext1 == '.gz'):
        LOG.info(
            'MATCHES NUMBER FIELDS. 9 or 10 fields. Attempting to remove metoctiff duplicates'
        )
    else:
        LOG.info(
            'NOT REMOVING DUPLICATES. Not metoctiff filename, does not contain 9 or 10 fields.'
        )
        return [], []

    try:
        # 20201010.222325.GPM.gmi.89H.WP162020.14pc.jif.gz
        yyyymmdd = parts[0]
        hhmnss = parts[1]
        platform = parts[2]
        sensor = parts[3]
        product = parts[4]
        stormname = parts[5]
        coverage = parts[6]
        if 'pc' not in coverage:
            LOG.info(
                'NOT REMOVING DUPLICATES. Not metoctiff filename, coverage not "NNpc.'
            )
            return []
    except IndexError:
        LOG.info(
            'NOT REMOVING DUPLICATES. Unmatched metoctiff filename format, incorrect number of . delimited fields'
        )
        return [], []
    try:
        fname_dt = datetime.strptime(yyyymmdd + hhmnss, '%Y%m%d%H%M%S')
    except ValueError:
        LOG.info(
            'NOT REMOVING DUPLICATES. Unmatched metoctiff filename format, incorrect date time string.'
        )
        return [], []
    timediff = timedelta(minutes=mins_to_remove)
    for currdt in minrange(fname_dt - timediff, fname_dt + timediff):
        # 20201010.222325.GPM.gmi.19H.WP162020.14pc.jif.gz
        # Matches
        # 20201010.222325.GPM.gmi.19H.WP162020.*.jif.gz*
        dtstr = currdt.strftime(
            '{0}/%Y%m%d.%H%M*.{1}.{2}.{3}.{4}.*.jif.gz*'.format(
                dirname, platform, sensor, product, stormname))
        # print(dtstr)
        matching_fnames += glob(dtstr)
    max_coverage = 0
    for matching_fname in matching_fnames:
        # 20201010.222325.GPM.gmi.89H.WP162020.14pc.jif.gz
        parts = pathbasename(matching_fname).split('.')
        coverage = float(parts[6].replace('pc', ''))
        max_coverage = max(coverage, max_coverage)

    gotone = False
    LOG.info('CHECKING DUPLICATE FILES')
    for matching_fname in list(set(matching_fnames)):
        # 20201010.222325.GPM.gmi.89H.WP162020.14pc.jif.gz
        parts = pathbasename(matching_fname).split('.')
        coverage = float(parts[6].replace('pc', ''))
        if coverage < max_coverage or gotone is True:
            removed_fnames += [matching_fname]
            # Test it out for a bit first
            if remove_files is True:
                LOG.info(
                    'DELETING DUPLICATE FILE with less coverage %s < %s %s',
                    coverage, max_coverage, matching_fname)
                osunlink(matching_fname)
            else:
                LOG.info(
                    'TEST DELETING DUPLICATE FILE with less coverage %s < %s %s',
                    coverage, max_coverage, matching_fname)
        else:
            saved_fnames += [matching_fname]
            if len(matching_fnames) == 1:
                LOG.info(
                    'SAVING DUPLICATE FILE (only one!) with max coverage %s %s',
                    max_coverage, matching_fname)
            else:
                LOG.info('SAVING DUPLICATE FILE with max coverage %s %s',
                         max_coverage, matching_fname)
            gotone = True

    return removed_fnames, saved_fnames