Exemple #1
0
def nowcast(result, datetime, timeframe, minutes):
    """
    Create nowcast product.
    """
    loghelper.setup_logging(logfile_name='radar_nowcast.log')
    logging.info(20 * '-' + ' nowcast ' + 20 * '-')
    # the result product is called the nowcast product
    nowcast_product = products.NowcastProduct(
        datetime=datetime,
        timeframe=timeframe,
    )
    # the vector products (realtime, five minutes)
    # are used to determine the translation vector.
    vector_products = []
    for vector_delay in minutes + 15, minutes:
        vector_products.append(products.CalibratedProduct(
            prodcode='r',
            timeframe='f',
            datetime=datetime - timedelta(minutes=vector_delay),
        ))
    # the base product is the product for which the data
    # is shifted to arrive at a nowcasted product.
    base_product = products.CalibratedProduct(
        prodcode='r',
        timeframe='f',
        datetime=datetime - timedelta(minutes=minutes)
    )

    nowcast_product.make(
        base_product=base_product,
        vector_products=vector_products,
    )
    logging.info(20 * '-' + ' nowcast complete ' + 20 * '-')
Exemple #2
0
def animate(result, datetime):
    """
    Create animation
    Publish products.

    Cascade means rescaled (derived) products are published as well.
    """
    loghelper.setup_logging(logfile_name='radar_animate.log')
    logging.info(20 * '-' + ' animate ' + 20 * '-')

    images.create_animated_gif(datetime=datetime)
    logging.info(20 * '-' + ' animate complete ' + 20 * '-')
Exemple #3
0
def aggregate(result,
              datetime,
              timeframe,
              nowcast,
              radars,
              declutter,
              direct=False,
              cascade=False):
    """ Create aggregates and optionally cascade to depending products. """
    loghelper.setup_logging(logfile_name='radar_aggregate.log')
    logging.info(20 * '-' + ' aggregate ' + 20 * '-')
    try:
        # Create aggregates
        aggregate_kwargs = dict(radars=radars,
                                declutter=declutter,
                                datetime=datetime,
                                timeframe=timeframe)
        if nowcast:
            aggregate_kwargs.update(
                dict(basedir=config.NOWCAST_AGGREGATE_DIR,
                     multiscandir=config.NOWCAST_MULTISCAN_DIR,
                     grid=scans.NOWCASTGRID))
        else:
            aggregate_kwargs.update(
                dict(basedir=config.AGGREGATE_DIR,
                     multiscandir=config.MULTISCAN_DIR,
                     grid=scans.BASEGRID))

        aggregate = scans.Aggregate(**aggregate_kwargs)

        aggregate.make()
        # Cascade when requested
        if cascade:
            combinations = utils.get_product_combinations(
                datetimes=[datetime],
                timeframes=[timeframe],
            )
            for combination in combinations:
                calibrate_kwargs = dict(result=None,
                                        radars=radars,
                                        declutter=declutter,
                                        direct=direct,
                                        cascade=cascade)
                calibrate_kwargs.update(combination)
                if direct:
                    calibrate(**calibrate_kwargs)
                else:
                    calibrate.delay(**calibrate_kwargs)
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' aggregate complete ' + 20 * '-')
Exemple #4
0
def rescale(result, datetime, prodcode,
            timeframe, direct=False, cascade=False):
    """ Create rescaled products wherever possible. """
    loghelper.setup_logging(logfile_name='radar_rescale.log')
    logging.info(20 * '-' + ' rescale ' + 20 * '-')

    product = products.CalibratedProduct(prodcode=prodcode,
                                         datetime=datetime,
                                         timeframe=timeframe)
    rescaleds = products.Consistifier.create_consistent_products(product)
    if not rescaleds:
        logging.info('Nothing to rescale.')

    logging.info(20 * '-' + ' rescale complete ' + 20 * '-')
Exemple #5
0
def animate(result, datetime):
    """
    Create animation
    Publish products.

    Cascade means rescaled (derived) products are published as well.
    """
    loghelper.setup_logging(logfile_name='radar_animate.log')
    logging.info(20 * '-' + ' animate ' + 20 * '-')
    try:
        images.create_animated_gif(datetime=datetime)
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' animate complete ' + 20 * '-')
Exemple #6
0
def calibrate(result,
              datetime,
              prodcode,
              timeframe,
              nowcast,
              radars,
              declutter,
              direct=False,
              cascade=False):
    """ Created calibrated aggregated composites. """
    loghelper.setup_logging(logfile_name='radar_calibrate.log')
    logging.info(20 * '-' + ' calibrate ' + 20 * '-')
    try:
        # Create products
        if nowcast:
            product = products.CopiedProduct(datetime)
        else:
            product = products.CalibratedProduct(
                radars=radars,
                prodcode=prodcode,
                datetime=datetime,
                timeframe=timeframe,
                declutter=declutter,
            )
        product.make()
        # Cascade when requested
        if cascade:
            combinations = utils.get_product_combinations(
                datetimes=[datetime],
                prodcodes=[prodcode],
                timeframes=[timeframe],
            )
            for combination in combinations:
                rescale_kwargs = dict(result=None,
                                      direct=direct,
                                      cascade=cascade)
                extra_kwargs = {
                    k: v
                    for k, v in combination.items()
                    if k in ['datetime', 'prodcode', 'timeframe']
                }
                rescale_kwargs.update(extra_kwargs)
                if direct:
                    rescale(**rescale_kwargs)
                else:
                    rescale.delay(**rescale_kwargs)
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' calibrate complete ' + 20 * '-')
Exemple #7
0
def cleanup():
    """ Synchronize specific remote ftp folders with our ftp. """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, 'cleanup.log'))
    logging.info('Starting cleanup...')

    # Check sync possible
    if not hasattr(config, 'FTP_HOST') or config.FTP_HOST == '':
        logging.warning('FTP not configured, FTP cleanup not possible.')
        return

    try:
        cleanup_ftp()
    except:
        logging.exception('Error:')
    logging.info('Cleanup done.')
Exemple #8
0
def cleanup():
    """ Synchronize specific remote ftp folders with our ftp. """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, 'cleanup.log'))
    logging.info('Starting cleanup...')

    # Check sync possible
    if not hasattr(config, 'FTP_HOST') or config.FTP_HOST == '':
        logging.warning('FTP not configured, FTP cleanup not possible.')
        return

    try:
        cleanup_ftp()
    except:
        logging.exception('Error:')
    logging.info('Cleanup done.')
def sync_radar():
    """
    Synchronize publication FTP with calibrate or consistent dirs.

    Sometimes the publication FTP is not available and this causes
    missing publications although they are available on the server. This
    script fills in the holes.
    """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, "sync.log"))
    datetimes = tuple(get_datetimes())
    prodcodes = "rnau"
    timeframes = "fhd"

    publisher = publishing.Publisher(datetimes=datetimes, prodcodes=prodcodes, timeframes=timeframes, nowcast=False)
    publisher.publish_ftp(overwrite=False, cascade=True)
Exemple #10
0
def aggregate(result, datetime, timeframe, nowcast,
              radars, declutter, direct=False, cascade=False):
    """ Create aggregates and optionally cascade to depending products. """
    loghelper.setup_logging(logfile_name='radar_aggregate.log')
    logging.info(20 * '-' + ' aggregate ' + 20 * '-')
    try:
        # Create aggregates
        aggregate_kwargs = dict(
            radars=radars,
            declutter=declutter,
            datetime=datetime,
            timeframe=timeframe
            )
        if nowcast:
            aggregate_kwargs.update(dict(
                basedir=config.NOWCAST_AGGREGATE_DIR,
                multiscandir=config.NOWCAST_MULTISCAN_DIR,
                grid=scans.NOWCASTGRID))
        else:
            aggregate_kwargs.update(dict(
                basedir=config.AGGREGATE_DIR,
                multiscandir=config.MULTISCAN_DIR,
                grid=scans.BASEGRID))

        aggregate = scans.Aggregate(**aggregate_kwargs)

        aggregate.make()
        # Cascade when requested
        if cascade:
            combinations = utils.get_product_combinations(
                datetimes=[datetime], timeframes=[timeframe],
            )
            for combination in combinations:
                calibrate_kwargs = dict(result=None,
                                        radars=radars,
                                        declutter=declutter,
                                        direct=direct,
                                        cascade=cascade)
                calibrate_kwargs.update(combination)
                if direct:
                    calibrate(**calibrate_kwargs)
                else:
                    calibrate.delay(**calibrate_kwargs)
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' aggregate complete ' + 20 * '-')
Exemple #11
0
def publish(result, datetimes, prodcodes, timeframes, endpoints, cascade,
            nowcast):
    """
    Publish products.

    Cascade means rescaled (derived) products are published as well.
    If the calibrate task is also run with 'cascade=True', this should
    be no problem.
    """
    loghelper.setup_logging(logfile_name='radar_publish.log')
    logging.info(20 * '-' + ' publish ' + 20 * '-')
    publisher = publishing.Publisher(datetimes=datetimes,
                                     prodcodes=prodcodes,
                                     timeframes=timeframes,
                                     nowcast=nowcast)
    for endpoint in endpoints:
        getattr(publisher, 'publish_' + endpoint)(cascade=cascade)
    logging.info(20 * '-' + ' publish complete ' + 20 * '-')
Exemple #12
0
def sync_radar():
    """
    Synchronize publication FTP with calibrate or consistent dirs.

    Sometimes the publication FTP is not available and this causes
    missing publications although they are available on the server. This
    script fills in the holes.
    """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, 'sync.log'))
    datetimes = tuple(get_datetimes())
    prodcodes = 'rna'
    timeframes = 'fhd'

    publisher = publishing.Publisher(datetimes=datetimes,
                                     prodcodes=prodcodes,
                                     timeframes=timeframes,
                                     nowcast=False)
    publisher.publish_ftp(overwrite=False, cascade=True)
Exemple #13
0
def rescale(result,
            datetime,
            prodcode,
            timeframe,
            direct=False,
            cascade=False):
    """ Create rescaled products wherever possible. """
    loghelper.setup_logging(logfile_name='radar_rescale.log')
    logging.info(20 * '-' + ' rescale ' + 20 * '-')
    try:
        product = products.CalibratedProduct(prodcode=prodcode,
                                             datetime=datetime,
                                             timeframe=timeframe)
        rescaleds = products.Consistifier.create_consistent_products(product)
        if not rescaleds:
            logging.info('Nothing to rescale.')
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' rescale complete ' + 20 * '-')
Exemple #14
0
def main():
    """ Create images for a range of products. """
    loghelper.setup_logging()

    # Get products according to args
    args = get_image_args()

    multidaterange = utils.MultiDateRange(args['range'])
    products = product_generator(product=args['product'],
                                 prodcode=args['prodcode'],
                                 timeframe=args['timeframe'],
                                 datetimes=multidaterange.iterdatetimes(),
                                 nowcast=args['nowcast'])

    create = dict(png=images.create_png, tif=images.create_tif)[args['format']]

    # Create images with those products
    kwargs = args.copy()
    map(kwargs.pop, ['range', 'product', 'timeframe', 'prodcode'])
    create(products, **kwargs)
Exemple #15
0
def main():
    """ Create images for a range of products. """
    loghelper.setup_logging()

    # Get products according to args
    args = get_image_args()

    multidaterange = utils.MultiDateRange(args['range'])
    products = product_generator(product=args['product'],
                                 prodcode=args['prodcode'],
                                 timeframe=args['timeframe'],
                                 datetimes=multidaterange.iterdatetimes(),
                                 nowcast=args['nowcast'])

    create = dict(png=images.create_png,
                  tif=images.create_tif)[args['format']]

    # Create images with those products
    kwargs = args.copy()
    map(kwargs.pop, ['range', 'product', 'timeframe', 'prodcode'])
    create(products, **kwargs)
Exemple #16
0
def publish(result, datetimes, prodcodes, timeframes, endpoints, cascade,
            nowcast):
    """
    Publish products.

    Cascade means rescaled (derived) products are published as well.
    If the calibrate task is also run with 'cascade=True', this should
    be no problem.
    """
    loghelper.setup_logging(logfile_name='radar_publish.log')
    logging.info(20 * '-' + ' publish ' + 20 * '-')
    publisher = publishing.Publisher(datetimes=datetimes,
                                     prodcodes=prodcodes,
                                     timeframes=timeframes,
                                     nowcast=nowcast)
    for endpoint in endpoints:
        try:
            getattr(publisher, 'publish_' + endpoint)(cascade=cascade)
        except Exception as e:
            logging.exception(e)
    logging.info(20 * '-' + ' publish complete ' + 20 * '-')
Exemple #17
0
def calibrate(result, datetime, prodcode, timeframe, nowcast,
              radars, declutter, direct=False, cascade=False):
    """ Created calibrated aggregated composites. """
    loghelper.setup_logging(logfile_name='radar_calibrate.log')
    logging.info(20 * '-' + ' calibrate ' + 20 * '-')
    try:
        # Create products
        if nowcast:
            product = products.CopiedProduct(datetime)
        else:
            product = products.CalibratedProduct(
                radars=radars,
                prodcode=prodcode,
                datetime=datetime,
                timeframe=timeframe,
                declutter=declutter,
            )
        product.make()
        # Cascade when requested
        if cascade:
            combinations = utils.get_product_combinations(
                datetimes=[datetime],
                prodcodes=[prodcode],
                timeframes=[timeframe],
            )
            for combination in combinations:
                rescale_kwargs = dict(result=None,
                                      direct=direct,
                                      cascade=cascade)
                extra_kwargs = {k: v
                                for k, v in combination.items()
                                if k in ['datetime', 'prodcode', 'timeframe']}
                rescale_kwargs.update(extra_kwargs)
                if direct:
                    rescale(**rescale_kwargs)
                else:
                    rescale.delay(**rescale_kwargs)
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' calibrate complete ' + 20 * '-')
Exemple #18
0
def sync():
    """ Synchronize specific remote ftp folders with our ftp. """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, 'sync.log'))

    # Check sync possible
    if not hasattr(config, 'FTP_HOST') or config.FTP_HOST == '':
        logging.warning('FTP not configured, FTP syncing not possible.')
        return

    try:
        target = ftplib.FTP(config.FTP_HOST,
                            config.FTP_USER,
                            config.FTP_PASSWORD)

        for name, info in config.FTP_THROUGH.items():
            logging.info('Syncing {}...'.format(name))

            # Make the connection
            source = ftplib.FTP(
                info['host'],
                info['user'],
                info['password'],
            )

            # Change to proper directories
            source.cwd(info['path'])
            target.cwd(info['target'])

            # Sync
            ftp_sync(source=source, target=target)

            # Quit connections.
            source.quit()
        target.quit()
    except:
        logging.exception('Error:')
    logging.info('Sync done.')
Exemple #19
0
def nowcast(result, datetime, timeframe, minutes):
    """
    Create nowcast product.
    """
    loghelper.setup_logging(logfile_name='radar_nowcast.log')
    logging.info(20 * '-' + ' nowcast ' + 20 * '-')
    # the result product is called the nowcast product
    nowcast_product = products.NowcastProduct(
        datetime=datetime,
        timeframe=timeframe,
    )
    # the vector products (realtime, five minutes)
    # are used to determine the translation vector.
    vector_products = []
    for vector_delay in minutes + 15, minutes:
        vector_products.append(
            products.CalibratedProduct(
                prodcode='r',
                timeframe='f',
                datetime=datetime - timedelta(minutes=vector_delay),
            ))
    # the base product is the product for which the data
    # is shifted to arrive at a nowcasted product.
    base_product = products.CalibratedProduct(prodcode='r',
                                              timeframe='f',
                                              datetime=datetime -
                                              timedelta(minutes=minutes))

    try:
        nowcast_product.make(
            base_product=base_product,
            vector_products=vector_products,
        )
    except Exception as e:
        logging.exception(e)
    logging.info(20 * '-' + ' nowcast complete ' + 20 * '-')
Exemple #20
0
def sync():
    """ Synchronize specific remote ftp folders with our ftp. """
    loghelper.setup_logging(os.path.join(config.LOG_DIR, 'sync.log'))

    # Check sync possible
    if not hasattr(config, 'FTP_HOST') or config.FTP_HOST == '':
        logging.warning('FTP not configured, FTP syncing not possible.')
        return

    try:
        target = ftplib.FTP(config.FTP_HOST, config.FTP_USER,
                            config.FTP_PASSWORD)

        for name, info in config.FTP_THROUGH.items():
            logging.info('Syncing {}...'.format(name))

            # Make the connection
            source = ftplib.FTP(
                info['host'],
                info['user'],
                info['password'],
            )

            # Change to proper directories
            source.cwd(info['path'])
            target.cwd(info['target'])

            # Sync
            ftp_sync(source=source, target=target)

            # Quit connections.
            source.quit()
        target.quit()
    except:
        logging.exception('Error:')
    logging.info('Sync done.')
Exemple #21
0
def master(**kwargs):
    """ Run the radar production chain for a single datetime. """
    loghelper.setup_logging(logfile_name='radar_master.log')
    logging.info(20 * '-' + ' master ' + 20 * '-')

    # Determine the delivery datetime and if necessary wait for files.
    if kwargs['range'] is not None:
        datetimes = utils.DateRange(kwargs['range']).iterdatetimes()
        for i, datetime_delivery in enumerate(datetimes):
            if i > 0:
                logging.warning('Range of datetimes given. Using the first.')
                break
    else:
        datetime_delivery = utils.closest_time()
        try:
            files.sync_and_wait_for_files(dt_calculation=datetime_delivery)
        except Exception as exception:
            logging.exception(exception)

    # Organize
    files.organize_from_path(source_dir=config.SOURCE_DIR)

    # Product datetime depends on delevery times
    declutter = dict(
        size=config.DECLUTTER_SIZE,
        history=config.DECLUTTER_HISTORY,
    )
    radars = config.ALL_RADARS
    delivery_times = (
        ('r', datetime.timedelta()),
        ('n', datetime.timedelta(hours=1)),
        ('a', datetime.timedelta(days=2)),
    )

    # Submit tasks in a chain.
    subtasks = [tasks.do_nothing.s()]
    for prodcode, timedelta_delivery in delivery_times:
        datetime_product = datetime_delivery - timedelta_delivery
        combinations = utils.get_aggregate_combinations(
            datetimes=[datetime_product],
        )
        for combination in combinations:
            # Add a separator between groups of tasks
            logging.info(40 * '-')

            # Nowcast combinations only proceed for prodcode r
            if combination['nowcast'] and not prodcode == 'r':
                continue

            # Append aggregate subtask
            aggregate_kwargs = dict(declutter=declutter, radars=radars)
            aggregate_kwargs.update(combination)
            subtasks.append(tasks.aggregate.s(**aggregate_kwargs))
            tpl = 'Agg. task: {datetime} {timeframe}   {nowcast}'
            logging.info(tpl.format(**aggregate_kwargs))

            # Append calibrate subtask
            calibrate_kwargs = dict(prodcode=prodcode)
            calibrate_kwargs.update(aggregate_kwargs)
            subtasks.append(tasks.calibrate.s(**calibrate_kwargs))
            tpl = 'Cal. task: {datetime} {timeframe} {prodcode} {nowcast}'
            logging.info(tpl.format(**calibrate_kwargs))

            # Append rescale subtask
            rescale_kwargs = {k: v
                              for k, v in calibrate_kwargs.items()
                              if k in ['datetime', 'prodcode', 'timeframe']}
            subtasks.append(tasks.rescale.s(**rescale_kwargs))
            tpl = 'Res. task: {datetime} {timeframe} {prodcode}'
            logging.info(tpl.format(**rescale_kwargs))

            # Append publication subtask
            subtasks.append(tasks.publish.s(
                datetimes=[calibrate_kwargs['datetime']],
                prodcodes=[calibrate_kwargs['prodcode']],
                timeframes=[calibrate_kwargs['timeframe']],
                nowcast=calibrate_kwargs['nowcast'],
                endpoints=['ftp', 'h5', 'local', 'image', 'h5m'],
                cascade=True,
            ))
            tpl = 'Pub. task: {datetime} {timeframe} {prodcode} {nowcast}'
            logging.info(tpl.format(**calibrate_kwargs))

    # Append subtask to create animated gif
    subtasks.append(tasks.animate.s(datetime=datetime_delivery))

    # Submit all subtask as a single chain
    chain(*subtasks).apply_async()

    logging.info(20 * '-' + ' master complete ' + 20 * '-')
Exemple #22
0
def master(**kwargs):
    """ Run the radar production chain for a single datetime. """
    loghelper.setup_logging(logfile_name='radar_master.log')
    logging.info(20 * '-' + ' master ' + 20 * '-')

    # Determine the delivery datetime and if necessary wait for files.
    if kwargs['range'] is not None:
        datetimes = utils.DateRange(kwargs['range']).iterdatetimes()
        for i, datetime_delivery in enumerate(datetimes):
            if i > 0:
                logging.warning('Range of datetimes given. Using the first.')
                break
    else:
        datetime_delivery = utils.closest_time()
        try:
            files.sync_and_wait_for_files(dt_calculation=datetime_delivery)
        except Exception as exception:
            logging.exception(exception)

    # Organize
    files.organize_from_path(source_dir=config.SOURCE_DIR)

    # Product datetime depends on delevery times
    declutter = dict(
        size=config.DECLUTTER_SIZE,
        history=config.DECLUTTER_HISTORY,
    )
    radars = config.ALL_RADARS

    # nowcast extent is from utils.get_combinations
    delivery_times = [(p, t) for p, t in config.DELIVERY_TIMES if p in 'rnau']

    # Submit tasks in a chain.
    subtasks = [tasks.do_nothing.s()]
    for prodcode, timedelta_delivery in delivery_times:
        datetime_product = datetime_delivery - timedelta_delivery
        combinations = utils.get_aggregate_combinations(
            datetimes=[datetime_product], )
        for combination in combinations:
            # Add a separator between groups of tasks
            logging.info(40 * '-')

            # Nowcast combinations only proceed for prodcode r
            if combination['nowcast'] and not prodcode == 'r':
                continue

            # Append aggregate subtask
            aggregate_kwargs = dict(declutter=declutter, radars=radars)
            aggregate_kwargs.update(combination)
            subtasks.append(tasks.aggregate.s(**aggregate_kwargs))
            tpl = 'Agg. task: {datetime} {timeframe}   {nowcast}'
            logging.info(tpl.format(**aggregate_kwargs))

            # Append calibrate subtask
            calibrate_kwargs = dict(prodcode=prodcode)
            calibrate_kwargs.update(aggregate_kwargs)
            subtasks.append(tasks.calibrate.s(**calibrate_kwargs))
            tpl = 'Cal. task: {datetime} {timeframe} {prodcode} {nowcast}'
            logging.info(tpl.format(**calibrate_kwargs))

            # Append rescale subtask
            rescale_kwargs = {
                k: v
                for k, v in calibrate_kwargs.items()
                if k in ['datetime', 'prodcode', 'timeframe']
            }
            subtasks.append(tasks.rescale.s(**rescale_kwargs))
            tpl = 'Res. task: {datetime} {timeframe} {prodcode}'
            logging.info(tpl.format(**rescale_kwargs))

            # Append publication subtask
            subtasks.append(
                tasks.publish.s(
                    datetimes=[calibrate_kwargs['datetime']],
                    prodcodes=[calibrate_kwargs['prodcode']],
                    timeframes=[calibrate_kwargs['timeframe']],
                    nowcast=calibrate_kwargs['nowcast'],
                    endpoints=['ftp', 'h5', 'local', 'image', 'h5m'],
                    cascade=True,
                ))
            tpl = 'Pub. task: {datetime} {timeframe} {prodcode} {nowcast}'
            logging.info(tpl.format(**calibrate_kwargs))

    # Append subtask to create animated gif
    subtasks.append(tasks.animate.s(datetime=datetime_delivery))

    # Submit all subtask as a single chain
    chain(*subtasks).apply_async()

    logging.info(20 * '-' + ' master complete ' + 20 * '-')
Exemple #23
0
from openradar import gridtools
from openradar import loghelper
from openradar import utils

from osgeo import gdal
from osgeo import gdalconst

from scipy import interpolate

import csv
import datetime
import logging
import numpy as np
import os

loghelper.setup_logging()


class Aggregator(object):

    KEYS = 'X', 'Y', 'NAAM', 'REGIO'

    METHODS = {
        1: 'radar weging [mm/dag]',
        2: 'radar laagste hoek [mm/dag]',
        3: 'radar gewogen hoek [mm/dag]',
    }

    CODES = {
        1: 'p.radar.m1',
        2: 'p.radar.m2',
Exemple #24
0
from openradar import gridtools
from openradar import loghelper
from openradar import utils

from osgeo import gdal
from osgeo import gdalconst

from scipy import interpolate

import csv
import datetime
import logging
import numpy as np
import os

loghelper.setup_logging()


class Aggregator(object):

    KEYS = "X", "Y", "NAAM", "REGIO"

    METHODS = {1: "radar weging [mm/dag]", 2: "radar laagste hoek [mm/dag]", 3: "radar gewogen hoek [mm/dag]"}

    CODES = {1: "p.radar.m1", 2: "p.radar.m2", 3: "m3"}

    def __init__(self, datapath, coordspath, outputpath):

        coordsdict = {}
        with open(coordspath) as coords:
            coordsreader = csv.DictReader(coords)