Exemplo n.º 1
0
def delete_index(ctx, es, username, password, ignore_certs):
    """Delete cap-alerts realtime index"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    conn.delete(INDEX_NAME)
def delete_index(ctx, index_name, es, username, password, ignore_certs):
    """
    Delete a particular ES index with a given name as argument or all if no
    argument is passed
    """

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    if index_name:
        if click.confirm(
                'Are you sure you want to delete ES index named: {}?'.format(
                    click.style(index_name, fg='red')),
                abort=True,
        ):
            LOGGER.info('Deleting ES index {}'.format(index_name))
            conn.delete(index_name)
            return True
    else:
        if click.confirm(
                'Are you sure you want to delete {} marine forecast'
                ' indices ({})?'.format(
                    click.style('ALL', fg='red'),
                    click.style(", ".join(INDICES), fg='red'),
                ),
                abort=True,
        ):
            conn.delete(",".join(INDICES))
            return True
Exemplo n.º 3
0
def delete_indexes(ctx, es, username, password, ignore_certs):
    """Delete all SWOB realtime indexes"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    all_indexes = '{}*'.format(INDEX_BASENAME)

    click.echo('Deleting indexes {}'.format(all_indexes))
    conn.delete(all_indexes)

    click.echo('Done')
Exemplo n.º 4
0
def clean_records(ctx, days, es, username, password, ignore_certs):
    """Delete old citypageweather documents"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    older_than = (datetime.now() -
                  timedelta(days=days)).strftime('%Y-%m-%d %H:%M')
    click.echo('Deleting documents older than {} ({} days)'.format(
        older_than, days))

    query = {'query': {'range': {'properties.datetime': {'lte': older_than}}}}

    conn.Elasticsearch.delete_by_query(index=INDEX_NAME, body=query)
Exemplo n.º 5
0
def clean_indexes(ctx, days, es, username, password, ignore_certs):
    """Clean bulletins indexes older than n number of days"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    indexes = conn.get('{}*'.format(INDEX_BASENAME))

    if indexes:
        indexes_to_delete = check_es_indexes_to_delete(indexes, days)
        if indexes_to_delete:
            click.echo('Deleting indexes {}'.format(indexes_to_delete))
            conn.delete(','.join(indexes_to_delete))

    click.echo('Done')
Exemplo n.º 6
0
def delete_indexes(ctx, index_name, es, username, password, ignore_certs):
    """
    Delete a particular ES index with a given name as argument or all if no
    argument is passed
    """

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    loader = LtceLoader(conn_config=conn_config)

    if index_name:
        LOGGER.info('Deleting ES index {}'.format(index_name))
        loader.conn.delete(index_name)
        return True
    else:
        LOGGER.info('Deleting all LTCE ES indices')
        loader.conn.delete(",".join(INDICES))
        return True
Exemplo n.º 7
0
def add(
    ctx,
    ctl,
    es,
    username,
    password,
    ignore_certs,
    dataset,
    batch_size,
):
    """Loads AHCCD data from JSON into Elasticsearch"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    loader = AhccdLoader(conn_config)

    try:
        with open(ctl, 'r') as f:
            ctl_dict = json.loads(f.read())
    except Exception as err:
        msg = 'Could not open JSON location file: {}'.format(err)
        click.ClickException(err)

    if dataset == 'all':
        datasets_to_process = [
            'annual',
            'monthly',
            'seasonal',
            'stations',
            'trends',
        ]
    else:
        datasets_to_process = [dataset]

    click.echo('Processing dataset(s): {}'.format(datasets_to_process))

    for dtp in datasets_to_process:
        try:
            click.echo('Populating {} index'.format(dtp))
            loader.create_index(dtp)
            dtp_data = loader.generate_docs(ctl_dict[dtp], dtp)
            loader.conn.submit_elastic_package(dtp_data, batch_size)
        except Exception as err:
            msg = 'Could not populate {} index: {}'.format(dtp, err)
            raise click.ClickException(msg)
Exemplo n.º 8
0
def clean_indexes(ctx, days, dataset, es, username, password, ignore_certs):
    """Delete old AQHI realtime indexes older than n days"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    if dataset == 'all':
        indexes_to_fetch = '{}*'.format(INDEX_BASENAME.format('*'))
    else:
        indexes_to_fetch = '{}*'.format(INDEX_BASENAME.format(dataset))

    indexes = conn.get(indexes_to_fetch)

    if indexes:
        indexes_to_delete = check_es_indexes_to_delete(indexes, days)
        if indexes_to_delete:
            click.echo('Deleting indexes {}'.format(indexes_to_delete))
            conn.delete(','.join(indexes))

    click.echo('Done')
Exemplo n.º 9
0
def delete_indexes(ctx, dataset, es, username, password, ignore_certs,
                   index_template):
    """Delete all AQHI realtime indexes"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    if dataset == 'all':
        indexes = 'aqhi_*'
    else:
        indexes = '{}*'.format(INDEX_BASENAME.format(dataset))

    click.echo('Deleting indexes {}'.format(indexes))

    conn.delete(indexes)

    if index_template:
        click.echo('Deleting index template {}'.format(INDEX_BASENAME))
        conn.delete_template(INDEX_BASENAME)

    click.echo('Done')
Exemplo n.º 10
0
def deactivate(ctx, days, es, username, password, ignore_certs):
    """deactivate hurricane forecasts older than N days"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)
    conn = ElasticsearchConnector(conn_config)

    for index in INDICES:
        query = {
            "script": "ctx._source.properties.active=false",
            "query": {
                "range": {
                    "properties.filedate": {
                        "lte": "now-{}d".format(days)
                    }
                }
            }
        }

        conn.Elasticsearch.update_by_query(index=index, body=query)

    return True
Exemplo n.º 11
0
def add(ctx, file_, directory, es, username, password, ignore_certs):
    """Add AQHI data to Elasticsearch"""

    if all([file_ is None, directory is None]):
        raise click.ClickException('Missing --file/-f or --dir/-d option')

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    files_to_process = []

    if file_ is not None:
        files_to_process = [file_]
    elif directory is not None:
        for root, dirs, files in os.walk(directory):
            for f in [file for file in files if file.endswith('.json')]:
                files_to_process.append(os.path.join(root, f))
        files_to_process.sort(key=os.path.getmtime)

    for file_to_process in files_to_process:
        loader = AQHIRealtimeLoader(conn_config)
        result = loader.load_data(file_to_process)
        if not result:
            click.echo('features not generated')
Exemplo n.º 12
0
def add(ctx, file_, directory, es, username, password, ignore_certs):
    """adds data to system"""

    if all([file_ is None, directory is None]):
        raise click.ClickException('Missing --file/-f or --dir/-d option')

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    files_to_process = []

    if file_ is not None:
        files_to_process = [file_]
    elif directory is not None:
        for root, dirs, files in os.walk(directory):
            for f in [file for file in files if file.endswith('.csv')]:
                files_to_process.append(os.path.join(root, f))
        files_to_process.sort(key=os.path.getmtime)

    for file_to_process in files_to_process:
        loader = HydrometricRealtimeLoader(conn_config)
        loader.load_data(file_to_process)

    click.echo('Done')
def add(ctx, file_, directory, es, username, password, ignore_certs):
    """add data to system"""

    if all([file_ is None, directory is None]):
        raise click.ClickException('Missing --file/-f or --dir/-d option')

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    files_to_process = []

    if file_ is not None:
        files_to_process = [file_]
    elif directory is not None:
        for root, dirs, files in os.walk(directory):
            for f in [file for file in files if file.endswith('.xml')]:
                files_to_process.append(os.path.join(root, f))
        files_to_process.sort(key=os.path.getmtime)

    for file_to_process in files_to_process:
        loader = MarineWeatherRealtimeLoader(conn_config)
        result = loader.load_data(file_to_process)
        if result:
            click.echo('GeoJSON features generated: {}'.format(
                json_pretty_print(loader.items)))
Exemplo n.º 14
0
def add(
    ctx,
    db,
    es,
    username,
    password,
    ignore_certs,
    dataset,
    station=None,
    starting_from=False,
    date=None,
):
    """Loads MSC Climate Archive data from Oracle into Elasticsearch"""

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    loader = ClimateArchiveLoader(db, conn_config)

    if dataset == 'all':
        datasets_to_process = ['daily', 'monthly', 'normals', 'stations']
    else:
        datasets_to_process = [dataset]

    click.echo('Processing dataset(s): {}'.format(datasets_to_process))

    if 'stations' in datasets_to_process:
        try:
            click.echo('Populating stations index')
            loader.create_index('stations')
            stations = loader.generate_stations()
            loader.conn.submit_elastic_package(stations)
        except Exception as err:
            msg = 'Could not populate stations index: {}'.format(err)
            raise click.ClickException(msg)

    if 'normals' in datasets_to_process:
        try:
            click.echo('Populating normals index')
            stn_dict = loader.get_station_data(station, starting_from)
            normals_dict = loader.get_normals_data()
            periods_dict = loader.get_normals_periods()
            loader.create_index('normals')
            normals = loader.generate_normals(
                stn_dict, normals_dict, periods_dict
            )
            loader.conn.submit_elastic_package(normals)
        except Exception as err:
            msg = 'Could not populate normals index: {}'.format(err)
            raise click.ClickException(msg)

    if 'monthly' in datasets_to_process:
        try:
            click.echo('Populating monthly index')
            stn_dict = loader.get_station_data(station, starting_from)
            if not (date or station or starting_from):
                loader.create_index('monthly_summary')
            monthlies = loader.generate_monthly_data(stn_dict, date)
            loader.conn.submit_elastic_package(monthlies)
        except Exception as err:
            msg = 'Could not populate montly index: {}'.format(err)
            raise click.ClickException(msg)

    if 'daily' in datasets_to_process:
        try:
            click.echo('Populating daily index')
            stn_dict = loader.get_station_data(station, starting_from)
            if not (date or station or starting_from):
                loader.create_index('daily_summary')
            dailies = loader.generate_daily_data(stn_dict, date)
            loader.conn.submit_elastic_package(dailies)
        except Exception as err:
            msg = 'Could not populate daily index: {}'.format(err)
            raise click.ClickException(msg)

    loader.db_conn.close()
Exemplo n.º 15
0
def add(ctx, db, es, username, password, ignore_certs, dataset):
    """
    Loads Long Term Climate Extremes(LTCE) data from Oracle DB
    into Elasticsearch.

    :param db: database connection string.
    :param dataset: name of dataset to load, or all for all datasets.
    """

    conn_config = configure_es_connection(es, username, password, ignore_certs)

    loader = LtceLoader(db, conn_config)

    if dataset == 'all':
        datasets_to_process = [
            'stations',
            'temperature',
            'precipitation',
            'snowfall',
        ]
    else:
        datasets_to_process = [dataset]

    if 'stations' in datasets_to_process:
        try:
            stations = loader.generate_stations()
            loader.conn.submit_elastic_package(stations)
            LOGGER.info('Stations populated.')
        except Exception as err:
            LOGGER.error(
                'Could not populate stations due to: {}.'.format(str(err))
            )
            raise err

    if 'temperature' in datasets_to_process:
        try:
            temp_extremes = loader.generate_daily_temp_extremes()
            loader.conn.submit_elastic_package(temp_extremes)
            LOGGER.info('Daily temperature extremes populated.')
        except Exception as err:
            LOGGER.error(
                'Could not populate daily temperature extremes due to: {}.'.format(  # noqa
                    str(err)
                )
            )
            raise err

    if 'precipitation' in datasets_to_process:
        try:
            precip_extremes = loader.generate_daily_precip_extremes()
            loader.conn.submit_elastic_package(precip_extremes)
            LOGGER.info('Daily precipitation extremes populated.')
        except Exception as err:
            LOGGER.error(
                'Could not populate daily precipitations extremes due to: {}.'.format(  # noqa
                    str(err)
                )
            )
            raise err

    if 'snowfall' in datasets_to_process:
        try:
            snow_extremes = loader.generate_daily_snow_extremes()
            loader.conn.submit_elastic_package(snow_extremes)
            LOGGER.info('Daily snowfall extremes populated.')
        except Exception as err:
            LOGGER.error(
                'Could not populate daily snowfall extremes due to: {}.'.format(  # noqa
                    str(err)
                )
            )
            raise err

    LOGGER.info('Finished populating indices.')

    loader.db_conn.close()