def register_dataset():
    """NEXGDDP REGISTER ENDPOINT"""
    logging.info('Registering new NEXGDDP Dataset')

    # Get and deserialize
    table_name = request.get_json().get('connector').get('table_name')
    try:
        scenario, model = table_name.rsplit('/')
    except Exception:
        logging.error('Nexgddp tableName Not Valid')
        body = {'status': 2, 'errorMessage': 'Nexgddp tableName Not Valid'}
        return jsonify(callback_to_dataset(body)), 200

    try:
        QueryService.get_rasdaman_fields(scenario, model)
    except TableNameNotValid:
        body = {
            'status': 2,
            'errorMessage': 'Error Validating Nexgddp Dataset'
        }
        return jsonify(callback_to_dataset(body)), 200

    body = {'status': 1}

    return jsonify(callback_to_dataset(body)), 200
Exemple #2
0
def get_tile(x,
             y,
             z,
             model,
             scenario,
             year,
             style,
             indicator,
             layer,
             compare_year=None,
             dset_b=None):
    logging.info(f'Getting tile for {x} {y} {z}')
    logging.debug(compare_year)
    bbox = TileService.get_bbox(z, x, y)
    logging.debug(f"bbox: {bbox}")
    bounds = ColoringHelper.get_data_bounds(style)
    logging.debug(bounds)
    if compare_year:
        logging.debug(f"[rout] compare_year: {compare_year}")
        if not dset_b:
            dset_b = f"{scenario}_{model}_processed"
        rasterfile = QueryService.get_tile_diff_query(bbox, year, model,
                                                      scenario, indicator,
                                                      bounds, compare_year,
                                                      dset_b)
    else:
        rasterfile = QueryService.get_tile_query(bbox, year, model, scenario,
                                                 indicator, bounds)
    colored_response = ColoringHelper.colorize(rasterfile, style=style)

    # Saving file in cache
    logging.debug(f'Requested path is: {request.path}')

    # Uploading file to storage
    # Beware of side effects!
    # ColoringHelper.colorize stores the color-coded file in the same input file
    # Uploading file to storage.
    StorageService.upload_file(rasterfile, layer, str(z), str(x), str(y), year,
                               compare_year, dset_b)

    return colored_response, 200
def get_fields(dataset_id, dataset):
    """NEXGDDP FIELDS ENDPOINT"""
    logging.info('[ROUTER] Getting fields of dataset' + dataset_id)

    # Get and deserialize
    table_name = dataset.get('tableName')
    scenario, model = table_name.rsplit('/')

    fields_xml = QueryService.get_rasdaman_fields(scenario, model)
    fields = XMLService.get_fields(fields_xml)
    data = {'tableName': table_name, 'fields': fields}
    return jsonify(data), 200
    def get_diff_value(dset_a, date_a, date_b, lat, lon, varnames, dset_b):

        dset_b = dset_a if not dset_b else dset_b

        logging.debug(dset_a)
        logging.debug(dset_b)
        results = []
        for var in varnames:
            query = f"for cov1 in ({dset_a}), cov2 in ({dset_b}) return encode((cov1.{var})[ansi(\"{date_a}\"), Lat({lat}),Long({lon})] - (cov2.{var})[ansi(\"{date_b}\"), Lat({lat}),Long({lon})], \"CSV\")]"
            logging.debug(f"query: {query}")
            query_result = QueryService.get_rasdaman_csv_query(query)
            logging.debug(query_result)
            results.append(float(query_result))
        return dict(zip(varnames, results))
def get_tile(z,
             x,
             y,
             model,
             scenario,
             year,
             style,
             indicator,
             layer,
             compare_year=None,
             dset_b=None,
             no_data=None):
    logging.info(f'Getting tile for {z} {x} {y}')
    logging.debug(compare_year)
    bbox = TileService.get_bbox(x, y, z)
    logging.debug(f"bbox: {bbox}")
    bounds = ColoringHelper.get_data_bounds(style)
    logging.debug(bounds)
    if compare_year:
        logging.debug(f"[rout] compare_year: {compare_year}")
        if not dset_b:
            dset_b = f"{scenario}_{model}_processed"
        rasterfile = QueryService.get_tile_diff_query(bbox, year, model,
                                                      scenario, indicator,
                                                      bounds, compare_year,
                                                      dset_b)
    else:
        rasterfile = QueryService.get_tile_query(bbox, year, model, scenario,
                                                 indicator, bounds)

    try:
        colored_response = ColoringHelper.colorize(rasterfile, style=style)
    except CoverageNotFound as e:
        return error(status=404, detail=e.message)

    logging.debug(f"colored_response: {colored_response}")
    # logging.debug(f"colored_response.shape: {colored_response.shape}")

    if no_data is not None:
        logging.debug("Creating mask")
        maskfile = QueryService.get_tile_mask_query(bbox, year, model,
                                                    scenario, indicator,
                                                    no_data)
        ColoringHelper.blend_alpha(colored_response, maskfile)
        os.remove(maskfile)
    else:
        logging.debug("No nodata values")

    # Saving file in cache
    logging.debug(f'Requested path is: {request.path}')

    # Uploading file to storage.
    StorageService.upload_file(colored_response, layer, str(z), str(x), str(y),
                               year, compare_year, dset_b)

    tile_response = send_file(io.BytesIO(open(colored_response, 'rb').read()),
                              attachment_filename='tile.png',
                              mimetype='image/png')

    os.remove(colored_response)
    return tile_response, 200
def query(dataset_id, bbox, dataset):
    """NEXGDDP QUERY ENDPOINT"""
    logging.info('[ROUTER] Doing Query of dataset ' + dataset_id)
    # Get and deserialize
    table_name = dataset.get('tableName')
    temporal_resolution = table_name.split('_')[-1]
    logging.debug(f"temporal_resolution: {temporal_resolution}")
    scenario, model = table_name.rsplit('/')

    request_json = request.get_json() or {}
    sql = request.args.get('sql', None) or request_json.get('sql', None)

    if not sql:
        return error(status=400, detail='sql must be provided')
    # convert
    try:
        _, json_sql = QueryService.convert(sql)
    except SqlFormatError as e:
        logging.error(e.message)
        return error(status=500, detail=e.message)
    except Exception as e:
        logging.error('[ROUTER]: ' + str(e))
        return error(status=500, detail='Generic Error')
    try:
        select = get_sql_select(json_sql)
        logging.debug("Select")
        logging.debug(select)
    except Exception as e:
        return error(status=400, detail='Invalid Select')
    # Fields
    try:
        fields_xml = QueryService.get_rasdaman_fields(scenario, model)
        fields = XMLService.get_fields(fields_xml)
    except TableNameNotValid as e:
        return error(status=404, detail='Table name not valid')
    fields.update({'all': {'type': 'array'}})

    # Prior to validating dates, the [max|min](year) case has to be dealt with:
    def is_year(clause):
        if (clause.get('function') == 'max' or clause.get('function')
                == 'min') and clause.get('argument') == 'year':
            return True
        else:
            return False

    # All statements in the select must have the prior form
    select_year = all(list(map(is_year, select)))
    if select_year == True:
        result = {}
        domain = QueryService.get_domain(scenario, model)
        logging.debug(f"domain: {domain}")
        for element in select:
            result[
                element['alias'] if element['alias'] else
                f"{element['function']}({element['argument']})"] = domain.get(
                    element['argument']).get(element['function'])
        return jsonify(data=[result]), 200
    if not bbox:
        return error(
            status=400,
            detail='No coordinates provided. Include geostore or lat & lon')
    # Get years
    try:
        years = get_years(json_sql, temporal_resolution)
    except PeriodNotValid as e:
        return error(status=400, detail=e.message)
    logging.debug("years: ")
    logging.debug(years)
    # if len(years) == 0:
    #     domain = QueryService.get_domain(scenario, model)
    #     logging.debug(f"domain: {domain}")
    #     years = list(range(
    #         int(dateutil.parser.parse(domain['year']['min'], fuzzy_with_tokens=True)[0].year),
    #         int(dateutil.parser.parse(domain['year']['max'], fuzzy_with_tokens=True)[0].year + 1),
    #         10
    #     )) if temporal_resolution == 'decadal' else ['1971', '2021', '2051']
    #     logging.debug(f"years: {years}")
    # return error(status=400, detail='Period of time must be set')

    results = {}
    for element in select:
        try:
            if element['argument'] not in fields:
                raise InvalidField(message='Invalid Fields')
            elif element['function'] == 'temporal_series' and element[
                    'argument'] == 'year':
                results[
                    element['alias'] if element['alias'] else 'year'] = years
            elif element['function'] == 'temporal_series' and element[
                    'argument'] == 'all':
                query_results = QueryService.get_all_data(
                    scenario, model, years, bbox)
                return jsonify(data=query_results), 200
            elif element['function'] == 'temporal_series':
                indicator = element['argument']
                results[element['alias'] if element['alias'] else
                        indicator] = QueryService.get_temporal_series(
                            scenario, model, years, indicator, bbox)
            else:
                function = element['function']
                indicator = element['argument']
                results[element['alias'] if element['alias'] else
                        f"{function}({indicator})"] = QueryService.get_stats(
                            scenario, model, years, indicator, bbox, function)
        except InvalidField as e:
            return error(status=400, detail=e.message)
        except PeriodNotValid as e:
            return error(status=400, detail=e.message)
        except GeostoreNeeded as e:
            return error(status=400, detail=e.message)
        except CoordinatesNeeded as e:
            return error(status=400, detail=e.message)
    output = [dict(zip(results, col)) for col in zip(*results.values())]
    # return jsonify(data=response), 200
    return jsonify(data=output), 200