コード例 #1
0
def run_test(filename: str):
    """Runs the extractor code using pixels from the file and prints the result
    Args:
        filename: Path to image file
    Return:
        The result of calling the extractor's calculate() method
    Exceptions:
        Raises RuntimeError if there's a problem with the returned values
    Notes:
        Assumes the path passed in is valid. An error is reported if
        the file is not an image file.
    """
    try:
        open_file = gdal.Open(filename)
        if open_file:
            # Get the pixels and call the calculation
            pix = np.array(open_file.ReadAsArray())
            calc_val = algorithm_rgb.calculate(np.rollaxis(pix, 0, 3))

            # Check for unsupported types
            if isinstance(calc_val, set):
                raise RuntimeError("A 'set' type of data was returned and isn't supported. "
                                   "Please use a list or a tuple instead")

            # Perform any type conversions to a printable string
            if isinstance(calc_val, str):
                print_val = calc_val
            else:
                # Check if the return is iterable and comma separate the values if it is
                try:
                    _ = iter(calc_val)
                    print_val = ",".join(map(str, calc_val))
                except Exception:
                    print_val = str(calc_val)

            print(filename + "," + print_val)
    except Exception as ex:
        sys.stderr.write("Exception caught: " + str(ex) + "\n")
        sys.stderr.write("    File: " + filename + "\n")
コード例 #2
0
def perform_process(transformer: transformer_class.Transformer, check_md: dict, transformer_md: list, full_md: list) -> dict:
    """Performs the processing of the data
    Arguments:
        transformer: instance of transformer class
        check_md: request specific metadata
        transformer_md: metadata associated with previous runs of the transformer
        full_md: the full set of metadata available to the transformer
    Return:
        Returns a dictionary with the results of processing
    """
    # pylint: disable=unused-argument
    # The following pylint disables are here because to satisfy them would make the code unreadable
    # pylint: disable=too-many-statements, too-many-locals

    # Environment checking
    if not hasattr(algorithm_rgb, 'calculate'):
        msg = "The 'calculate()' function was not found in algorithm_rgb.py"
        logging.error(msg)
        return {'code': -1001, 'error': msg}

    # Setup local variables
    variable_names = __internal__.get_algorithm_variable_list('VARIABLE_NAMES')

    csv_file, geostreams_csv_file, betydb_csv_file = __internal__.get_csv_file_names(
        __internal__.determine_csv_path([transformer.args.csv_path, check_md['working_folder']]))
    logging.debug("Calculated default CSV path: %s", csv_file)
    logging.debug("Calculated geostreams CSV path: %s", geostreams_csv_file)
    logging.debug("Calculated EBTYdb CSV path: %s", betydb_csv_file)

    datestamp, localtime = __internal__.get_time_stamps(check_md['timestamp'])
    cultivar = transformer.args.germplasm_name

    write_geostreams_csv = transformer.args.geostreams_csv or __internal__.get_algorithm_definition_bool('WRITE_GEOSTREAMS_CSV', True)
    write_betydb_csv = transformer.args.betydb_csv or __internal__.get_algorithm_definition_bool('WRITE_BETYDB_CSV', True)
    logging.info("Writing geostreams csv file: %s", "True" if write_geostreams_csv else "False")
    logging.info("Writing BETYdb csv file: %s", "True" if write_betydb_csv else "False")

    # Get default values and adjust as needed
    (csv_fields, csv_traits) = __internal__.get_csv_traits_table(variable_names)
    csv_traits['germplasmName'] = cultivar
    (geo_fields, geo_traits) = __internal__.get_geo_traits_table()
    (bety_fields, bety_traits) = __internal__.get_bety_traits_table(variable_names)
    bety_traits['species'] = cultivar

    csv_header = ','.join(map(str, __internal__.get_csv_header_fields()))
    geo_csv_header = ','.join(map(str, geo_fields))
    bety_csv_header = ','.join(map(str, bety_fields))

    # Process the image files
    num_image_files = 0
    entries_written = 0
    for one_file in __internal__.filter_file_list_by_ext(check_md['list_files'](), transformer.supported_image_file_exts):

        plot_name = None
        try:
            num_image_files += 1

            # Setup
            plot_name = __internal__.find_metadata_value(full_md, ['sitename'])
            centroid = __internal__.get_centroid_latlon(one_file)
            image_pix = np.array(gdal.Open(one_file).ReadAsArray())

            # Make the call and check the results
            calc_value = algorithm_rgb.calculate(image_pix)
            logging.debug("Calculated value is %s for file: %s", str(calc_value), one_file)
            if calc_value is None:
                continue

            values = __internal__.validate_calc_value(calc_value, variable_names)
            logging.debug("Verified values are %s", str(values))

            geo_traits['site'] = plot_name
            geo_traits['lat'] = str(centroid.GetY())
            geo_traits['lon'] = str(centroid.GetX())
            geo_traits['dp_time'] = localtime
            geo_traits['source'] = one_file
            geo_traits['timestamp'] = datestamp

            # Write the data points geographically and otherwise
            for idx, trait_name in enumerate(variable_names):
                # Geostreams can only handle one field at a time so we write out one row per field/value pair
                geo_traits['trait'] = trait_name
                geo_traits['value'] = str(values[idx])
                if write_geostreams_csv:
                    __internal__.write_trait_csv(geostreams_csv_file, geo_csv_header, geo_fields, geo_traits)

                # csv and BETYdb can handle wide rows with multiple values so we just set the field
                # values here and write the single row after the loop
                csv_traits[variable_names[idx]] = str(values[idx])
                bety_traits[variable_names[idx]] = str(values[idx])

            csv_traits['site'] = plot_name
            csv_traits['timestamp'] = datestamp
            csv_traits['lat'] = str(centroid.GetY())
            csv_traits['lon'] = str(centroid.GetX())
            __internal__.write_trait_csv(csv_file, csv_header, csv_fields, csv_traits)

            bety_traits['site'] = plot_name
            bety_traits['local_datetime'] = localtime
            if write_betydb_csv:
                __internal__.write_trait_csv(betydb_csv_file, bety_csv_header, bety_fields, bety_traits)

            entries_written += 1

        except Exception as ex:
            logging.exception("Error generating %s for %s", __internal__.get_algorithm_name(), str(plot_name))
            continue

    if num_image_files == 0:
        logging.warning("No images were detected for processing")
    if entries_written == 0:
        logging.warning("No entries were written to CSV files")

    # Prepare the return information
    algorithm_name, algorithm_md = __internal__.prepare_algorithm_metadata()
    algorithm_md['files_processed'] = str(num_image_files)
    algorithm_md['lines_written'] = str(entries_written)
    if write_geostreams_csv:
        algorithm_md['wrote_geostreams'] = "Yes"
    if write_betydb_csv:
        algorithm_md['wrote_betydb'] = "Yes"

    file_md = []
    if entries_written:
        file_md.append({
            'path': csv_file,
            'key': 'csv'
        })
        if write_geostreams_csv:
            file_md.append({
                'path': geostreams_csv_file,
                'key': 'csv'
            })
        if write_betydb_csv:
            file_md.append({
                'path': betydb_csv_file,
                'key': 'csv'
            })

    return {'code': 0,
            'file': file_md,
            algorithm_name: algorithm_md
            }