Ejemplo n.º 1
0
def analyse_single_hash(index, config, filehash, cursors):
    field_id_key_map = get_field_id_key_map(index)
    logfile_filepath = get_filepath(index, config, filehash)
    print(logfile_filepath)

    results = get_logfile_mosaiq_results(index,
                                         config,
                                         logfile_filepath,
                                         field_id_key_map,
                                         filehash,
                                         cursors,
                                         grid_resolution=5 / 3)

    comparison = calc_comparison(results[2], results[3])
    print("Comparison result = {}".format(comparison))
    plot_results(*results)

    return comparison
Ejemplo n.º 2
0
def get_logfile_mosaiq_results(index,
                               config,
                               filepath,
                               field_id_key_map,
                               filehash,
                               cursors,
                               grid_resolution=1):
    file_info = index[filehash]
    delivery_details = file_info['delivery_details']
    field_id = delivery_details['field_id']

    centre = get_centre(config, file_info)
    server = get_sql_servers(config)[centre]
    mosaiq_delivery_data = multi_fetch_and_verify_mosaiq(
        cursors[server], field_id)

    mosaiq_results = mu_density_from_delivery_data(
        mosaiq_delivery_data, grid_resolution=grid_resolution)

    consecutive_keys = find_consecutive_logfiles(field_id_key_map, field_id,
                                                 filehash, index, config)

    logfilepaths = [
        get_filepath(index, config, key) for key in consecutive_keys
    ]

    logile_results = calc_and_merge_logfile_mudensity(
        logfilepaths, grid_resolution=grid_resolution)

    try:
        assert np.all(logile_results[0] == mosaiq_results[0])
        assert np.all(logile_results[1] == mosaiq_results[1])
    except AssertionError:
        print(np.shape(logile_results[0]))
        print(np.shape(mosaiq_results[0]))
        raise

    grid_xx = logile_results[0]
    grid_yy = logile_results[1]

    logfile_mu_density = logile_results[2]
    mosaiq_mu_density = mosaiq_results[2]

    return grid_xx, grid_yy, logfile_mu_density, mosaiq_mu_density
Ejemplo n.º 3
0
def get_logfile_delivery_data_bygantry(index, config, logfile_groups,
                                       mosaiq_gantry_angles):
    logfile_delivery_data_bygantry = dict()

    for logfile_group in logfile_groups:
        logfile_delivery_data_bygantry[logfile_group] = dict()

        for file_hash in logfile_group:
            filepath = get_filepath(index, config, file_hash)
            logfile_delivery_data = DeliveryDatabases.from_logfile(
                filepath)
            mu = np.array(logfile_delivery_data.monitor_units)

            filtered = logfile_delivery_data.filter_cps()

            mu = filtered.monitor_units
            mlc = filtered.mlc
            jaw = filtered.jaw
            logfile_gantry_angles = filtered.gantry

            gantry_tolerance = get_gantry_tolerance(index, file_hash, config)
            unique_logfile_gantry_angles = np.unique(logfile_gantry_angles)

            assert_array_agreement(
                unique_logfile_gantry_angles, mosaiq_gantry_angles,
                gantry_tolerance)

            logfile_delivery_data_bygantry[logfile_group][file_hash] = dict()

            for mosaiq_gantry_angle in mosaiq_gantry_angles:
                logfile_delivery_data_bygantry[logfile_group][file_hash][mosaiq_gantry_angle] = dict(
                )
                agrees_within_tolerance = (
                    np.abs(logfile_gantry_angles - mosaiq_gantry_angle) <= gantry_tolerance)

                logfile_delivery_data_bygantry[logfile_group][file_hash][
                    mosaiq_gantry_angle]['mu'] = mu[agrees_within_tolerance]
                logfile_delivery_data_bygantry[logfile_group][file_hash][
                    mosaiq_gantry_angle]['mlc'] = mlc[agrees_within_tolerance]
                logfile_delivery_data_bygantry[logfile_group][file_hash][
                    mosaiq_gantry_angle]['jaw'] = jaw[agrees_within_tolerance]

    return logfile_delivery_data_bygantry
Ejemplo n.º 4
0
def calc_logfile_mu_density_bygantry(index, config, logfile_group,
                                     gantry_angle, grid_resolution=1):
    logfile_mu_density = None

    for filehash in logfile_group:
        filepath = get_filepath(index, config, filehash)
        logfile_delivery_data = DeliveryDatabases.from_logfile(filepath)

        a_logfile_mu_density = [
            get_grid(grid_resolution=grid_resolution),
            logfile_delivery_data.mudensity(
                gantry_angle, grid_resolution=grid_resolution)
        ]

        if logfile_mu_density is None:
            logfile_mu_density = a_logfile_mu_density
        else:
            assert np.all(logfile_mu_density[0] == a_logfile_mu_density[0])
            assert np.all(logfile_mu_density[1] == a_logfile_mu_density[1])
            logfile_mu_density[2] += a_logfile_mu_density[2]

    return logfile_mu_density
Ejemplo n.º 5
0
def mudensity_comparisons(config, plot=True, new_logfiles=False):
    (comparison_storage_filepath, comparison_storage_scratch) = get_cache_filepaths(
        config
    )

    grid_resolution, ram_fraction = get_mu_density_parameters(config)

    index = get_index(config)
    field_id_key_map = get_field_id_key_map(index)

    (file_hashes, comparisons, _) = load_comparisons_from_cache(config)

    if new_logfiles:
        file_hashes, _ = random_uncompared_logfiles(index, config, file_hashes)

    sql_servers_list = get_sql_servers_list(config)

    with multi_mosaiq_connect(sql_servers_list) as cursors:
        for file_hash in file_hashes:

            try:
                logfile_filepath = get_filepath(index, config, file_hash)
                print("\n{}".format(logfile_filepath))

                if (new_logfiles) and (file_hash in comparisons):
                    raise AssertionError(
                        "A new logfile shouldn't have already been compared"
                    )

                if index[file_hash]["delivery_details"]["qa_mode"]:
                    print("Skipping QA field")
                else:
                    if file_hash in comparisons:
                        print(
                            "Cached comparison value = {}".format(
                                comparisons[file_hash]
                            )
                        )

                    results = get_logfile_mosaiq_results(
                        index,
                        config,
                        logfile_filepath,
                        field_id_key_map,
                        file_hash,
                        cursors,
                        grid_resolution=grid_resolution,
                    )
                    new_comparison = calc_comparison(results[2], results[3])

                    if file_hash not in comparisons:
                        update_comparison_file(
                            file_hash,
                            new_comparison,
                            comparison_storage_filepath,
                            comparison_storage_scratch,
                        )
                        print(
                            "Newly calculated comparison value = {}".format(
                                new_comparison
                            )
                        )
                    elif np.abs(comparisons[file_hash] - new_comparison) > 0.00001:
                        print(
                            "Calced comparison value does not agree with the "
                            "cached value."
                        )
                        print(
                            "Newly calculated comparison value = {}".format(
                                new_comparison
                            )
                        )
                        update_comparison_file(
                            file_hash,
                            new_comparison,
                            comparison_storage_filepath,
                            comparison_storage_scratch,
                        )
                        print("Overwrote the cache with the new result.")
                    else:
                        print("Calced comparison value agrees with the cached value")
                    if plot:
                        plot_results(*results)
            except KeyboardInterrupt:
                raise
            except AssertionError:
                raise
            except Exception:
                print(traceback.format_exc())