Esempio n. 1
0
def compare_mosaiq_fields(servers, field_ids):
    unique_servers = list(set(servers))

    with multi_mosaiq_connect(unique_servers) as cursors:
        deliveries = [
            delivery_data_from_mosaiq(cursors[server], field_id)
            for server, field_id in zip(servers, field_ids)
        ]

    mu_density_results = [
        delivery_data.mudensity() for delivery_data in deliveries
    ]

    mu_densities = [results[2] for results in mu_density_results]

    labels = [
        "Server: `{}` | Field ID: `{}`".format(server, field_id)
        for server, field_id in zip(servers, field_ids)
    ]

    plot_gantry_collimator(labels, deliveries)
    plot_mu_densities(labels, mu_density_results)

    mu_densities_match = np.all([
        np.all(np.abs(mu_density_a - mu_density_b) < 0.1)
        for mu_density_a, mu_density_b in itertools.combinations(
            mu_densities, 2)
    ])

    plt.show()
    print("MU Densities match: {}".format(mu_densities_match))

    return deliveries, mu_densities
Esempio n. 2
0
def mudensity_comparisons(config, plot=True, new_logfiles=False):
    (comparison_storage_filepath, comparison_storage_scratch) = get_cache_filepaths(
        config
    )

    grid_resolution, ram_fraction = get_mu_density_parameters(config)

    index = get_index(config)
    field_id_key_map = get_field_id_key_map(index)

    (file_hashes, comparisons, _) = load_comparisons_from_cache(config)

    if new_logfiles:
        file_hashes, _ = random_uncompared_logfiles(index, config, file_hashes)

    sql_servers_list = get_sql_servers_list(config)

    with multi_mosaiq_connect(sql_servers_list) as cursors:
        for file_hash in file_hashes:

            try:
                logfile_filepath = get_filepath(index, config, file_hash)
                print("\n{}".format(logfile_filepath))

                if (new_logfiles) and (file_hash in comparisons):
                    raise AssertionError(
                        "A new logfile shouldn't have already been compared"
                    )

                if index[file_hash]["delivery_details"]["qa_mode"]:
                    print("Skipping QA field")
                else:
                    if file_hash in comparisons:
                        print(
                            "Cached comparison value = {}".format(
                                comparisons[file_hash]
                            )
                        )

                    results = get_logfile_mosaiq_results(
                        index,
                        config,
                        logfile_filepath,
                        field_id_key_map,
                        file_hash,
                        cursors,
                        grid_resolution=grid_resolution,
                    )
                    new_comparison = calc_comparison(results[2], results[3])

                    if file_hash not in comparisons:
                        update_comparison_file(
                            file_hash,
                            new_comparison,
                            comparison_storage_filepath,
                            comparison_storage_scratch,
                        )
                        print(
                            "Newly calculated comparison value = {}".format(
                                new_comparison
                            )
                        )
                    elif np.abs(comparisons[file_hash] - new_comparison) > 0.00001:
                        print(
                            "Calced comparison value does not agree with the "
                            "cached value."
                        )
                        print(
                            "Newly calculated comparison value = {}".format(
                                new_comparison
                            )
                        )
                        update_comparison_file(
                            file_hash,
                            new_comparison,
                            comparison_storage_filepath,
                            comparison_storage_scratch,
                        )
                        print("Overwrote the cache with the new result.")
                    else:
                        print("Calced comparison value agrees with the cached value")
                    if plot:
                        plot_results(*results)
            except KeyboardInterrupt:
                raise
            except AssertionError:
                raise
            except Exception:
                print(traceback.format_exc())
Esempio n. 3
0
def index_logfiles(centre_map, machine_map, logfile_data_directory):
    data_directory = logfile_data_directory
    index_filepath = os.path.abspath(os.path.join(data_directory,
                                                  'index.json'))
    to_be_indexed_directory = os.path.abspath(
        os.path.join(data_directory, 'to_be_indexed'))
    indexed_directory = os.path.abspath(os.path.join(data_directory,
                                                     'indexed'))
    no_mosaiq_record_found = os.path.abspath(
        os.path.join(data_directory, 'no_mosaiq_record_found'))
    unknown_error_in_logfile = os.path.abspath(
        os.path.join(data_directory, 'unknown_error_in_logfile'))
    no_field_label_in_logfile = os.path.abspath(
        os.path.join(data_directory, 'no_field_label_in_logfile'))
    # machine_map = config['machine_map']
    centre_details = centre_map

    centre_server_map = {
        centre: centre_lookup['mosaiq_sql_server']
        for centre, centre_lookup in centre_map.items()
    }

    sql_server_and_ports = [
        "{}".format(details['mosaiq_sql_server'])
        for _, details in centre_details.items()
    ]

    with open(index_filepath, 'r') as json_data_file:
        index = json.load(json_data_file)

    indexset = set(index.keys())

    print('\nConnecting to Mosaiq SQL servers...')
    with multi_mosaiq_connect(sql_server_and_ports) as cursors:

        print('Globbing index directory...')
        to_be_indexed = glob(os.path.join(to_be_indexed_directory, '**/*.trf'),
                             recursive=True)

        chunk_size = 50
        number_to_be_indexed = len(to_be_indexed)
        to_be_indexed_chunked = [
            to_be_indexed[i:i + chunk_size]
            for i in range(0, number_to_be_indexed, chunk_size)
        ]

        for i, a_to_be_indexed_chunk in enumerate(to_be_indexed_chunked):
            print('\nHashing a chunk of logfiles ({}/{})'.format(
                i + 1, len(to_be_indexed_chunked)))
            hashlist = [
                hash_file(filename, dot_feedback=True)
                for filename in a_to_be_indexed_chunk
            ]

            print(' ')

            to_be_indexed_dict = dict(zip(hashlist, a_to_be_indexed_chunk))

            hashset = set(hashlist)

            for filehash in list(hashset.intersection(indexset)):
                file_already_in_index(
                    os.path.join(indexed_directory,
                                 index[filehash]['filepath']),
                    to_be_indexed_dict[filehash], filehash)

            file_ready_to_be_indexed(
                cursors, list(hashset.difference(indexset)),
                to_be_indexed_dict, unknown_error_in_logfile,
                no_mosaiq_record_found, no_field_label_in_logfile,
                indexed_directory, index_filepath, index, machine_map,
                centre_details, centre_server_map)
    print('Complete')