コード例 #1
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def predict(job_id):
    """
        Predict
        * evaluates results and computes predictions from them
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    results_url = request.args.get('results-url', type=str)
    if results_url is None:
        results_url = (await request.get_json())['results-url']

    n_classes = request.args.get('n_classes', type=int, default=2)
    is_statevector = request.args.get('is-statevector',
                                      type=str,
                                      default='False')
    is_statevector = False if is_statevector in ['False', '', 'No', 'None'
                                                 ] else True

    # file paths (inputs)
    results_file_path = './static/plots/predictions/results' \
                        + str(job_id) + '.txt'

    # file paths (inputs)
    labels_file_path = './static/plots/predictions/labels' \
                       + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist('./static/plots/predictions/')

        # delete old files if exist
        FileService.delete_if_exist(results_file_path, labels_file_path)

        # download and store locally
        await FileService.download_to_file(results_url, results_file_path)

        results = ResultsSerializer.deserialize(results_file_path)

        labels = DecisionBoundaryPlotter.predict(results, n_classes,
                                                 is_statevector)

        NumpySerializer.serialize(labels, labels_file_path)

        # generate urls
        url_root = request.host_url
        predictions_url = generate_url(url_root, 'plots/predictions',
                                       'labels' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message,
                   status_code=status_code,
                   predictions_url=predictions_url)
コード例 #2
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def perform_mds_data_preparation(job_id):
    """
    Trigger multi-dimensional scaling on the given distance matrix.
    We have the following parameters (name : type : description):
    distance_matrix_url : string : download location of the input distance matrix
    """

    # response parameters
    message = "success"
    status_code = 200

    # load the distance matrix from url
    distance_matrix_url = request.args.get('distance_matrix_url', type=str)
    if distance_matrix_url is None:
        distance_matrix_url = (await request.get_json())['distance_matrix_url']

    distance_matrix_path = './static/mds/distance-matrix' + str(
        job_id) + '.txt'
    embeddings_path = './static/mds/embeddings' + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist('./static/mds')

        # delete old files if exist
        FileService.delete_if_exist(distance_matrix_path, embeddings_path)

        # download the input data and store it locally
        print('Loading distance matrix from URL: ' + str(distance_matrix_path))
        await FileService.download_to_file(distance_matrix_url,
                                           distance_matrix_path)
        print('Successfully loaded distance matrix...')

        # deserialize distance matrix to numpy array
        distance_matrix = np.loadtxt(distance_matrix_path)
        print('Created numpy array from file. Applying MDS...')

        # apply MDS
        mds = manifold.MDS(n_components=2,
                           n_init=4,
                           max_iter=300,
                           eps=1e-3,
                           dissimilarity="euclidean",
                           n_jobs=1).fit(distance_matrix)
        embedding = mds.embedding_
        print('Successfully applied MDS to given distance matrix!')
        np.savetxt(embeddings_path, embedding)

        embeddings_url = generate_url(request.host_url, 'mds',
                                      'embeddings' + str(job_id))
        print('Result available at URL: ' + embeddings_url)

        return jsonify(message=message,
                       status_code=status_code,
                       embeddings_url=embeddings_url)

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)
コード例 #3
0
ファイル: test.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def download_url_and_generate_temp_url(url_root, file_url, file_name):
    """
    Downloads the file on the given url and stores it locally.
    Also create for the locally stored file a url to access it.
    """

    test_folder_path = './static/test/'
    test_file_path = test_folder_path + str(file_name) + '.txt'
    FileService.create_folder_if_not_exist(test_folder_path)
    FileService.delete_if_exist(test_file_path)
    await FileService.download_to_file(file_url, test_file_path)
    return url_root + '/static/test/' + file_name + '.txt'
コード例 #4
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def generate_grid(job_id):
    """
        Takes original data and generates a grid of new data points that surrounds original data
        - resolution parameter r determines the dimensions of the grid, e.g. r x r for 2 dimensional data 
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    data_url = request.args.get('data-url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data-url']

    resolution = request.args.get('resolution', type=int, default=50)

    # file paths (inputs)
    data_file_path = './static/plots/grid-generation/data' \
                     + str(job_id) + '.txt'

    # file paths (inputs)
    grid_file_path = './static/plots/grid-generation/grid' \
                     + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/plots/grid-generation/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path, grid_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_url, data_file_path)

        # deserialize the data
        data = NumpySerializer.deserialize(data_file_path)

        grid = DecisionBoundaryPlotter.generate_grid(data, resolution)

        NumpySerializer.serialize(grid, grid_file_path)

        # generate urls
        url_root = request.host_url
        grid_url = generate_url(url_root, 'plots/grid-generation',
                                'grid' + str(job_id))
    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message, status_code=status_code, grid_url=grid_url)
コード例 #5
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def initialize_centroids(job_id):
    """
    Create k random centroids in the range [0, 1] x [0, 1].
    """

    # load the data from url
    k = request.args.get('k', type=int, default=2)

    centroids_file_path = './static/centroid-calculation/initialization/centroids' \
                          + str(job_id) + '.txt'

    # response parameters
    message = 'success'
    status_code = 200
    centroids_url = ''

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/centroid-calculation/initialization/')

        # delete old files if exist
        FileService.delete_if_exist(centroids_file_path)

        # generate k centroids
        centroids = DataProcessingService.generate_random_data(k)

        # serialize the data
        np.savetxt(centroids_file_path, centroids)

        # generate urls
        url_root = request.host_url
        centroids_url = generate_url(url_root,
                                     'centroid-calculation/initialization',
                                     'centroids' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   centroids_url=centroids_url)
コード例 #6
0
ファイル: test.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def plot_data_from_urls(data_url, cluster_mapping_url, k):
    # create paths
    plot_folder_path = './static/test/plot/'
    data_file_path = plot_folder_path + 'data.txt'
    cluster_mapping_file_path = plot_folder_path + 'cluster_mapping.txt'

    # create folder and delete old files
    FileService.create_folder_if_not_exist(plot_folder_path)
    FileService.delete_if_exist(data_file_path, cluster_mapping_file_path)

    # download data
    await FileService.download_to_file(data_url, data_file_path)
    await FileService.download_to_file(cluster_mapping_url,
                                       cluster_mapping_file_path)

    # deserialize data
    data = np.loadtxt(data_file_path)
    cluster_mapping = np.loadtxt(cluster_mapping_file_path)

    # prepare plot data
    data_preprocessed = DataProcessingService.normalize(
        DataProcessingService.standardize(data))

    plot(data, data_preprocessed, cluster_mapping, k)
コード例 #7
0
def main():
    node_list = ["0.0.0.0", "localhost"]
    main_pool = ThreadPool(3)
    discovery_service = DiscoveryService(ip="127.0.0.1",
                                         port=3000,
                                         initial_nodes=node_list,
                                         period=5)
    file_service = FileService(ip="127.0.0.1",
                               port=3001,
                               node_list=node_list,
                               directory='files/',
                               timeout=5)

    main_pool.add_task(discovery_service.start_service)
    main_pool.add_task(file_service.start_service)
    main_pool.wait_completion()
コード例 #8
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def plot_boundary(job_id):
    """
        Plots data and decision boundary
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    data_url = request.args.get('data-url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data-url']

    labels_url = request.args.get('labels-url', type=str)
    if labels_url is None:
        labels_url = (await request.get_json())['labels-url']

    grid_url = request.args.get('grid-url', type=str)
    if grid_url is None:
        grid_url = (await request.get_json())['grid-url']

    predicitons_url = request.args.get('predictions-url', type=str)
    if predicitons_url is None:
        predicitons_url = (await request.get_json())['predictions-url']

    # file paths (inputs)
    data_file_path = './static/plots/plot/data' \
                     + str(job_id) + '.txt'

    labels_file_path = './static/plots/plot/labels' \
                       + str(job_id) + '.txt'

    grid_file_path = './static/plots/plot/grid' \
                     + str(job_id) + '.txt'

    predictions_file_path = './static/plots/plot/predictions' \
                            + str(job_id) + '.txt'

    # file paths (outputs)
    plot_file_path = './static/plots/plot/plot' \
                     + str(job_id) + '.png'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist('./static/plots/plot/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path, labels_file_path,
                                    grid_file_path, predictions_file_path,
                                    plot_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_url, data_file_path)
        await FileService.download_to_file(labels_url, labels_file_path)
        await FileService.download_to_file(grid_url, grid_file_path)
        await FileService.download_to_file(predicitons_url,
                                           predictions_file_path)

        # deserialize the data
        data = NumpySerializer.deserialize(data_file_path)
        labels = NumpySerializer.deserialize(labels_file_path)
        grid = NumpySerializer.deserialize(grid_file_path)
        predictions = NumpySerializer.deserialize(predictions_file_path)

        labels = labels.astype(int)
        predictions = predictions.astype(int)

        DecisionBoundaryPlotter.save_plot(data, labels, grid, predictions,
                                          plot_file_path)

        # generate urls
        url_root = request.host_url
        plot_url = url_root + '/static/plots/plot/plot' + str(job_id) + '.png'

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message, status_code=status_code, plot_url=plot_url)
コード例 #9
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def perform_wu_palmer_data_preparation(job_id):
    """
    Trigger the wu palmer data preparation algorithm.
    We have the following parameters (name : type : description):
    input_data_url : string : download location of the input data
    attributes : [string] : the attributes to prepare
    """

    # response parameters
    message = "success"
    status_code = 200

    # load the data from url
    input_data_url = request.args.get('input_data_url', type=str)
    if input_data_url is None:
        input_data_url = (await request.get_json())['input_data_url']
    attributes = request.args.get('attributes', type=list)
    if attributes is None:
        attributesResponse = (await request.get_json())
        if (attributesResponse is
                None) or (attributesResponse.get('attributes') is None):
            print('Using default attributes for comparison: ' +
                  str(default_attributes))
            attributes = default_attributes
        else:
            attributes = attributesResponse['attributes']

    input_file_path = './static/distance-matrices/muse-input' + str(
        job_id) + '.csv'
    output_file_path = './static/distance-matrices/distance-matrix' + str(
        job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist('./static/distance-matrices')

        # delete old files if exist
        FileService.delete_if_exist(input_file_path, output_file_path)

        # download the input data and store it locally
        print('Loading input data from URL: ' + str(input_data_url))
        await FileService.download_to_file(input_data_url, input_file_path)
        print('Successfully loaded input data...')

        # deserialize the input data
        entities = FileService.load_entities(input_file_path)
        print('Comparing based on the following attributes: ' +
              str(attributes))

        # calculate the distance matrix
        await WuPalmerService.wu_palmer_data_preparation(
            entities, output_file_path, attributes)

        distance_matrix_url = generate_url(request.host_url,
                                           'distance-matrices',
                                           'distance-matrix' + str(job_id))
        print('Result available at URL: ' + distance_matrix_url)

        return jsonify(message=message,
                       status_code=status_code,
                       distance_matrix_url=distance_matrix_url)

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)
コード例 #10
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def calculate_angles(job_id):
    """
    Performs the pre processing of a general rotational clustering algorithm,
    i.e. the angle calculations.

    We take the data and centroids and calculate the centroid and data angles.
    """

    # load the data from url or json body
    data_url = request.args.get('data_url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data_url']
    centroids_url = request.args.get('centroids_url', type=str)
    if centroids_url is None:
        centroids_url = (await request.get_json())['centroids_url']
    base_vector_x = request.args.get('base_vector_x',
                                     type=float,
                                     default=-0.7071)
    base_vector_y = request.args.get('base_vector_y',
                                     type=float,
                                     default=0.7071)

    data_file_path = './static/angle-calculation/rotational-clustering/data' \
                     + str(job_id) + '.txt'
    centroids_file_path = './static/angle-calculation/rotational-clustering/centroids' \
                          + str(job_id) + '.txt'
    centroid_angles_file_path = './static/angle-calculation/rotational-clustering/centroid_angles' \
                                + str(job_id) + '.txt'
    data_angles_file_path = './static/angle-calculation/rotational-clustering/data_angles' \
                            + str(job_id) + '.txt'

    base_vector = np.array([base_vector_x, base_vector_y])

    # response parameters
    message = 'success'
    status_code = 200
    data_angles_url = ''
    centroid_angles_url = ''

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/angle-calculation/rotational-clustering/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path, centroids_file_path,
                                    centroid_angles_file_path,
                                    data_angles_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_url, data_file_path)
        await FileService.download_to_file(centroids_url, centroids_file_path)

        # deserialize the data
        data = np.loadtxt(data_file_path)
        centroids = np.loadtxt(centroids_file_path)

        # map data and centroids to standardized unit sphere
        data = DataProcessingService.normalize(
            DataProcessingService.standardize(data))
        centroids = DataProcessingService.normalize(
            DataProcessingService.standardize(centroids))

        # calculate the angles
        data_angles = DataProcessingService.calculate_angles(data, base_vector)
        centroid_angles = DataProcessingService.calculate_angles(
            centroids, base_vector)

        # serialize the data
        np.savetxt(data_angles_file_path, data_angles)
        np.savetxt(centroid_angles_file_path, centroid_angles)

        # generate urls
        url_root = request.host_url
        data_angles_url = generate_url(
            url_root, 'angle-calculation/rotational-clustering',
            'data_angles' + str(job_id))
        centroid_angles_url = generate_url(
            url_root, 'angle-calculation/rotational-clustering',
            'centroid_angles' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   data_angles_url=data_angles_url,
                   centroid_angles_url=centroid_angles_url)
コード例 #11
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def check_convergence(job_id):
    """
    Performs the convergence check for a general KMeans clustering algorithm.

    We take the old and new centroids, calculate their pairwise distance and sum them up
    and divide it by k.

    If the resulting value is less then the given eps, we return convergence, if not,
    we return not converged.
    """

    # load the data from url
    new_centroids_url = request.args.get('new_centroids_url', type=str)
    if new_centroids_url is None:
        new_centroids_url = (await request.get_json())['new_centroids_url']
    old_centroids_url = request.args.get('old_centroids_url', type=str)
    if old_centroids_url is None:
        old_centroids_url = (await request.get_json())['old_centroids_url']
    eps = request.args.get('eps', type=float, default=0.0001)

    old_centroids_file_path = './static/convergence-check/old_centroids' + str(
        job_id) + '.txt'
    new_centroids_file_path = './static/convergence-check/new_centroids' + str(
        job_id) + '.txt'

    # response parameters
    message = 'success'
    status_code = 200
    convergence = False
    distance = 0.0

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist('./static/convergence-check/')

        # delete old files if exist
        FileService.delete_if_exist(old_centroids_file_path,
                                    new_centroids_file_path)

        # download the data and store it locally
        await FileService.download_to_file(old_centroids_url,
                                           old_centroids_file_path)
        await FileService.download_to_file(new_centroids_url,
                                           new_centroids_file_path)

        # deserialize the data
        old_centroids = np.loadtxt(old_centroids_file_path)
        new_centroids = np.loadtxt(new_centroids_file_path)

        # check convergence
        distance = ConvergenceCalculationService.calculate_averaged_euclidean_distance(
            old_centroids, new_centroids)

        convergence = distance < eps

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   convergence=convergence,
                   distance=distance)
コード例 #12
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def execute_negative_rotation_circuits(job_id):
    """
    Executes the negative rotation clustering algorithm given the generated
    quantum circuits.
    """

    # load the data from url
    circuits_url = request.args.get('circuits_url', type=str)
    if circuits_url is None:
        circuits_url = (await request.get_json())['circuits_url']
    k = request.args.get('k', type=int)
    if k is None:
        k = (await request.get_json())['k']
    backend_name = request.args.get('backend_name', type=str)
    if backend_name is None:
        backend_name = (await request.get_json())['backend_name']
    token = request.args.get('token', type=str)
    if token is None:
        token = (await request.get_json())['token']
    shots_per_circuit = request.args.get('shots_per_circuit',
                                         type=int,
                                         default=8192)

    circuits_file_path = './static/circuit-execution/negative-rotation-clustering/circuits' \
                         + str(job_id) + '.txt'
    cluster_mapping_file_path = './static/circuit-execution/negative-rotation-clustering/cluster_mapping' \
                                + str(job_id) + '.txt'

    # response parameters
    message = 'success'
    status_code = 200
    cluster_mapping_url = ''

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/circuit-execution/negative-rotation-clustering/')

        # delete old files if exist
        FileService.delete_if_exist(circuits_file_path)

        # download the circuits and store it locally
        await FileService.download_to_file(circuits_url, circuits_file_path)

        # deserialize the circuits
        circuits = QiskitSerializer.deserialize(circuits_file_path)

        # create the quantum backend
        backend = QuantumBackendFactory.create_backend(backend_name, token)

        # execute the circuits
        cluster_mapping = NegativeRotationClusteringService.execute_negative_rotation_clustering(
            circuits, k, backend, shots_per_circuit)

        # serialize the data
        np.savetxt(cluster_mapping_file_path, cluster_mapping)

        # generate urls
        url_root = request.host_url
        cluster_mapping_url = generate_url(
            url_root, 'circuit-execution/negative-rotation-clustering',
            'cluster_mapping' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   cluster_mapping_url=cluster_mapping_url)
コード例 #13
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def calculate_centroids(job_id):
    """
    Performs the post processing of a general rotational clustering algorithm,
    i.e. the centroid calculations.

    We take the cluster mapping, data and old centroids and calculate the
    new centroids.
    """

    # load the data from url
    data_url = request.args.get('data_url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data_url']
    cluster_mapping_url = request.args.get('cluster_mapping_url', type=str)
    if cluster_mapping_url is None:
        cluster_mapping_url = (await request.get_json())['cluster_mapping_url']
    old_centroids_url = request.args.get('old_centroids_url', type=str)
    if old_centroids_url is None:
        old_centroids_url = (await request.get_json())['old_centroids_url']

    data_file_path = './static/centroid-calculation/rotational-clustering/data' \
                     + str(job_id) + '.txt'
    cluster_mapping_file_path = './static/centroid-calculation/rotational-clustering/cluster_mapping' \
                                + str(job_id) + '.txt'
    old_centroids_file_path = './static/centroid-calculation/rotational-clustering/old_centroids' \
                              + str(job_id) + '.txt'
    centroids_file_path = './static/centroid-calculation/rotational-clustering/centroids' \
                          + str(job_id) + '.txt'

    # response parameters
    message = 'success'
    status_code = 200
    centroids_url = ''

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/centroid-calculation/rotational-clustering/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path, cluster_mapping_file_path,
                                    old_centroids_file_path,
                                    centroids_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_url, data_file_path)
        await FileService.download_to_file(cluster_mapping_url,
                                           cluster_mapping_file_path)
        await FileService.download_to_file(old_centroids_url,
                                           old_centroids_file_path)

        # deserialize the data
        data = np.loadtxt(data_file_path)
        cluster_mapping = np.loadtxt(cluster_mapping_file_path)
        old_centroids = np.loadtxt(old_centroids_file_path)

        # map data and centroids to standardized unit sphere
        data = DataProcessingService.normalize(
            DataProcessingService.standardize(data))
        old_centroids = DataProcessingService.normalize(
            DataProcessingService.standardize(old_centroids))

        # calculate new centroids
        centroids = DataProcessingService.calculate_centroids(
            cluster_mapping, old_centroids, data)

        # serialize the data
        np.savetxt(centroids_file_path, centroids)

        # generate urls
        url_root = request.host_url
        centroids_url = generate_url(
            url_root, 'centroid-calculation/rotational-clustering',
            'centroids' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   centroids_url=centroids_url)
コード例 #14
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def generate_negative_rotation_circuits(job_id):
    """
    Generates the negative rotation clustering quantum circuits.

    We take the data and centroid angles and return a url to a file with the
    quantum circuits as qasm strings.
    """

    # load the data from url or json body
    data_angles_url = request.args.get('data_angles_url', type=str)
    if data_angles_url is None:
        data_angles_url = (await request.get_json())['data_angles_url']
    centroid_angles_url = request.args.get('centroid_angles_url', type=str)
    if centroid_angles_url is None:
        centroid_angles_url = (await request.get_json())['centroid_angles_url']
    max_qubits = request.args.get('max_qubits', type=int, default=5)

    data_angles_file_path = './static/circuit-generation/negative-rotation-clustering/data_angles' \
                            + str(job_id) + '.txt'
    centroid_angles_file_path = './static/circuit-generation/negative-rotation-clustering/centroid_angles' \
                                + str(job_id) + '.txt'
    circuits_file_path = './static/circuit-generation/negative-rotation-clustering/circuits' \
                         + str(job_id) + '.txt'

    # response parameters
    message = 'success'
    status_code = 200
    circuits_url = ''

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/circuit-generation/negative-rotation-clustering/')

        # delete old files if exist
        FileService.delete_if_exist(data_angles_file_path,
                                    centroid_angles_file_path,
                                    circuits_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_angles_url,
                                           data_angles_file_path)
        await FileService.download_to_file(centroid_angles_url,
                                           centroid_angles_file_path)

        # deserialize the data and centroid angles
        data_angles = np.loadtxt(data_angles_file_path)
        centroid_angles = np.loadtxt(centroid_angles_file_path)

        # perform circuit generation
        circuits = ClusteringCircuitGenerator.generate_negative_rotation_clustering(
            max_qubits, data_angles, centroid_angles)

        # serialize the quantum circuits
        QiskitSerializer.serialize(circuits, circuits_file_path)

        # generate url
        url_root = request.host_url
        circuits_url = generate_url(
            url_root, 'circuit-generation/negative-rotation-clustering',
            'circuits' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500

    return jsonify(message=message,
                   status_code=status_code,
                   circuits_url=circuits_url)
コード例 #15
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def generate_circuit_parameterizations(job_id):
    """
        Generate circuit parameterizations
        * takes circuit template, data, and thetas to generate parameterizations for the circuit execution
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    data_url = request.args.get('data-url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data-url']

    circuit_template_url = request.args.get('circuit-template-url', type=str)
    if circuit_template_url is None:
        circuit_template_url = (await
                                request.get_json())['circuit-template-url']

    thetas_url = request.args.get('thetas-url', type=str)
    if thetas_url is None:
        thetas_url = (await request.get_json())['thetas-url']

    thetas_plus_url = request.args.get('thetas-plus-url', type=str)
    if thetas_plus_url is None:
        thetas_plus_url = (await request.get_json())['thetas-plus-url']

    thetas_minus_url = request.args.get('thetas-minus-url', type=str)
    if thetas_minus_url is None:
        thetas_minus_url = (await request.get_json())['thetas-minus-url']

    # file paths (inputs)
    data_file_path = './static/variational-svm-classification/circuit-generation/data' \
                     + str(job_id) + '.txt'
    circuit_template_file_path = './static/variational-svm-classification/circuit-generation/circuit-template' \
                                 + str(job_id) + '.txt'
    thetas_file_path = './static/variational-svm-classification/circuit-generation/thetas' \
                       + str(job_id) + '.txt'
    thetas_plus_file_path = './static/variational-svm-classification/circuit-generation/thetas-plus' \
                            + str(job_id) + '.txt'
    thetas_minus_file_path = './static/variational-svm-classification/circuit-generation/thetas-minus' \
                             + str(job_id) + '.txt'

    # file paths (outputs)
    parameterizations_file_path = './static/variational-svm-classification/circuit-generation/parameterizations' \
                                  + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/variational-svm-classification/circuit-generation/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path, circuit_template_file_path,
                                    thetas_file_path, thetas_plus_file_path,
                                    thetas_minus_file_path,
                                    parameterizations_file_path)

        # download and store locally
        await FileService.download_to_file(data_url, data_file_path)
        await FileService.download_to_file(circuit_template_url,
                                           circuit_template_file_path)

        if thetas_url is not None and thetas_url != '':
            await FileService.download_to_file(thetas_url, thetas_file_path)
        if thetas_plus_url is not None and thetas_plus_url != '':
            await FileService.download_to_file(thetas_plus_url,
                                               thetas_plus_file_path)
        if thetas_minus_url is not None and thetas_minus_url != '':
            await FileService.download_to_file(thetas_minus_url,
                                               thetas_minus_file_path)

        # deserialize inputs
        data = NumpySerializer.deserialize(data_file_path)

        # WORKAROUND until https://github.com/Qiskit/qiskit-terra/issues/5710 is fixed
        circuit_template = PickleSerializer.deserialize(
            circuit_template_file_path)

        thetas = NumpySerializer.deserialize(
            thetas_file_path
        ) if thetas_url is not None and thetas_url != '' else None
        thetas_plus = NumpySerializer.deserialize(
            thetas_plus_file_path
        ) if thetas_plus_url is not None and thetas_plus_url != '' else None
        thetas_minus = NumpySerializer.deserialize(
            thetas_minus_file_path
        ) if thetas_minus_url is not None and thetas_minus_url != '' else None

        thetas_array = []
        for t in [thetas, thetas_plus, thetas_minus]:
            if t is not None:
                thetas_array.append(t)

        # generate parameterizations
        parameterizations = VariationalSVMCircuitGenerator.generateCircuitParameterizations(
            circuit_template, data, thetas_array)

        # serialize outputs
        ListSerializer.serialize(parameterizations,
                                 parameterizations_file_path)

        url_root = request.host_url
        parameterizations_url = generate_url(
            url_root, 'variational-svm-classification/circuit-generation',
            'parameterizations' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message,
                   status_code=status_code,
                   parameterizations_url=parameterizations_url)
コード例 #16
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def execute_circuits(job_id):
    """
        Execute circuits
        * assigns parameters of circuit template for each parameterization
        * runs the circuit for each parameterization
        * returns results as a list
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    circuit_template_url = request.args.get('circuit-template-url', type=str)
    if circuit_template_url is None:
        circuit_template_url = (await
                                request.get_json())['circuit-template-url']

    parameterizations_url = request.args.get('parameterizations-url', type=str)
    if parameterizations_url is None:
        parameterizations_url = (await
                                 request.get_json())['parameterizations-url']

    backend_name = request.args.get('backend_name', type=str)
    if backend_name is None:
        backend_name = (await request.get_json())['backend_name']

    token = request.args.get('token', type=str)
    if token is None:
        token = (await request.get_json())['token']
    shots = request.args.get('shots', type=int, default=1024)

    # file paths (inputs)
    circuit_template_file_path = './static/variational-svm-classification/circuit-execution/circuit-template' \
                                 + str(job_id) + '.txt'
    parameterizations_file_path = './static/variational-svm-classification/circuit-execution/parameterizations' \
                                  + str(job_id) + '.txt'

    # file paths (outputs)
    results_file_path = './static/variational-svm-classification/circuit-execution/results' \
                        + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/variational-svm-classification/circuit-execution/')

        # delete old files if exist
        FileService.delete_if_exist(circuit_template_file_path,
                                    parameterizations_file_path,
                                    results_file_path)

        # download and store locally
        await FileService.download_to_file(circuit_template_url,
                                           circuit_template_file_path)
        await FileService.download_to_file(parameterizations_url,
                                           parameterizations_file_path)

        # deserialize inputs
        # WORKAROUND until https://github.com/Qiskit/qiskit-terra/issues/5710 is fixed
        circuit_template = PickleSerializer.deserialize(
            circuit_template_file_path)

        parameterizations = ListSerializer.deserialize(
            parameterizations_file_path)

        results, is_statevector = CircuitExecutor.runCircuit(
            circuit_template,
            parameterizations,
            backend_name,
            token,
            shots,
            add_measurements=True)

        ResultsSerializer.serialize(results, results_file_path)
        url_root = request.host_url
        results_url = generate_url(
            url_root, 'variational-svm-classification/circuit-execution',
            'results' + str(job_id))
    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message,
                   status_code=status_code,
                   results_url=results_url,
                   is_statevector=is_statevector)
コード例 #17
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def initialize_classification(job_id):
    """
        Initialize variational SVM classification
        * generates circuit template
        * initializes optimization parameters
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    data_url = request.args.get('data-url', type=str)
    if data_url is None:
        data_url = (await request.get_json())['data-url']

    optimizer_parameters_url = request.args.get('optimizer-parameters-url',
                                                type=str)
    if optimizer_parameters_url is None:
        body = (await request.get_json())
        if body is not None:
            optimizer_parameters_url = body['optimizer-parameters-url']

    entanglement = request.args.get('entanglement', type=str, default='full')
    feature_map_reps = request.args.get('feature-map-reps',
                                        type=int,
                                        default=1)
    variational_form_reps = request.args.get('variational-form-reps',
                                             type=int,
                                             default=3)

    # file paths (inputs)
    data_file_path = './static/variational-svm-classification/initialization/data' \
                     + str(job_id) + '.txt'
    optimizer_parameters_file_path = './static/variational-svm-classification/initialization/optimizer-parameters' \
                                     + str(job_id) + '.txt'

    # file paths (outputs)
    circuit_template_file_path = './static/variational-svm-classification/initialization/circuit-template' \
                                 + str(job_id) + '.txt'
    thetas_file_path = './static/variational-svm-classification/initialization/thetas' \
                       + str(job_id) + '.txt'
    thetas_plus_file_path = './static/variational-svm-classification/initialization/thetas-plus' \
                            + str(job_id) + '.txt'
    thetas_minus_file_path = './static/variational-svm-classification/initialization/thetas-minus' \
                             + str(job_id) + '.txt'
    delta_file_path = './static/variational-svm-classification/initialization/delta' \
                      + str(job_id) + '.txt'
    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/variational-svm-classification/initialization/')

        # delete old files if exist
        FileService.delete_if_exist(data_file_path,
                                    optimizer_parameters_file_path,
                                    circuit_template_file_path,
                                    thetas_file_path, thetas_plus_file_path,
                                    thetas_minus_file_path, delta_file_path)

        # download the data and store it locally
        await FileService.download_to_file(data_url, data_file_path)

        if optimizer_parameters_url is None:
            print('Using default parameterization!')
            optimizer_parameters = default_optimizer_params
        else:
            # deserialize optimizer parameters
            print('Donwloading parameterization from URL: ' +
                  str(optimizer_parameters_url))
            await FileService.download_to_file(optimizer_parameters_url,
                                               optimizer_parameters_file_path)
            optimizer_parameters = NumpySerializer.deserialize(
                optimizer_parameters_file_path)
            if len(optimizer_parameters) != 5:
                raise Exception(
                    "Wrong number of optimizer parameters. 5 parameters c0 through c4 expected."
                )

        # deserialize the data
        data = NumpySerializer.deserialize(data_file_path)

        # generate circuit template
        n_dimensions = data.shape[1]
        circuit_template, feature_map_parameters, var_form_parameters = \
            VariationalSVMCircuitGenerator.generateCircuitTemplate(n_dimensions, feature_map_reps,
                                                                   variational_form_reps, entanglement)

        # store circuit template
        # WORKAROUND until https://github.com/Qiskit/qiskit-terra/issues/5710 is fixed
        PickleSerializer.serialize(circuit_template,
                                   circuit_template_file_path)

        # initialize thetas for optimization
        n_thetas = len(var_form_parameters)
        thetas, thetas_plus, thetas_minus, delta = SPSAOptimizer.initializeOptimization(
            n_thetas, optimizer_parameters)

        NumpySerializer.serialize(thetas, thetas_file_path)
        NumpySerializer.serialize(thetas_plus, thetas_plus_file_path)
        NumpySerializer.serialize(thetas_minus, thetas_minus_file_path)
        NumpySerializer.serialize(delta, delta_file_path)

        # generate urls
        url_root = request.host_url
        circuit_template_url = generate_url(
            url_root, 'variational-svm-classification/initialization',
            'circuit-template' + str(job_id))
        thetas_url = generate_url(
            url_root, 'variational-svm-classification/initialization',
            'thetas' + str(job_id))
        thetas_plus_url = generate_url(
            url_root, 'variational-svm-classification/initialization',
            'thetas-plus' + str(job_id))
        thetas_minus_url = generate_url(
            url_root, 'variational-svm-classification/initialization',
            'thetas-minus' + str(job_id))
        delta_url = generate_url(
            url_root, 'variational-svm-classification/initialization',
            'delta' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message,
                   status_code=status_code,
                   circuit_template_url=circuit_template_url,
                   thetas_url=thetas_url,
                   thetas_plus_url=thetas_plus_url,
                   thetas_minus_url=thetas_minus_url,
                   delta_url=delta_url)
コード例 #18
0
ファイル: app.py プロジェクト: UST-QuAntiL/QuantME-UseCases
async def optimize(job_id):
    """
        Optimize parameters
        * evaluates the results from circuit execution
        * optimizes thetas using SPSA optimizer
        * generates thetas and delta for the next round (thetas_plus, thetas_minus)
    """

    # response parameters
    message = 'success'
    status_code = 200

    # load the data from url or json body
    results_url = request.args.get('results-url', type=str)
    if results_url is None:
        results_url = (await request.get_json())['results-url']

    labels_url = request.args.get('labels-url', type=str)
    if labels_url is None:
        labels_url = (await request.get_json())['labels-url']

    thetas_in_url = request.args.get('thetas-url', type=str)
    if thetas_in_url is None:
        thetas_in_url = (await request.get_json())['thetas-url']

    delta_in_url = request.args.get('delta-url', type=str)
    if delta_in_url is None:
        delta_in_url = (await request.get_json())['delta-url']

    optimizer_parameters_url = request.args.get('optimizer-parameters-url',
                                                type=str)
    if optimizer_parameters_url is None:
        optimizer_parameters_url = (
            await request.get_json())['optimizer-parameters-url']

    iteration = request.args.get('iteration', type=int)
    if iteration is None:
        iteration = (await request.get_json())['iteration']

    is_statevector = request.args.get('is-statevector',
                                      type=str,
                                      default='False')
    is_statevector = False if is_statevector in ['False', '', 'No', 'None'
                                                 ] else True

    # file paths (inputs)
    results_file_path = './static/variational-svm-classification/optimization/results' \
                        + str(job_id) + '.txt'
    labels_file_path = './static/variational-svm-classification/optimization/labels' \
                       + str(job_id) + '.txt'
    thetas_in_file_path = './static/variational-svm-classification/optimization/thetas-in' \
                          + str(job_id) + '.txt'
    delta_in_file_path = './static/variational-svm-classification/optimization/delta-in' \
                         + str(job_id) + '.txt'
    optimizer_parameters_file_path = './static/variational-svm-classification/optimization/optimizer-parameters' \
                                     + str(job_id) + '.txt'

    # file paths (inputs)
    thetas_out_file_path = './static/variational-svm-classification/optimization/thetas-out' \
                           + str(job_id) + '.txt'
    thetas_plus_file_path = './static/variational-svm-classification/optimization/thetas-plus' \
                            + str(job_id) + '.txt'
    thetas_minus_file_path = './static/variational-svm-classification/optimization/thetas-minus' \
                             + str(job_id) + '.txt'
    delta_out_file_path = './static/variational-svm-classification/optimization/delta-out' \
                          + str(job_id) + '.txt'

    try:
        # create working folder if not exist
        FileService.create_folder_if_not_exist(
            './static/variational-svm-classification/optimization/')

        # delete old files if exist
        FileService.delete_if_exist(
            results_file_path, labels_file_path, thetas_in_file_path,
            delta_in_file_path, optimizer_parameters_file_path,
            thetas_out_file_path, thetas_plus_file_path,
            thetas_minus_file_path, delta_out_file_path)

        # download and store locally
        await FileService.download_to_file(results_url, results_file_path)
        await FileService.download_to_file(labels_url, labels_file_path)
        await FileService.download_to_file(thetas_in_url, thetas_in_file_path)
        await FileService.download_to_file(delta_in_url, delta_in_file_path)

        if optimizer_parameters_url is None:
            print('Using default parameterization!')
            optimizer_parameters = default_optimizer_params
        else:
            # deserialize optimizer parameters
            print('Downloading parameterization from URL: ' +
                  str(optimizer_parameters_url))
            await FileService.download_to_file(optimizer_parameters_url,
                                               optimizer_parameters_file_path)
            optimizer_parameters = NumpySerializer.deserialize(
                optimizer_parameters_file_path)
            if len(optimizer_parameters) != 5:
                raise Exception(
                    "Wrong number of optimizer parameters. 5 parameters c0 through c4 expected."
                )

        results = ResultsSerializer.deserialize(results_file_path)
        labels = NumpySerializer.deserialize(labels_file_path)
        thetas = NumpySerializer.deserialize(thetas_in_file_path)
        delta = NumpySerializer.deserialize(delta_in_file_path)

        # make that sure labels are integers
        labels = labels.astype(int)

        thetas_out, thetas_plus, thetas_minus, delta_out, costs_curr = \
            SPSAOptimizer.optimize(results, labels, thetas, delta, iteration, optimizer_parameters, is_statevector)

        NumpySerializer.serialize(thetas_out, thetas_out_file_path)
        NumpySerializer.serialize(thetas_plus, thetas_plus_file_path)
        NumpySerializer.serialize(thetas_minus, thetas_minus_file_path)
        NumpySerializer.serialize(delta_out, delta_out_file_path)

        # generate urls
        url_root = request.host_url
        thetas_out_url = generate_url(
            url_root, 'variational-svm-classification/optimization',
            'thetas-out' + str(job_id))
        thetas_plus_url = generate_url(
            url_root, 'variational-svm-classification/optimization',
            'thetas-plus' + str(job_id))
        thetas_minus_url = generate_url(
            url_root, 'variational-svm-classification/optimization',
            'thetas-minus' + str(job_id))
        delta_url = generate_url(
            url_root, 'variational-svm-classification/optimization',
            'delta-out' + str(job_id))

    except Exception as ex:
        message = str(ex)
        status_code = 500
        return jsonify(message=message, status_code=status_code)

    return jsonify(message=message,
                   status_code=status_code,
                   thetas_out_url=thetas_out_url,
                   thetas_plus_url=thetas_plus_url,
                   thetas_minus_url=thetas_minus_url,
                   delta_url=delta_url,
                   costs_curr=costs_curr)