Beispiel #1
0
    def __init__(self, bot):
        self.bot = bot
        self.last_result = None  # I don't really use this...
        self.app_id = bot.api_keys['wolfram']

        # our wolfram alpha client
        self.wolfram_client = wolframalpha.Client(self.app_id)

        # Globals for message removal
        self.messageHistory = set()
        self.compute_message_history = set()
        self.previousQuery = ''

        # DB interfacer for user location query localization
        self.reg = reg.Registration(self.bot)

        # Fun strings for invalid queries
        self.invalidQueryStrings = [
            "Nobody knows.", "It's a mystery.", "I have no idea.",
            "No clue, sorry!", "I'm afraid I can't let you do that.",
            "Maybe another time.", "Ask someone else.",
            "That is anybody's guess.", "Beats me.",
            "I haven't the faintest idea."
        ]

        # Regex for IP address
        self.ipv4_regex = re.compile(r'\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b')
        self.ipv6_regex = re.compile(
            r'(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
        )
Beispiel #2
0
def update_set_application(request):
    c = {}
    c.update(csrf(request))
    data = request.POST["prior"]
    regis = registration.Registration()
    st = regis.updateRegisteredApplicantData(data)
    print(st)
    return HttpResponse(st, content_type="application/type")
Beispiel #3
0
 def do_registration(self, c, s):
     if not self.is_registration(s):
         r = registration.Registration(c, s)
         self.registrations.add(r)
         c.do_registration(s)
         return r
     else:
         return None
Beispiel #4
0
def registration_ui(request):
    c = {}
    c.update(csrf(request))
    data = request.POST["vol_name"]
    print("data" + data)
    try:
        regis = registration.Registration()
        st = regis.register(data)
    except Exception as e:
        st = e
    return HttpResponse(st, content_type="application/type")
Beispiel #5
0
def already_ssn(request):
    c = {}
    c.update(csrf(request))
    data = request.POST["ssn"]
    front_end_str10 = json.dumps({"data": [{"guardian_ssn": "342-909-8982"}]})
    try:
        regis = registration.Registration()
        st = regis.alreadySsn(data)
    except Exception as e:
        st = e
    return HttpResponse(st, content_type="application/type")
Beispiel #6
0
    def test_register(self):

        reg = registration.Registration()

        data = self.getFromCsv('sample_registration_data.csv', {})

        success = []
        error = []

        for i in range(0, len(data)):

            return_front_end_dict = '{ "data": ' + json.dumps([data[i]]) + '}'
            return_data = reg.register(return_front_end_dict)
            front_end_dict = ast.literal_eval(return_data)

            if front_end_dict['status'] == 'success':
                success.append(front_end_dict['data'][0])
            if front_end_dict['status'] == 'error':
                error.append(front_end_dict['data'][0])
                data[i]['error_message'] = front_end_dict['message']
                self.insertIntoCsv('actual_errors_data.csv', data[i])
Beispiel #7
0
 def __init__(self, port):
     self.registration = registration.Registration(port)
     self.handshakePackage = handshake.Handshake(self.registration.addrMe)
     self.peersLocal = {}
     self.memPool = {}
     self.miner = None
mri_path = base_path + "mri/"
labels_path = base_path + "labels/"
masks_path = base_path + "masks/"
atlas_path = base_path + "templates/MI/"

ENABLE_PYRAMIDS = False

if ENABLE_PYRAMIDS:
    out_path = base_path + "eval/ALPHA_SMD_PYRAMIDS_2/"
else:
    out_path = base_path + "eval/ALPHA_SMD_2/"

cnt = 20

r = reg.Registration(2, exe_path, dim=3, enable_logging=True)

param = r.get_register_param_defaults()

#param.pop("weights1", None)
#param.pop("weights2", None)
param["weights1"] = "hann"
param["weights2"] = "hann"
param["learning_rate"] = "0.5"
param["relaxation_factor"] = "0.99"
param["alpha_levels"] = "7"
param["alpha_max_distance"] = "128"
param["seed"] = "1000"
param["metric"] = "alpha_smd"
param["iterations"] = "3000"
param["sampling_fraction"] = "0.05"
Beispiel #9
0
 def connect(self):
     self.add_user = registration.Registration()
     self.add_user.show()
Beispiel #10
0
def main():
    """Funkcja wykonująca cały program"""
    modul = modules.Modules()
    fun = function.Functions()
    file = files.Files()
    register = registration.Registration()

    files_with_code = pythonfiles.FileInDirectory()
    path = r'.\\'
    extend = 'py'
    result = files_with_code.list_directory(path, extend)

    files.Files.checking_connections_between_files(result)

    function_list1 = function.Functions.checking_connections_between_functions1(
        result)
    function_list2 = fun.checking_weight_of_connections_between_functions(
        result, function_list1)
    weight_fun = functions.write_to_file_fun_data(function_list1,
                                                  function_list2)
    function.Functions.checking_connections_between_functions(
        result, weight_fun)
    modul_list = modules.Modules.searching_for_used_modules(result)
    modules.Modules.checking_connections_between_modules(result, modul_list)

    join_list = functions.convert_list_to_list_for_cc(
        files.Files.filesConnectionList, modules.Modules.modulConnectionList)
    join_list = list(set(join_list))

    cyclomatic_complexity = functions.cyclomatic_complexity()
    cyclomatic_complexity = functions.compare(function_list1,
                                              cyclomatic_complexity)
    cyclomatic_complexity += join_list
    menu_choice = functions.menu()

    if menu_choice == 1:
        registration.Registration.write_to_file(
            "FILES", files.Files.filesConnectionList
        )  # Wpisywanie do pliku połączeń plików
        registration.Registration.write_to_file(
            "", files.Files.filesConnectionWeight)

    elif menu_choice == 2:
        registration.Registration.write_to_file(
            "Functions", function.Functions.functionsConnectionList
        )  # Wpisywanie do pliku połączeń funkcji
        registration.Registration.write_to_file(
            "", function.Functions.functionsConnectionWeight)
        registration.Registration.write_to_file("CYCLOMATIC_COMPLEXITY",
                                                cyclomatic_complexity)

    elif menu_choice == 3:
        registration.Registration.write_to_file(
            "Modules", files.Files.filesConnectionList
        )  # Wpisywanie do pliku połączeń modułów
        registration.Registration.write_to_file(
            "", modul.Modules.modulConnectionWeight)

    elif menu_choice == 4:
        registration.Registration.write_to_file(
            "FILES", files.Files.filesConnectionList
        )  # Wpisywanie do pliku połączeń plików
        registration.Registration.write_to_file(
            "", files.Files.filesConnectionWeight)

        registration.Registration.write_to_file(
            "Functions", function.Functions.functionsConnectionList
        )  # Wpisywanie do pliku połączeń funkcji
        registration.Registration.write_to_file(
            "", function.Functions.functionsConnectionWeight)
        registration.Registration.write_to_file("CYCLOMATIC_COMPLEXITY",
                                                cyclomatic_complexity)

    elif menu_choice == 5:
        registration.Registration.write_to_file(
            "FILES", files.Files.filesConnectionList
        )  # Wpisywanie do pliku połączeń plików
        registration.Registration.write_to_file(
            "", files.Files.filesConnectionWeight)

        registration.Registration.write_to_file(
            "Modules", modules.Modules.modulConnectionList
        )  # Wpisywanie do pliku połączeń modułów
        registration.Registration.write_to_file(
            "", modules.Modules.modulConnectionWeight)

    elif menu_choice == 6:
        registration.Registration.write_to_file(
            "Functions", function.Functions.functionsConnectionList
        )  # Wpisywanie do pliku połączeń funkcji
        registration.Registration.write_to_file(
            "", function.Functions.functionsConnectionWeight)

        registration.Registration.write_to_file(
            "Modules", modules.Modules.modulConnectionList
        )  # Wpisywanie do pliku połączeń modułów
        registration.Registration.write_to_file(
            "", modules.Modules.modulConnectionWeight)
        registration.Registration.write_to_file("CYCLOMATIC_COMPLEXITY",
                                                cyclomatic_complexity)

    elif menu_choice == 7:
        registration.Registration.write_to_file(
            "FILES", files.Files.filesConnectionList
        )  # Wpisywanie do pliku połączeń plików
        registration.Registration.write_to_file(
            "", files.Files.filesConnectionWeight)

        registration.Registration.write_to_file(
            "Functions", function.Functions.functionsConnectionList
        )  # Wpisywanie do pliku połączeń funkcji
        registration.Registration.write_to_file(
            "", function.Functions.functionsConnectionWeight)

        registration.Registration.write_to_file(
            "Modules", modules.Modules.modulConnectionList
        )  # Wpisywanie do pliku połączeń modułów
        registration.Registration.write_to_file(
            "", modules.Modules.modulConnectionWeight)
        registration.Registration.write_to_file("CYCLOMATIC_COMPLEXITY",
                                                cyclomatic_complexity)

    else:
        print("Wybrałeś opcję z poza zakresu")
        main()
def main(USE_PYRAMIDS, RANDOMIZE_TRANSFORMS, DO_AFFINE_REGISTRATION,
         DO_TRANSFORM_LANDMARKS, EVAL_LANDMARKS, metric_name,
         transformation_size, noise_level):

    # Windows
    #dataset_path = "C:/cygwin64/home/johan/itkAlphaCut/assets/01.png"
    #bin_path = "C:/cygwin64/home/johan/itkAlphaCut-release/Release/"

    # Linux
    dataset_path = "/home/johof680/work/itkAlphaCut-4j/itkAlphaCut/assets/01.png"
    bin_path = "/home/johof680/work/itkAlphaAMD-build2/"

    #weight_path = "/home/johof680/work/VironovaRegistrationImages/"
    out_path = dataset_path  #"/home/johof680/work/VironovaRegistrationImages/"

    # image extension
    im_ext = "tif"

    # number of registrations to perform
    count = 1000

    # create registration object
    parallel_count = 6
    image_dimensions = 2
    r = reg.Registration(parallel_count,
                         bin_path,
                         dim=image_dimensions,
                         enable_logging=True)

    out_path1 = "/home/johof680/work/itkAlphaCut-4j/cilia_param_search8/" + transformation_size + "/" + noise_level + "/"
    #out_path1 = "C:/cygwin64/home/johan/cilia_random/large4/"

    # Generate random transforms

    if RANDOMIZE_TRANSFORMS:

        rnd_param = r.get_random_transforms_param_defaults()
        if noise_level == "none":
            rnd_param["noise"] = "0.0"
        elif noise_level == "large":
            rnd_param["noise"] = "0.1"
        else:
            raise "Illegal noise level."

        if transformation_size == "small":
            rnd_param["rotation"] = "10"
            rnd_param["translation"] = "10"
            rnd_param["min_rotation"] = "0"
            rnd_param["min_translation"] = "0"
        elif transformation_size == "medium":
            rnd_param["rotation"] = "20"
            rnd_param["translation"] = "20"
            rnd_param["min_rotation"] = "10"
            rnd_param["min_translation"] = "10"
        elif transformation_size == "large":
            rnd_param["rotation"] = "30"
            rnd_param["translation"] = "30"
            rnd_param["min_rotation"] = "20"
            rnd_param["min_translation"] = "20"
        elif transformation_size == "all":
            rnd_param["rotation"] = "30"
            rnd_param["translation"] = "30"
            rnd_param["min_rotation"] = "0"
            rnd_param["min_translation"] = "0"
        else:
            raise "Illegal transformation size."

        r.random_transforms(dataset_path, out_path1, count, rnd_param)

        st.create_directory(out_path1)

        r.run("Random transformations")

    in_path = out_path1
    out_path1 = out_path1 + metric_name + "/"
    st.create_directory(out_path1)

    #learning_rates_list = [0.001, 0.01, 0.1, 0.5, 1.0]
    #sampling_fractions_list = [0.01, 0.05, 0.1, 0.2, 0.5, 1.0]
    #normalizations_list = [0.0, 0.01, 0.025, 0.05]
    #sampling_fractions_list = [0.01, 0.05, 0.1, 0.2, 0.5, 1.0]

    exponents = [
        -3.75, -3.5, -3.25, -3, -2.75, -2.5, -2.25, -2.0, -1.75, -1.5, -1.25,
        -1.0, -0.75, -0.5, -0.25, 0.0
    ]
    #sampling_fractions_list = [np.power(10.0, i) for i in exponents]
    #sampling_fractions_list = [0.1]
    #learning_rates_list = [0.5]
    #normalizations_list = [0.0]
    #alpha_levels_list = [7]

    sampling_fractions_list = [0.1]
    learning_rates_list = [0.5]
    normalizations_list = [0.0]
    alpha_levels_list = range(1, 32)

    param_comb = [(a, b, c, d) for a in learning_rates_list
                  for b in sampling_fractions_list for c in normalizations_list
                  for d in alpha_levels_list]

    print(
        "Parameter format (learning_rate, sampling_fraction, normalization percentile)"
    )
    print(param_comb)

    # Do the registrations

    def register(rp, input1_path, input2_path, output_path, metric, cnt):
        for i in xrange(cnt):
            for j in xrange(len(param_comb)):
                (learning_rate, sampling_fraction, normalization,
                 alpha_levels) = param_comb[j]

                pth = output_path + ("registration_%d_%d/" % (j + 1, i + 1))
                rpar = rp.get_register_param_defaults()
                rpar.pop("weights1", None)
                rpar.pop("weights2", None)
                in1 = input1_path + "ref_image_%d.%s" % (i + 1, im_ext)
                in2 = input2_path + "transformed_image_%d.%s" % (i + 1, im_ext)
                msk2 = input2_path + "transformed_mask_%d.%s" % (i + 1, im_ext)
                if USE_PYRAMIDS:
                    rpar["multiscale_sampling_factors"] = "4x2x1"
                    rpar["multiscale_smoothing_sigmas"] = "5x3x0"
                else:
                    rpar["multiscale_sampling_factors"] = "1"
                    rpar["multiscale_smoothing_sigmas"] = "0"
                rpar["metric"] = metric
                rpar["learning_rate"] = str(learning_rate)
                rpar["alpha_outlier_rejection"] = "0.0"
                rpar["sampling_fraction"] = str(sampling_fraction)
                rpar["normalization"] = str(normalization)
                rpar["alpha_levels"] = str(alpha_levels)

                rpar["mask1"] = "circle"
                rpar["mask2"] = msk2
                rp.register_affine(in1, in2, pth, rpar)

    #def landmark_transform(self, landmarks_in_path, out_path, out_name, transform_path):
    def transform_landmarks(rp, landmark_path, transform_path_base, cnt):
        for i in xrange(cnt):
            for j in xrange(len(param_comb)):
                transform_path = transform_path_base + (
                    "registration_%d_%d/transform_complete.txt" %
                    (j + 1, i + 1))
                input_path = landmark_path + "transformed_landmarks_%d.csv" % (
                    i + 1)
                out_name = "registered_landmarks_%d_%d.csv" % (j + 1, i + 1)

                rp.landmark_transform(input_path, transform_path_base,
                                      out_name, transform_path)

    if DO_AFFINE_REGISTRATION:
        register(r, in_path, in_path, out_path1, metric_name, count)
        r.run("Affine Registration")

    if DO_TRANSFORM_LANDMARKS:
        transform_landmarks(r, in_path, out_path1, count)
        r.run("Transform Landmarks")

    def eval_landmarks(ref_landmark_path, output_path, cnt):
        ref_lm = landmarks.read_csv(
            ref_landmark_path + "ref_landmarks.csv",
            False)  #csv_2_np.read_csv(out1_path + "ref_landmarks.csv", False)
        out_succ_freq = []
        out_succ_means = []
        out_means = []
        out_stddevs = []
        out_dists = []
        out_dist_summary = []

        for j in xrange(len(param_comb)):
            dists = np.zeros(cnt)
            for i in xrange(cnt):
                tra_lm = landmarks.read_csv(output_path +
                                            "registered_landmarks_%d_%d.csv" %
                                            (j + 1, i + 1))
                dists[i] = landmarks.mean_euclidean(ref_lm, tra_lm)
            succ_freq = np.count_nonzero(
                np.where(dists <= 1.0)) / np.float(cnt)
            succ_means = np.mean(dists[np.where(dists <= 1.0)])

            out_succ_freq.append(succ_freq)
            out_succ_means.append(succ_means)
            out_means.append(np.mean(dists))
            out_stddevs.append(np.std(dists))
            out_dists.append(dists)
            out_dist_summary.append(dt.make_distribution(dists))
            #np.sort(dists)
        return (out_succ_freq, out_succ_means, out_means, out_stddevs,
                out_dists, out_dist_summary)
        #print("%.4d: %f" % (i+1, dist))

        #print(eval_landmarks(out_path1, out_path1, count))

    def filter_set(values, tup, tup_index, tup_value):
        vals = []
        tups = []
        for i in xrange(len(tup)):
            if tup[i][tup_index] == tup_value:
                tups.append(tup[i])
                vals.append(values[i])
        return (np.array(vals), tups)

    if EVAL_LANDMARKS:
        (succ_freq, succ_means, mn, stddev, full_distri,
         distri) = eval_landmarks(in_path, out_path1, count)
        np.set_printoptions(precision=5)
        np.set_printoptions(suppress=True)
        print("")
        #print(full_distri)
        #print(distri)
        print("Succ freq: ")
        print(succ_freq)
        print("Succ means: ")
        print(succ_means)
        print("Means: ")
        print(mn)
        print(stddev)

        M = 9
        if M > len(param_comb):
            M = len(param_comb)

        argsort = np.argsort(mn)

        best_indices = argsort[0:M]
        best_params = [param_comb[ind] for ind in best_indices]
        best_vals = [mn[ind] for ind in best_indices]

        worst_indices = argsort[len(argsort) - M:]
        worst_params = [param_comb[ind] for ind in worst_indices]
        worst_vals = [mn[ind] for ind in worst_indices]

        print("Best param: " + str(best_params))
        print("Best vals: " + str(best_vals))

        print("Worst param: " + str(worst_params))
        print("Worst vals: " + str(worst_vals))

        for i in xrange(len(full_distri)):
            np.savetxt(out_path1 + metric_name + "%d.csv" % (i + 1),
                       full_distri[i],
                       delimiter=",")

        print("For learning rate 0.5")
        (mn_lr_0_5, tup_lr_0_5) = filter_set(mn, param_comb, 0, 0.5)
        print(mn_lr_0_5)
        print(tup_lr_0_5)

        #print(str(param_comb))
        #make_plot(distri)

    # Eval

    # param = r.get_register_param_defaults()

    # param["weights1"] = "hann"
    # param["weights2"] = "hann"
    # param["learning_rate"] = "0.05"
    # param["relaxation_factor"] = "0.99"
    # param["alpha_levels"] = "7"
    # param["alpha_max_distance"] = "128"
    # param["seed"] = "1000"
    # param["metric"] = "alpha_smd"
    # param["iterations"] = "1000"
    # param["sampling_fraction"] = "0.001"
    # param["spacing_mode"] = "default"
    # param["normalization"] = "0.005"
    # param["multiscale_sampling_factors"] = "4x2x1"
    # param["multiscale_smoothing_sigmas"] = "4.0x2.0x1.0"

    #def make_dir(path, index):
    #  return st.makedir_string(path + "Out%.3d" % (index))

    #def make_register(index, in1, in2, out, params):
    #  return exe_path + " -in1 " + in1 + " -in2 " + in2 + " -out " + out + st.param_dict_to_string(params)

    def ind_str(prefix, index, postfix):
        return prefix + ("%.3d" % index) + postfix
def main(USE_PYRAMIDS, RANDOMIZE_TRANSFORMS, DO_AFFINE_REGISTRATION,
         DO_TRANSFORM_LANDMARKS, EVAL_LANDMARKS, metric_name,
         transformation_size, noise_level):

    # Windows
    #dataset_path = "c:/dev/data/VironovaRegistrationImages/"
    #bin_path = "C:/cygwin64/home/johan/itkAlphaCut-release/Release/"

    # Linux
    dataset_path = "/home/johof680/work/LPBA40_for_ANTs/mri/S01.nii.gz"
    bin_path = "/home/johof680/work/itkAlphaCut-4j/build-release/"

    # image extension
    im_ext = "nii.gz"

    # number of registrations to perform
    count = 200

    # create registration object
    parallel_count = 6
    image_dimensions = 3
    r = reg.Registration(parallel_count,
                         bin_path,
                         dim=image_dimensions,
                         enable_logging=True)

    out_path1 = "/home/johof680/work/itkAlphaCut-4j/lpba40_random_5/" + transformation_size + "/" + noise_level + "/"

    #metric_name = "ssd"

    #USE_PYRAMIDS = True

    #RANDOMIZE_TRANSFORMS = True
    #DO_AFFINE_REGISTRATION = True
    #DO_TRANSFORM_LANDMARKS = True
    #EVAL_LANDMARKS = True

    # Generate random transforms

    if RANDOMIZE_TRANSFORMS:

        rnd_param = r.get_random_transforms_param_defaults()
        if noise_level == "none":
            rnd_param["noise"] = "0.0"
        elif noise_level == "large":
            rnd_param["noise"] = "0.1"
        else:
            raise "Illegal noise level."

        if transformation_size == "small":
            rnd_param["rotation"] = "10"
            rnd_param["translation"] = "10"
            rnd_param["min_rotation"] = "0"
            rnd_param["min_translation"] = "0"
        elif transformation_size == "medium":
            rnd_param["rotation"] = "15"
            rnd_param["translation"] = "15"
            rnd_param["min_rotation"] = "10"
            rnd_param["min_translation"] = "10"
        elif transformation_size == "large":
            rnd_param["rotation"] = "20"
            rnd_param["translation"] = "20"
            rnd_param["min_rotation"] = "15"
            rnd_param["min_translation"] = "15"
        elif transformation_size == "all":
            rnd_param["rotation"] = "20"
            rnd_param["translation"] = "20"
            rnd_param["min_rotation"] = "0"
            rnd_param["min_translation"] = "0"
        else:
            raise "Illegal transformation size."

        #rnd_param["format_ext"] = im_ext

        r.random_transforms(dataset_path, out_path1, count, rnd_param)

        st.create_directory(out_path1)

        r.run("Random transformations")

    in_path = out_path1
    out_path1 = out_path1 + metric_name + "/"
    if USE_PYRAMIDS:
        out_path1 = out_path1 + "w_pyramid/"
    else:
        out_path1 = out_path1 + "wo_pyramid/"
    st.create_directory(out_path1)

    # Do the registrations

    def register(rp, input1_path, input2_path, output_path, metric, cnt):
        for i in xrange(cnt):
            pth = output_path + ("registration_%d/" % (i + 1))
            rpar = rp.get_register_param_defaults()
            rpar.pop("weights1", None)
            rpar.pop("weights2", None)
            in1 = input1_path + "ref_image_%d.%s" % (i + 1, im_ext)
            in2 = input2_path + "transformed_image_%d.%s" % (i + 1, im_ext)
            msk2 = input2_path + "transformed_mask_%d.%s" % (i + 1, im_ext)
            if USE_PYRAMIDS:
                rpar["multiscale_sampling_factors"] = "4x2x1"
                rpar["multiscale_smoothing_sigmas"] = "5x3x0"
            else:
                rpar["multiscale_sampling_factors"] = "1"
                rpar["multiscale_smoothing_sigmas"] = "0"
            rpar["metric"] = metric
            rpar["learning_rate"] = "0.5"
            rpar["alpha_outlier_rejection"] = "0.0"
            rpar["sampling_fraction"] = "0.01"

            rpar["mask1"] = "circle"
            rpar["mask2"] = msk2
            rp.register_affine(in1, in2, pth, rpar)

    #def landmark_transform(self, landmarks_in_path, out_path, out_name, transform_path):
    def transform_landmarks(rp, landmark_path, transform_path_base, cnt):
        for i in xrange(cnt):
            transform_path = transform_path_base + (
                "registration_%d/transform_complete.txt" % (i + 1))
            input_path = landmark_path + "transformed_landmarks_%d.csv" % (i +
                                                                           1)
            out_name = "registered_landmarks_%d.csv" % (i + 1)

            rp.landmark_transform(input_path, transform_path_base, out_name,
                                  transform_path)

    if DO_AFFINE_REGISTRATION:
        register(r, in_path, in_path, out_path1, metric_name, count)
        r.run("Affine Registration")

    if DO_TRANSFORM_LANDMARKS:
        transform_landmarks(r, in_path, out_path1, count)
        r.run("Transform Landmarks")

    def eval_landmarks(ref_landmark_path, output_path, cnt):
        ref_lm = landmarks.read_csv(
            ref_landmark_path + "ref_landmarks.csv",
            False)  #csv_2_np.read_csv(out1_path + "ref_landmarks.csv", False)
        dists = np.zeros(cnt)
        for i in xrange(cnt):
            tra_lm = landmarks.read_csv(output_path +
                                        "registered_landmarks_%d.csv" %
                                        (i + 1))
            dists[i] = landmarks.mean_euclidean(ref_lm, tra_lm)
            #np.sort(dists)
        return (np.mean(dists), np.std(dists), dists,
                dt.make_distribution(dists))
        #print("%.4d: %f" % (i+1, dist))

        #print(eval_landmarks(out_path1, out_path1, count))

    # def count_less_than(sorted_distrib, value):
    #     return np.searchsorted(sorted_distrib, value)

    # def compute_hist(distrib, bins):
    #     yy = np.sort(distrib)
    #     x = np.arange(0, 30.0, 30.0 / bins)
    #     h = np.zeros(bins)
    #     for i in xrange(bins):
    #         h[i] = count_less_than(yy, x[i])
    #     return (x, h)

    # def make_plot(distrib):
    #     fig, ax = plt.subplots(figsize = (8, 4))
    #     n_bins = 500
    #     (x, h) = compute_hist(distrib, n_bins)
    #     #n, bins, patches = ax.hist(distrib, n_bins, normed=1, color=c[i], cumulative=True, label='alpha_smd')

    #     c = ['r', 'b', 'g', 'y', 'k']
    #     ax.fill_between(x, 0, h)
    #     #histtype='step'
    #     ax.grid(True)
    #     ax.legend(loc='right')
    #     ax.set_title('Registration results')
    #     ax.set_xlabel('Registration error (px)')
    #     ax.set_ylabel('Success rate')
    #     ax.set_ylim(0.0, 1.0)
    #     ax.set_xlim(0.0, 1.0)
    #     plt.show()

    if EVAL_LANDMARKS:
        (mn, stddev, full_distri,
         distri) = eval_landmarks(in_path, out_path1, count)
        np.set_printoptions(precision=5)
        np.set_printoptions(suppress=True)
        print("")
        print(distri)
        print(mn)
        print(stddev)
        np.savetxt(out_path1 + metric_name + ".csv",
                   full_distri,
                   delimiter=",")
        #make_plot(distri)

    # Eval

    # param = r.get_register_param_defaults()

    # param["weights1"] = "hann"
    # param["weights2"] = "hann"
    # param["learning_rate"] = "0.05"
    # param["relaxation_factor"] = "0.99"
    # param["alpha_levels"] = "7"
    # param["alpha_max_distance"] = "128"
    # param["seed"] = "1000"
    # param["metric"] = "alpha_smd"
    # param["iterations"] = "1000"
    # param["sampling_fraction"] = "0.001"
    # param["spacing_mode"] = "default"
    # param["normalization"] = "0.005"
    # param["multiscale_sampling_factors"] = "4x2x1"
    # param["multiscale_smoothing_sigmas"] = "4.0x2.0x1.0"

    #def make_dir(path, index):
    #  return st.makedir_string(path + "Out%.3d" % (index))

    #def make_register(index, in1, in2, out, params):
    #  return exe_path + " -in1 " + in1 + " -in2 " + in2 + " -out " + out + st.param_dict_to_string(params)

    def ind_str(prefix, index, postfix):
        return prefix + ("%.3d" % index) + postfix
Beispiel #13
0
    def post(self):
        minlen = 4 + 4 + 20 + 4 + 1 + 4 + 1
        STR_VERSERVER = '01060000'
        INT_VERCLIENT = 0x01060000
        STR_VERCLIENT = '1.6'

        # must be able to query for https
        if not os.environ.has_key('HTTPS'):
            self.resp_simple(0, 'HTTPS environment variable not found')
            return

        # must be able to query for server version
        if not os.environ.has_key('CURRENT_VERSION_ID'):
            self.resp_simple(
                0, 'CURRENT_VERSION_ID environment variable not found')
            return

        HTTPS = os.environ.get('HTTPS', 'off')
        CURRENT_VERSION_ID = os.environ.get('CURRENT_VERSION_ID',
                                            STR_VERSERVER)

        # SSL must be enabled
        if HTTPS.__str__() != 'on':
            self.resp_simple(0, 'Secure socket required.')
            return

        # get the data from the post
        self.response.headers['Content-Type'] = 'application/octet-stream'
        data = self.request.body
        size = str.__len__(data)

        # size check
        if size < minlen:
            logging.debug("in body '%s'" % data)
            logging.debug("in size %d" % size)
            self.resp_simple(0, 'Request was formatted incorrectly.')
            return

        # unpack all incoming data
        client = (struct.unpack("!i", data[0:4]))[0]

        # client version check
        if client < INT_VERCLIENT:
            self.resp_simple(0, (
                'Client version mismatch; %s required.  Download latest client release first.'
                % STR_VERCLIENT))
            return

        server = int(CURRENT_VERSION_ID[0:8], 16)

        # init
        submissionAuth = None
        submissionType = 1

        # unpack all incoming data, skip client version
        pos = 4

        lenkeyid = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
        pos = pos + 4
        keyId = data[pos:(pos + lenkeyid)]
        pos = pos + lenkeyid

        lensubtok = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
        pos = pos + 4
        submissionToken = str(data[pos:(pos + lensubtok)])
        pos = pos + lensubtok

        if lensubtok >= 32:  # 256-bit original SHA-3 minimum, before base-64 encoding
            submissionAuth = submissionToken
            submissionType = 1  # version 1 of authentication

        lenregid = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
        pos = pos + 4
        registrationId = str(data[pos:(pos + lenregid)])
        pos = pos + lenregid

        devtype = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
        pos = pos + 4

        # additional verifying for self signing
        if size > pos:  # still has data
            lennonce = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
            pos = pos + 4
            nonce = str(data[pos:(pos + lennonce)])
            pos = pos + lennonce

            lenpubkey = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
            pos = pos + 4
            pubkey = str(data[pos:(pos + lenpubkey)])
            pos = pos + lenpubkey
            plain_pos = pos

            sig_len = (struct.unpack("!i", data[pos:(pos + 4)]))[0]
            pos = pos + 4
            sig = data[pos:(pos + sig_len)]
            pos = pos + sig_len

            # signature verification
            if lenpubkey > 0:
                # load RSA public key
                rsa_key = RSA.importKey(base64.decodestring(pubkey))
                # verify signature
                h = SHA.new()
                h.update(data[:plain_pos])
                verifier = PKCS1_v1_5.new(rsa_key)
                if verifier.verify(h, sig):
                    submissionAuth = pubkey
                    submissionType = 2  # version 2 of authentication
                    logging.debug(
                        'The signature is authentic. Registration continues.')
                else:
                    logging.error(
                        'The signature is not authentic. Registration stops.')
                    return

        # REGISTRATION STORAGE =============================================
        # check if registration needs to be authenticated before insertion or update
        query = registration.Registration.all().order('-inserted')
        query.filter('key_id =', keyId)
        num = query.count()

        # key_id exists, submissionAuth must match
        if num >= 1:
            reg_old = query.get()  # only want the oldest

            # follow update logic
            updateOld = False
            if submissionType > reg_old.submission_type:
                updateOld = True  # authentication type upgraded
            elif submissionAuth == reg_old.submission_token:
                updateOld = True  # previous authentication matches

            # token is authentic
            if updateOld:
                # if record exists, update it
                if registrationId == reg_old.registration_id:
                    # update time and active status only
                    reg_old.active = True
                    reg_old.submission_token = submissionAuth
                    reg_old.submission_type = submissionType
                    reg_old.put()
                    key = reg_old.key()
                    if not key.has_id_or_name():
                        self.resp_simple(0, 'Unable to update registration.')
                        return
                # if record missing, insert it
                else:
                    reg_new = registration.Registration(
                        key_id=keyId,
                        submission_token=submissionAuth,
                        submission_type=submissionType,
                        registration_id=registrationId,
                        notify_type=devtype,
                        client_ver=client)
                    reg_new.put()
                    key = reg_new.key()
                    if not key.has_id_or_name():
                        self.resp_simple(0,
                                         'Unable to create new registration.')
                        return

            # token not authentic, just log it
            else:
                logging.info(
                    'Registration failed: submission token in table %s, not matching submitted submission token %s'
                    % (reg_old.submission_token[0:30], submissionAuth[0:30]))

        # key_id is new, submissionAuth can be inserted instantly
        else:
            reg_new = registration.Registration(
                key_id=keyId,
                submission_token=submissionAuth,
                submission_type=submissionType,
                registration_id=registrationId,
                notify_type=devtype,
                client_ver=client)
            reg_new.put()
            key = reg_new.key()
            if not key.has_id_or_name():
                self.resp_simple(0, 'Unable to create new registration.')
                return

        # SEND RESPONSE =========================================
        # this client background process does not need to log back insert/update errors to the client
        self.response.out.write('%s' % struct.pack('!i', server))
Beispiel #14
0
 def regisgtrationFormCommand():
     win.destroy()
     registrationObject = registration.Registration()
     registrationObject.show()
"legal_form":"",
"helmet":"",
"boot":"",
"sleeping_bag":"",
"water_bottle":"",
"sunscreen":"",
"bugs_spray":"",
"check_in_status":"",
"application_status":"",
"guardian_ssn":"342-90-8982"
}]})

front_end_str10 = json.dumps({"data" :[{"guardian_ssn":"342-909-8981"}]})

front_end_str11 = json.dumps({"data" :[{"applicant_id":"50"}]})

test = json.dumps({"data" :[{"camp_time_slots":"2017-02-12 00:00:00.000000","applicant_id":"15"}]})

regis = registration.Registration()
#st = regis.register(front_end_str1)
#st = regis.register(front_end_str3)
#st = regis.register(front_end_str4)
#st = regis.register(front_end_str5)
#st = regis.alreadySsn(front_end_str10)

st = regis.viewRegisteredApplicant(test)
#st = regis.updateRegisteredApplicantData(front_end_str5)

print(st)

Beispiel #16
0
def main(USE_PYRAMIDS, RANDOMIZE_TRANSFORMS, DO_AFFINE_REGISTRATION, DO_TRANSFORM_LANDMARKS, EVAL_LANDMARKS, metric_name, transformation_size, noise_level, reverse):

  out_path = dataset_path

  # image extension
  im_ext = "tif"

  # number of registrations to perform
  count = 1000

  # create registration object
  parallel_count = 6
  image_dimensions = 2
  r = reg.Registration(parallel_count, bin_path, dim = image_dimensions, enable_logging = True)

  out_path1 = "/home/johof680/work/itkAlphaAMD-build/cilia/" + transformation_size + "/" + noise_level + "/"
  
  # Generate random transforms

  if RANDOMIZE_TRANSFORMS:

    rnd_param = r.get_random_transforms_param_defaults()
    if noise_level == "none":
      rnd_param["noise"] = "0.0"
    elif noise_level == "large":
      rnd_param["noise"] = "0.1"
    else:
      raise "Illegal noise level."
    
    if transformation_size == "small":
      rnd_param["rotation"] = "10"
      rnd_param["translation"] = "10"
      rnd_param["min_rotation"] = "0"
      rnd_param["min_translation"] = "0"
    elif transformation_size == "medium":
      rnd_param["rotation"] = "20"
      rnd_param["translation"] = "20"
      rnd_param["min_rotation"] = "10"
      rnd_param["min_translation"] = "10"
    elif transformation_size == "large":
      rnd_param["rotation"] = "30"
      rnd_param["translation"] = "30"
      rnd_param["min_rotation"] = "20"
      rnd_param["min_translation"] = "20"
    elif transformation_size == "all":
      rnd_param["rotation"] = "30"
      rnd_param["translation"] = "30"
      rnd_param["min_rotation"] = "0"
      rnd_param["min_translation"] = "0"
    else:
      raise "Illegal transformation size."

    r.random_transforms(dataset_path, out_path1, count, rnd_param)

    st.create_directory(out_path1)

    r.run("Random transformations")

  in_path = out_path1
  if reverse:
    out_path1 = out_path1 + metric_name + "_reverse/"
  else:
    out_path1 = out_path1 + metric_name + "/"
  st.create_directory(out_path1)

  # Do the registrations

  def register(rp, input1_path, input2_path, output_path, metric, cnt):
      for i in xrange(cnt):
          pth = output_path + ("registration_%d/" % (i+1))
          rpar = rp.get_register_param_defaults()
          rpar.pop("weights1", None)
          rpar.pop("weights2", None)
          msk2 = input2_path + "transformed_mask_%d.%s" % (i+1, im_ext)
          if reverse:
              rpar["mask2"] = "circle"
              rpar["mask1"] = msk2           
              in2 = input1_path + "ref_image_%d.%s" %(i+1, im_ext)
              in1 = input2_path + "transformed_image_%d.%s" % (i+1, im_ext)
          else:
              rpar["mask1"] = "circle"
              rpar["mask2"] = msk2           
              in1 = input1_path + "ref_image_%d.%s" %(i+1, im_ext)
              in2 = input2_path + "transformed_image_%d.%s" % (i+1, im_ext)           
          if USE_PYRAMIDS:
            rpar["multiscale_sampling_factors"] = "4x2x1"
            rpar["multiscale_smoothing_sigmas"] = "5x3x0"
          else:
            rpar["multiscale_sampling_factors"] = "1"
            rpar["multiscale_smoothing_sigmas"] = "0"
          rpar["metric"] = metric
          rpar["learning_rate"] = "0.5"
          rpar["alpha_outlier_rejection"] = "0.0"
          rpar["sampling_fraction"] = "0.1"
          rpar["normalization"] = "0.05"

          rp.register_affine(in1, in2, pth, rpar)

    #def landmark_transform(self, landmarks_in_path, out_path, out_name, transform_path):
  def transform_landmarks(rp, landmark_path, landmark_prefix, transform_path_base, cnt):
      for i in xrange(cnt):
          transform_path = transform_path_base + ("registration_%d/transform_complete.txt" % (i+1))
          if reverse:
            input_path = landmark_path + landmark_prefix + ".csv"
          else:
            input_path = landmark_path + landmark_prefix + "_%d.csv" % (i + 1)
          out_name = "registered_landmarks_%d.csv" % (i + 1)       

          rp.landmark_transform(input_path, transform_path_base, out_name, transform_path)

  if DO_AFFINE_REGISTRATION:
      register(r, in_path, in_path, out_path1, metric_name, count)
      r.run("Affine Registration")

  if DO_TRANSFORM_LANDMARKS:
      if reverse:
          transform_landmarks(r, in_path, "ref_landmarks", out_path1, count)
      else:
          transform_landmarks(r, in_path, "transformed_landmarks", out_path1, count)
      r.run("Transform Landmarks")


  def eval_landmarks(ref_landmark_path, output_path, cnt):
      if reverse:
          dists = np.zeros(cnt)
          for i in xrange(cnt):
              ref_lm = landmarks.read_csv(in_path + "transformed_landmarks_%d.csv" % (i+1), False) #csv_2_np.read_csv(out1_path + "ref_landmarks.csv", False)
              tra_lm = landmarks.read_csv(output_path + "registered_landmarks_%d.csv" % (i+1))
              dists[i] = landmarks.mean_euclidean(ref_lm, tra_lm)
          return (np.mean(dists), np.std(dists), dists, dt.make_distribution(dists))
      else:
          ref_lm = landmarks.read_csv(ref_landmark_path + "ref_landmarks.csv", False) #csv_2_np.read_csv(out1_path + "ref_landmarks.csv", False)
          dists = np.zeros(cnt)
          for i in xrange(cnt):
              tra_lm = landmarks.read_csv(output_path + "registered_landmarks_%d.csv" % (i+1))
              dists[i] = landmarks.mean_euclidean(ref_lm, tra_lm)
          return (np.mean(dists), np.std(dists), dists, dt.make_distribution(dists))

  if EVAL_LANDMARKS:
    (mn, stddev, full_distri, distri) = eval_landmarks(in_path, out_path1, count)
    np.set_printoptions(precision=5)
    np.set_printoptions(suppress=True)
    print("")
    #print(full_distri)
    print(distri)
    print(mn)
    print(stddev)
    print("Metric:" + metric_name)
    print("Transformation: " + transformation_size)
    print("Noise level: " + noise_level)
    print("IsReverse: " + str(reverse))
    np.savetxt(out_path1 + metric_name + ".csv", full_distri, delimiter=",")

  def ind_str(prefix, index, postfix):
    return prefix + ("%.3d" % index) + postfix
Beispiel #17
0
import registration as reg

import numpy

points_fixed = numpy.random.randn(100, 3)

transform = reg.Affine()

transform.parameter[:] = transform.identity
transform.parameter[0] = numpy.pi / 8.

points_moving = transform.transform_points(points_fixed)

metric = reg.ExactLandmarkL2(points_moving, points_fixed, transform=None)

opt = reg.ModifiedLBFGS(optimizer_args={'factr': 1, 'pgtol': 1e-10, 'disp': 1})

registration = reg.Registration(
    model=transform,
    metric=metric,
    optimizer=opt
)

registration.register(points_moving)

points_moving_transformed = transform.transform_points(points_moving)

print "Initial Maximum MSE:", ((points_fixed - points_moving) ** 2).max()
print "Registered Maximum MSE:", ((points_fixed - points_moving_transformed) ** 2).max()
Beispiel #18
0
 def register():
     registration.Registration(MAIN_WINDOW)