Пример #1
0
 def fill_steps(self, run_path, num_agent):
     for step_path in PathProvider(run_path).get_data_paths():
         current_step, population = pickle.load(step_path.open('rb'))
         for word in self.whole_lexicon:
             i = self.whole_lexicon.index(word)
             try:
                 self.dcnum[i, current_step] = sum(
                     population.agents[num_agent].get_categories_by_word(
                         word) > 0)
             except ValueError:
                 pass
Пример #2
0
 def get_whole_lexicon(self, run_path, num_agent):
     self.whole_lexicon = set()
     for step_path in PathProvider(run_path).get_data_paths():
         _, population = pickle.load(step_path.open('rb'))
         if self.active_only:
             self.whole_lexicon = self.whole_lexicon.union(
                 population.agents[num_agent].get_active_lexicon(
                     self.stimuluses))
         else:
             self.whole_lexicon = self.whole_lexicon.union(
                 population.agents[num_agent].get_lexicon())
     self.whole_lexicon = list(
         self.whole_lexicon)  # cast to list to preserve the order
Пример #3
0
    return (cv.normalize(
        image, dst=None, alpha=0, beta=65535, norm_type=cv.NORM_MINMAX) /
            256).astype('uint8')


# Total 25 - 38 %
def processing_f1(img):
    blur_img = cv.GaussianBlur(img, (5, 5), 0)
    _, th = cv.threshold(blur_img, 0, 255, cv.THRESH_BINARY + cv.THRESH_OTSU)
    kernel = np.ones((5, 5), np.uint8)
    dilation = cv.dilate(th, kernel, iterations=12)
    erosion = cv.erode(dilation, kernel, iterations=5)
    return erosion


path_provider = PathProvider()
ellipse_detector = EllipseDetector()
results_writer = ResultsWriter('results.csv')

results = {}
paths_dict = path_provider.get_paths_dict()
for category in range(1, 7):
    for path in paths_dict[category]:
        image_name = os.path.basename(path)
        image = load_image(path)

        start_time = time.time(
        )  # TODO implement process pool? Much faster program run, but probably slower elapsed_time per image...
        image = image_equalization_to8bit(image)
        image = processing_f1(image)
        ellipse = ellipse_detector.detect(image,
Пример #4
0
                        help='hdf franek',
                        type=bool,
                        default=False)
    parser.add_argument('--parallelism',
                        '-p',
                        help='number of processes (unbounded if 0)',
                        type=int,
                        default=8)

    parsed_params = vars(parser.parse_args())

    logging.debug("loading pickled simulation from '%s' file",
                  parsed_params['data_root'])
    data_root_path = Path(parsed_params['data_root'])
    sim_params = pickle.load(
        PathProvider.new_path_provider(
            data_root_path).get_simulation_params_path().open('rb'))
    unpickled_inmem = pickle.load(
        PathProvider.new_path_provider(
            data_root_path).get_inmem_calc_path().open('rb'))
    unpickled_stimuluses = pickle.load(
        PathProvider.new_path_provider(
            data_root_path).get_stimuluses_path().open('rb'))

    for k, v in unpickled_inmem.items():
        inmem[k] = v

    if parsed_params['hdf_franek']:
        hdf_command = MakeHdf5(parsed_params['data_root'],
                               unpickled_stimuluses, sim_params)
        hdf_command()
Пример #5
0
import cv2 as cv
import numpy as np
from matplotlib import pyplot as plt
from path_provider import PathProvider

paths = PathProvider().get_paths_dict()

img  = cv.imread(paths[6][1], cv.IMREAD_ANYDEPTH)

img_scaled = cv.normalize(img, dst=None, alpha=0, beta=65535, norm_type=cv.NORM_MINMAX)

plt.subplot(311).set_title('Original histogram')

plt.hist(img_scaled.ravel())

plt.subplot(312).set_title('Normalized histogram')

plt.hist(img.ravel())


plt.subplot(313).set_title('Histogram after conversion to 8bit')

img8 = (img_scaled/256).astype('uint8')

plt.hist(img8.ravel())

plt.show()

    simulation_tasks = []
    if parsed_params['load_simulation']:
        for run in Path(parsed_params['load_simulation']).glob('*'):
            pickled_simulation_file = parsed_params['load_simulation']
            logging.debug("loading pickled simulation from {} file".format(
                pickled_simulation_file))
            with open(pickled_simulation_file, 'rb') as read_handle:
                step, population = pickle.load(read_handle)

        simulation_tasks.append(
            Simulation(params=parsed_params,
                       step_offset=step + 1,
                       population=population,
                       context_constructor=context_constructor,
                       num=0,
                       path_provider=PathProvider.new_path_provider(
                           parsed_params['simulation_name'])))
    else:
        simulation_path = os.path.abspath(parsed_params['simulation_name'])
        if os.path.exists(simulation_path):
            shutil.rmtree(simulation_path, ignore_errors=True)
        os.makedirs(simulation_path)
        os.makedirs(simulation_path + '/stats')

        for run in range(parsed_params['runs']):
            population = Population(parsed_params)
            root_path = Path(simulation_path).joinpath('run' + str(run))
            path_provider = PathProvider.new_path_provider(root_path)
            path_provider.create_directory_structure()
            simulation = Simulation(params=parsed_params,
                                    step_offset=0,
                                    population=population,