Ejemplo n.º 1
0
def benchmark_cost_function(data_sizes):
    polygon = pickle.load(open('utils/good_path_for_rectangle.pickle', 'rb'))

    save_folder = generate_folder_name()
    iterations = 10
    results = np.empty((len(data_sizes), iterations, 2), dtype=float)

    for i, dsize in enumerate(data_sizes):
        data = dg.generate_rectangle_set(dsize)

        print("\nRun {} with value {}".format(i+1, dsize))

        # Compile functions and warm up GPU
        acoc.cost_function_gpu(data.T, polygon)

        for j in range(iterations):
            utils.print_on_current_line('Iteration {}/{}'.format(j, iterations))
            start_cpu = time.clock()
            acoc.cost_function(data.T, polygon)
            end_cpu = time.clock()
            results[i][j][0] = end_cpu - start_cpu

            start_gpu = time.clock()
            acoc.cost_function_gpu(data.T, polygon)
            end_gpu = time.clock()
            results[i][j][2] = end_gpu - start_gpu

    mean_results = np.mean(results, axis=1).T
    acoc_plotter.plot_bar_chart_gpu_benchmark(mean_results, data_sizes, ['CPython', 'GPU'], save_folder, 'results')

    np.set_printoptions(precision=7, suppress=False)
    print("\nResults: \n{}".format(mean_results))
    utils.save_object(mean_results, save_folder, 'results')
Ejemplo n.º 2
0
def save_plot(fig=None, parent_folder='', file_name=None, extension='png'):
    if parent_folder is not None:
        directory = os.path.join(SAVE_DIR, parent_folder)
    else:
        directory = os.path.join(SAVE_DIR, generate_folder_name())
    if not os.path.exists(directory):
        os.makedirs(directory)
    file_name = datetime.utcnow().strftime('%Y-%m-%d %H_%M_%S_%f')[:-5] if file_name is None else file_name
    if fig is None:
        fig = plt
    fig.savefig(os.path.join(directory, file_name + '.' + extension), transparent=False)
Ejemplo n.º 3
0
class ProductImageInlineForm(InlineFormAdmin):
    form_columns = ['id', 'name', 'is_main_image']
    column_labels = dict(name="Image")
    relative_file_path = get_relative_file_path('products', generate_folder_name())
    form_extra_fields = {'name': form.ImageUploadField('Picture', allowed_extensions=['jpg', 'jpeg', 'png'],
                                                       base_path=get_file_path(), relative_path=relative_file_path)}
    form_widget_args = {
        'is_main_image': {
            'type': 'checkbox',
            'class': 'flat-red'
        },
    }
Ejemplo n.º 4
0
def benchmark(parameter_name, values, config=CONFIG):
    save_folder = generate_folder_name()
    print("\n\nBenchmark for parameter '{}' with values {}".format(parameter_name, values))

    results = np.empty((len(values), 2), dtype=float)
    for index, v in enumerate(values):
        print("Run {} with value {} on GPU".format(index+1, v))
        results[index, 0] = run((parameter_name, v), ('gpu', True))
        utils.clear_current_line()

        print("Run {} with value {} on CPU".format(index+1, v))
        results[index, 1] = run((parameter_name, v), ('gpu', False))
        utils.clear_current_line()

    result_str = "Results: \n{}".format(results)
    print(result_str)
    utils.save_dict(config, save_folder, 'config_' + parameter_name + '.txt')
    utils.save_string_to_file(result_str, save_folder, 'results.txt')
Ejemplo n.º 5
0
def benchmark(parameter_name, values, config=CONFIG):
    save_folder = generate_folder_name()
    print("\n\nBenchmark for parameter '{}' with values {}".format(
        parameter_name, values))

    results = np.empty((len(values), 2), dtype=float)
    for index, v in enumerate(values):
        print("Run {} with value {} on GPU".format(index + 1, v))
        results[index, 0] = run((parameter_name, v), ('gpu', True))
        utils.clear_current_line()

        print("Run {} with value {} on CPU".format(index + 1, v))
        results[index, 1] = run((parameter_name, v), ('gpu', False))
        utils.clear_current_line()

    result_str = "Results: \n{}".format(results)
    print(result_str)
    utils.save_dict(config, save_folder, 'config_' + parameter_name + '.txt')
    utils.save_string_to_file(result_str, save_folder, 'results.txt')
Ejemplo n.º 6
0
def parameter_tester(parameter_name, values, save_folder=None):
    if save_folder is None:
        save_folder = utils.generate_folder_name()
    print("\n\nExperiment for parameter '{}' with values {}".format(
        parameter_name, values))

    plt.clf()
    all_scores = []
    for index, v in enumerate(values):
        print("Run {} with value {}".format(index + 1, v))
        scores = run((parameter_name, v))
        all_scores.append(scores)
        utils.print_on_current_line('')
    header = ','.join(str(s) for s in values)
    result_str = header + '\n' + ','.join(["{:.4f}".format(sum(s) / CLASSIFIER_CONFIG.runs) for s in all_scores]) + \
                 '\n\n' + 'all scores:\n'

    for a in all_scores:
        result_str += ','.join('{:.4f}'.format(s) for s in a) + '\n'
    utils.save_string_to_file(result_str,
                              parent_folder=save_folder,
                              file_name='result_' + parameter_name + '.txt')
Ejemplo n.º 7
0
class RegistryView(MyModelView):
    column_filters = ['is_active']
    relative_file_path = get_relative_file_path('registries', generate_folder_name())
    form_widget_args = {
        'is_active': {
            'type': 'checkbox',
            'class': 'flat-red'
        },
    }
    form_extra_fields = {
        'image': form.ImageUploadField('Background Image', allowed_extensions=['jpg', 'jpeg', 'png'], base_path=get_file_path(),
                                       relative_path=relative_file_path)
    }
    can_delete = False

    @action('activate', 'Mark as Activated', 'Are you sure you want to mark these items as active?')
    def action_activate(self, ids):
        try:
            count = self.model.query.filter(self.model.id.in_(ids)).update({self.model.is_active: True}, synchronize_session='fetch')
            db.session.commit()

            flash(f'{count} items were successfully marked as active', 'success')
        except Exception as ex:
            if not self.handle_view_exception(ex):
                raise
            flash(f'Failed to process request. {str(ex)}', 'error')

    @action('deactivate', 'Mark as Deactivated', 'Are you sure you want to mark these items as deactivated?')
    def action_deactivate(self, ids):
        try:
            count = self.model.query.filter(self.model.id.in_(ids)).update({self.model.is_active: False}, synchronize_session='fetch')
            db.session.commit()

            flash(f'{count} items were successfully marked as deactivated', 'success')
        except Exception as ex:
            if not self.handle_view_exception(ex):
                raise
            flash(f'Failed to process request. {str(ex)}', 'error')
Ejemplo n.º 8
0
def benchmark_cost_function(data_sizes):
    polygon = pickle.load(open('utils/good_path_for_rectangle.pickle', 'rb'))

    save_folder = generate_folder_name()
    iterations = 10
    results = np.empty((len(data_sizes), iterations, 2), dtype=float)

    for i, dsize in enumerate(data_sizes):
        data = dg.generate_rectangle_set(dsize)

        print("\nRun {} with value {}".format(i + 1, dsize))

        # Compile functions and warm up GPU
        acoc.cost_function_gpu(data.T, polygon)

        for j in range(iterations):
            utils.print_on_current_line('Iteration {}/{}'.format(
                j, iterations))
            start_cpu = time.clock()
            acoc.cost_function(data.T, polygon)
            end_cpu = time.clock()
            results[i][j][0] = end_cpu - start_cpu

            start_gpu = time.clock()
            acoc.cost_function_gpu(data.T, polygon)
            end_gpu = time.clock()
            results[i][j][2] = end_gpu - start_gpu

    mean_results = np.mean(results, axis=1).T
    acoc_plotter.plot_bar_chart_gpu_benchmark(mean_results, data_sizes,
                                              ['CPython', 'GPU'], save_folder,
                                              'results')

    np.set_printoptions(precision=7, suppress=False)
    print("\nResults: \n{}".format(mean_results))
    utils.save_object(mean_results, save_folder, 'results')
Ejemplo n.º 9
0
class ArticleView(MyModelView):
    column_list = ['title', 'slug', 'summary', 'is_published', 'view_count', 'created_by', 'date_created']
    column_labels = dict(is_published="Published?")
    form_excluded_columns = ['slug', 'created_by', 'date_created', 'view_count']
    form_widget_args = {
        'content': {
            'rows': 20,
            'class': "form-control textarea"
        },
        'summary': {
            'rows': 5,
            'class': "form-control textarea"
        },
        'is_published': {
            'type': 'checkbox',
            'class': 'flat-red'
        },
    }
    column_formatters = {
        'summary': strip_html_tags,
        'content': strip_html_tags,
    }
    folder_name = generate_folder_name()
    relative_file_path = get_relative_file_path('articles', folder_name)

    form_overrides = dict(image=FileField)
    form_columns = ['title', 'summary', 'content', 'tags', 'image', 'is_published']
    form_extra_fields = {
        'image': form.ImageUploadField('Image', allowed_extensions=['jpg', 'jpeg', 'png'], base_path=get_file_path(),
                                       relative_path=relative_file_path)}

    def on_model_change(self, form, model, is_created):
        if is_created:
            model.created_by = current_user
            unique_slug = UniqueSlugify(to_lower=True)
            model.slug = unique_slug(model.title)
        else:
            slug = Slugify(to_lower=True)
            model.slug = slug(model.title)

    @action('publish', 'Mark as Published', 'Are you sure you want to publish selected articles?')
    def action_publish(self, ids):
        try:
            count = Article.query.filter(Article.id.in_(ids)).update({Article.is_published: True},
                                                                     synchronize_session='fetch')
            db.session.commit()

            flash(f'{count} articles were successfully published', 'success')
        except Exception as ex:
            if not self.handle_view_exception(ex):
                raise
            flash(f'Failed to publish articles. {str(ex)}', 'error')

    @action('archive', 'Mark as Archived', 'Are you sure you want to archive selected articles?')
    def action_archive(self, ids):
        try:
            count = Article.query.filter(Article.id.in_(ids)).update({Article.is_published: False},
                                                                     synchronize_session='fetch')
            db.session.commit()

            flash(f'{count} articles were successfully archived', 'success')
        except Exception as ex:
            if not self.handle_view_exception(ex):
                raise
            flash(f'Failed to archive articles. {str(ex)}', 'error')
Ejemplo n.º 10
0
from copy import copy
from time import time

import acoc
import utils
from utils import data_manager, generate_folder_name
import acoc.polygon
from config import CLASSIFIER_CONFIG, SAVE_DIR


CLASSIFIER_CONFIG.plot = False
CLASSIFIER_CONFIG.save = False
CLASSIFIER_CONFIG.training_test_split = True

CLASSIFIER_CONFIG.data_set = 'iris'
SAVE_FOLDER = generate_folder_name(CLASSIFIER_CONFIG.data_set, SAVE_DIR)


def run(**kwargs):
    conf = copy(CLASSIFIER_CONFIG)
    for k, v in kwargs.items():
        conf[k] = v

    data_set = data_manager.load_data_set(conf.data_set)
    X = data_set.data
    y = data_set.target
    class_indices = list(set(y))

    # Split data into training and testing set
    if conf.training_test_split:
        X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
Ejemplo n.º 11
0
    data_set = data_manager.load_data_set('breast_cancer')

    data = data_set.data[:, :2]
    target = np.array([data_set.target]).T
    np.place(target, target == 4, 1)
    np.place(target, target == 2, 0)
    dt = np.concatenate((data, target), axis=1)

    # dt = np.array([[0, 0.02, 0],
    #                [0.05, 0, 1],
    #                [1, 1, 1]])
    mtrx = AcocMatrix(dt)
    pol = mtrx.edges[:4]
    mtrx.edges[1].pheromone_strength = 5
    mtrx.edges[0].pheromone_strength = 3
    save_folder = generate_folder_name()
    print("Level {}: Edges {}, vertices {}".format(mtrx.level, len(mtrx.edges),
                                                   len(mtrx.vertices)))

    if plot:
        # plot_pheromones(mtrx, dt, tau_min=1, tau_max=10, folder_name=save_folder, save=save, show=show)
        plot_matrix_and_data(mtrx, dt, show=show)
    for i in range(6):
        mtrx.level_up(pol)
        print("Level {}: Edges {}, vertices {}".format(mtrx.level,
                                                       len(mtrx.edges),
                                                       len(mtrx.vertices)))
        if plot:
            plot_matrix_and_data(mtrx, dt, show=show, save=save)
            # plot_pheromones(mtrx, dt, tau_min=1, tau_max=10, folder_name=save_folder, save=save, show=show)
Ejemplo n.º 12
0
def parameter_tester(parameter_name, values, save_folder=None):
    if save_folder is None:
        save_folder = utils.generate_folder_name()
    print("\n\nExperiment for parameter '{}' with values {}".format(
        parameter_name, values))

    plt.clf()
    all_scores = []
    for index, v in enumerate(values):
        print("Run {} with value {}".format(index + 1, v))
        scores = run((parameter_name, v))
        all_scores.append(scores)
        utils.print_on_current_line('')
    header = ','.join(str(s) for s in values)
    result_str = header + '\n' + ','.join(["{:.4f}".format(sum(s) / CLASSIFIER_CONFIG.runs) for s in all_scores]) + \
                 '\n\n' + 'all scores:\n'

    for a in all_scores:
        result_str += ','.join('{:.4f}'.format(s) for s in a) + '\n'
    utils.save_string_to_file(result_str,
                              parent_folder=save_folder,
                              file_name='result_' + parameter_name + '.txt')


if __name__ == "__main__":
    folder = generate_folder_name('tuning-'.format(CLASSIFIER_CONFIG.data_set))
    utils.save_dict(CLASSIFIER_CONFIG, folder, file_name='base_config.txt')
    parameter_tester('rho', [0.02, 0.05, 0.07], save_folder=folder)
    parameter_tester('beta', [0.02, 0.05, 0.07], save_folder=folder)
Ejemplo n.º 13
0
    plot = True
    save = True
    show = False
    data_set = data_manager.load_data_set('breast_cancer')

    data = data_set.data[:, :2]
    target = np.array([data_set.target]).T
    np.place(target, target == 4, 1)
    np.place(target, target == 2, 0)
    dt = np.concatenate((data, target), axis=1)

    # dt = np.array([[0, 0.02, 0],
    #                [0.05, 0, 1],
    #                [1, 1, 1]])
    mtrx = AcocMatrix(dt)
    pol = mtrx.edges[:4]
    mtrx.edges[1].pheromone_strength = 5
    mtrx.edges[0].pheromone_strength = 3
    save_folder = generate_folder_name()
    print("Level {}: Edges {}, vertices {}".format(mtrx.level, len(mtrx.edges), len(mtrx.vertices)))

    if plot:
        # plot_pheromones(mtrx, dt, tau_min=1, tau_max=10, folder_name=save_folder, save=save, show=show)
        plot_matrix_and_data(mtrx, dt, show=show)
    for i in range(6):
        mtrx.level_up(pol)
        print("Level {}: Edges {}, vertices {}".format(mtrx.level, len(mtrx.edges), len(mtrx.vertices)))
        if plot:
            plot_matrix_and_data(mtrx, dt, show=show, save=save)
            # plot_pheromones(mtrx, dt, tau_min=1, tau_max=10, folder_name=save_folder, save=save, show=show)