def main(): config_file = './data/configs/network.txt' initial_weights_file = './data/configs/initial_weights.txt' dataset_file = './data/datasets/wine.txt' fileUtils = FileUtils(dataset_file=dataset_file, config_file=config_file) dataset = fileUtils.getDataset() #normalized_dataset = normalizeDataset(dataset) neurons_per_layer = [1, 2, 1] network = NeuralNetwork(config_file=config_file, dataset=dataset, initial_weights_file=initial_weights_file, neurons_per_layer=neurons_per_layer) network.backpropagation()
def __init__(self): self.file_utils = FileUtils() self.tempo_algoritmos = TempoAlgoritmos() self._definir_o_estilo_de_plot() self._gerar_sub_plot_de_cada_quantidade_de_numeros()
from flask import Flask, Blueprint, render_template, json, request from utils import FileUtils import os, time, json bionic = Blueprint('bionic', __name__, template_folder='templates') fu = FileUtils() onto_names = {} with open("static/data/ontologyDescriptions.json") as f: ontoD = json.load(f)["results"]["bindings"] for k in ontoD: onto_names[k["acr"]["value"]] = k["name"]["value"] def convert_bytes(num): for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return "%3.1f %s" % (num, x) num /= 1024.0 def get_files_all_tsv(folder, dtype="stats"): fileset = fu.get_reqd_fileset(folder + "tsv/", lambda x: False if "tsv" in x else True) ap = [] #print fileset for k in sorted(fileset): #t = time.ctime(os.path.getmtime(folder + k)) s = convert_bytes(os.stat(folder + "tsv/" + k).st_size) rdf_file = folder + "rdf/" + k.split(".")[ 0] + ".hdt" if dtype == "stats" else folder + "rdf/" + k.split(
# This is a sample Python script. # Press Shift+F10 to execute it or replace it with your code. # Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings. import os import datetime from logging import Logger from file_processing import FileProcessor from model_operations import Modeller from utils import FileUtils, LoggerUtil from sklearn.metrics import confusion_matrix, classification_report data_path = "data" save_model_path = "saved_models" session_id = str(datetime.datetime.now().timestamp()) utils = FileUtils() logger = LoggerUtil(session_id, 'files') def prepare_dataset(dir_path, is_train=True): processor = FileProcessor(dir_path, is_train) processor.process_all_files() return processor.get_dataset() def prepare_model(dataset): modeller = Modeller(dataset) modeller.define_model() modeller.train_model() return modeller
from utils import FileUtils, HttpUtils # instantiating Objects of FileUtils file_utils = FileUtils() # instantiating Objects of HttpUtils http_utils = HttpUtils()