def __init__(self):
     self.net = NetworkManager(9999)
     self.yolo = Yolo()
     self.yolo.load_model()
     self.net.addCallback(0x00, self.yolo_callback)
     while True:
         try:
             self.net.spinSocket()
         except KeyboardInterrupt:
             self.net.close()
Esempio n. 2
0
class NetworkManagerWrapper(ManagerInterface):
    name = "Network Manager"

    def __init__(self, conf: NetworkManagerConfig):
        super().__init__(conf)

        # Build network manager
        self._network_manager = NetworkManager(
            port=conf.GRPC_PORT,
            known_priorities=conf.PRIORITIES,
            limit_to_known_priorities=conf.LIMIT_PRIORITIES,
            logfile_dir=conf.OUTPUT_DIR,
        )

        # Add network services
        self._network_manager.add_service(services.DebugPing())
        self._network_manager.add_service(
            services.LoggingClient(
                host=conf.LOGGING_SERVER_HOST,
                port=conf.LOGGING_SERVER_PORT,
            ))

    def get_service(self, service_name):
        return self._network_manager.get_service(service_name)

    def _start(self):
        self._network_manager.start()

    def _stop(self):
        self._network_manager.stop()
Esempio n. 3
0
class Main(object):
    def __init__(self):
        self.__active = True

        self.__config = get_config()

        self.__unhandled_files = []

        self.__sleep_time = int(self.__config.get('Settings', 'frequency'))

        if not CUSTOM_SYNC_PATH:
            self.__sync_directory_path = self.__config.get('Settings', 'path')
        else:
            self.__sync_directory_path = input('Please enter custom sync path: ')

        self.__storage_manager = StorageManager(self.__sync_directory_path)
        self.__network_manager = NetworkManager(self.__storage_manager, permanent_connections=['127.0.0.1'])

    def start(self):
        self.__network_manager.start_listening()

        # Main Loop
        while self.__active:
            # Check files for changes
            changed_files = self.__storage_manager.check_files_for_changes()

            # Ensure network functionality is not already in use
            if self.__network_manager.in_progress:
                self.__unhandled_files.extend(changed_files)
                continue

            # Add unhandled changes to changed files
            changed_files.extend(self.__unhandled_files)

            # If any files have changed and there are active connections
            if changed_files != [] and self.__network_manager.get_connections() != []:
                # Iterate through changes and attempt to transmit to active connections
                # code: O
                for filename_code_md5 in changed_files:

                    if filename_code_md5[1] == 0 or filename_code_md5[1] == 2:
                        # try:
                        # Send file
                        logging.info('Sending file changes for file name: ' + filename_code_md5[0])
                        self.__network_manager.transfer_file(filename_code_md5[0], filename_code_md5[2])
                        changed_files.remove(filename_code_md5)
                        # except Exception as E:
                        # logging.info('Exception:\n' + str(E))

                self.__unhandled_files.extend(changed_files)

            time.sleep(self.__sleep_time)
Esempio n. 4
0
    def __init__(self):
        self.__active = True

        self.__config = get_config()

        self.__unhandled_files = []

        self.__sleep_time = int(self.__config.get('Settings', 'frequency'))

        if not CUSTOM_SYNC_PATH:
            self.__sync_directory_path = self.__config.get('Settings', 'path')
        else:
            self.__sync_directory_path = input('Please enter custom sync path: ')

        self.__storage_manager = StorageManager(self.__sync_directory_path)
        self.__network_manager = NetworkManager(self.__storage_manager, permanent_connections=['127.0.0.1'])
Esempio n. 5
0
 def __init__(self):
     ##
     # Initial Mode
     self.encoding = 'utf-8'
     self.mode = MODE.CLEAR
     self.current_message_dict = {}
     self.network_manager = NetworkManager.get_instance()
     super(Housekeeper, self).__init__()
Esempio n. 6
0
    def __init__(self, conf: NetworkManagerConfig):
        super().__init__(conf)

        # Build network manager
        self._network_manager = NetworkManager(
            port=conf.GRPC_PORT,
            known_priorities=conf.PRIORITIES,
            limit_to_known_priorities=conf.LIMIT_PRIORITIES,
            logfile_dir=conf.OUTPUT_DIR,
        )

        # Add network services
        self._network_manager.add_service(services.DebugPing())
        self._network_manager.add_service(
            services.LoggingClient(
                host=conf.LOGGING_SERVER_HOST,
                port=conf.LOGGING_SERVER_PORT,
            ))
Esempio n. 7
0
    def __init__(self):
        self.encoding = 'utf-8'
        self.stats = {}

        self.weather = {'connection': False}
        self.last_weather_check = time.time() - 9999
        self.load_weather()

        self.network_manager = NetworkManager.get_instance()
def benchmark_running(hypercolumns=4, minicolumns=20):
    # Patterns parameters
    hypercolumns = hypercolumns
    minicolumns = minicolumns

    # Manager properties
    dt = 0.001
    running_time = 10
    values_to_save = []

    # Build the network
    nn = BCPNNFast(hypercolumns, minicolumns)
    nn.k_inner = False
    nn.k = 0.0

    # Build the manager
    manager = NetworkManager(nn=nn, dt=dt, values_to_save=values_to_save)
    time = np.arange(0, running_time, dt)
    manager.run_network(time, I=0)
Esempio n. 9
0
def main(argv):
	#load configuration
	try:
		configuration_file = open('config.json','r')
		CONFIG = json.loads(configuration_file.read())
		check_configuration(CONFIG)
		configuration_file.close()
		print 'Configuration loaded.'
	except IOError:
		print 'missing configuration file'
		sys.exit(2)
	except ValueError:
		print 'configuration file is corrupt'
		sys.exit(2)

	print Globals.RESOURCES_RESOURCE_CODE

	#Load database
	print '\nLoading Database'
	DATABASE = Database(CONFIG)

	#Load encryption key
	print '\nLoading encryption key'
	CRYPTO = Crypto(CONFIG, DATABASE)

	#Create network manager
	print '\nCreating network manager'
	NETWORK_MANAGER = NetworkManager(CONFIG, DATABASE, CRYPTO)

	#Create server
	print '\nCreating server'
	SERVER = Server(CONFIG, CRYPTO)

	#Create block manager
	print '\nCreating block manager'
	BLOCK_MANAGER = BlockManager(CONFIG, DATABASE, CRYPTO, NETWORK_MANAGER, SERVER)
	SERVER.set_blocks_manager(BLOCK_MANAGER)

	#Connect to nodes
	print '\nConnecting to network'
	NETWORK_MANAGER.connect_to_all(BLOCK_MANAGER)

	print '\nStartup complete, waiting for synchronization'

	while True:
		try:
			cmd = raw_input()
			if cmd in ['shutdown', 'SHUTDOWN', '^C', '^Z', 'exit', 'EXIT', 'close', 'CLOSE']:
				break
		except KeyboardInterrupt:
			break

	print 'Shutdown signal received, stopping everything'
	SERVER.shutdown()
	NETWORK_MANAGER.shutdown()
	print 'All was correctly stopped, exiting'
	sys.exit(0)
Esempio n. 10
0
class ValutoPyApp(App):
    theme_cls = ThemeManager()
    network_manager = NetworkManager()
    transfer_manager = TransferManager()
    transaction_manager = TransactionListManager()

    def build(self):
        self.icon = 'valuto_logo.png'
        self.theme_cls.theme_style = 'Dark'
        self.theme_cls.primary_palette = 'BlueGrey'
        main_widget = Builder.load_file('main.kv')
        return main_widget

    def on_stop(self):
        self.network_manager.kill_valutod()
Esempio n. 11
0
def main(argv):

	#Load configuration
	print '\nLoading Configuration'
	CONFIG = load_configuration()

	#Load database
	print '\nLoading Database'
	DATABASE = Database(CONFIG)

	#Load encryption key
	print '\nLoading encryption key'
	CRYPTO = Crypto(CONFIG, DATABASE)

	#Create network manager
	print '\nCreating network manager'
	NETWORK_MANAGER = NetworkManager(CONFIG, DATABASE, CRYPTO)

	#Create server
	print '\nCreating server'
	SERVER = Server(CONFIG, CRYPTO)

	#Create block manager
	print '\nCreating block manager'
	BLOCK_MANAGER = BlockManager(CONFIG, DATABASE, CRYPTO, NETWORK_MANAGER, SERVER)
	SERVER.set_blocks_manager(BLOCK_MANAGER)

	#Connect to nodes
	print '\nConnecting to network'
	NETWORK_MANAGER.connect_to_all(BLOCK_MANAGER)

	print '\nStartup complete, waiting for synchronization'

	while True:
		try:
			time.sleep(1)
			os.system('clear')
			print 'Ready for Interruption'
			cmd = raw_input()
			if cmd in ['shutdown', 'SHUTDOWN', '^C', '^Z', 'exit', 'EXIT', 'close', 'CLOSE']:
				break
		except KeyboardInterrupt:
			break

	print 'Shutdown signal received, stopping everything'
	SERVER.shutdown()
	NETWORK_MANAGER.shutdown()
	print 'All was correctly stopped, exiting'
	sys.exit(0)
class YoloServer(object):
    def __init__(self):
        self.net = NetworkManager(9999)
        self.yolo = Yolo()
        self.yolo.load_model()
        self.net.addCallback(0x00, self.yolo_callback)
        while True:
            try:
                self.net.spinSocket()
            except KeyboardInterrupt:
                self.net.close()

    def yolo_callback(self, arg):
        byte_img = bytes(arg['img'], 'ascii')
        img_decoded = base64.b64decode(byte_img)
        img = numpy.frombuffer(img_decoded, dtype=numpy.uint8)
        cv_img = cv2.imdecode(img, flags=1)
        names = self.yolo.run(numpy.asarray(cv_img), False)
        return (0x00, {'names': names})
Esempio n. 13
0
    def execute(self):
        data = self.data
        language = data['language']
        if data['type'][0] == 'SDS':
            type_summary = 0
        else:
            type_summary = 1
        anti_redundancy_method = data['type'][1]

        corpus_name = data['corpus']
        #resumo_size_parameter = data['size']  # para definir el tamanio de los sumarios, en relacion a numero de palabras o sentencias, o fijo

        use_machine_learning = data['ml'][0]  ## VERY IMPORTANT NOW
        method, classifier, kFold, use_traditional_features = None, None, None, None
        if use_machine_learning:
            method, classifier, kFold, use_traditional_features = data['ml'][
                1][0], data['ml'][1][1], data['ml'][1][2], data['ml'][1][3]

        network = data['network']
        network_type = network[0]  # tipo de red: noun, tfidf, d2v , mln
        network_parameters = network[
            1]  # todos los parametros del tipo de red que se va a utilizar
        mln_type_flag = network_type == 'mln'  # para verificar en corpus loader si se tiene que cargar para una multilayer network

        extracted_net_parameters = parameter_extractor(network_type,
                                                       network_parameters)

        mln_type = extracted_net_parameters['mln_type']
        sw_removal = extracted_net_parameters['sw_removal']
        limiar_value = extracted_net_parameters['limiar_value']
        limiar_type = extracted_net_parameters['limiar_type']
        size_d2v = extracted_net_parameters['size_d2v']
        inter_edge_mln = extracted_net_parameters['inter_edge']
        limiar_mln = extracted_net_parameters['limiar_mln']

        network_measures = data['measures']
        #selection_method = data['selection']  #####

        #print use_machine_learning
        #print method, classifier, kFold, use_traditional_features
        # use_machine_learning and method   ---> muy importantes

        print extracted_net_parameters
        '''
        1 Corpus loader : cargar el corpus indicado y dejarlo listo para ser pre-procesado    
        '''

        #obj = Loader(language=language, type_summary=type_summary, corpus=corpus_name, size=resumo_size_parameter, mln=mln_type_flag, use_ml=use_machine_learning)
        obj = Loader(language=language,
                     type_summary=type_summary,
                     corpus=corpus_name,
                     mln=mln_type_flag,
                     use_ml=use_machine_learning)
        loaded_corpus = obj.load(
        )  # diccionario que tiene como key el nombre del documento o nombre del grupo y como claves los documentos y sus sizes
        '''
        2. Corpus processing
        '''
        obj = CorpusConversion(loaded_corpus, language, network_type, mln_type,
                               sw_removal)
        processed_corpus = obj.convert()

        #for i in processed_corpus.items():
        #    print i
        '''
        3. Corpus vectorization 
        '''

        vectorized_corpus = None

        if network_type == 'noun' or mln_type == 'noun':
            pass

        else:
            if network_type == 'mln':
                network_type_subtype = mln_type
            else:
                network_type_subtype = network_type

            if language == 'eng':
                obj = Vectorization(processed_corpus,
                                    network_type_subtype,
                                    size_d2v,
                                    language=language)
                vectorized_corpus = obj.calculate()
            else:
                type_summary_inverted = 0
                if type_summary == 0:
                    type_summary_inverted = 1

                obj = Loader(language=language,
                             type_summary=type_summary_inverted,
                             corpus=corpus_name,
                             mln=mln_type_flag)
                auxiliar_corpus = obj.load()

                obj = CorpusConversion(auxiliar_corpus, language, network_type,
                                       mln_type, sw_removal)
                processed_auxiliar = obj.convert()

                obj = Vectorization(processed_corpus,
                                    network_type_subtype,
                                    size_d2v,
                                    processed_auxiliar,
                                    language=language)
                vectorized_corpus = obj.calculate()
        '''
        4. Network creation
        5. Network prunning 
        '''

        obj = NetworkManager(network_type, mln_type, processed_corpus,
                             vectorized_corpus, inter_edge_mln, limiar_mln,
                             limiar_value, limiar_type)
        complex_networks = obj.create_networks()
        '''
        6. Node weighting  7. Node ranking
        6. Node weighting 7. Machine Learning
        '''

        manageNodes = NodeManager(complex_networks, network_measures)

        #features = manageNodes.get_network_features()

        if use_machine_learning:
            obj = MLRanking(corpus=processed_corpus,
                            method=method,
                            classifier=classifier,
                            kfold=kFold,
                            nodeManager=manageNodes)
            all_documentRankings = obj.rank_by_machine_learning()
        else:
            all_documentRankings = manageNodes.ranking()

        #for i in all_documentRankings.items():
        #    print i
        '''
        8. Summarization
        '''

        obj = SummaryGenerator(processed_corpus, complex_networks,
                               all_documentRankings, anti_redundancy_method)
        obj.generate_summaries()
        '''
         9. Validation
        '''

        key = choice(all_documentRankings.keys())
        number_of_measures = len(all_documentRankings[key][0])
        parameters_to_show_table = []

        if limiar_mln is not None:
            first_value = len(inter_edge_mln)
            second_value = len(limiar_mln)
            third_value = number_of_measures
            parameters_to_show_table.append(inter_edge_mln)
            parameters_to_show_table.append(limiar_mln)
        elif limiar_value is not None:
            first_value = 1
            second_value = len(limiar_value)
            third_value = number_of_measures
            parameters_to_show_table.append(None)
            parameters_to_show_table.append(limiar_value)
        else:
            first_value = 1
            second_value = 1
            third_value = number_of_measures

        print first_value, second_value, third_value

        obj = Validation(language, type_summary, corpus_name,
                         [first_value, second_value, third_value],
                         self.output_excel, parameters_to_show_table)
        obj.validate('results.csv')

        deleteFolders(extras['Automatics'])
Esempio n. 14
0
from entities.message import *
from entities.node import *
from entities.group import *
from entities.request import *
from network.NetworkManager import *
from services.MocHandler import MocHandler

net = NetworkManager()
mac = net.getMacAddress()
ev = MocHandler()
net.addListener(ev)
# start listener
#net.startListener()
# start node discovery service
#net.startNodeDiscovery()

msg = Message("FFEEDDCCBBAA", "AABBCCDDEEFF", {
    "timestamp": "2019-11-07 10:10:10",
    "message": "Hello test test"
})
groupMsg = GroupMessage("FFEEDDCCBBAA", "FFFFAABBCCDDEEFF", {
    "timestamp": "2019-11-07 10:10:10",
    "message": "Hello test test"
})
broadcastMsg = GroupMessage("FFEEDDCCBBAA", {
    "timestamp": "2019-11-07 10:10:10",
    "message": "Hello test test"
})
req = Request("FFEEDDCCBBAA", "FFFFAABBCCDDEEFF",
              {"message": "Hello test test"})
Esempio n. 15
0
    def execute(self):
        data = self.data
        language = data['language']
        if data['type'][0] == 'SDS':
            type_summary = 0
        else:
            type_summary = 1
        anti_redundancy_method = data['type'][1]
        corpus_name = data['corpus']

        resumo_size_parameter = data[
            'size']  # para definir el tamanio de los sumarios, en relacion a numero de palabras o sentencias, o fijo

        network = data['network']
        network_type = network[0]  # tipo de red: noun, tfidf, d2v , mln
        network_parameters = network[
            1]  # todos los parametros del tipo de red que se va a utilizar
        mln_type_flag = network_type == 'mln'  # para verificar en corpus loader si se tiene que cargar para una multilayer network

        extracted_net_parameters = parameter_extractor(network_type,
                                                       network_parameters)
        mln_type = extracted_net_parameters['mln_type']
        sw_removal = extracted_net_parameters['sw_removal']
        limiar_value = extracted_net_parameters['limiar_value']
        limiar_type = extracted_net_parameters['limiar_type']
        #distance = extracted_net_parameters['distance']
        size_d2v = extracted_net_parameters['size_d2v']
        #inference_d2v = extracted_net_parameters['inference_d2v']
        inter_edge = extracted_net_parameters['inter_edge']
        #intra_edge = extracted_net_parameters['intra_edge']
        limiar_mln = extracted_net_parameters['limiar_mln']

        print extracted_net_parameters

        #anti_redundancy_threshold = None  # si los documentos no requieren vectorizacion , este es calculado en la generacion de los sumarios
        # basado en la distancia coseno de las palabras, sin necesidad de generar los vectores de cada documento
        # si los documentos requierein vectorizacion, entonces este valor sera atribuido a partir del valor calculado en la etapa de vectorizacion

        network_measures = data['measures']
        selection_method = data['selection']
        #validation = data['validation']
        '''
        0 cargar el corpus indicado y dejarlo listo para ser pre-procesado    
        '''
        #obj = Loader(language=language, type_summary=type_summary, corpus=corpus_name, size=resumo_size_parameter, mln=mln_type_flag)
        obj = Loader(language=language,
                     type_summary=type_summary,
                     corpus=corpus_name,
                     mln=mln_type_flag)
        loaded_corpus = obj.load(
        )  # diccionario que tiene como key el nombre del documento o nombre del grupo y como claves los documentos y sus sizes

        #for i in loaded_corpus.items():
        #    print i

        #for i in loaded_corpus.items():
        #    grupos =  i[1]
        #    sentences = grupos[0]
        #    sizes = grupos[1]
        #    for j in sentences:
        #        print j
        #        print j[0] , j[1]

        top_sentences = dict()  # solo para MDS
        #if anti_redundancy_method is not None:
        #    for i in loaded_corpus.items():
        #        doc_name = i[0]
        #        tops = i[1][2]
        #        top_sentences[doc_name] = tops
        '''
        1. Pre-procesamiento de los corpus
        '''

        obj = CorpusConversion(loaded_corpus, language, network_type, mln_type,
                               sw_removal)
        processed_corpus = obj.convert()

        #for i in processed_corpus.items():
        #    print len(i[1][0]) #, len(i[1][1])
        '''
        2. Vectorizacion de los corpus (auxiliar - caso sea requerido)
        '''

        vectorized_corpus = None

        if network_type == 'noun' or mln_type == 'noun':
            pass
        else:

            if network_type == 'mln':
                network_type_subtype = mln_type
            else:
                network_type_subtype = network_type

            #cargar corpus auxiliar para entrenamiento
            if language == 'eng':
                #obj = Vectorization(processed_corpus, network_type, inference_d2v, size_d2v)
                #obj = Vectorization(processed_corpus, network_type_subtype, inference_d2v, size_d2v)
                obj = Vectorization(processed_corpus, network_type_subtype,
                                    size_d2v)
                vectorized_corpus = obj.calculate()
            else:
                print "cargando nuevo"
                type_summary_inverted = 0
                if type_summary == 0:
                    type_summary_inverted = 1
                #obj = Loader(language=language, type_summary=type_summary_inverted, corpus=corpus_name, size=resumo_size_parameter, mln=mln_type_flag)
                obj = Loader(language=language,
                             type_summary=type_summary_inverted,
                             corpus=corpus_name,
                             mln=mln_type_flag)
                auxiliar_corpus = obj.load()
                obj = CorpusConversion(auxiliar_corpus, language, network_type,
                                       mln_type, sw_removal)
                processed_auxiliar = obj.convert()
                #obj = Vectorization(processed_corpus, network_type, inference_d2v, size_d2v, processed_auxiliar)
                #obj = Vectorization(processed_corpus, network_type_subtype, inference_d2v, size_d2v, processed_auxiliar)
                obj = Vectorization(processed_corpus, network_type_subtype,
                                    size_d2v, processed_auxiliar)
                vectorized_corpus = obj.calculate()
        '''
        3. Creacion de la red  y  4. Eliminacion de nodos, limiares
        '''

        #obj = NetworkManager(network_type, mln_type, processed_corpus, vectorized_corpus, distance, inter_edge, limiar_mln, limiar_value)
        obj = NetworkManager(network_type, mln_type, processed_corpus,
                             vectorized_corpus, inter_edge, limiar_mln,
                             limiar_value, limiar_type)
        complex_networks = obj.create_networks()

        #for i in complex_networks.items():
        #    print i
        '''
        5. Node weighting and node ranking
        '''

        obj = NodeManager(complex_networks, network_measures)
        all_documentRankings = obj.ranking()
        '''
        6. Summarization
        #corpus, rankings, sentence_selection, anti_redundancy
        

        print "Summarization!!!"
        obj = SummaryGenerator(processed_corpus, complex_networks, all_documentRankings, selection_method, anti_redundancy_method, top_sentences)
        obj.generate_summaries()
        '''
        '''
Esempio n. 16
0
from command import CommandParser
from network import NetworkManager, ClientDisconnected
from response import Response
from error import ParseError, ExecutionError, ValidationError
from http import HTTPStatus
from commands.disconnect import Disconnect
from controller import Controller
from configparser import ConfigParser
from logging.handlers import RotatingFileHandler
from logging import Formatter, basicConfig, getLogger, StreamHandler

if __name__ == '__main__':

    config = ConfigParser()
    config.read('../config.ini')
    network_manager = NetworkManager(config)
    parser = CommandParser()

    # Logging configuration
    level = config['LOGGING'].get('level', 'ERROR')
    log_on_console = config['LOGGING'].get('console', False)
    log_on_console = log_on_console == '1' or log_on_console == 'True' or log_on_console == 'true'
    filename = config['LOGGING'].get('filename', '/var/log/sc_driver.log')
    max_bytes = int(config['LOGGING'].get('max_bytes', str(1024 * 1024)))
    backup_count = int(config['LOGGING'].get('backup_count', str(5)))
    log_format = '%(asctime)s - %(name)s - %(levelname)s -- %(message)s'
    formatter = Formatter(log_format)
    fileHandler = RotatingFileHandler(filename, maxBytes=max_bytes, backupCount=backup_count)
    consoleHandler = StreamHandler()
    consoleHandler.setFormatter(formatter)
    fileHandler.setFormatter(formatter)
                  sigma=sigma,
                  G=G,
                  tau_z_pre_ampa=tau_z_pre_ampa,
                  tau_z_post_ampa=tau_z_post_ampa,
                  tau_p=tau_p,
                  g_I=g_I,
                  z_transfer=z_transfer,
                  diagonal_zero=False,
                  strict_maximum=strict_maximum,
                  perfect=perfect,
                  k_perfect=k_perfect,
                  always_learning=always_learning)
nn.g_beta = 0.0

# Build the manager
manager = NetworkManager(nn=nn, dt=dt, values_to_save=values_to_save)
w = simple_bcpnn_matrix(minicolumns, w_self, w_next, w_rest)
nn.w_ampa = w

# Recall
T_recall = 0.450
T_cue = 0.050
sequences = [[i for i in range(n_patterns)]]
n = 1

aux = calculate_recall_time_quantities(manager, T_recall, T_cue, n, sequences)
total_sequence_time, mean, std, success, timings = aux

i_ampa = manager.history['i_ampa']
a = manager.history['a']
time = np.linspace(0, manager.T_total, a.shape[0])
                  g_a=g_a,
                  tau_a=tau_a,
                  sigma=sigma,
                  G=G,
                  tau_z_pre_ampa=tau_z_pre_ampa,
                  tau_z_post_ampa=tau_z_pre_ampa,
                  tau_p=tau_p,
                  z_transfer=False,
                  diagonal_zero=False,
                  strict_maximum=False,
                  perfect=perfect,
                  k_perfect=k_perfect,
                  always_learning=always_learning)

# Build the manager
manager = NetworkManager(nn=nn, dt=dt, values_to_save=values_to_save)

# Build the protocol for training
protocol = Protocol()
patterns_indexes = [i for i in range(n_patterns)]
protocol.simple_protocol(patterns_indexes,
                         training_time=training_time,
                         inter_pulse_interval=inter_pulse_interval,
                         inter_sequence_interval=inter_sequence_interval,
                         epochs=epochs)

# Train
epoch_history = manager.run_network_protocol(protocol=protocol, verbose=True)

z_training = manager.history['z_pre_ampa']
o_training = manager.history['o']
Esempio n. 19
0
    # Recall
    T_cue = 0.020
    T_recall = 1.0 + T_cue
    n = 1


    # Neural Network
    nn = BCPNNPerfect(hypercolumns, minicolumns, g_w_ampa=g_w_ampa, g_w=g_w, g_a=g_a, tau_a=tau_a, tau_m=tau_m,
                      sigma=sigma, G=G, tau_z_pre_ampa=tau_z_pre_ampa, tau_z_post_ampa=tau_z_post_ampa, tau_p=tau_p,
                      z_transfer=z_transfer, diagonal_zero=diagonal_zero, strict_maximum=strict_maximum,
                      perfect=perfect, k_perfect=k_perfect, always_learning=always_learning,
                      normalized_currents=normalized_currents)

    # Build the manager
    manager = NetworkManager(nn=nn, dt=dt, values_to_save=values_to_save)

    # Protocol
    matrix = create_orthogonal_canonical_representation(minicolumns, hypercolumns)
    seq = np.copy(matrix)
    seq[4] = matrix[2]
    seq[5:] = matrix[4:-1]
    nr = build_network_representation(seq, minicolumns, hypercolumns)

    n_connections = len(seq) - 1
    value = 1.0
    extension = 10
    alpha = 1.0
    weights = [value for i in range(n_connections)]
    weights_collection = [weights]
    sequences = [seq]
Esempio n. 20
0
             G=G,
             tau_s=tau_s,
             tau_z_pre=tau_z_pre,
             tau_z_post=tau_z_post,
             tau_a=tau_a,
             g_a=g_a,
             g_I=g_I,
             sigma_out=sigma_out,
             epsilon=epsilon,
             prng=np.random,
             strict_maximum=strict_maximum,
             perfect=False,
             normalized_currents=True)

# Build the manager
manager = NetworkManager(nn=nn, dt=dt, values_to_save=values_to_save)
# Just to build the representations
manager.run_artificial_protocol(ws=w_self, wn=w_next, wb=w_back, alpha=0.5)
w = simple_bcpnn_matrix(minicolumns, w_self, w_next, w_rest, w_back)
nn.w = w
T_persistence = 0.100
manager.set_persistent_time_with_adaptation_gain(T_persistence=T_persistence)

nn.g_beta = 1.0

# Recall
T_recall = 1.0
T_cue = 0.080
I_cue = 0

manager.run_network_recall(T_recall=T_recall,