def process_state(self): state = 0 try: state = self.contract_container.call().currentState() except Exception as ex: self.logger.info('Exception on process worker state.') self.logger.info(ex.args) Manager.get_instance().set_job_contract_state( self.state_table[state].name) self.logger.info("Contract %s initial state is %s", self.__class__.__name__, self.state_table[state].name) self.state = state return state
def process_state(self, job_id_hex): state = 0 try: state = self.job_controller_container.call( ).getCognitiveJobDetails(job_id_hex)[6] except Exception as ex: self.logger.info('Exception on process job state.') self.logger.info(ex.args) Manager.get_instance().set_job_contract_state( self.state_table[state].name) self.logger.info("Contract %s initial state is %s", self.__class__.__name__, self.state_table[state].name) self.state = state return state
def init_kernel(self): # get main kernel params try: self.model_address = self.json_kernel['model'] Manager.get_instance().set_job_kernel_ipfs_address( self.model_address) self.weights_address = self.json_kernel['weights'] Manager.get_instance().set_job_dataset_ipfs_address( self.weights_address) except Exception as ex: self.logger.error("Wrong Kernel data file structure:") self.logger.error(ex.args) return False # ------------------------------ # validate values by string type # ------------------------------ # model address is necessary if not isinstance(self.model_address, str): self.logger.error("Wrong model address type : " + str(type(self.model_address))) self.model_address = None self.weights_address = None return False # weights is not necessary and may be empty if self.weights_address is not None: if not isinstance(self.weights_address, str): self.logger.error("Wrong weights address type : " + str(type(self.weights_address))) self.weights_address = None return False try: self.logger.info("Downloading model file %s", self.model_address) self.ipfs_api.download_file(self.model_address) if self.weights_address: self.logger.info("Downloading weights file %s", self.weights_address) self.ipfs_api.download_file(self.weights_address) else: self.logger.info("Weights address is empty, skip downloading") except Exception as ex: self.logger.error("Can't download kernel files from IPFS: %s", type(ex)) self.logger.error(ex.args) return False return True
def instantiate_contracts(abi_path, eth_hooks): manager = Manager.get_instance() print("ABI folder path : " + str(abi_path)) if os.path.isdir(abi_path): if eth_hooks == 'True': if os.path.isfile(abi_path + "PandoraHooks.json"): with open(abi_path + "PandoraHooks.json", encoding='utf-8') as pandora_contract_file: manager.eth_pandora_contract = json.load(pandora_contract_file)['abi'] print('Pandora hooks abi loaded') else: if os.path.isfile(abi_path + "Pandora.json"): with open(abi_path + "Pandora.json", encoding='utf-8') as pandora_contract_file: manager.eth_pandora_contract = json.load(pandora_contract_file)['abi'] print('Pandora abi loaded') if os.path.isfile(abi_path + "WorkerNode.json"): with open(abi_path + "WorkerNode.json", encoding='utf-8') as worker_contract_file: manager.eth_worker_contract = json.load(worker_contract_file)['abi'] print('WorkerNode abi loaded') if os.path.isfile(abi_path + "CognitiveJob.json"): with open(abi_path + "CognitiveJob.json", encoding='utf-8') as eth_cognitive_job_contract: manager.eth_cognitive_job_contract = json.load(eth_cognitive_job_contract)['abi'] print('CognitiveJob abi loaded') if os.path.isfile(abi_path + "Kernel.json"): with open(abi_path + "Kernel.json", encoding='utf-8') as eth_kernel_contract: manager.eth_kernel_contract = json.load(eth_kernel_contract)['abi'] print('Kernel abi loaded') if os.path.isfile(abi_path + "Dataset.json"): with open(abi_path + "Dataset.json", encoding='utf-8') as eth_dataset_contract: manager.eth_dataset_contract = json.load(eth_dataset_contract)['abi'] print('Dataset abi loaded') else: print("ABI files not found, exiting") raise ContractsAbiNotFound()
def __init__(self, dataset_file, ipfs_api, batch_no: int): # Initializing logger object self.logger = logging.getLogger("Kernel") self.logger.addHandler(LogSocketHandler.get_instance()) self.manager = Manager.get_instance() self.json_dataset = dataset_file # variable for determinate process (predict, fit) self.process = None # variables for predict job by batches self.data_address = None self.batch_no = batch_no self.dataset = None # variables for training (fit) self.train_x_address = None self.train_x_dataset = None self.train_y_address = None self.train_y_dataset = None self.loss = None self.optimizer = None self.batch_size = None self.epochs = None self.validation_split = 0 self.shuffle = False self.initial_epoch = 0 self.ipfs_api = ipfs_api
def __init__(self, kernel_file, ipfs_api): # Initializing logger object self.logger = logging.getLogger("Kernel") self.logger.addHandler(LogSocketHandler.get_instance()) self.manager = Manager.get_instance() self.json_kernel = kernel_file self.ipfs_api = ipfs_api self.model_address = None self.weights_address = None self.model = None
def instantiate_contracts(abi_path, eth_hooks): manager = Manager.get_instance() print("ABI folder path : " + str(abi_path)) if os.path.isdir(abi_path): if eth_hooks == 'True': if os.path.isfile(abi_path + "PandoraHooks.json"): with open(abi_path + "PandoraHooks.json", encoding='utf-8') \ as pandora_contract_file: manager.eth_pandora_contract = json.load( pandora_contract_file)['abi'] print('Pandora hooks abi loaded') else: if os.path.isfile(abi_path + "Pandora.json"): with open(abi_path + "Pandora.json", encoding='utf-8') \ as pandora_contract_file: manager.eth_pandora_contract = json.load( pandora_contract_file)['abi'] print('Pandora abi loaded') if os.path.isfile(abi_path + "WorkerNode.json"): with open(abi_path + "WorkerNode.json", encoding='utf-8') \ as worker_contract_file: manager.eth_worker_contract = json.load( worker_contract_file)['abi'] print('WorkerNode abi loaded') if os.path.isfile(abi_path + "CognitiveJobController.json"): with open(abi_path + "CognitiveJobController.json", encoding='utf-8') \ as eth_job_controller_contract: manager.eth_job_controller_contract = json.load( eth_job_controller_contract)['abi'] print('CognitiveJobController abi loaded') if os.path.isfile(abi_path + "Kernel.json"): with open(abi_path + "Kernel.json", encoding='utf-8') \ as eth_kernel_contract: manager.eth_kernel_contract = json.load( eth_kernel_contract)['abi'] print('Kernel abi loaded') if os.path.isfile(abi_path + "Dataset.json"): with open(abi_path + "Dataset.json", encoding='utf-8') \ as eth_dataset_contract: manager.eth_dataset_contract = json.load( eth_dataset_contract)['abi'] print('Dataset abi loaded') else: print("ABI files not found, exiting.") print("pyrrha-pynode repo contains link to pyrrha-consensus project.") print("for complete clone project please provide git commands :") print("cd .\pyrrha-pynode\"") print("git submodule init") print("git submodule update --recursive --remote") raise ContractsAbiNotFound()
def __init__(self, eth_server: str, abi_path: str, pandora: str, node: str, ipfs_server: str, ipfs_port: int, data_dir: str): Broker.get_instance() Thread.__init__(self, daemon=True) # Initializing logger object self.logger = logging.getLogger("Broker") self.logger.addHandler(LogSocketHandler.get_instance()) self.manager = Manager.get_instance() self.mode = self.manager.launch_mode # Saving starter configs self.eth_server = eth_server self.abi_path = abi_path self.pandora = pandora self.node = node self.ipfs_server = ipfs_server self.ipfs_port = ipfs_port self.data_dir = data_dir # Init empty container for pandora self.pandora_container = None # Init empty containers for worker node self.worker_node_container = None self.worker_node_state_machine = None self.worker_node_event_thread = None # Init empty containers for job self.job_address = None self.job_container = None self.job_state_machine = None self.job_state_event_thread = None # Init empty jobs and processor self.jobs = {} self.processors = {} # init connectors self.eth = EthService(strategic=EthConnector()) self.ipfs = IpfsService(strategic=IpfsConnector()) self.local_password = None self.key_tool = KeyTools() print('Pandora broker initialize success')
def __init__(self, ipfs_api, processor_id: str, delegate: ProcessorDelegate): super().__init__() # Initializing logger object self.logger = logging.getLogger("Processor") self.logger.addHandler(LogSocketHandler.get_instance()) self.manager = Manager.get_instance() # Configuring self.id = processor_id self.results_file = None # variables for kernel and dataset objects self.kernel = None self.kernel_init_result = None self.dataset = None self.dataset_init_result = None # define delegate self.ipfs_api = ipfs_api self.delegate = delegate
def run_pynode(): try: manager = Manager.get_instance() # startup broker main process broker = Broker(eth_server=manager.eth_host, abi_path=manager.eth_abi_path, pandora=manager.eth_pandora, node=manager.eth_worker, data_dir=manager.ipfs_storage, ipfs_server=manager.ipfs_host, ipfs_port=manager.ipfs_port) except Exception as ex: logging.error("Error broker initialization: %s, exiting", type(ex)) logging.error(ex.args) return if broker.connect() is True: return # Remove the following line in order to put the app into a daemon mode (running on the background) broker.join()
def emit(self, record): if Manager.get_instance().web_socket_enable == 'True': from service.webapi.web_socket_listener import WebSocket socket = WebSocket.get_instance() socket.update_log_record(record)
def __init__(self, listener): self.manager = Manager.get_instance() self.mode = self.manager.launch_mode self.listener = listener
def __init__(self, eth_server: str, abi_path: str, pandora: str, node: str, ipfs_server: str, ipfs_port: int, data_dir: str): Broker.get_instance() Thread.__init__(self, daemon=True) # Initializing logger object self.logger = logging.getLogger("Broker") self.logger.setLevel(logging.INFO) self.logger.addHandler(LogSocketHandler.get_instance()) self.manager = Manager.get_instance() self.mode = self.manager.launch_mode # Saving starter configs self.eth_server = eth_server self.abi_path = abi_path self.pandora = pandora self.node = node self.ipfs_server = ipfs_server self.ipfs_port = ipfs_port self.data_dir = data_dir # initial filtering current block number self.current_block_number = 0 # Init empty container for pandora self.pandora_container = None # Init empty container for cognitive job manager self.job_controller = None self.job_controller_address = None self.job_controller_container = None # Init empty containers for worker node self.worker_node_container = None self.worker_node_state_machine = None self.worker_node_event_thread = None # Init empty containers for job self.job_id_hex = None self.job_container = None self.job_state_machine = None self.job_state_event_thread = None self.job_state_thread_flag = True # Init empty jobs and processor self.jobs = {} self.processors = {} # init connectors self.eth = EthService(strategic=EthConnector()) self.ipfs = IpfsService(strategic=IpfsConnector()) # init progress delegate self.send_progress = False self.start_training_time = None self.finish_training_time = None self.current_epoch = None self.start_epoch_time = None self.finish_epoch_time = None self.time_per_epoch = None self.sends_count = 1 self.send_progress_interval = 300 # set to 5min by default self.local_password = None self.key_tool = KeyTools() print('Pandora broker initialize success')
def main(argv): help_message = """ Pandora Boxchain python node realisation -/////////+++++/` `/ss:--:sds/d/````.hNso- /hNhsoosh+. `h+ ss`/ss` so yoh/ yy//+++dh- /h` y+ `h/ yo -ohM+...`.d+/sssm- `hh++++od/ sds/-h: `h: -sN/ `dN`````-Mh` .d- -mo::::/m: `h/yo .d:m `m:d.-d/os++yds:-.:os+. .d- oy` -d.`m `m .dmy/` -ossooshNdN- +d so :Mhoooood: so :Mh` :d. -NM so/h`y+ y++o :d:d. .d-:d.N sNs os oNo -d .dsoooosNd``N` :d dhmdsooyyo: -odm: s+ os +y :h` `y/ `/oo/..-+hh+:os+ho`y+o+ osos .h:h: `d+/////N/ `y+ oNo +Mo `dh: .h- `h:-oyh `dsoo++oN+ `yyos+:d-````-mds/` /h` `h: `h/ oo .oN+++++od` -h:d- `h: /s/`/h` :h. -shyoosmmh- .+shd. .d:/ss/` `/ss/` `/ooo+++oooo+++++- to see more https://pandoraboxchain.ai/ Current configuration performs launch with different parameters and configurations example >python ./pynode.py -p <your vault password> - starts pynode with default config placed in pynode.ini file ATTENTION! - vault password is necessary, and if you use docker and if you use the docker, enter the password in the startup line in Dockerfile if we need launch with different ETH HOST and IPFS config use launch params example >python ./pynode.py -e (--ethereum) remote -i (--ipfs) pandora with eth_connector instance to connect remote = http://rinkeby.pandora.network:8545 and IPFS config server = http://ipfs.pandora.network port = 5001 -c (--config) performs path for custom config file for launch params -a (--abi) performs change abi directory path -w (--worker) alow to set custom worker contract address seted by console value will replace value in config file for this launch """ parser = argparse.ArgumentParser(description=help_message, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-p', '--password', action="store", dest='vault_key', default='', help='necessary parameter for launch pynode.' '(used for encrypt vault and use private key to local transactions sign)', metavar='') parser.add_argument('-c', '--config', action="store", dest='configuration_file', default='../pynode/core/config/pynode.ini', help='startup pyrrha-pynode with custom configuration file ' '(default is ../pynode.ini strongly recommended for use)', metavar='') parser.add_argument('-e', '--ethereum', action="store", dest='ethereum_use', default='remote', help='setting up current used host for ethereum node ' '(default is remote)', metavar='') parser.add_argument('-a', '--abi', action='store', dest='abi_path', default='../pyrrha-consensus/build/contracts/', help='setting up path to folder with ABI files ' '(default is ../abi/ strongly recommended for use)', metavar='') parser.add_argument('-i', '--ipfs', action='store', dest='ipfs_use', default='pandora', help='setting up current used host for ipfs connection ' '(default is "pandora" strongly recommended for use)', metavar='') parser.add_argument('-v ', '--version', action='version', version='%(prog)s 0.1.2') results = parser.parse_args() # read configuration file and parse base settings print("Configuration file path : " + str(results.configuration_file)) if results.configuration_file: try: config = ConfigParser() config.read(results.configuration_file) eth_section = config['Ethereum'] account_section = config['Account'] eth_contracts = config['Contracts'] ipfs_section = config['IPFS'] web_section = config['Web'] eth_host = eth_section[results.ethereum_use] eth_worker_node_account = account_section['worker_node_account'] pandora_address = eth_contracts['pandora'] worker_address = eth_contracts['worker_node'] eth_hooks = eth_contracts['hooks'] pynode_start_on_launch = eth_contracts['start_on_launch'] ipfs_storage = ipfs_section['store_in'] ipfs_use_section = config['IPFS.%s' % results.ipfs_use] ipfs_host = ipfs_use_section['server'] ipfs_port = ipfs_use_section['port'] socket_enable = web_section['enable'] socket_host = web_section['host'] socket_port = web_section['port'] socket_listen = web_section['connections'] except Exception as ex: print("Error reading config: %s, exiting", type(ex)) logging.error(ex.args) return print("Config reading success") manager = Manager.get_instance() if not results.vault_key: print('Vault key is necessary for launch (use -p key for provide if)') return manager.vault_key = results.vault_key # ------------------------------------- # launch pynode # ------------------------------------- worker_contract_address = worker_address manager.pynode_config_file_path = results.configuration_file manager.launch_mode = "0" # results.launch_mode manager.eth_use = results.ethereum_use manager.eth_host = eth_host manager.eth_worker_node_account = eth_worker_node_account manager.eth_abi_path = results.abi_path manager.eth_pandora = pandora_address manager.eth_worker = worker_contract_address manager.ipfs_use = results.ipfs_use manager.ipfs_host = ipfs_host manager.ipfs_port = ipfs_port manager.ipfs_storage = ipfs_storage manager.pynode_start_on_launch = pynode_start_on_launch manager.web_socket_enable = socket_enable manager.web_socket_host = socket_host manager.web_socket_port = socket_port manager.web_socket_listeners = socket_listen print("Pynode production launch") print("Node launch mode : " + str(manager.launch_mode)) print("Ethereum use : " + str(results.ethereum_use)) print("Ethereum host : " + str(eth_host)) print("Worker node account owner : " + str(eth_worker_node_account)) print("Primary contracts addresses") print("Pandora main contract : " + str(pandora_address)) print("Worker node contract : " + str(worker_contract_address)) print("IPFS configuration") print("IPFS use : " + str(results.ipfs_use)) print("IPFS host : " + str(ipfs_host)) print("IPFS port : " + str(ipfs_port)) print("IPFS file storage : " + str(ipfs_storage)) print("Web socket enable : " + str(socket_enable)) # inst contracts instantiate_contracts(results.abi_path, eth_hooks) # launch socket web listener if socket_enable == 'True': print("Launch client socket listener") print("Web socket enable : " + str(manager.web_socket_enable)) print("Web socket host : " + str(manager.web_socket_host)) print("Web socket port : " + str(manager.web_socket_port)) print("Web socket listeners : " + str(manager.web_socket_listeners)) WebSocket(socket_host, socket_port, socket_listen) # launch pynode if pynode_start_on_launch == 'True': print("Launch pynode...") run_pynode()