def discovered_tasks(self, tasks): print(f'{Fore.WHITE}{Back.BLACK} TASKS {Style.RESET_ALL}') print(f'From\t\t\t\tName\t\t\t\tAddress') print('==================================================================') data = json.loads(tasks['data']) fr = tasks['from'] # base58.encode(tasks['from']) for task in data: name = task['name'] addr = task['address'] print(f'{fr}\t{name}\t{addr}') t = self.api.get_json(addr) if 'data_dir' in t.keys(): data_dir = t['data_dir'] if os.path.exists(f'data/{data_dir}'): self.listen_for_models(name) utils.store_task(name, addr) else: print(f"DON'T HAVE DATA FOR {name} DATA DIRECTORY: {data_dir}") elif 'adapter' in t.keys(): self.listen_for_models(name) utils.store_task(name, addr) self.load_adapter(t['adapter'])
def add_task(self, name, dataset=None, data_dir=None, adapter=None): # 04/02/2018: This condition will need to be changed if data_dir is None and adapter is None and dataset is None: print( f'{Fore.RED}data_dir and adapter can not both be None{Style.RESET_ALL}' ) return task_data = {'name': name, 'creator': self.id} # 04/20/2018: Added new parameter for different dataset task_data['dataset'] = dataset if data_dir is not None: task_data['data_dir'] = data_dir if adapter is not None: with open(adapter, 'rb') as f: adapter_bin = f.read() f.close() adapter_addr = self.api.add_bytes(adapter_bin) task_data['adapter'] = adapter_addr addr = self.api.add_json(task_data) utils.store_task(name, addr) data = json.dumps([{'name': name, 'address': addr}]) self.publish('openmined:add_task', data)
def add_task(self, name, data_dir): task_data = {'name': name, 'creator': self.id, 'data_dir': data_dir} addr = self.api.add_json(task_data) utils.store_task(name, addr) data = json.dumps([{'name': name, 'address': addr}]) self.publish('openmined:add_task', data)
def discovered_tasks(self, tasks): print(f'{Fore.WHITE}{Back.BLACK} TASKS {Style.RESET_ALL}') print(f'From\t\t\t\tName\t\t\t\tAddress') print('==================================================================') data = json.loads(tasks['data']) fr = base58.encode(tasks['from']) for task in data: name = task['name'] addr = task['address'] print(f'{fr}\t{name}\t{addr}') data_dir = self.api.get_json(addr)['data_dir'] # TODO should only listen on task channels that which i have data for if os.path.exists(f'data/{data_dir}'): self.listen_for_models(name) utils.store_task(name, addr) else: print(f"DON'T HAVE DATA FOR {name} DATA DIRECTORY: {data_dir}")
def add_task(self, name, data_dir=None, adapter=None): if data_dir == None and adapter == None: print( f'{Fore.RED}data_dir and adapter can not both be None{Style.RESET_ALL}' ) return task_data = {'name': name, 'creator': self.id} if data_dir != None: task_data['data_dir'] = data_dir if adapter != None: with open(adapter, 'rb') as f: adapter_bin = f.read() f.close() adapter_addr = self.api.add_bytes(adapter_bin) task_data['adapter'] = adapter_addr addr = self.api.add_json(task_data) utils.store_task(name, addr) data = json.dumps([{'name': name, 'address': addr}]) self.publish('openmined:add_task', data)
def discovered_tasks(self, tasks): """ people publish new tasks to the network which get processed by this method a task is a json object which has a data directory - and that data gets processed by an adapter which is also sent over the wire. """ print(f'{Fore.WHITE}{Back.BLACK} TASKS {Style.RESET_ALL}') print(f'From\t\t\t\tName\t\t\t\tAddress') print( '==================================================================' ) data = json.loads(tasks['data']) fr = base58.encode(tasks['from']) for task in data: name = task['name'] addr = task['address'] print(f'{fr}\t{name}\t{addr}') t = self.api.get_json(addr) if 'data_dir' in t.keys(): data_dir = t['data_dir'] if os.path.exists(f'data/{data_dir}'): self.listen_for_models(name) utils.store_task(name, addr) else: print( f"DON'T HAVE DATA FOR {name} DATA DIRECTORY: {data_dir}" ) elif 'adapter' in t.keys(): self.listen_for_models(name) utils.store_task(name, addr) self.load_adapter(t['adapter'])