def gen_data_packet(src_id, dst_id, tos='low-loss'): packet = addict.Dict() packet.src_id = src_id packet.dst_id = dst_id packet.ttl = DEFAULT_PACKET_TTL # the prefered transmit is via wifi00, can be tetra if not possible packet.tos = tos return packet
def _hook(cls, item): if isinstance(item, dict): return addict.Dict(item) if isinstance(item, (list, tuple)): return type(item)(cls._hook(elem) for elem in item) return item
def to_object(self): """ Converts dictionary in the data to an addict object. Useful for interactive data exploration on IPython and similar tools Returns: iterator: an iterator of addict.Dict """ return (addict.Dict(i) for i in self)
def get_meals_with_pictures_with_status(self, status): return addict.Dict( self.db.meals.find({ 'picture': { '$exists': True }, 'status': status }))
def calculate_final_average_by_year(self) -> None: """Calculates descriptive statistic by year. """ group_by_years = addict.Dict() self.year_statistics = addict.Dict() album_lyrics = self.all_albums_lyrics_sum # with open(f'{os.getcwd()}/lyrics_sum_all_album.json', 'r') as f: # album_lyrics = json.load(f) # Merge years together for i in album_lyrics: for album, count in i.items(): year = album.split('[')[-1].strip(']') try: group_by_years = addict.Dict(**group_by_years, **addict.Dict((year, count))) # First loop returns value error for empty dict except ValueError: group_by_years = addict.Dict((year, count)) # Multiple years raise a TypeError - we append except TypeError: group_by_years.get(year).extend(count) for year, y_count in group_by_years.items(): _y_count = [d for d in y_count if isinstance(d, int)] _y_count = [d for d in _y_count if d > 1] _d = self.get_descriptive_statistics(_y_count) self.year_statistics = addict.Dict(**self.year_statistics, **addict.Dict((year, _d)))
def json(self) -> Any: """ (`Any`): A dict sent to the client. Default-type: `"application/json"` . You can set values like `resp.json.keyName.you = "want"` . """ if self._json is None: self._json = addict.Dict() return self._json
def get_attribute_metadata(attr, data): kind = get_attribute_type(attr, data) name = sanitize_attribute(attr) meta = addict.Dict() meta.type = kind meta.name = name return meta
def recursively_convert_dict(data): ans = addict.Dict() for key_original, item in data.items(): if isinstance(item, dict): ans[key_original] = recursively_convert_dict(item) else: key = str.upper(key_original) ans[key] = item return ans
def save_thumbs(subject, *key_path, thumbs): stats = StatsModel.instance() root_dict = sub_dict = addict.Dict(getattr(stats, subject)) key_path = (get_remote_address(), ) + key_path for key in key_path[:-1]: sub_dict = sub_dict[key] sub_dict[key_path[-1]] = thumbs setattr(stats, subject, root_dict) stats.put()
async def analyze(self, nlpdata: addict.Dict) -> Result: tokens = nltk.word_tokenize(nlpdata.content) res = addict.Dict() res.tokens = nltk.pos_tag(tokens) return Result(name=self.name, version=self.version, result=res)
def play_last_thing(self): if "last_blend" in self.attr: self.play_blend(addict.Dict(self.attr["last_blend"]), speak=False, card=False) elif "last_radio" in self.attr: self.play_radio(**self.attr["last_radio"]) else: self.play_random(speak=False, card=False)
def __init__(self, config = { }): self.status = Pipeline.PENDING self.config = Pipeline.CONFIG self.thread = None self.logs = [ ] self.stages = [ ] self.gist = addict.Dict() self.set_config(config)
def get_attribute_metadata(attr, data): tipe = re.search(PATTERN_COLUMN, attr) name = re.sub(PATTERN_COLUMN, '', attr).strip() metadata = addict.Dict() metadata.name = name metadata.type = '' return metadata
def build_distortions_response(channel, threshold=.5, argmax=True): distortions = addict.Dict() distortions.response.yes = np.max( [v['head_predictions'][4].max() for v in channel.values()]) # distortions.response.no = 1. - distortions.response.yes distortions.threshold = threshold distortions.default = "no" # distortions.argmax = argmax return distortions
def get_greet_dict(): try: import addict data = addict.Dict() data.greet = "Hello World!" except ImportError: data = dict() data["greet"] = "Hello (without addict) World!" return data
def get_thumbs(subject, *key_path): stats = StatsModel.instance() sub_dict = addict.Dict(getattr(stats, subject)) key_path = (get_remote_address(), ) + key_path for key in key_path: sub_dict = sub_dict[key] if sub_dict == {}: return None return sub_dict
def rtn_rx_parse_payload_data(raw): size = struct.unpack('>I', raw[len(IDENT):len(IDENT) + 4])[0] if len(raw) < 7 + size: print("message seems corrupt") return False, None data = raw[len(IDENT) + 4:len(IDENT) + 4 + size] uncompressed_json = str(zlib.decompress(data), "utf-8") data = addict.Dict(json.loads(uncompressed_json)) return True, data
def build_lymph_node_response(channel, threshold=.5, argmax=True): lymph_node = addict.Dict() lymph_node.response.yes = np.max( [v['head_predictions'][6].max() for v in channel.values()]) # lymph_node.response.no = 1. - lymph_node.response.yes lymph_node.threshold = threshold lymph_node.default = "no" # lymph_node.argmax = argmax return lymph_node
def neuron_params(id): return addict.Dict({ 'type': 'population', 'num_neurons': 100, 'neuron_type': { 'type': 'IFCondAlpha', }, 'id': id, })
def _newMesosTask(self, job, offer): """ Build the Mesos task object for a given the Toil job and Mesos offer """ task = addict.Dict() task.task_id.value = str(job.jobID) task.agent_id.value = offer.agent_id.value task.name = job.name task.data = encode_data(pickle.dumps(job)) task.executor = addict.Dict(self.executor) task.resources = [] task.resources.append(addict.Dict()) cpus = task.resources[-1] cpus.name = 'cpus' cpus.type = 'SCALAR' cpus.scalar.value = job.resources.cores task.resources.append(addict.Dict()) disk = task.resources[-1] disk.name = 'disk' disk.type = 'SCALAR' if toMiB(job.resources.disk) > 1: disk.scalar.value = toMiB(job.resources.disk) else: log.warning( "Job %s uses less disk than Mesos requires. Rounding %s up to 1 MiB.", job.jobID, job.resources.disk) disk.scalar.value = 1 task.resources.append(addict.Dict()) mem = task.resources[-1] mem.name = 'mem' mem.type = 'SCALAR' if toMiB(job.resources.memory) > 1: mem.scalar.value = toMiB(job.resources.memory) else: log.warning( "Job %s uses less memory than Mesos requires. Rounding %s up to 1 MiB.", job.jobID, job.resources.memory) mem.scalar.value = 1 return task
async def analyze(self, nlpdata: addict.Dict) -> Result: text = nlpdata.content res = addict.Dict() res.cve = self.cve.findall(text) res.msid = self.msid.findall(text) return Result(name=self.name, version=self.version, result=res)
def sendUpdate(task, taskState, wallTime=None, msg=''): update = addict.Dict() update.task_id.value = task.task_id.value if self.id is not None: # Sign our messages as from us, since the driver doesn't do it. update.executor_id.value = self.id update.state = taskState update.timestamp = wallTime update.message = msg driver.sendStatusUpdate(update)
def test_vocabulary_tool() -> None: """Tool vocabulary test""" config = addict.Dict() config.alias = os.path.join(VOCABULARY_DATADIR, "tool_aliases.cfg") tool = Vocabulary(config) assert tool.get("backdoor:java/adwind", primary=True) == "jrat" assert tool["backdoor:java/adwind"] == "backdoor:java/adwind"
def run(options={}, **kwargs): options = addict.Dict(options, **kwargs) if options.modules: from_modules(options.modules) if options.module: from_modules([options.module]) elif options.paths: from_paths(options.paths) elif options.path: from_paths([options.path])
def model_from_config(path: str): """Create model from configuration specified in config file and load checkpoint weights""" cfg = addict.Dict(parse_config(config=path)) # read and parse config file init_params = cfg.model.init_params # extract model initialization parameters init_params["encoder_weights"] = None # because we will load pretrained weights for whole model model = getters.get_model(architecture=cfg.model.architecture, init_params=init_params) checkpoint_path = os.path.join(cfg.logdir, "checkpoints", "best.pth") state_dict = torch.load(checkpoint_path)["state_dict"] model.load_state_dict(state_dict) return model
def __init__(self, state, verbose=False): super(Controller, self).__init__(verbose) self.state = state self.pid = addict.Dict() self.input_ = None self.output = None self.dt = 0 self.command = ""
def build_paths_response(channel, channel_id): sides = addict.Dict() for side, el in channel.items(): sides[side] = addict.Dict() root = config.PATHS.OUTPUT / channel_id / side os.makedirs(root, exist_ok=True) for i, fpn in enumerate(el['fpn_predictions']): path = str( root / 'fpn_{}.png'.format(config.MAMMOGRAPHY_PARAMS.NAMES['fpn'][i])) sides[side][config.MAMMOGRAPHY_PARAMS.NAMES['fpn'][i]] = path cv2.imwrite(path, (fpn * 255).astype(np.uint8)) for i, head in enumerate(el['head_predictions']): path = str(root / 'head_{}.png'.format( config.MAMMOGRAPHY_PARAMS.NAMES['head'][i])) sides[side][config.MAMMOGRAPHY_PARAMS.NAMES['head'][i]] = path cv2.imwrite(path, (head * 255).astype(np.uint8)) return sides
def _buildExecutor(self): """ Creates and returns an ExecutorInfo-shaped object representing our executor implementation. """ # The executor program is installed as a setuptools entry point by setup.py info = addict.Dict() info.name = "toil" info.command.value = resolveEntryPoint('_toil_mesos_executor') info.executor_id.value = "toil-%i" % os.getpid() info.source = pwd.getpwuid(os.getuid()).pw_name return info
def rx_mcast_socket_cb(fd, queue_rt_proto): try: data, addr = fd.recvfrom(2048) except socket.error as e: warn('Expection: {}'.format(e)) d = addict.Dict() d.proto = "IPv4" d.data = data d.src_addr = addr[0] d.src_port = addr[1] msg = addict.Dict() msg.type = TYPE_RTN_INTRA_IPC msg.data = d try: queue_rt_proto.put_nowait(msg) fmt = "received route protcol message from {}:{}\n" debug(fmt.format(addr[0], addr[1])) except asyncio.queues.QueueFull: warn("queue overflow, strange things happens")
def __init__(self, status = None, code = 200): self.version = CONFIG.VERSION self.id = get_rand_uuid_str() self.status = assign_if_none(status, Response.Status.SUCCESS) self.code = code self.schema = addict.Dict() self.schema.id = get_rand_uuid_str() self.schema.version = self.version self.schema.status = self.status