def init_from_ergast(self, season=None, roundn=None): url_base = r'https://ergast.com/api/f1' if not season or not roundn: urlr = url_base + '/current/last/results' + '.json' elif season and roundn: urlr = url_base + '/' + str(season) + '/' + str( roundn) + '/results.json' rawjson = requests.get(urlr).json() self.rawr = namedtupled.map(rawjson) self.Raceraw = self.rawr.MRData.RaceTable.Races[0] if not season or not roundn: urlq = url_base + '/' + self.Raceraw.season + '/' + self.Raceraw.round + '/qualifying.json' elif season and roundn: urlq = url_base + '/' + str(season) + '/' + str( roundn) + '/qualifying.json' rawjson_quali = requests.get(urlq).json() self.rawq = namedtupled.map(rawjson_quali) self.Qualiraw = self.rawq.MRData.RaceTable.Races[0] self.season = int(self.Raceraw.season) self.roundn = int(self.Raceraw.round) self.raceName = self.Raceraw.raceName dateraw = re.match(r'(\d{4})-(\d{2})-(\d{2})', self.Raceraw.date) self.date = datetime.date(int(dateraw.group(1)), int(dateraw.group(2)), int(dateraw.group(2)))
def __init__(self, **kwargs): self.data = namedtupled.map(kwargs.get("data", None), _nt_name="TxData") self.tx = kwargs.get("tx", None) self.hash = kwargs.get("hash", None) self.metadata = namedtupled.map(kwargs.get("metadata", None), _nt_name="TxMeta")
def test_map(): data = {'binks': {'says': 'meow'}} cat = namedtupled.map(data) assert cat.binks.says == 'meow' data = [{'id': 'binks', 'says': 'meow'}, {'id': 'cedric', 'says': 'prrr'}] cats = namedtupled.map(data) assert cats[1].says == 'prrr'
def test_reducer(): data = {'binks': {'says': 'meow'}} cat = namedtupled.map(data) unpacked_cat = namedtupled.reduce(cat) assert unpacked_cat == {'binks': {'says': 'meow'}} data = [{'id': 'binks', 'says': 'meow'}, {'id': 'cedric', 'says': 'prrr'}] cats = namedtupled.map(data) unpacked_cats = namedtupled.reduce(cats) assert unpacked_cats == [{ 'id': 'binks', 'says': 'meow' }, { 'id': 'cedric', 'says': 'prrr' }]
def test_10_measurement_toplevel(self): raw_measurement_info = [{ "nameserver": "a.dns.jp", "destination": ["203.119.1.1"], "proto": "udp", "query": { "qname": "jp", "rrtype": "SOA" } }, { "nameserver": "a.dns.jp", "destination": ["203.119.1.1"], "proto": "tcp", "query": { "qname": "jp", "rrtype": "SOA" } }] self.measurer.measurement_info = namedtupled.map(raw_measurement_info) self.measurer.set_measurer_id() self.measurer.set_server_boottime() self.measurer.ipv4 = "10.0.2.15" # rewrite addr depending on test environment self.measurer.net_desc_v4 = ("", "") self.assertTrue(self.measurer.measure_toplevel())
def geocode(self, address, zoomlevel=14): client = googlemaps.Client(key=self.apikey) geocodes = client.geocode(address) georesults = [] for idx, geocode in enumerate(geocodes): geocode = namedtupled.map(geocodes[idx]) coordinates = ( geocode.geometry.location.lat, geocode.geometry.location.lng) displayname = geocode.formatted_address county = "Unknown" neighborhood = "Unknown" for component in geocode.address_components: if "administrative_area_level_2" in component.types: county = component.long_name elif "neighborhood" in component.types: neighborhood = component.long_name figure = self.map(geocode.coordinates, zoomlevel) georesults.append(GeocodeResult( coordinates, displayname, neighborhood, county, figure)) return georesults
def chain_fixture(scope="module"): # create a new account and fill it with some money ACCOUNT = Account.generate() ACCOUNT_1 = Account.generate() # used by for oracles # set the key folder as environment variables genesis = Account.from_private_key_string(PRIVATE_KEY) # Instantiate the node client for the tests NODE_CLI = NodeClient(Config( external_url=NODE_URL, internal_url=NODE_URL_DEBUG, # network_id=NETWORK_ID, blocking_mode=True, debug=True, )) NODE_CLI.spend(genesis, ACCOUNT.get_address(), 5000000000000000000000) # 5000AE a = NODE_CLI.get_account_by_pubkey(pubkey=ACCOUNT.get_address()) print(f"Test account is {ACCOUNT.get_address()} with balance {a.balance}") NODE_CLI.spend(genesis, ACCOUNT_1.get_address(), 5000000000000000000000) # 5000AE a = NODE_CLI.get_account_by_pubkey(pubkey=ACCOUNT_1.get_address()) print(f"Test account (1) is {ACCOUNT_1.get_address()} with balance {a.balance}") return namedtupled.map({"NODE_CLI": NODE_CLI, "ALICE": ACCOUNT, "BOB": ACCOUNT_1}, _nt_name="TestData")
def sign_encode_transaction(self, tx, metadata: dict=None): """ Sign, encode and compute the hash of a transaction :param tx: the TxObject to be signed :param metadata: additional data to include in the output of the signed transaction object :return: encoded_signed_tx, encoded_signature, tx_hash """ # decode the transaction if not in native mode transaction = _tx_native(op=UNPACK_TX, tx=tx.tx if hasattr(tx, "tx") else tx) # get the transaction as byte list tx_raw = decode(transaction.tx) # sign the transaction signature = self.account.sign(_binary(self.network_id) + tx_raw) # encode the transaction encoded_signed_tx, encoded_signature = self.encode_signed_transaction(tx_raw, signature) # compute the hash tx_hash = TxBuilder.compute_tx_hash(encoded_signed_tx) # return the object tx = dict( data=transaction.data, metadata=metadata, tx=encoded_signed_tx, hash=tx_hash, signature=encoded_signature, network_id=self.network_id, ) return namedtupled.map(tx, _nt_name="TxObject")
def __init__(self, **kwargs): """ Initialize the Channel object :param url (str): Channel url (for example: "ws://localhost:3001") :param role (str): Participant role ("initiator" or "responder") :param initiator_id (str): Initiator's public key :param responder_id (str): Responder's public key :param push_amount (int): Initial deposit in favor of the responder by the initiator :param initiator_amount (int): Amount of tokens the initiator has committed to the channel :param responder_amount (int): Amount of tokens the responder has committed to the channel :param channel_reserve (int): The minimum amount both peers need to maintain :param ttl (int): Minimum block height to include the channel_create_tx :param host (str): Host of the responder's node :param port (int): The port of the responders node :param lock_period (int): Amount of blocks for disputing a solo close :param existing_channel_id (str): Existing channel id (required if reestablishing a channel) :param offchain_tx (str): Offchain transaction (required if reestablishing a channel) :param timeout_idle (int): The time waiting for a new event to be initiated (default: 600000) :param timeout_funding_create (int): The time waiting for the initiator to produce :param the create channel transaction after the noise session had been established (default: 120000) :param timeout_funding_sign (int): The time frame the other client has to sign an off-chain update after our client had initiated and signed it. This applies only for double signed on-chain intended updates: channel create transaction, deposit, withdrawal and etc. (default: 120000) :param timeout_funding_lock (int): The time frame the other client has to confirm an on-chain transaction reaching maturity (passing minimum depth) after the local node has detected this. This applies only for double signed on-chain intended updates: channel create transaction, deposit, withdrawal and etc. (default: 360000) :param timeout_sign (int): The time frame the client has to return a signed off-chain update or to decline it. This applies for all off-chain updates (default: 500000) :param timeout_accept (int): The time frame the other client has to react to an event. This applies for all off-chain updates that are not meant to land on-chain, as well as some special cases: opening a noise connection, mutual closing acknowledgment and reestablishing an existing channel (default: 120000) :param timeout_initialized (int): the time frame the responder has to accept an incoming noise session. Applicable only for initiator (default: timeout_accept value) :param timeout_awaiting_open (int): The time frame the initiator has to start an outgoing noise session to the responder's node. Applicable only for responder (default: timeout_idle's value) :param sign (TxSigner): Instance of TxSigner :param offchain_message_handler (function): Callback method to receive off-chain messages. If not provided, all the incoming messages will be ignored. :param error_handler (function): Callback method to receive error messages. If not provided, all error messages will be ignored. """ options_keys = {'sign', 'endpoint', 'url'} endpoint = kwargs.get('endpoint', defaults.CHANNEL_ENDPOINT) wsUrl = kwargs.get('url', defaults.CHANNEL_URL) self.sign = kwargs.get('sign', None) self.params = { k: kwargs[k] for k in kwargs.keys() if k not in options_keys } self.url = self.__channel_url(wsUrl, self.params, endpoint) self.params = namedtupled.map(self.params) self.status = None self.id = None self.is_locked = False self.action_queue = Queue() self.handlers = {}
def __process_options(self, **kwargs): gas = self.gas if kwargs.get('gas') is None else kwargs.get('gas') gas_price = self.gas_price if kwargs.get( 'gas_price') is None else kwargs.get('gas_price') amount = self.contract_amount if kwargs.get( 'amount') is None else kwargs.get('amount') fee = self.fee if kwargs.get('fee') is None else kwargs.get('fee') account = self.account if kwargs.get( 'account') is None else kwargs.get('account') if account is None: raise ValueError( "Please provide an account to sign contract call transactions. You can set a default account using 'set_account' method" ) if account and type(account) is not signing.Account: raise TypeError( "Invalid account type. Use `class Account` for creating an account" ) return namedtupled.map( { "gas": gas, "gas_price": gas_price, "amount": amount, "fee": fee, "account": account }, _nt_name="ContractOptions")
def api_method(*args, **kwargs): query_params = {} post_body = {} target_endpoint = api.endpoint for p in api.params: # get the value or default val, ok = self._get_param_val(kwargs, p) if not ok: raise OpenAPIArgsException(f"missing required parameter {p.name}") # if none continue if val is None: continue # check the type if p.field.type.startswith("#/definitions/"): # TODO: validate the model pass elif not self._is_valid_type(val, p.field): raise OpenAPIArgsException(f"type error for parameter {p.name}, expected: {p.field.type} got {type(val).__name__}", ) # check the ranges if not self._is_valid_interval(val, p.field): raise OpenAPIArgsException(f"value error for parameter {p.name}, expected: {p.minimum} =< {val} =< {p.maximum}", ) # check allowed values if len(p.field.values) > 0 and val not in p.field.values: raise OpenAPIArgsException(f"Invalid value for param {p.name}, allowed values are {','.join(p.values)}") # if in path substitute if p.pos == 'path': target_endpoint = target_endpoint.replace('{%s}' % p.name, str(val)) # if in query add to the query if p.pos == 'query': query_params[p.raw] = val if p.pos == 'body': post_body = val # make the request if api.http_method == 'get': http_reply = requests.get(target_endpoint, params=query_params) api_response = api.responses.get(http_reply.status_code, None) else: http_reply = requests.post(target_endpoint, params=query_params, json=post_body) api_response = api.responses.get(http_reply.status_code, None) if self.debug: logging.debug(f">>>> ENDPOINT {target_endpoint}\n >> QUERY \n{query_params}\n >> BODY \n{post_body} \n >> REPLY \n{http_reply.text}", ) # unknown error if api_response is None: raise OpenAPIClientException(f"Unknown error {target_endpoint} {http_reply.status_code} - {http_reply.text}", code=http_reply.status_code) # success if http_reply.status_code == 200: # parse the http_reply if len(api_response.schema) == 0: return {} if "inline_response_200" in api_response.schema: # this are raw values, doesnt make sense to parse into a dict raw = http_reply.json() return list(raw.values())[0] jr = http_reply.json() return namedtupled.map(jr, _nt_name=api_response.schema) # error raise OpenAPIClientException(f"Error: {api_response.desc}", code=http_reply.status_code)
def test_namedtupled_map_object_keywords(mapping=mapping_keywords): try: t = namedtupled.map(mapping) except ValueError: # Type names and field names cannot be a keyword: 'from' assert False assert t.from_ == 'John Doe' assert len(t._fields) == len(mapping)
def merge_branch_net(model_state_fn): """""" global NET_TEMPLATE state = joblib.load(model_state_fn) model = Model(namedtupled.map(state).config) w = fetch_merged_parameters(model) net = init_layers(NET_TEMPLATE, w) return net, model
def test_namedtupled_map_array(mapping=mapping_array): t = namedtupled.map(mapping) assert t[0].tito.tata == 'tutu' assert t[0].tito.frobnicator == ['this', 'is', 'not', 'a', 'mapping'] assert t[0].foo == 'bar' assert t[0].baz.qux == 'quux' assert t[0].alist[0].a == 'A' assert t[0].alist[1].two == '2' assert t[0].baz != {'qux': 'quux'} assert t[0].alist[0] != {'one': '1', 'a': 'A'}
def client_fixture(scope="module"): # Instantiate the node client for the tests NODE_CLI = NodeClient(Config( external_url=NODE_URL, internal_url=NODE_URL_DEBUG, # network_id=NETWORK_ID, blocking_mode=True, debug=True, )) return namedtupled.map({"NODE_CLI": NODE_CLI}, _nt_name="TestData")
def _txdata_to_txobject(self, data: dict, descriptor: dict, metadata: dict = {}, compute_hash=True) -> TxObject: # initialize the right data size # this is PYTHON to POSTBODY schema = descriptor.get("schema", []) raw_data = [0] * (len(schema) + 1) # the +1 is for the tag # set the tx tag first raw_data[0] = _int(data.get("tag")) # parse fields and encode them for label, fn in schema.items(): if fn.field_type == _INT: raw_data[fn.index] = _int(data.get(label, 0)) elif fn.field_type == _ID: raw_data[fn.index] = _id(data.get(label)) elif fn.field_type == _ENC: raw_data[fn.index] = decode(data.get(label)) elif fn.field_type == _OTTL_TYPE: raw_data[fn.index] = _int(idf.ORACLE_TTL_TYPES.get(data.get(label))) elif fn.field_type == _SG: # signatures are always a list raw_data[fn.index] = [decode(sg) for sg in data.get(label, [])] elif fn.field_type == _VM_ABI: # vm/abi are encoded in the same 32bit length block raw_data[fn.index] = _int(data.get("vm_version")) + _int(data.get("abi_version"), 2) elif fn.field_type == _BIN: # this are binary string #TODO: may be byte array raw_data[fn.index] = _binary(data.get(label)) elif fn.field_type == _PTR: # this are name pointers raw_data[fn.index] = [[_binary(p.get("key")), _id(p.get("id"))] for p in data.get(label, [])] elif fn.field_type == _TX: # this can be raw or tx object tx = data.get(label).tx if hasattr(data.get(label), "tx") else data.get(label) raw_data[fn.index] = decode(tx) # encode the transaction in rlp rlp_tx = rlp.encode(raw_data) # encode the tx in base64 rlp_b64_tx = encode(idf.TRANSACTION, rlp_tx) # copy the data before modifying tx_data = copy.deepcopy(data) # build the tx object txo = TxObject( data=namedtupled.map(tx_data, _nt_name="TxData"), tx=rlp_b64_tx, ) # compute the tx hash if compute_hash: txo.hash = hash_encode(idf.TRANSACTION_HASH, rlp_tx) # copy the metadata if exists or initialize it if None tx_meta = copy.deepcopy(metadata) if metadata is not None else {} # compute the minimum fee if descriptor.get("fee") is not None: tx_meta["min_fee"] = self.compute_min_fee(data, descriptor, raw_data) # only set the metadata if it is not empty txo.set_metadata(tx_meta) return txo
def build_tx_object(tx_data, tx_raw, fee_idx, min_fee): if tx_data.get("fee") < min_fee: tx_native[fee_idx] = _int(min_fee) tx_data["fee"] = min_fee tx_encoded = encode_rlp(idf.TRANSACTION, tx_native) tx = dict( data=tx_data, tx=tx_encoded, hash=TxBuilder.compute_tx_hash(tx_encoded), ) return namedtupled.map(tx, _nt_name="TxObject")
def test_namedtupled_map_array(mapping=mapping_array): t = namedtupled.map(mapping) d = namedtupled.reducer(t) assert d[0]['tito']['tata'] == 'tutu' assert d[0]['tito']['frobnicator'] == ['this', 'is', 'not', 'a', 'mapping'] assert d[0]['foo'] == 'bar' assert d[0]['baz']['qux'] == 'quux' assert d[0]['alist'][0]['a'] == 'A' assert d[0]['alist'][1]['two'] == '2' assert d[0]['baz'] == {'qux': 'quux'} assert d[0]['alist'][0] == {'one': '1', 'a': 'A'}
def get_config(env): env_folder = os.path.join(os.path.dirname(__file__), '..', '..', 'envs', env) default_path = os.path.abspath(os.path.join(env_folder, "config.json")) if os.path.isfile(default_path): file = open(default_path) default_json = json.load(file) return namedtupled.map(default_json) return 'No JSON found'
def compute_absolute_ttl(self, relative_ttl): """ Compute the absolute ttl by adding the ttl to the current height of the chain :param relative_ttl: the relative ttl, if 0 will set the ttl to 0 """ ttl = dict(absolute_ttl=0, height=self.get_current_key_block_height(), estimated_expiration=datetime.now()) if relative_ttl > 0: ttl["absolute_ttl"] = ttl["height"] + relative_ttl ttl["estimated_expiration"] = datetime.now() + timedelta( minutes=self.config.key_block_interval * relative_ttl) return namedtupled.map(ttl, _nt_name="TTL")
def query(terms, auth=None, as_dict=True): """ Constructs v2 explicit queries with parameters outlined here: https://developer.usajobs.gov/Search-API/API-Query-Parameters """ base_url = 'https://data.usajobs.gov/api/search?' url = base_url + terms if not auth: env = namedtupled.env(['email', 'apikey']) headers = connect(email=env.email, apikey=env.apikey) resp = requests.get(url, headers=headers) data = resp.json() if not as_dict: data = namedtupled.map(data) return data
def test_namedtupled_map_object(mapping=mapping): t = namedtupled.map(mapping) assert t.tito.tata == 'tutu' assert t.tito.frobnicator == ['this', 'is', 'not', 'a', 'mapping'] assert t.foo == 'bar' assert t.baz.qux == 'quux' assert t.alist[0].a == 'A' assert t.alist[1].two == '2' assert t.baz != {'qux': 'quux'} assert t.alist[0] != {'one': '1', 'a': 'A'} assert t.huh == [('a', 'b', 'c')] assert t.huh2 == ('a', 'b', ('a', 'b', 'c')) assert t.name == 'Bob'
def __init__(self, model_fn, task, out_dir=None, hop_sz=1.): """""" super(MTLExtractor, self).__init__(task, out_dir, hop_sz, prob=True) # load configuration for model if os.path.exists(model_fn): model_id = os.path.splitext(os.path.basename(model_fn))[0] self.model_id = model_id.split('_state')[0] model_state = joblib.load(model_fn) self.config = namedtupled.map(model_state['config']) else: self.model_id = 'rnd' # load default config and change task as rand self.config = load_config('config/config.example.json') self.config.target[0] = 'rand' self.out_fn = os.path.join( self.root, self.model_id + '_{}_feature.h5'.format(self.task)) self.targets = self.config.target # load model self.model = Model(self.config) # variable set up self.sr = self.config.hyper_parameters.sample_rate self.length = self.config.hyper_parameters.patch_length self.n_fft = self.config.hyper_parameters.n_fft self.hop_sz_trn = self.config.hyper_parameters.hop_size self.input = self.config.hyper_parameters.input self.hop = int(self.hop_sz * self.sr) sig_len = int(self.sr * self.length) self.sig_len = sig_len - sig_len % self.hop_sz_trn # prepare preprocessor if needed if self.config.hyper_parameters.input == 'melspec': self.melspec = MelSpectrogramGPU(2, self.sr, self.n_fft, self.hop_sz_trn) # set feature layer names branch_at = self.config.hyper_parameters.branch_at if isinstance(branch_at, (int, float)): self.feature_layers = ['{}.fc'.format(t) for t in self.targets] elif isinstance(branch_at, (str, unicode)) and branch_at == "fc": self.feature_layers = ['fc'] self._prepare_db() super(MTLExtractor, self).post_init() self.hf.attrs['targets'] = [t.encode() for t in self.targets]
def __generate_methods(self): if self.aci: for f in self.aci.encoded_aci.contract.functions: self.__add_contract_method( namedtupled.map( { "name": f.name, "doc": f"Contract Method {f.name}", "arguments": f.arguments, "returns": f.returns, "stateful": f.stateful, "payable": f.payable }, _nt_name="ContractMethod"))
def decode_bytecode(compiled): """ Decode an encoded contract to it's components :param compiled: the encoded bytecode to decode as got from the 'compile' function :return: a named tuple with a decoded contract """ if isinstance(compiled, str): if not utils.prefix_match(identifiers.BYTECODE, compiled): raise ValueError( f"Invalid input, expecting {identifiers.BYTECODE}_ prefix") # unpack the transaction raw_contract = hashing.decode_rlp(compiled) elif isinstance(compiled, bytes): # unpack the transaction raw_contract = hashing.decode_rlp(compiled) else: raise ValueError(f"Invalid input type") if not isinstance(raw_contract, list) or len(raw_contract) < 6: raise ValueError(f"Invalid contract structure") # print(raw_contract) tag = hashing._int_decode(raw_contract[0]) vsn = hashing._int_decode(raw_contract[1]) if tag != identifiers.OBJECT_TAG_SOPHIA_BYTE_CODE: raise ValueError( f"Invalid input, expecting object type {identifiers.OBJECT_TAG_SOPHIA_BYTE_CODE}, got {tag}" ) # this is the hash contract_data = dict( raw=raw_contract, tag=tag, vsn=vsn, src_hash=raw_contract[2], type_info=[], bytecode=raw_contract[4], compiler_version=hashing._binary_decode(raw_contract[5], str), # payable=raw_contract[6] ) # print(type_info) for t in raw_contract[3]: contract_data["type_info"].append( dict( fun_hash=t[0], fun_name=hashing._binary_decode(t[1], str), arg_type=t[2], out_type=t[3], )) return namedtupled.map(contract_data, _nt_name="ContractBin")
def __link_type_def(self, t, bindings): _, type_defs = t.split('.') if isinstance(t, str) else list( t.keys())[0].split('.') aci_types = bindings.contract.type_defs + [ namedtupled.map({ "name": "state", "typedef": bindings.contract.state, "vars": [] }) ] aci_types = filter(lambda x: x.name == type_defs, aci_types) aci_types = list(aci_types)[0] if len(list(aci_types.vars)) > 0: aci_types.typedef = self.__inject_vars(t, aci_types) return namedtupled.reduce(aci_types.typedef)
def get_field_values(self, values): key = values['key'] label = values.get('label', key).title() data = { 'key': key, 'label': label, 'is_required': values.get('required'), 'placeholder': values.get('default', f'Please provide {label}'), 'max_length': values.get('max_length', 0), 'min_length': values.get('min_length', 0), 'id': key, 'name': key, } values.update(data) data = namedtupled.map(values) return data
def build_tx_object(tx_data, tx_raw, fee_idx, min_fee): # if fee is not set use the min fee if tx_data.get("fee") <= 0: tx_data["fee"] = min_fee # if it is set check that is greater then the minimum fee elif tx_data.get("fee") < min_fee: raise TransactionFeeTooLow( f'Minimum transaction fee is {min_fee}, provided fee is {tx_data.get("fee")}' ) tx_native[fee_idx] = _int(tx_data.get("fee")) tx_encoded = encode_rlp(idf.TRANSACTION, tx_native) tx = dict( data=tx_data, tx=tx_encoded, hash=TxBuilder.compute_tx_hash(tx_encoded), ) return namedtupled.map(tx, _nt_name="TxObject")
def search(terms, start=0, step=100, as_dict=False): """ Constructs v3 fuzzy searches with parameters outlined here: http://search.digitalgov.gov/developer/jobs.html """ size = '&size=' + str(step) base_url = format_search(terms) + size results = requests.get(base_url).json() data = results if len(data) == step: while results != []: start += step from_start = '&from=' + str(start) next_url = base_url + from_start results = requests.get(next_url).json() data += results if not as_dict: data = namedtupled.map(data) return data
def load_config(self) -> None: specific_config_basename = \ os.path.basename(self.script_name).\ replace(BatchBaseApplication.SCRIPT_EXT, BatchBaseApplication.CONF_EXT) common_conf = configparser.ConfigParser() self_conf = configparser.ConfigParser() common_conf.read(os.path.join(config.CONFIG_DIR, BatchBaseApplication.GENERAL_CONF_NAME)) self_conf.read(os.path.join(config.CONFIG_DIR, specific_config_basename)) com = self._convert_config_type(common_conf._sections) # type: ignore sel = self._convert_config_type(self_conf._sections) # type: ignore self.conf = namedtupled.map(dict(common=com, self=sel))
def filename_to_named_tuple(filename): with open(filename) as data_file: c_ = json.load(data_file) pprint(c_) return namedtupled.map(c_)