def _validate(items, schema, next_validator): req_validators = list(schema.reqs) req_vals_pass = [False]*len(req_validators) other = [] for val in iter_chain(schema.reqs, schema.opts): val.key_val.setup() for (name, value) in items: for (i, validator) in enumerate(req_validators): if _validator_safe_call(validator.key_val, name): req_vals_pass[i] = True next_validator(value, validator.value_val, name) break else: for validator in schema.opts: if _validator_safe_call(validator.key_val, name): next_validator(value, validator.value_val, name) break else: other.append(name) all_completed = all(val.key_val.teardown() for val in iter_chain(schema.reqs, schema.opts)) return (all(req_vals_pass) and all_completed, other)
async def pq_dtable_merge(current, past, _dsn_db=None): dins, dchg, ddel = _dtable_diff(current, past) table_name = current.__dset_item_class__.__name__ dobj_cls = current.__dset_item_class__ attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain(dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) seq_attrs = {} for n, attr in attrs.items(): if issubclass(attr.type, dsequence): seq_attrs[n] = attr dobj_cls = current.__dset_item_class__ attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain(dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) seq_attrs = {} for n, attr in attrs.items(): if issubclass(attr.type, dsequence): seq_attrs[n] = attr if dins.values: await _do_insert(table_name, seq_attrs, dins, _dsn_db) if dchg.values: await _do_update(table_name, seq_attrs, dchg, _dsn_db) if ddel.pkey_values: await _do_delete(table_name, ddel, _dsn_db)
def preval(self, group: Hashable, n_threads: int = None, logger: Logger = None, drop_group: bool = True, cleanup_scope: bool = True): """When using expression groups, "pre-evaluate" the utility expressions for a specified group, caching the utility table on the ChoiceModel. This is an advanced modelling technique to facilitate complex segmented stochastic models, where segments share decision units and some common utility expressions. Call ``preval()`` for the group of common expressions, and then ``copy()`` or ``copy_subset()`` the ChoiceModel to fill other symbols with segment-specific values. Discrete (micro-simulated) models don't need this because the decision units of each segment shouldn't overlap. So there's no downside to double-computing common expressions. Args: group: The name of the group to pre-compute. n_threads: Number of threads to use to evaluate the expressions logger: Optional logger for debugging drop_group: If True, the selected group will be "popped" from the set of groups, to avoid re-computing. cleanup_scope: If True, symbols unique to this group will be dropped from the scope. This can clean up memory by de-referencing objects and arrays that are no longer required. """ self.validate(group=group) subgroup = self._expressions.get_group(group) utilities = self._evaluate_utilities(subgroup, n_threads=n_threads, logger=logger) self._cached_utils = utilities if cleanup_scope: # Need to get a set of only those symbols unique to this group, using set math all_symbols = set( iter_chain(self.expressions.itersimple(), self.expressions.itersimple())) ungrouped_symbols = set( iter_chain(self._expressions.iterchained(groups=False), self._expressions.itersimple(groups=False))) group_symbols = set( iter_chain(subgroup.iterchained(), subgroup.itersimple())) other_symbols = all_symbols - ungrouped_symbols - group_symbols symbols_to_clear = group_symbols - ungrouped_symbols - other_symbols for name in symbols_to_clear: del self._scope[name] if drop_group: self._expressions.drop_group(group)
def _recall_dobject(obj): obj_cls = obj.__class__ col_names = tuple(iter_chain(obj_cls.__dobject_key__, obj.__dobject_att__)) if hasattr(obj_cls, '__table_name__'): table_name = obj_cls.__table_name__ else: table_name = obj.__class__.__name__ sql = "SELECT " + ','.join(col_names) + " FROM " sql += table_name + " WHERE " sql += ' AND '.join( pk_colname + "=%s" for pk_colname in obj_cls.__dobject_key__) pk_values = [] for val in obj.__dobject_key__: if isinstance(val, dsequence): val = val.value pk_values.append(val) # print(3123, pk_values) dbc << sql << tuple(pk_values) origin = next(dbc) if origin is not None: return obj.__class__(origin) else: return obj.__class__()
def _recall_dset(obj): item_cls = obj.__dset_item_class__ if hasattr(item_cls, '__table_name__'): table_name = item_cls.__table_name__ else: table_name = item_cls.__name__ pk_names = [] col_names = tuple(iter_chain(item_cls.__dobject_key__, item_cls.__dobject_att__)) pk_values = [] for val in obj.__dobject_key__: pk_values.append(val) sql = "\nSELECT " + ', '.join(col_names) + " FROM " + table_name if pk_values: sql += '\nWHERE ' sql += ' AND '.join(n + "=%s" for n in obj.__class__.__dobject_key__) sql += _make_page_sql(obj._page) dbc << sql if pk_values: dbc << tuple(pk_values) # empty dset with key new_ds = obj.__class__(dbc, **obj.__dobject_key__.as_dict(), _page=obj._page.copy()) return new_ds
async def _recall_dobject(obj, _dsn_db=None): obj_cls = obj.__class__ col_names = tuple(iter_chain(obj_cls.__dobject_key__, obj.__dobject_att__)) if hasattr(obj_cls, '__table_name__'): table_name = obj_cls.__table_name__ else: table_name = obj.__class__.__name__ # pk_values = [] # for val in obj.__dobject_key__: # if isinstance(val, dsequence): # val = val.value # pk_values.append(val) pk_pairs = obj.__dobject_key__.as_dict() await _select_with_pks(table_name, col_names, pk_pairs, _page=getattr(obj, '_page', None), _dsn_db=_dsn_db) try: # get the first value origin = await _dsn_db.__aiter__().__anext__() return obj.__class__(origin) except StopAsyncIteration: return obj.__class__()
def _recall_dobject(obj): obj_cls = obj.__class__ col_names = tuple(iter_chain(obj_cls.__dobject_key__, obj.__dobject_att__)) if hasattr(obj_cls, '__table_name__'): table_name = obj_cls.__table_name__ else: table_name = obj.__class__.__name__ sql = "SELECT " + ','.join(col_names) + " FROM " sql += table_name + " WHERE " sql += ' AND '.join( pk_colname + "=%s" for pk_colname in obj_cls.__dobject_key__) pk_values = [] for val in obj.__dobject_key__: if isinstance(val, dsequence): val = val.value pk_values.append(val) dbc << sql << tuple(pk_values) origin = next(dbc) if origin is not None: return obj.__class__(origin) else: return obj.__class__()
def template_module_iter (self) : yield self.template_module xs = ([self.Instance], self.fields, self.Actions_I, self.Actions_T) for x in iter_chain (xs) : tm = getattr (x, "template_module", None) if tm : yield tm
async def _do_insert(table_name, seq_attrs, dins, _dsn_db=None): cols = tuple(iter_chain(dins.pkey_attrs.keys(), dins.attrs.keys())) values = [k + v for k, v in zip(dins.pkey_values, dins.values)] # If there are new sequence objects, # get next values of them in a batch if seq_attrs: seq_cols = [] # (col_idx, col_name, [seq_value]) for i, colname in enumerate(cols): if colname in seq_attrs: seq_cols.append((i, colname, [])) for record in values: for seq_col in seq_cols: seq_val = record[seq_col[0]] if seq_val is not None: seq_col[2].append(seq_val) for colidx, colname, seqvals in seq_cols: allocate_sequence(seq_attrs[colname], seqvals) _dsn_db << f""" INSERT INTO {table_name} ({', '.join(cols)}) VALUES ({','.join([ '{' + c + '}' for c in cols])}) """ params_list = list( dict(t for t in zip(cols, col_values)) for col_values in values ) await _dsn_db(params_list)
def _generate_radvd_conf(self, router_ports): radvd_conf = utils.get_conf_file_name(self._agent_conf.ra_confs, self._router_id, 'radvd.conf', True) buf = six.StringIO() for p in router_ports: subnets = p.get('subnets', []) v6_subnets = [subnet for subnet in subnets if netaddr.IPNetwork(subnet['cidr']).version == 6] if not v6_subnets: continue ra_modes = {subnet['ipv6_ra_mode'] for subnet in v6_subnets} auto_config_prefixes = [subnet['cidr'] for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC or subnet['ipv6_ra_mode'] == constants.DHCPV6_STATELESS] stateful_config_prefixes = [subnet['cidr'] for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.DHCPV6_STATEFUL] interface_name = self._dev_name_helper(p['id']) slaac_subnets = [subnet for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC] dns_servers = list(iter_chain(*[subnet['dns_nameservers'] for subnet in slaac_subnets if subnet.get('dns_nameservers')])) buf.write('%s' % CONFIG_TEMPLATE.render( ra_modes=list(ra_modes), interface_name=interface_name, auto_config_prefixes=auto_config_prefixes, stateful_config_prefixes=stateful_config_prefixes, dns_servers=dns_servers[0:MAX_RDNSS_ENTRIES], constants=constants, min_rtr_adv_interval=self._agent_conf.min_rtr_adv_interval, max_rtr_adv_interval=self._agent_conf.max_rtr_adv_interval)) common_utils.replace_file(radvd_conf, buf.getvalue()) return radvd_conf
def template_module_iter(self): yield self.template_module xs = ([self.Instance], self.fields, self.Actions_I, self.Actions_T) for x in iter_chain(xs): tm = getattr(x, "template_module", None) if tm: yield tm
def aggregate_words(self, words_lists): bag_of_words = defaultdict(int) for noun in iter_chain(*words_lists): if len(noun) > 1: bag_of_words[noun] += 1 top_words = sorted(bag_of_words.items(), key=lambda x: x[1]) return dict(top_words[-20:])
def make_sample_index_iterator_maker(n, steps, r): """ Make a generator of iterators that iterate over n-uples of integers that parametrize certain complex numbers on the unit circle with a given number of steps on a semicircle. """ indr1 = range(1, steps + 1) indr2 = range(-steps + 1, 0) if r is None: return lambda: iter_product( indr1, *[iter_chain(indr1, indr2) for _ in range(1, n)] ) else: if n == 1: return lambda: iter(indr1) else: if r % n: rr = [r, -r] else: rr = [r] def make_2ind_iterator(): for i1 in indr1: for m in rr: i2 = (m - i1 + n - 1) % (2*n) - n + 1 yield (i1, i2) if n == 2: return make_2ind_iterator elif n >= 3: return lambda: map( _splice_first, iter_product( make_2ind_iterator(), *[ iter_chain(indr1, indr2) for _ in range(2, n) ] ) )
def getArgs(self): """ Get arguments used in "distinguish" method. :param endpoint: :return: """ args = {} for arg in iter_chain(self.requiredArgs, self.optionalArgs): val = self.callerArgs.data.get(arg, [''])[0] val = '' if val is None else val args[arg] = val return args
def _generate_radvd_conf(self, router_ports): radvd_conf = utils.get_conf_file_name(self._agent_conf.ra_confs, self._router_id, 'radvd.conf', True) buf = six.StringIO() for p in router_ports: subnets = p.get('subnets', []) v6_subnets = [ subnet for subnet in subnets if netaddr.IPNetwork(subnet['cidr']).version == 6 ] if not v6_subnets: continue ra_modes = {subnet['ipv6_ra_mode'] for subnet in v6_subnets} auto_config_prefixes = [ subnet['cidr'] for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC or subnet['ipv6_ra_mode'] == constants.DHCPV6_STATELESS ] stateful_config_prefixes = [ subnet['cidr'] for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.DHCPV6_STATEFUL ] interface_name = self._dev_name_helper(p['id']) slaac_subnets = [ subnet for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC ] dns_servers = list( iter_chain(*[ subnet['dns_nameservers'] for subnet in slaac_subnets if subnet.get('dns_nameservers') ])) network_mtu = p.get('mtu', 0) buf.write('%s' % CONFIG_TEMPLATE.render( ra_modes=list(ra_modes), interface_name=interface_name, auto_config_prefixes=auto_config_prefixes, stateful_config_prefixes=stateful_config_prefixes, dns_servers=dns_servers[0:MAX_RDNSS_ENTRIES], n_const=n_const, constants=constants, min_rtr_adv_interval=self._agent_conf.min_rtr_adv_interval, max_rtr_adv_interval=self._agent_conf.max_rtr_adv_interval, network_mtu=int(network_mtu))) contents = buf.getvalue() LOG.debug("radvd config = %s", contents) # radvd conf file can't be writeable by self/group file_utils.replace_file(radvd_conf, contents, file_mode=0o444) return radvd_conf
def __json_object__(self): """export dobject as list or dict """ cls = self.__class__ self_attrs = getattr(self, '__value_dict__') data = OrderedDict() for attr_name in iter_chain(cls.__dobject_key__, cls.__dobject_att__): attr_value = getattr(self, attr_name) if hasattr(attr_value, '__json_object__'): attr_value = attr_value.__json_object__() data[attr_name] = attr_value return data
def repr_create_table(dobj_cls): attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain(dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) segments = [] for name, attr in attrs.items(): s = ' %s %s' % (name, repr_datatype(attr.type, attr.len)) segments.append(s) if dobj_cls.__dobject_key__: s = ' PRIMARY KEY(%s)' s %= ','.join(dobj_cls.__dobject_key__.keys()) segments.append(s) if hasattr(dobj_cls, '__tablename__'): table_name = dobj_cls.__tablename__ else: table_name = dobj_cls.__name__ sql = ''.join([ 'CREATE TABLE IF NOT EXISTS ', table_name, ' (\n', ',\n'.join(segments), '\n);' ]) yield sql quote = lambda s : s.replace("'", "''") if hasattr(dobj_cls, '__doc__') and dobj_cls.__doc__: if dobj_cls.__doc__: doc = textwrap.dedent(dobj_cls.__doc__) sql = "COMMENT ON TABLE %s IS '%s';" sql %= (table_name, quote(doc)) yield sql for name, attr in attrs.items(): if attr.doc: doc = textwrap.dedent(attr.doc) sql = "COMMENT ON COLUMN %s.%s IS '%s';" sql %= (table_name, name, quote(doc)) yield sql
def __dset__(self, item_type): dobj_attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain(item_type.__dobject_key__.items(), item_type.__dobject_att__.items())) colnames = [] selected = [] for i, d in enumerate(self._cursor.description): colname = d[0] if colname in dobj_attrs: selected.append(i) colnames.append(colname) for record in self._cursor: obj = dict((k, v) for k, v in zip(colnames, (record[i] for i in selected))) yield item_type(**obj)
def get_processing_assets(task_id): ispc = app.control.inspect() ion_tasks = set() active = set() from uuid import UUID for wtask in iter_chain(*ispc.active().values(), *ispc.reserved().values()): args = eval(wtask["args"]) if len(args) < 2: continue ion_tasks.add((str(args[0]), AssetType[args[1]])) for asset_type in AssetType: asset_info = get_asset_info(task_id, asset_type) ion_task_id = (task_id, asset_type) if not is_asset_task(asset_info) or ion_task_id in ion_tasks: continue active.add(asset_type) return active
def __dset__(self, item_type): dobj_attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain( item_type.__dobject_key__.items(), item_type.__dobject_att__.items())) colnames = [] selected = [] for i, d in enumerate(self._cursor.description): colname = d[0] if colname in dobj_attrs: selected.append(i) colnames.append(colname) for record in self._cursor: obj = dict((k, v) for k, v in zip(colnames, (record[i] for i in selected))) yield item_type(**obj)
def repr_create_table(dobj_cls): attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain( dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) segments = [] for name, attr in attrs.items(): s = ' %s %s' % (name, repr_datatype(attr.type, attr.len)) segments.append(s) if dobj_cls.__dobject_key__: s = ' PRIMARY KEY(%s)' s %= ','.join(dobj_cls.__dobject_key__.keys()) segments.append(s) if hasattr(dobj_cls, '__tablename__'): table_name = dobj_cls.__tablename__ else: table_name = dobj_cls.__name__ sql = ''.join([ 'CREATE TABLE IF NOT EXISTS ', table_name, ' (\n', ',\n'.join(segments), '\n);' ]) yield sql quote = lambda s: s.replace("'", "''") if hasattr(dobj_cls, '__doc__') and dobj_cls.__doc__: if dobj_cls.__doc__: doc = textwrap.dedent(dobj_cls.__doc__) sql = "COMMENT ON TABLE %s IS '%s';" sql %= (table_name, quote(doc)) yield sql for name, attr in attrs.items(): if attr.doc: doc = textwrap.dedent(attr.doc) sql = "COMMENT ON COLUMN %s.%s IS '%s';" sql %= (table_name, name, quote(doc)) yield sql
def get_reclamation_candidates( data_path: pathlib.Path, min_age_hours: int) -> List[ReclamationCandidate]: candidates: List[ReclamationCandidate] = [] for node_dir in iter_chain(data_path.glob("**/node_???"), data_path.glob("**/node_*_???")): if (node_dir / "reclaimed").exists(): continue last_run = next( iter( sorted( node_dir.glob("run-*.log*"), key=lambda p: p.stat().st_mtime, reverse=True, )), None, ) # If there is no last run assume we can reclaim if last_run: age_hours = (time.time() - last_run.stat().st_mtime) / 3600 if age_hours < min_age_hours: scenario_name: Path = Path(node_dir.parent.name) log.debug( "Skipping too recent node", scenario_name=scenario_name, node=node_dir.name, age_hours=age_hours, ) continue for keyfile in node_dir.glob("keys/*"): keyfile_content = json.loads(keyfile.read_text()) address = keyfile_content.get("address") if address: candidates.append( ReclamationCandidate( address=to_checksum_address(address), node_dir=node_dir, keyfile_content=keyfile_content, )) return candidates
def __bool__(self): """ """ cls = self.__class__ if not cls.__dobject_att__ and not cls.__dobject_key__: return False # no attribues defined in this dobject for attr_name, attr in iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items()): if attr_name not in self.__value_dict__: continue # The truth value of attribute is false attr_val = getattr(self, attr_name) if attr.default is not None: if attr_val != attr.default: return True elif attr_val: return True return False
def contents(self): top = self.top dis_fmt = "Disallow: %s" exclude = list(dis_fmt % x for x in self._excluded_urls(top)) extra = list(dis_fmt % x for x in self.extra_excludes) result = "" if exclude or extra: result = "\n".join \ (iter_chain (["User-agent: *"], exclude, extra)) sitemap = getattr(top.SC, "Sitemap", None) if sitemap is not None: request = getattr(top, "request", None) if request is not None: site = request.host_url.rstrip("/") if not site.endswith("//localhost"): result = "\n".join \ ( ( result , "".join (("Sitemap: ", site, "/", sitemap.name, ".txt")) ) ) return result
def contents (self) : top = self.top dis_fmt = "Disallow: %s" exclude = list (dis_fmt % x for x in self._excluded_urls (top)) extra = list (dis_fmt % x for x in self.extra_excludes) result = "" if exclude or extra : result = "\n".join \ (iter_chain (["User-agent: *"], exclude, extra)) sitemap = getattr (top.SC, "Sitemap", None) if sitemap is not None : request = getattr (top, "request", None) if request is not None : site = request.host_url.rstrip ("/") if not site.endswith ("//localhost") : result = "\n".join \ ( ( result , "".join (("Sitemap: ", site, "/", sitemap.name, ".txt")) ) ) return result
def _generate_radvd_conf(self, router_ports): radvd_conf = utils.get_conf_file_name(self._agent_conf.ra_confs, self._router_id, 'radvd.conf', True) buf = six.StringIO() for p in router_ports: subnets = p.get('subnets', []) v6_subnets = [ subnet for subnet in subnets if netaddr.IPNetwork(subnet['cidr']).version == 6 ] if not v6_subnets: continue ra_modes = {subnet['ipv6_ra_mode'] for subnet in v6_subnets} auto_config_prefixes = [ subnet['cidr'] for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC or subnet['ipv6_ra_mode'] == constants.DHCPV6_STATELESS ] interface_name = self._dev_name_helper(p['id']) slaac_subnets = [ subnet for subnet in v6_subnets if subnet['ipv6_ra_mode'] == constants.IPV6_SLAAC ] dns_servers = list( iter_chain(*[ subnet['dns_nameservers'] for subnet in slaac_subnets if subnet.get('dns_nameservers') ])) buf.write('%s' % CONFIG_TEMPLATE.render( ra_modes=list(ra_modes), interface_name=interface_name, prefixes=auto_config_prefixes, dns_servers=dns_servers[0:MAX_RDNSS_ENTRIES], constants=constants)) common_utils.replace_file(radvd_conf, buf.getvalue()) return radvd_conf
async def _recall_dset(obj, _dsn_db=None): item_cls = obj.__dset_item_class__ if hasattr(item_cls, '__table_name__'): table_name = item_cls.__table_name__ else: table_name = item_cls.__name__ pk_names = list(item_cls.__dobject_key__) col_names = tuple(iter_chain(item_cls.__dobject_key__, item_cls.__dobject_att__)) pk_pairs = obj.__dobject_key__.as_dict() await _select_with_pks(table_name, col_names, pk_pairs, _page=obj._page, _dsn_db=_dsn_db) # empty dset with key new_ds = obj.__class__(_dsn_db, **obj.__dobject_key__.as_dict(), _page=obj._page.copy()) return new_ds
def mif_lines(mem): # Write file header yield '-- Assembler12 - generated file' yield 'WIDTH=12;' yield f'DEPTH={len(mem)};' yield 'ADDRESS_RADIX=HEX;' yield 'DATA_RADIX=HEX;' yield '' yield 'CONTENT BEGIN' # Run-length encode file contents prev_word = -1 run_start = 0 address = 0 addr_width = (len(mem).bit_length() + 3) // 4 # The 'mem' array is extended with a value that cannot be in the input, # so that the last run is finished properly for word in iter_chain(mem, (None, )): # Negative values in the input should be written as zeros if word is not None and word < 0: word = 0 # If a new run has started, if word != prev_word: run_length = address - run_start # write out the previous run if run_length >= 1: run_end = address - 1 if run_length == 1: addr_part = f'\t{run_start:0{addr_width}X}' else: addr_part = f'\t[{run_start:0{addr_width}X}..{run_end:0{addr_width}X}]' yield f'{addr_part} : {prev_word:03X};' # Start the new run run_start = address prev_word = word address += 1 yield 'END;'
def _reshape_class(orig_cls, *args, **kwargs): new_type_name = None # string of type name selected = set() # names ignored = set() # names new_pkeys = [] # list of names declared = OrderedDict() # {attr_name: attribute} new_bases = [] # list of type combined = [] # list of type substituted = {} # {new_attr : old_attr} arg_name = '_ignore' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Iterable): for i, elem in enumerate(arg_value): if isinstance(elem, str): ignored.add(elem) elif isinstance(elem, DAttribute): ignored.add(elem.name) else: errmsg = ("The %d-th element in 'ignore' argument " "should be a str or DAttribute object: %r") errmsg %= (elem, arg_value) raise ValueError(errmsg) elif isinstance(arg_value, DAttribute): ignored.add(arg_value.name) elif isinstance(arg_value, str): ignored.add(arg_value) del kwargs[arg_name] arg_name = '_key' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Iterable): for i, elem in enumerate(arg_value): if isinstance(elem, str): new_pkeys.append(elem) elif isinstance(elem, DAttribute): new_pkeys.append(elem.name) else: errmsg = ("The %d-th element in '_pkeys' argument " "should be a str or DAttribute object: %r") errmsg %= (elem, arg_value) raise ValueError(errmsg) elif isinstance(arg_value, DAttribute): new_pkeys.append(arg_value.name) elif isinstance(arg_value, str): new_pkeys.append(arg_value) del kwargs[arg_name] arg_name = '_base' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, type): new_bases.append(arg_value) elif isinstance(arg_value, Iterable): for i, cls in enumerate(arg_value): if isinstance(cls, type): new_bases.append(cls) else: errmsg = ("The %d-th element of '_base' should be" " a type object") errmsg %= (i + 1) raise ValueError(errmsg) else: errmsg = ("The value of '_base' should be" " a iterable object of type or a type object") raise ValueError(errmsg) del kwargs[arg_name] arg_name = '_combine' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, type): combined.append(arg_value) elif isinstance(arg_value, Iterable): for i, cls in enumerate(arg_value): if isinstance(cls, type): combined.append(cls) else: errmsg = ("The %d-th element of '_combine' should be" " a type object") errmsg %= (i + 1) raise ValueError(errmsg) else: errmsg = ("The value of '_combine' should be" " a iterable object of type or a type object") raise ValueError(errmsg) del kwargs[arg_name] arg_name = '_subst' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Mapping): for new_attr, old_attr in arg_value.items(): if isinstance(old_attr, str): substituted[new_attr] = old_attr else: errmsg = ("The target or source attribue names should be " " a str object in _subst") raise ValueError(errmsg) else: raise ValueError("The _subst should be a dict or Mapping object") del kwargs[arg_name] arg_name = '_name' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, str): new_type_name = arg_value else: raise ValueError("The _name should be a str object") del kwargs[arg_name] for i, arg in enumerate(args): if isinstance(arg, str): selected.add(arg) elif isinstance(arg, DAttribute): selected.add(arg.name) else: errmsg = ("The %d-th argument must be a str or attribute object" ", not : %r") errmsg %= (i + 1, arg) raise ValueError(errmsg) for attr_name, arg_value in kwargs.items(): if attr_name.startswith('_'): raise ValueError("Unknown operation '%s'" % attr_name) elif isinstance(arg_value, bool): if arg_value: selected.add(arg) else: ignored.add(arg) elif (arg_value, DAttribute): declared[attr_name] = arg_value else: errmsg = "Unknown operand: %s=%r" % (attr_name, arg_value) raise ValueError(errmsg) # ------------------------------------------------------------------- attributes = OrderedDict() for attr_name, attr in iter_chain(orig_cls.__dobject_key__.items(), orig_cls.__dobject_att__.items()): attr = attr.copy() attr.owner_class = None if attr_name in substituted: attributes[substituted[attr_name]] = attr else: attributes[attr_name] = attr # ONLY substitute the original object's attribute names for old_attr_name, new_attr_name in substituted.items(): if (old_attr_name not in orig_cls.__dobject_att__ and old_attr_name not in orig_cls.__dobject_key__): errmsg = "No found the attribute '%s' substituted by '%s' in %s" errmsg = (old_attr_name, new_attr_name, orig_cls.__name__) raise ValueError(errmsg) if old_attr_name in selected: selected.add(new_attr_name) selected.remove(old_attr_name) if old_attr_name in ignored: ignored.add(new_attr_name) ignored.remove(old_attr_name) for cls in combined: for attr_name, attr in iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items()): if attr_name not in attributes: attributes[attr_name] = attr for attr_name, attr in declared.items(): attributes[attr_name] = attr if selected: attributes = OrderedDict([(k, v) for k, v in attributes.items() if k in selected and k not in ignored]) else: attributes = OrderedDict([(k, v) for k, v in attributes.items() if k not in ignored]) if new_pkeys: pkeys = [] for attr_name in new_pkeys: if attr_name in ignored: errmsg = ("Conflict! The attribute '%s' has specified as " "primary key, and also as ignored attribute") errmsg %= attr_name raise ValueError(errmsg) if attr_name not in attributes: errmsg = ( "The attribute '%s' specified as primary key does not" " be declared in origin or base classes") errmsg %= attr_name raise ValueError(errmsg) if attr_name in attributes: pkeys.append(attr_name) new_pkeys = pkeys else: if orig_cls.__dobject_key__: new_pkeys = [] for attr_name in orig_cls.__dobject_key__: if attr_name in substituted: attr_name = substituted[attr_name] if attr_name not in attributes: continue new_pkeys.append(attr_name) attributes['__dobject_key__'] = new_pkeys attributes['__dobject_origin_class__'] = orig_cls subst_map = OrderedDict() for old_name, new_name in substituted.items(): subst_map[new_name] = old_name attributes['__dobject_mapping__'] = subst_map if not new_bases: new_bases = orig_cls.__bases__ else: new_bases = tuple(new_bases) if not new_type_name: new_type_name = orig_cls.__name__ new_cls = type(new_type_name, new_bases, attributes) new_cls.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') setattr(new_cls, '__dobject_origin_class__', tuple([orig_cls] + combined)) if substituted: setattr(new_cls, '__dobject_mapping__', substituted) return new_cls
def pq_dtable_merge(current, past): dins, dchg, ddel = _dtable_diff(current, past) table_name = current.__dset_item_class__.__name__ dobj_cls = current.__dset_item_class__ attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain( dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) seq_attrs = {} for n, attr in attrs.items(): if issubclass(attr.type, dsequence): seq_attrs[n] = attr if dins.values: cols = tuple(iter_chain(dins.pkey_attrs.keys(), dins.attrs.keys())) values = [k + v for k, v in zip(dins.pkey_values, dins.values)] # If there are new sequence objects, # get next values of them in a batch if seq_attrs: seq_cols = [] # (col_idx, col_name, [seq_value]) for i, colname in enumerate(cols): if colname in seq_attrs: seq_cols.append((i, colname, [])) for record in values: for seq_col in seq_cols: seq_val = record[seq_col[0]] if seq_val is not None: seq_col[2].append(seq_val) for colidx, colname, seqvals in seq_cols: allocate_sequence(seq_attrs[colname], seqvals) sql = """ INSERT INTO {table} ({cols}) VALUES ({vals}); """.format(table=table_name, cols=', '.join(cols), vals=', '.join(['%s'] * len(cols))) dbc << sql dbc << values if dchg.values: if seq_attrs: # for attrname in dchg.values.items(): seq_cols = {} for record in dchg.values: for colname in record: if colname in seq_attrs: try: seqvals = seq_cols[colname] except KeyError: seq_cols[colname] = seqvals = [] seqvals.append(record[colname][0]) for colname, seqvals in seq_cols.items(): allocate_sequence(seq_attrs[colname], seqvals) # generate update statment in a modified field group groups = {} for i, modified in enumerate(dchg.values): # group with attr name grpid = tuple(modified.keys()) try: chgidxs = groups[grpid] except KeyError: groups[grpid] = chgidxs = [] chgidxs.append(i) pkcond = ' AND '.join( ['{pk}=%s'.format(pk=pk) for pk in dchg.pkey_attrs]) for grpid, chgidxs in groups.items(): asgn_expr = ', '.join(['%s=%%s' % name for name in grpid]) dbc << """ UPDATE {table} SET {asgn} WHERE {pkcond} """.format(table=table_name, asgn=asgn_expr, pkcond=pkcond) for i in chgidxs: values = tuple(dchg.values[i][k][0] for k in grpid) pkvals = dchg.pkey_values[i] dbc << values + pkvals if ddel.pkey_values: pkcond = ' AND '.join( ['{pk}=%s'.format(pk=pk) for pk in ddel.pkey_attrs]) dbc << """ DELETE FROM {table} WHERE {pkcond}; """.format(table=table_name, pkcond=pkcond) dbc << [k for k in ddel.pkey_values]
def reclaim_eth(account: Account, chain_str: str, data_path: pathlib.Path, min_age_hours: int): chain_name, chain_url = chain_str.split(":", maxsplit=1) log.info("in cmd", chain=chain_str, chain_name=chain_name, chain_url=chain_url) web3s: Dict[str, Web3] = {chain_name: Web3(HTTPProvider(chain_url))} log.info("Starting eth reclaim", data_path=data_path) address_to_keyfile = dict() address_to_privkey = dict() for node_dir in iter_chain(data_path.glob("**/node_???"), data_path.glob("**/node_*_???")): scenario_name: Path = node_dir.parent.name last_run = next( iter( sorted(list(node_dir.glob("run-*.log")), key=lambda p: p.stat().st_mtime, reverse=True)), None, ) # If there is no last run assume we can reclaim if last_run: age_hours = (time.time() - last_run.stat().st_mtime) / 3600 if age_hours < min_age_hours: log.debug( "Skipping too recent node", scenario_name=scenario_name, node=node_dir.name, age_hours=age_hours, ) continue for keyfile in node_dir.glob("keys/*"): keyfile_content = json.loads(keyfile.read_text()) address = keyfile_content.get("address") if address: address_to_keyfile[to_checksum_address( address)] = keyfile_content log.info("Reclaiming candidates", addresses=list(address_to_keyfile.keys())) txs = defaultdict(set) reclaim_amount = defaultdict(int) for chain_name, web3 in web3s.items(): log.info("Checking chain", chain=chain_name) for address, keyfile_content in address_to_keyfile.items(): balance = web3.eth.getBalance(address) if balance > RECLAIM_MIN_BALANCE: if address not in address_to_privkey: address_to_privkey[address] = decode_keyfile_json( keyfile_content, b"") privkey = address_to_privkey[address] drain_amount = balance - (web3.eth.gasPrice * VALUE_TX_GAS_COST) log.info( "Reclaiming", from_address=address, amount=drain_amount.__format__(",d"), chain=chain_name, ) reclaim_amount[chain_name] += drain_amount client = JSONRPCClient(web3, privkey) txs[chain_name].add( client.get_next_transaction().send_transaction( to=account.address, value=drain_amount, startgas=VALUE_TX_GAS_COST)) for chain_name, chain_txs in txs.items(): wait_for_txs(web3s[chain_name], chain_txs, 1000) for chain_name, amount in reclaim_amount.items(): log.info("Reclaimed", chain=chain_name, amount=amount.__format__(",d"))
def pq_dtable_merge(current, past): dins, dchg, ddel = _dtable_diff(current, past) table_name = current.__dset_item_class__.__name__ dobj_cls = current.__dset_item_class__ attrs = OrderedDict((attr_name, attr) for attr_name, attr in iter_chain(dobj_cls.__dobject_key__.items(), dobj_cls.__dobject_att__.items())) seq_attrs = {} for n, attr in attrs.items(): if issubclass(attr.type, dsequence): seq_attrs[n] = attr if dins.values: cols = tuple(iter_chain(dins.pkey_attrs.keys(), dins.attrs.keys())) values = [k + v for k, v in zip(dins.pkey_values, dins.values)] # If there are new sequence objects, # get next values of them in a batch if seq_attrs: seq_cols = [] # (col_idx, col_name, [seq_value]) for i, colname in enumerate(cols): if colname in seq_attrs: seq_cols.append((i, colname, [])) for record in values: for seq_col in seq_cols: seq_val = record[seq_col[0]] if seq_val is not None: seq_col[2].append(seq_val) for colidx, colname, seqvals in seq_cols: allocate_sequence(seq_attrs[colname], seqvals) sql = """ INSERT INTO {table} ({cols}) VALUES ({vals}); """.format(table=table_name, cols=', '.join(cols), vals=', '.join(['%s'] * len(cols))) dbc << sql dbc << values if dchg.values: if seq_attrs: # for attrname in dchg.values.items(): seq_cols = {} for record in dchg.values: for colname in record: if colname in seq_attrs: try: seqvals = seq_cols[colname] except KeyError: seq_cols[colname] = seqvals = [] seqvals.append(record[colname][0]) for colname, seqvals in seq_cols.items(): allocate_sequence(seq_attrs[colname], seqvals) # generate update statment in a modified field group groups = {} for i, modified in enumerate(dchg.values): # group with attr name grpid = tuple(modified.keys()) try: chgidxs = groups[grpid] except KeyError: groups[grpid] = chgidxs = [] chgidxs.append(i) pkcond = ' AND '.join(['{pk}=%s'.format(pk=pk) for pk in dchg.pkey_attrs]) for grpid, chgidxs in groups.items(): asgn_expr = ', '.join(['%s=%%s' % name for name in grpid]) dbc << """ UPDATE {table} SET {asgn} WHERE {pkcond} """.format(table=table_name, asgn=asgn_expr, pkcond=pkcond) for i in chgidxs: values = tuple(dchg.values[i][k][0] for k in grpid) pkvals = dchg.pkey_values[i] dbc << values + pkvals if ddel.pkey_values: pkcond = ' AND '.join(['{pk}=%s'.format(pk=pk) for pk in ddel.pkey_attrs]) dbc << """ DELETE FROM {table} WHERE {pkcond}; """.format(table=table_name, pkcond=pkcond) dbc << [k for k in ddel.pkey_values]
def td_cols (self) : return tuple (iter_chain (* (f.td_cols for f in self.fields)))
else: addr_part = f'\t[{run_start:0{addr_width}X}..{run_end:0{addr_width}X}]' yield f'{addr_part} : {prev_word:03X};' # Start the new run run_start = address prev_word = word address += 1 yield 'END;' mem = array('h', (-1 for i in range(32768))) assembler_state = Assembler(mem) errored = False with open("../test2.a12", 'r') as f: tokens = assembly_lexer.tokenize(f) tokens = iter_chain(tokens, (LexerToken(TokenType.END, False), )) assembler_state.file_name = "test2.a12" assembler_state.line_number = 1 op_token = None args = [] try: for tok in tokens: if tok.token_type is TokenType.END: if op_token is not None: if op_token.token_type is TokenType.IDENTIFIER: assemble_statement(assembler_state, op_token.value, args) else: assembler_state.error( AssemblerError( 'A statement must begin with an identifier (the opcode).'
def _reshape_class(orig_cls, *args, **kwargs): new_type_name = None # string of type name selected = set() # names ignored = set() # names new_pkeys = [] # list of names declared = OrderedDict() # {attr_name: attribute} new_bases = [] # list of type combined = [] # list of type substituted = {} # {new_attr : old_attr} arg_name = '_ignore' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Iterable): for i, elem in enumerate(arg_value): if isinstance(elem, str): ignored.add(elem) elif isinstance(elem, DAttribute): ignored.add(elem.name) else: errmsg = ("The %d-th element in 'ignore' argument " "should be a str or DAttribute object: %r") errmsg %= (elem, arg_value) raise ValueError(errmsg) elif isinstance(arg_value, DAttribute): ignored.add(arg_value.name) elif isinstance(arg_value, str): ignored.add(arg_value) del kwargs[arg_name] arg_name = '_key' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Iterable): for i, elem in enumerate(arg_value): if isinstance(elem, str): new_pkeys.append(elem) elif isinstance(elem, DAttribute): new_pkeys.append(elem.name) else: errmsg = ("The %d-th element in '_pkeys' argument " "should be a str or DAttribute object: %r") errmsg %= (elem, arg_value) raise ValueError(errmsg) elif isinstance(arg_value, DAttribute): new_pkeys.append(arg_value.name) elif isinstance(arg_value, str): new_pkeys.append(arg_value) del kwargs[arg_name] arg_name = '_base' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, type): new_bases.append(arg_value) elif isinstance(arg_value, Iterable): for i, cls in enumerate(arg_value): if isinstance(cls, type): new_bases.append(cls) else: errmsg = ("The %d-th element of '_base' should be" " a type object") errmsg %= (i + 1) raise ValueError(errmsg) else: errmsg = ("The value of '_base' should be" " a iterable object of type or a type object") raise ValueError(errmsg) del kwargs[arg_name] arg_name = '_combine' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, type): combined.append(arg_value) elif isinstance(arg_value, Iterable): for i, cls in enumerate(arg_value): if isinstance(cls, type): combined.append(cls) else: errmsg = ("The %d-th element of '_combine' should be" " a type object") errmsg %= (i + 1) raise ValueError(errmsg) else: errmsg = ("The value of '_combine' should be" " a iterable object of type or a type object") raise ValueError(errmsg) del kwargs[arg_name] arg_name = '_subst' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, Mapping): for new_attr, old_attr in arg_value.items(): if isinstance(old_attr, str): substituted[new_attr] = old_attr else: errmsg = ("The target or source attribue names should be " " a str object in _subst") raise ValueError(errmsg) else: raise ValueError("The _subst should be a dict or Mapping object") del kwargs[arg_name] arg_name = '_name' if arg_name in kwargs: arg_value = kwargs[arg_name] if isinstance(arg_value, str): new_type_name = arg_value else: raise ValueError("The _name should be a str object") del kwargs[arg_name] for i, arg in enumerate(args): if isinstance(arg, str): selected.add(arg) elif isinstance(arg, DAttribute): selected.add(arg.name) else: errmsg = ("The %d-th argument must be a str or attribute object" ", not : %r") errmsg %= (i + 1, arg) raise ValueError(errmsg) for attr_name, arg_value in kwargs.items(): if attr_name.startswith('_'): raise ValueError("Unknown operation '%s'" % attr_name) elif isinstance(arg_value, bool): if arg_value: selected.add(arg) else: ignored.add(arg) elif(arg_value, DAttribute): declared[attr_name] = arg_value else: errmsg = "Unknown operand: %s=%r" % (attr_name, arg_value) raise ValueError(errmsg) # ------------------------------------------------------------------- attributes = OrderedDict() for attr_name, attr in iter_chain(orig_cls.__dobject_key__.items(), orig_cls.__dobject_att__.items()): attr = attr.copy() attr.owner_class = None if attr_name in substituted: attributes[substituted[attr_name]] = attr else: attributes[attr_name] = attr # ONLY substitute the original object's attribute names for old_attr_name, new_attr_name in substituted.items(): if (old_attr_name not in orig_cls.__dobject_att__ and old_attr_name not in orig_cls.__dobject_key__): errmsg = "No found the attribute '%s' substituted by '%s' in %s" errmsg = (old_attr_name, new_attr_name, orig_cls.__name__) raise ValueError(errmsg) if old_attr_name in selected: selected.add(new_attr_name) selected.remove(old_attr_name) if old_attr_name in ignored: ignored.add(new_attr_name) ignored.remove(old_attr_name) for cls in combined: for attr_name, attr in iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items()): if attr_name not in attributes: attributes[attr_name] = attr for attr_name, attr in declared.items(): attributes[attr_name] = attr if selected: attributes = OrderedDict([(k, v) for k, v in attributes.items() if k in selected and k not in ignored]) else: attributes = OrderedDict([(k, v) for k, v in attributes.items() if k not in ignored]) if new_pkeys: pkeys = [] for attr_name in new_pkeys: if attr_name in ignored: errmsg = ("Conflict! The attribute '%s' has specified as " "primary key, and also as ignored attribute") errmsg %= attr_name raise ValueError(errmsg) if attr_name not in attributes: errmsg = ("The attribute '%s' specified as primary key does not" " be declared in origin or base classes") errmsg %= attr_name raise ValueError(errmsg) if attr_name in attributes: pkeys.append(attr_name) new_pkeys = pkeys else: if orig_cls.__dobject_key__: new_pkeys = [] for attr_name in orig_cls.__dobject_key__: if attr_name in substituted: attr_name = substituted[attr_name] if attr_name not in attributes: continue new_pkeys.append(attr_name) attributes['__dobject_key__'] = new_pkeys attributes['__dobject_origin_class__'] = orig_cls subst_map = OrderedDict() for old_name, new_name in substituted.items(): subst_map[new_name] = old_name attributes['__dobject_mapping__'] = subst_map if not new_bases: new_bases = orig_cls.__bases__ else: new_bases = tuple(new_bases) if not new_type_name : new_type_name = orig_cls.__name__ new_cls = type(new_type_name, new_bases, attributes) new_cls.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') setattr(new_cls, '__dobject_origin_class__', tuple([orig_cls] + combined)) if substituted: setattr(new_cls, '__dobject_mapping__', substituted) return new_cls
def reshape_class(self): """ """ tmpl_pkey = None tmpl_attrs = OrderedDict() for cls in iter_chain([self.source], self._base): if tmpl_pkey is None and cls.__dobject_key__: # The nearest primary key definition is valid tmpl_pkey = cls.__dobject_key__ for attr_name, attr in iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items()): if attr_name not in tmpl_attrs: tmpl_attrs[attr_name] = attr prop_dict = OrderedDict() if self.required: for attr_name, attr in tmpl_attrs.items(): if attr_name not in self.required: continue if attr_name in self.ignored: continue prop_dict[attr_name] = attr else: for attr_name, attr in tmpl_attrs.items(): if attr_name in self.ignored: continue prop_dict[attr_name] = attr pkey_attrs = [] for attr in (self._primary_key if self._primary_key else tmpl_pkey): if isinstance(attr, str): if attr not in prop_dict: continue attr = prop_dict[attr] else: if attr.name not in prop_dict: continue pkey_attrs.append(attr) prop_dict['__dobject_key__'] = pkey_attrs if not self._base: # Oops, workaround, avoid cyclical importing!!! from ..db.dtable import dtable from ._dobject import dobject if issubclass(self.source, dtable): base_cls = tuple([dtable]) else: base_cls = tuple([dobject]) # no inheritance, it's too complicated else: base_cls = tuple(self._base) if not self._name: self._name = self.source.__name__ # keey the name reshaped_cls = type(self._name, base_cls, prop_dict) return reshaped_cls
def check_input(args): """Checks whether to read from stdin/file and validates user input/options. """ # Options can be single numbers or ranges. def _validate_opt_numeric(value): """Returns a valid numerical option or dies trying""" try: num = int(value) except ValueError: emsg = "ERROR!! Not a valid number: '{}'\n" sys.stderr.write(emsg.format(value)) sys.exit(1) else: # resid 4-char limit if (-999 <= num < 10000): return num else: emsg = "ERROR!! Residue numbers must be between -999 and 9999: '{}'\n" sys.stderr.write(emsg.format(value)) sys.exit(1) def _validate_opt_range(value, resid_list): """Returns a numerical range or dies trying""" # Validate formatting if not (1 <= value.count(':') <= 2): emsg = "ERROR!! Residue range must be in 'a:z:s' where a and z are " emsg += 'optional (default to first residue and last respectively), and' emsg += 's is an optional step value (to return every s-th residue).\n' sys.stderr.write(emsg) sys.exit(1) start, end, step = None, None, 1 slices = [ _validate_opt_numeric(num) if num.strip() else None for num in value.split(':') ] if len(slices) == 3: start, end, step = slices elif len(slices) == 2: start, end = slices elif len(slices) == 1: if value.startswith(':'): end = slices[0] else: start = slices[0] # Upper/Lower limits, resid max 4 char if start is None: start = -1000 if end is None: end = 10000 # extra validation for step if step is None: step = 1 else: if step < 1: emsg = "ERROR!! Step value must be a positive number: '{}'\n" sys.stderr.write(emsg.format(step)) sys.exit(1) # validate proper order in range if start > end: emsg = 'ERROR!! Start ({}) cannot be larger than end ({})\n' sys.stderr.write(emsg.format(start, end)) sys.exit(1) # Build range bounded_resid = [r for r in resid_list if start <= r <= end] return bounded_resid[::step] # Defaults option = '::' fh = sys.stdin # file handle if not len(args): # Reading from pipe with default option if sys.stdin.isatty(): sys.stderr.write(__doc__) sys.exit(1) elif len(args) == 1: # One of two options: option & Pipe OR file & default option if args[0].startswith('-'): option = args[0][1:] if sys.stdin.isatty(): # ensure the PDB data is streamed in emsg = 'ERROR!! No data to process!\n' sys.stderr.write(emsg) sys.stderr.write(__doc__) sys.exit(1) else: if not os.path.isfile(args[0]): emsg = 'ERROR!! File not found or not readable: \'{}\'\n' sys.stderr.write(emsg.format(args[0])) sys.stderr.write(__doc__) sys.exit(1) fh = open(args[0], 'r') elif len(args) == 2: # Two options: option & File if not args[0].startswith('-'): emsg = 'ERROR! First argument is not an option: \'{}\'\n' sys.stderr.write(emsg.format(args[0])) sys.stderr.write(__doc__) sys.exit(1) if not os.path.isfile(args[1]): emsg = 'ERROR!! File not found or not readable: \'{}\'\n' sys.stderr.write(emsg.format(args[1])) sys.stderr.write(__doc__) sys.exit(1) option = args[0][1:] fh = open(args[1], 'r') else: # Whatever ... sys.stderr.write(__doc__) sys.exit(1) # Read file handle to extract residue numbers # Because sys.stdin is not seekable we store the # lines again in an iterator. buffer = iter([]) resid_list = [] records = ('ATOM', 'HETATM', 'TER', 'ANISOU') prev_res = None for line in fh: if line.startswith(records): res_id = line[21:26] # include chain ID if res_id != prev_res: prev_res = res_id resid_list.append(int(line[22:26])) buffer = iter_chain(buffer, [line]) try: fh.close() # in case we opened a file. Just to be clean. except AttributeError: pass fh = buffer residue_range = set() # stores all the residues to write. for entry in option.split(','): if ':' in entry: resrange = _validate_opt_range(entry, resid_list) residue_range.update(resrange) else: singleres = _validate_opt_numeric(entry) residue_range.add(singleres) return (fh, residue_range)
def __new__(cls, *args, **kwargs): instance = super(dobject, cls).__new__(cls) # new instance of dobject # store values of attributes super(dobject, instance).__setattr__('__value_dict__', OrderedDict()) attributes = OrderedDict( iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items())) aggregates = [] seen = set() if args: if len(args) > 1: errmsg = "Do not exceed one positional argument: " errmsg += "(obj, attr1='', ...) or (attr1='', ...) " raise ValueError(errmsg) source_obj = args[0] # reshape the given object or dict if isinstance(source_obj, Mapping): # like {} for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if attr_name not in source_obj: continue attr_val = source_obj[attr_name] attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) elif isinstance(source_obj, DObject): if (cls.__dobject_origin_class__ and isinstance( source_obj, cls.__dobject_origin_class__)): subst_mapping = {} for o_name, n_name in cls.__dobject_mapping__.items(): subst_mapping[n_name] = o_name if n_name not in cls.__dobject_mapping__: # _subst=dict(a=b*, b=a) if o_name in mapping subst_mapping[ o_name] = None # mark it not to clone else: subst_mapping = {} for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if attr_name in subst_mapping: src_attr_name = subst_mapping[attr_name] if src_attr_name is None: continue else: src_attr_name = attr_name if not hasattr(source_obj, src_attr_name): continue attr_val = getattr(source_obj, src_attr_name) # if isinstance(attr_val, DSetBase): # # NOTED: the dominion object is required to replace # aggregates.append((attr_name, attr, attr_val)) # continue attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) else: for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if not hasattr(source_obj, attr_name): continue attr_val = getattr(source_obj, attr_name) attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) for arg_name, arg_value in kwargs.items(): attr = attributes.get(arg_name, None) if attr is None: errmsg = "No attribue '%s' defined in %s" errmsg %= (arg_name, cls.__name__) raise ValueError(errmsg) attr.set_value_unguardedly(instance, arg_value) seen.add(arg_name) # for attr_name, attr, attr_val in aggregates: # attr_val = attr.type(attr_val, _dominion = instance) # # set default values for these left parameters # for attr_name, attr in parameters.items(): # getattr(instance, attr_name) # # force it to get chance to check default value pkey_att_vals = tuple( getattr(instance, attr_name) for attr_name in cls.__dobject_key__) setattr(instance, '__dobject_key__', cls.__dobject_key_class__(instance)) return instance
def __new__(cls, *args, **kwargs): instance = super(dobject, cls).__new__(cls) # new instance of dobject # store values of attributes super(dobject, instance).__setattr__('__value_dict__', OrderedDict()) attributes = OrderedDict(iter_chain(cls.__dobject_key__.items(), cls.__dobject_att__.items())) aggregates = [] seen = set() if args: if len(args) > 1: errmsg = "Do not exceed one positional argument: " errmsg += "(obj, attr1='', ...) or (attr1='', ...) " raise ValueError(errmsg) source_obj = args[0] # reshape the given object or dict if isinstance(source_obj, Mapping): # like {} for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if attr_name not in source_obj: continue attr_val = source_obj[attr_name] attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) elif isinstance(source_obj, DObject): if (cls.__dobject_origin_class__ and isinstance(source_obj, cls.__dobject_origin_class__)): subst_mapping = {} for o_name, n_name in cls.__dobject_mapping__.items(): subst_mapping[n_name] = o_name if n_name not in cls.__dobject_mapping__: # _subst=dict(a=b*, b=a) if o_name in mapping subst_mapping[o_name] = None # mark it not to clone else: subst_mapping = {} for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if attr_name in subst_mapping: src_attr_name = subst_mapping[attr_name] if src_attr_name is None: continue else: src_attr_name = attr_name if not hasattr(source_obj, src_attr_name): continue attr_val = getattr(source_obj, src_attr_name) # if isinstance(attr_val, DSetBase): # # NOTED: the dominion object is required to replace # aggregates.append((attr_name, attr, attr_val)) # continue attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) else: for attr_name, attr in attributes.items(): if attr_name in kwargs: continue # this value will be set laterly if not hasattr(source_obj, attr_name): continue attr_val = getattr(source_obj, attr_name) attr.set_value_unguardedly(instance, attr_val) seen.add(attr_name) for arg_name, arg_value in kwargs.items(): attr = attributes.get(arg_name, None) if attr is None: errmsg = "No attribue '%s' defined in %s" errmsg %= (arg_name, cls.__name__) raise ValueError(errmsg) # print(111, arg_name, arg_value, arg_value.__class__.__name__) attr.set_value_unguardedly(instance, arg_value) seen.add(arg_name) # for attr_name, attr, attr_val in aggregates: # attr_val = attr.type(attr_val, _dominion = instance) # # set default values for these left parameters # for attr_name, attr in parameters.items(): # getattr(instance, attr_name) # # force it to get chance to check default value pkey_att_vals = tuple(getattr(instance, attr_name) for attr_name in cls.__dobject_key__) setattr(instance, '__dobject_key__', cls.__dobject_key_class__(instance)) return instance