def __init__(self, *argv, **argd): self.inbound = _deque() self.F_inbound = _deque() self.core = _deque() self._actor_logger = logging.getLogger( '%s.%s' % (self.__module__, self.__class__.__name__)) super(ActorMixin, self).__init__(*argv, **argd)
def __init__(self, flags=0, *, loop=None, maxsize=0): self._loop = loop or _asyncio.get_event_loop() self._maxsize = maxsize self._inotify = inotify = _Inotify() self._getters = _deque() self._events = _deque()
def __init__(self, flags=_FAN_CLASS_NOTIF, event_flags=_O_RDONLY, *, loop=None, maxsize=0): self._loop = loop or _asyncio.get_event_loop() self._maxsize = maxsize self._fanotify = fanotify = _Fanotify(flags, event_flags) self._getters = _deque() self._events = _deque()
def solve(head): # simple return cases if head and head.next: deque = _deque() deque.append(head) deque.append(head.next) head = head.next else: return head while len(deque) >= 2: # swap the latest pair's pointer deque[-2].next, deque[-1].next = deque[-1].next, deque[-2] # swap them in the deque so they make sequential sense deque[-1], deque[-2] = deque[-2], deque[-1] # update the rightmost of the previous pair as it's neighbor changed if len(deque) > 2: deque[-3].next = deque[-2] # the leftmost element is now totally complete, so we pop it deque.popleft() # enqueue iff another non-null pair to swap is possible if deque[-1].next and deque[-1].next.next: deque.append(deque[-1].next) deque.append(deque[-1].next) else: break return head
def mackey_glass(T, N, tau=17, n=10, beta=0.2, gamma=0.1, dt=10, mila=False): '''Returns `N` different Mackey Glass time-series of length `T` with delay `tau`. `tau` is the delay of the system, higher values being more chaotic. The values are centered and squashed through a tanh. dx/dt = beta * x_tau / (1 + x_tau ^ n) - gamma * x with x_tau = x(t - tau) The return shape is (N, T, 1). Origin of this function: https://github.com/mila-udem/summerschool2015 but modified a slight bit (unless `mila` is True). ''' X = np.empty((N, T, 1), floatX) x = 1.2 # Initial conditions for the history of the system for i in range(N): # Note that this is slightly different than the MILA one. # They didn't re-init x to 1.2 for each i, instead re-using the last # one from the previous i, all the while re-initializing the history. # I think what they do is wrong, but probably doesn't matter much. history = _deque((1.2 if mila else x) + beta * (np.random.rand(tau * dt) - 0.5)) # TODO: Is x above really x or just 1.2 which they used in MILA one? # TODO: 0.5 above must be constructed from others in some way? for t in range(T): for _ in range(dt): # xtau is the value at the last timestep, dt ago. xtau = history.popleft() history.append(x) x += (beta * xtau / (1.0 + xtau**n) - gamma*x) / dt X[i,t,0] = x # Squash timeseries through tanh return np.tanh(X - 1)
def __init__(self, limit): _Verbose.__init__(self) self.mon = RLock() self.rc = Condition(self.mon) self.wc = Condition(self.mon) self.limit = limit self.queue = _deque()
def __delitem__(self, key): super(CircularOnlineFlow, self).__delitem__(key) self.flow = _deque(self.flow) if (key.start < self.output_node_idx) and (self.output_node_idx < key.stop()): print('Output node deleted! Resetting the output node to the default last node.') self.reset_output_node() elif self.output_node_idx > key.stop(): self.set_output_node(self.output_node_idx - key.stop + key.start)
def __init__(self, signals=[], flags=0, *, loop=None): self._loop = loop or _asyncio.get_event_loop() self._signalfd = _Signalfd(signals, flags) self._getters = _deque() self.enable = self._signalfd.enable self.enable_all = self._signalfd.enable_all self.disable = self._signalfd.disable self.disable_all = self._signalfd.disable_all
def retrive_rds_engines(parameter_pool): engine_versions = retrive_rds_engine_versions(parameter_pool) db_engines = _deque() for engine_version in engine_versions: if engine_version.engine not in db_engines: db_engines.append(engine_version.engine) return list(db_engines)
def breadth( iterable, testFn=isIterable, limit=sys.getrecursionlimit()): """ iterator doing a breadth first expansion of args """ deq = _deque((x,0) for x in iterable) while deq : arg, level = deq.popleft() if testFn(arg) and level<limit : for a in arg : deq.append ((a, level+1)) else : yield arg
def __init__(self, clock_type=_CLOCK_REALTIME, flags=0, *, loop=None): self._loop = loop or _asyncio.get_event_loop() self._timerfd = _Timer(clock_type, flags) self._getters = _deque() self.set_one_off = self._timerfd.set_one_off self.set_reoccuring = self._timerfd.set_reoccuring self.enabled = self._timerfd.__class__.__dict__['enabled'] self.disabled = self._timerfd.__class__.__dict__['disabled'] self.disable = self._timerfd.disable self.get_current = self._timerfd.get_current
def preorderArgs (limit=sys.getrecursionlimit(), testFn=isIterable, *args) : """ returns a list of a preorder expansion of args """ stack = [(x,0) for x in args] result = _deque() while stack : arg, level = stack.pop() if testFn(arg) and level<limit : stack += [(x,level+1) for x in arg] else : result.appendleft(arg) return tuple(result)
def breadthArgs (limit=sys.getrecursionlimit(), testFn=isIterable, *args) : """ returns a list of a breadth first expansion of args """ deq = _deque((x,0) for x in args) result = [] while deq : arg, level = deq.popleft() if testFn(arg) and level<limit : for a in arg : deq.append ((a, level+1)) else : result.append(arg) return tuple(result)
def retrive_rds_default_engine_versions(parameter_pool): db_engines = retrive_rds_engines(parameter_pool) rds_client = create_rds_client(parameter_pool) db_default_versions = _deque() for engine in db_engines: response = rds_client.describe_db_engine_versions(engine = engine, default_only = u'true') log.info(u'Received response for DescribeDBEngineVersions call.') log_response(u'DescribeDBEngineVersions', response.result) db_default_versions.append(response.result[0]) return list(db_default_versions)
def postorderArgs (limit=sys.getrecursionlimit(), testFn=isIterable, *args) : """ returns a list of a postorder expansion of args """ if len(args) == 1: return (args[0],) else: deq = _deque((x,0) for x in args) stack = [] result = [] while deq : arg, level = deq.popleft() if testFn(arg) and level<limit : deq = _deque( [(x, level+1) for x in arg] + list(deq)) else : if stack : while stack and level <= stack[-1][1] : result.append(stack.pop()[0]) stack.append((arg, level)) else : stack.append((arg, level)) while stack : result.append(stack.pop()[0]) return tuple(result)
def __init__(self, flow, crash_recovery=False, verbose=False): """ flow - a list of nodes. """ super(CircularOnlineFlow, self).__init__(flow, crash_recovery, verbose) self.flow = _deque(flow) # A circular queue of the flow # a variable to the set the number of internal flow iteration for each data point. self._flow_iterations = 1 # set the last node of the list as the default output node. self.output_node_idx = len(self.flow) - 1 # a variable to store inputs for internal train iterations self._stored_input = None # a flag when set ignores the input data (uses stored input instead). self._ignore_input = False
def _sort_snapshots_by_create_time(cls, snapshots): sorted_snapshots = _deque() for item in snapshots: if misc.string_equal_ignore_case(item.snapshot_type, DBSnapshot.TypeAutomated): continue #skip if snapshot is created automated if len(sorted_snapshots) < 1: sorted_snapshots.append(item) elif item._snapshot_create_time_raw < sorted_snapshots[-1]._snapshot_create_time_raw: sorted_snapshots.append(item) else: shift = 0 while item._snapshot_create_time_raw < sorted_snapshots[0]._snapshot_create_time_raw: sorted_snapshots.rotate(-1) shift = shift + 1 sorted_snapshots.appendleft(item) sorted_snapshots.rotate(shift) return sorted_snapshots
def _table_allpredicates_triples(self, table_iri, object_pattern): subject_mapper = self._orm_mappers[table_iri] subject_pkey_cols = subject_mapper.primary_key subject_node_from_sql = self._row_node_from_sql_func(table_iri) query = self._orm.query(*subject_pkey_cols) if object_pattern is None: # *(IRI), *, * subject_mapper = self._orm_mappers[table_iri] subject_cols = subject_mapper.columns subject_cols_props = self._orm_columns_properties[table_iri] subject_rels = self._orm_relationships[table_iri].values() query = query.with_entities() for predicate_col in subject_cols: predicate_prop = subject_cols_props[predicate_col.name] predicate_attr = predicate_prop.class_attribute query = query.add_columns(predicate_attr) for predicate_prop in subject_rels: object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_cols_props = self._orm_columns_properties[object_table_iri] predicate_attr = predicate_prop.class_attribute query = query.outerjoin(predicate_attr).add_columns(*(object_cols_props[col.name] .class_attribute for col in object_table.primary_key.columns)) for query_result_values in query.all(): query_result_values_pending = _deque(query_result_values) subject_cols_values = [query_result_values_pending.popleft() for _ in range(len(subject_cols))] subject_pkey_values = (subject_cols_values[i] for i, col in enumerate(subject_cols) if col in subject_pkey_cols) subject_node = subject_node_from_sql(zip(subject_pkey_cols, subject_pkey_values)) yield (subject_node, _rdf.RDF.type, table_iri) for predicate_col, object_value in zip(subject_cols, subject_cols_values): if object_value is None: continue predicate_iri = _literal_property_iri(table_iri, predicate_col.name) yield (subject_node, predicate_iri, _common.rdf_literal_from_sql (object_value, sql_type=predicate_col.type)) for predicate_prop in subject_rels: object_table = predicate_prop.target object_pkey_cols = object_table.primary_key.columns object_pkey_values = [query_result_values_pending.popleft() for _ in range(len(object_pkey_cols))] if any(value is None for value in object_pkey_values): continue predicate_iri = self._ref_property_iri(table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, self._row_node_from_sql(self._table_iri(object_table.name), zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, Literal): # *(IRI), *, literal subject_cols_props = self._orm_columns_properties[table_iri] object_sql_types = _common.sql_literal_types_from_rdf(object_pattern.datatype) for predicate_col in subject_mapper.columns: predicate_sql_type = predicate_col.type if isinstance(predicate_sql_type, object_sql_types): predicate_colname = predicate_col.name predicate_iri = _literal_property_iri(table_iri, predicate_colname) predicate_prop = subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute object_sql_literal = _common.sql_literal_from_rdf(object_pattern) query_cand = query.filter(predicate_attr == object_sql_literal) for subject_pkey_values in query_cand.all(): yield (subject_node_from_sql(zip(subject_pkey_cols, subject_pkey_values)), predicate_iri, object_pattern) elif isinstance(object_pattern, (URIRef, BNode)): # *(IRI), *, IRI if object_pattern == table_iri: for subject_pkey_values in query.all(): yield (subject_node_from_sql(zip(subject_pkey_cols, subject_pkey_values)), _rdf.RDF.type, table_iri) return try: object_table_iri, object_pkey = self._parse_row_node(object_pattern) except (TypeError, ValueError): return subject_rels = self._orm_relationships[table_iri] object_cols_props = self._orm_columns_properties[object_table_iri] for predicate_prop in subject_rels.values(): predicate_iri = self._ref_property_iri(table_iri, (col.name for col in predicate_prop.local_columns)) query_cand = query.join(predicate_prop.class_attribute).filter(*(attr == value for attr, value in object_pkey.items())) for subject_pkey_values in query_cand.all(): yield (subject_node_from_sql(zip(subject_pkey_cols, subject_pkey_values)), predicate_iri, object_pattern) else: return
def _subject_triples(self, subject_node, predicate_pattern, object_pattern): try: subject_table_iri, subject_pkey = self._parse_row_node(subject_node) except (TypeError, ValueError): return subject_class = self._orm_classes[subject_table_iri] subject_cols_props = self._orm_columns_properties[subject_table_iri] query = self._orm.query(subject_class).filter(*(attr == value for attr, value in subject_pkey.items())) if predicate_pattern is None: if object_pattern is None: # IRI, *, * subject_mapper = self._orm_mappers[subject_table_iri] subject_cols = subject_mapper.columns subject_cols_props = self._orm_columns_properties[subject_table_iri] subject_rels = self._orm_relationships[subject_table_iri].values() query = query.with_entities() for predicate_col in subject_cols: predicate_colname = predicate_col.name predicate_iri = _literal_property_iri(subject_table_iri, predicate_colname) predicate_prop = subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute query = query.add_columns(predicate_attr) for predicate_prop in subject_rels: object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_cols_props = self._orm_columns_properties[object_table_iri] object_pkey_attrs = [object_cols_props[col.name].class_attribute for col in object_table.primary_key.columns] query = query.outerjoin(predicate_prop.class_attribute).add_columns(*object_pkey_attrs) query_result_values = query.first() query_result_values_pending = _deque(query_result_values) subject_cols_values = [query_result_values_pending.popleft() for _ in range(len(subject_cols))] yield (subject_node, _rdf.RDF.type, subject_table_iri) for predicate_col, object_value in zip(subject_cols, subject_cols_values): if object_value is None: continue predicate_iri = _literal_property_iri(subject_table_iri, predicate_col.name) yield (subject_node, predicate_iri, _common.rdf_literal_from_sql (object_value, sql_type=predicate_col.type)) for predicate_prop in subject_rels: object_table = predicate_prop.target object_pkey_cols = object_table.primary_key.columns object_pkey_values = [query_result_values_pending.popleft() for _ in range(len(object_pkey_cols))] object_node_from_sql = self._row_node_from_sql_func(self._table_iri(object_table.name)) if any(value is None for value in object_pkey_values): continue predicate_iri = self._ref_property_iri(subject_table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, object_node_from_sql(zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, Literal): # IRI, *, literal subject_mapper = self._orm_mappers[subject_table_iri] subject_cols_props = self._orm_columns_properties[subject_table_iri] object_sql_types = _common.sql_literal_types_from_rdf(object_pattern.datatype) for predicate_col in subject_mapper.columns: predicate_sql_type = predicate_col.type if isinstance(predicate_sql_type, object_sql_types): predicate_colname = predicate_col.name predicate_prop = subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute object_sql_literal = _common.sql_literal_from_rdf(object_pattern) query_cand = query.filter(predicate_attr == object_sql_literal) if self._orm.query(query_cand.exists()).scalar(): predicate_iri = _literal_property_iri(subject_table_iri, predicate_colname) yield (subject_node, predicate_iri, object_pattern) elif isinstance(object_pattern, URIRef): # IRI, *, IRI if object_pattern == subject_table_iri: if self._orm.query(query.exists()).scalar(): yield (subject_node, _rdf.RDF.type, subject_table_iri) return try: object_table_iri, object_pkey = self._parse_row_node(object_pattern) except (TypeError, ValueError): return subject_rels = self._orm_relationships[subject_table_iri] object_cols_props = self._orm_columns_properties[object_table_iri] for predicate_prop in subject_rels.values(): query_cand = query.join(predicate_prop.class_attribute).filter(*(attr == value for attr, value in object_pkey.items())) if self._orm.query(query_cand.exists()).scalar(): predicate_iri = self._ref_property_iri(subject_table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, object_pattern) else: return elif predicate_pattern == _rdf.RDF.type: if object_pattern is None or (isinstance(object_pattern, URIRef) and object_pattern == subject_table_iri): if self._orm.query(query.exists()).scalar(): yield (subject_node, _rdf.RDF.type, subject_table_iri) elif isinstance(predicate_pattern, URIRef): try: predicate_attr = self._predicate_orm_attr(predicate_pattern) except ValueError: return predicate_prop = predicate_attr.property if isinstance(predicate_prop, _sqla_orm.RelationshipProperty): if object_pattern is None: # IRI, ref IRI, * object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_pkey_cols = object_table.primary_key.columns query = query.join(predicate_attr).with_entities(*object_pkey_cols) for object_pkey_values in query.all(): yield (subject_node, predicate_pattern, self._row_iri_from_sql(object_table_iri, zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, URIRef): # IRI, ref IRI, IRI try: object_table_iri, object_pkey = self._parse_row_node(object_pattern) except (TypeError, ValueError): return object_cols_props = self._orm_columns_properties[object_table_iri] query = query.join(predicate_attr).filter(*(attr == value for attr, value in object_pkey.items())) if self._orm.query(query.exists()).scalar(): yield (subject_node, predicate_pattern, object_pattern) else: return else: return else: predicate_col, = predicate_attr.property.columns if object_pattern is None: # IRI, non-ref IRI, * query = query.with_entities(predicate_attr).filter(predicate_attr is not None) for value, in query.all(): yield (subject_node, predicate_pattern, _common.rdf_literal_from_sql (value, sql_type=predicate_col.type)) elif isinstance(object_pattern, Literal): # IRI, non-ref IRI, literal if object_pattern.datatype not in _common.rdf_datatypes_from_sql(predicate_col.type): return object_sql_literal = _common.sql_literal_from_rdf(object_pattern) query = query.filter(predicate_attr is not None, predicate_attr == object_sql_literal) if self._orm.query(query.exists()).scalar(): yield (subject_node, predicate_pattern, object_pattern) else: return else: return
def _table_allpredicates_triples(self, table_iri, object_pattern): subject_mapper = self._orm_mappers[table_iri] subject_pkey_cols = subject_mapper.primary_key subject_node_from_sql = self._row_node_from_sql_func(table_iri) query = self._orm.query(*subject_pkey_cols) if object_pattern is None: # *(IRI), *, * subject_mapper = self._orm_mappers[table_iri] subject_cols = subject_mapper.columns subject_cols_props = self._orm_columns_properties[table_iri] subject_rels = self._orm_relationships[table_iri].values() query = query.with_entities() for predicate_col in subject_cols: predicate_prop = subject_cols_props[predicate_col.name] predicate_attr = predicate_prop.class_attribute query = query.add_columns(predicate_attr) for predicate_prop in subject_rels: object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_cols_props = \ self._orm_columns_properties[object_table_iri] predicate_attr = predicate_prop.class_attribute query = \ query.outerjoin(predicate_attr)\ .add_columns(*(object_cols_props[col.name] .class_attribute for col in object_table.primary_key.columns)) for query_result_values in query.all(): query_result_values_pending = _deque(query_result_values) subject_cols_values = [ query_result_values_pending.popleft() for _ in range(len(subject_cols)) ] subject_pkey_values = (subject_cols_values[i] for i, col in enumerate(subject_cols) if col in subject_pkey_cols) subject_node = subject_node_from_sql( zip(subject_pkey_cols, subject_pkey_values)) yield (subject_node, _rdf.RDF.type, table_iri) for predicate_col, object_value in zip(subject_cols, subject_cols_values): if object_value is None: continue predicate_iri = \ self._literal_property_iri(table_iri, predicate_col.name) yield (subject_node, predicate_iri, _common.rdf_literal_from_sql( object_value, sql_type=predicate_col.type)) for predicate_prop in subject_rels: object_table = predicate_prop.target object_pkey_cols = object_table.primary_key.columns object_pkey_values = \ [query_result_values_pending.popleft() for _ in range(len(object_pkey_cols))] if any(value is None for value in object_pkey_values): continue predicate_iri = \ self._ref_property_iri\ (table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, self._row_node_from_sql\ (self._table_iri(object_table.name), zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, _rdf.Literal): # *(IRI), *, literal subject_cols_props = \ self._orm_columns_properties[table_iri] object_sql_types = \ _common.sql_literal_types_from_rdf(object_pattern.datatype) for predicate_col in subject_mapper.columns: predicate_sql_type = predicate_col.type if isinstance(predicate_sql_type, object_sql_types): predicate_colname = predicate_col.name predicate_iri = \ self._literal_property_iri(table_iri, predicate_colname) predicate_prop = subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute object_sql_literal = \ _common.sql_literal_from_rdf(object_pattern) query_cand = \ query.filter(predicate_attr == object_sql_literal) for subject_pkey_values in query_cand.all(): yield (subject_node_from_sql( zip(subject_pkey_cols, subject_pkey_values)), predicate_iri, object_pattern) elif isinstance(object_pattern, (_rdf.URIRef, _rdf.BNode)): # *(IRI), *, IRI if object_pattern == table_iri: for subject_pkey_values in query.all(): yield (subject_node_from_sql( zip(subject_pkey_cols, subject_pkey_values)), _rdf.RDF.type, table_iri) return try: object_table_iri, object_pkey = \ self._parse_row_node(object_pattern) except (TypeError, ValueError): return subject_rels = self._orm_relationships[table_iri] object_cols_props = self._orm_columns_properties[object_table_iri] for predicate_prop in subject_rels.values(): predicate_iri = \ self._ref_property_iri(table_iri, (col.name for col in predicate_prop.local_columns)) query_cand = \ query.join(predicate_prop.class_attribute)\ .filter(*(attr == value for attr, value in object_pkey.items())) for subject_pkey_values in query_cand.all(): yield (subject_node_from_sql( zip(subject_pkey_cols, subject_pkey_values)), predicate_iri, object_pattern) else: return
def __init__(self, pos=(0, 0)): super(Turtle, self).__init__(pos) self._jobs = _deque()
def __init__(self, limit): self.mon = RLock() self.rc = Condition(self.mon) self.wc = Condition(self.mon) self.limit = limit self.queue = _deque()
def __init__(self, seed=None, /): seed = self.seed = get_seed(seed) rng = self.rng = default_rng(SeedSequence(self.seed)) self.initstate = self.__getstate__() self.priorstates = _deque() self.random = rng.random
def _subject_triples(self, subject_node, predicate_pattern, object_pattern): try: subject_table_iri, subject_pkey = \ self._parse_row_node(subject_node) except (TypeError, ValueError): return subject_class = self._orm_classes[subject_table_iri] subject_cols_props = \ self._orm_columns_properties[subject_table_iri] query = self._orm.query(subject_class)\ .filter(*(attr == value for attr, value in subject_pkey.items())) if predicate_pattern is None: if object_pattern is None: # IRI, *, * subject_mapper = self._orm_mappers[subject_table_iri] subject_cols = subject_mapper.columns subject_cols_props = \ self._orm_columns_properties[subject_table_iri] subject_rels = \ self._orm_relationships[subject_table_iri].values() query = query.with_entities() for predicate_col in subject_cols: predicate_colname = predicate_col.name predicate_iri = \ self._literal_property_iri(subject_table_iri, predicate_colname) predicate_prop = subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute query = query.add_columns(predicate_attr) for predicate_prop in subject_rels: object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_cols_props = \ self._orm_columns_properties[object_table_iri] object_pkey_attrs = \ [object_cols_props[col.name].class_attribute for col in object_table.primary_key.columns] query = query.outerjoin(predicate_prop.class_attribute)\ .add_columns(*object_pkey_attrs) query_result_values = query.first() query_result_values_pending = _deque(query_result_values) subject_cols_values = \ [query_result_values_pending.popleft() for _ in range(len(subject_cols))] yield (subject_node, _rdf.RDF.type, subject_table_iri) for predicate_col, object_value \ in zip(subject_cols, subject_cols_values): if object_value is None: continue predicate_iri = \ self._literal_property_iri(subject_table_iri, predicate_col.name) yield (subject_node, predicate_iri, _common.rdf_literal_from_sql( object_value, sql_type=predicate_col.type)) for predicate_prop in subject_rels: object_table = predicate_prop.target object_pkey_cols = object_table.primary_key.columns object_pkey_values = \ [query_result_values_pending.popleft() for _ in range(len(object_pkey_cols))] object_node_from_sql = \ self._row_node_from_sql_func\ (self._table_iri(object_table.name)) if any(value is None for value in object_pkey_values): continue predicate_iri = \ self._ref_property_iri\ (subject_table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, object_node_from_sql( zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, _rdf.Literal): # IRI, *, literal subject_mapper = self._orm_mappers[subject_table_iri] subject_cols_props = \ self._orm_columns_properties[subject_table_iri] object_sql_types = \ _common.sql_literal_types_from_rdf\ (object_pattern.datatype) for predicate_col in subject_mapper.columns: predicate_sql_type = predicate_col.type if isinstance(predicate_sql_type, object_sql_types): predicate_colname = predicate_col.name predicate_prop = \ subject_cols_props[predicate_colname] predicate_attr = predicate_prop.class_attribute object_sql_literal = \ _common.sql_literal_from_rdf(object_pattern) query_cand = \ query.filter(predicate_attr == object_sql_literal) if self._orm.query(query_cand.exists()).scalar(): predicate_iri = \ self._literal_property_iri\ (subject_table_iri, predicate_colname) yield (subject_node, predicate_iri, object_pattern) elif isinstance(object_pattern, _rdf.URIRef): # IRI, *, IRI if object_pattern == subject_table_iri: if self._orm.query(query.exists()).scalar(): yield (subject_node, _rdf.RDF.type, subject_table_iri) return try: object_table_iri, object_pkey = \ self._parse_row_node(object_pattern) except (TypeError, ValueError): return subject_rels = self._orm_relationships[subject_table_iri] object_cols_props = \ self._orm_columns_properties[object_table_iri] for predicate_prop in subject_rels.values(): query_cand = \ query.join(predicate_prop.class_attribute)\ .filter(*(attr == value for attr, value in object_pkey.items())) if self._orm.query(query_cand.exists()).scalar(): predicate_iri = \ self._ref_property_iri\ (subject_table_iri, (col.name for col in predicate_prop.local_columns)) yield (subject_node, predicate_iri, object_pattern) else: return elif predicate_pattern == _rdf.RDF.type: if object_pattern is None \ or (isinstance(object_pattern, _rdf.URIRef) and object_pattern == subject_table_iri): if self._orm.query(query.exists()).scalar(): yield (subject_node, _rdf.RDF.type, subject_table_iri) elif isinstance(predicate_pattern, _rdf.URIRef): try: predicate_attr = \ self._predicate_orm_attr(predicate_pattern) except ValueError: return predicate_prop = predicate_attr.property if isinstance(predicate_prop, _sqla_orm.RelationshipProperty): if object_pattern is None: # IRI, ref IRI, * object_table = predicate_prop.target object_table_iri = self._table_iri(object_table.name) object_pkey_cols = object_table.primary_key.columns query = \ query.join(predicate_attr)\ .with_entities(*object_pkey_cols) for object_pkey_values in query.all(): yield (subject_node, predicate_pattern, self._row_iri_from_sql( object_table_iri, zip(object_pkey_cols, object_pkey_values))) elif isinstance(object_pattern, _rdf.URIRef): # IRI, ref IRI, IRI try: object_table_iri, object_pkey = \ self._parse_row_node(object_pattern) except (TypeError, ValueError): return object_cols_props = \ self._orm_columns_properties[object_table_iri] query = query.join(predicate_attr)\ .filter(*(attr == value for attr, value in object_pkey.items())) if self._orm.query(query.exists()).scalar(): yield (subject_node, predicate_pattern, object_pattern) else: return else: return else: predicate_col, = predicate_attr.property.columns if object_pattern is None: # IRI, non-ref IRI, * query = query.with_entities(predicate_attr)\ .filter(predicate_attr != None) for value, in query.all(): yield (subject_node, predicate_pattern, _common.rdf_literal_from_sql( value, sql_type=predicate_col.type)) elif isinstance(object_pattern, _rdf.Literal): # IRI, non-ref IRI, literal if object_pattern.datatype \ not in _common.rdf_datatypes_from_sql\ (predicate_col.type): return object_sql_literal = \ _common.sql_literal_from_rdf(object_pattern) query = \ query.filter(predicate_attr != None, predicate_attr == object_sql_literal) if self._orm.query(query.exists()).scalar(): yield (subject_node, predicate_pattern, object_pattern) else: return else: return
def __setitem__(self, key, value): super(CircularOnlineFlow, self).__setitem__(key, value) self.flow = _deque(self.flow) if (key.start < self.output_node_idx) and (self.output_node_idx < key.stop()): print('Output node is replaced! Resetting the output node.') self.reset_output_node()
def __init__(self, inital_value=0, flags=0, *, loop=None): self._loop = loop or _asyncio.get_event_loop() self._eventfd = _Eventfd(inital_value, flags) self._getters = _deque() self._value = inital_value
def __init__(self): self._fixed_bools = _deque() self._fixed_ints = _deque() self._fixed_reals = _deque()
def _process_root(self, root_name: 'str') -> 'int': # Поиск меш-объектов-детей root на этой же сцене. scene_objs = (self.scene or _bpy.context.scene).collection.objects children_queue = _deque() # type: Deque[str] children = set() # type: Set[str] children_queue.extend(x.name for x in _bpy.data.objects[root_name].children) while len(children_queue) > 0: child_name = children_queue.pop() child = _bpy.data.objects[child_name] children_queue.extend(x.name for x in child.children) if child_name not in scene_objs: continue if not isinstance(child.data, _bpy.types.Mesh): continue children.add(child_name) self._call_before_group(root_name, children) groups = dict() # type: Dict[Optional[str], Set[str]] for child_name in children: group_name = self._call_group_child(root_name, child_name) if group_name is False: continue # skip group = groups.get(group_name) if group is None: group = set() groups[group_name] = group group.add(child_name) # log.info('%s %s', root_name, repr(groups)) def create_mesh_obj(name): new_mesh = _bpy.data.meshes.new(name + '-Mesh') new_obj = _bpy.data.objects.new(name, object_data=new_mesh) new_obj.name = name # force rename scene_objs.link(new_obj) return new_obj # Далее намерено избегаем ссылок на root объект, т.к. объекты меняются # и можно отхватить ошибку StructRNA of type Object has been removed obj_group_count = 0 for group_name, obj_group in groups.items(): join_to = None if group_name is None: if isinstance(_bpy.data.objects[root_name].data, _bpy.types.Mesh): # root - Это меш, приклееваем к нему. join_to = _bpy.data.objects[root_name] elif self.force_mesh_root: # root - Это НЕ меш, но force_mesh_root. base_name = root_name old_root = _bpy.data.objects[root_name] old_root.name = base_name + '-Replaced' self.replaced_objects.add(old_root.name) join_to = create_mesh_obj(base_name) root_name = join_to.name # Фактическое новое имя self.created_objects.add(root_name) join_to.parent = old_root.parent join_to.parent_type = 'OBJECT' join_to.location = old_root.location join_to.rotation_mode = old_root.rotation_mode join_to.rotation_axis_angle = old_root.rotation_axis_angle join_to.rotation_euler = old_root.rotation_euler join_to.rotation_quaternion = old_root.rotation_quaternion join_to.scale = old_root.scale for sub_child in old_root.children: # type: Object sub_child.parent = join_to else: # root - Это НЕ меш, создаём подгруппу. join_to = create_mesh_obj(root_name + '-' + self.default_group) self.created_objects.add(join_to.name) join_to.parent = _bpy.data.objects[root_name] join_to.parent_type = 'OBJECT' _commons.identity_transform(join_to) else: join_to = create_mesh_obj(root_name + '-' + group_name) self.created_objects.add(join_to.name) join_to.parent = _bpy.data.objects[root_name] join_to.parent_type = 'OBJECT' _commons.identity_transform(join_to) self._call_before_join(root_name, join_to.name, group_name, obj_group) self._join_objects(join_to, obj_group) self._call_after_join(root_name, join_to.name, group_name) obj_group_count += len(obj_group) return obj_group_count
def to_neuroml(self, filename, resolution=10, write=True): ''' Save the neuron as a NeuroML (.nml) object. Parameters ---------- filename : str Name of the MNL file to write. resolution : int, optional (default: 10) Coarse-graining factor of the structure: only one point every `resolution` will be kept. write : bool, optional (default: True) Write the file. Returns ------- neuroml.Cell object. ''' import neuroml import neuroml.writers as writers x = self.position[0].to('micrometer').m y = self.position[1].to('micrometer').m z = 0. p = neuroml.Point3DWithDiam(x=x, y=y, z=z, diameter=2. * self.soma_radius.m) soma = neuroml.Segment(proximal=p, distal=p) soma.name = 'Soma' soma.id = 0 seg_id = 0 morpho = neuroml.Morphology() morpho.id = "Morphology neuron {}".format(int(self)) morpho.segments.append(soma) neurites_segments = [] neurites = list(self.dendrites.values()) if self.axon is not None: neurites.append(self.axon) # set dendrites for neurite in neurites: p_segment = soma parent = neuroml.SegmentParent(segments=soma.id) branch_seen = {} todo = _deque([branch for branch in neurite.branches]) indices = _deque([i for i in range(len(todo))]) while todo: branch = todo.popleft() idx = indices.popleft() if branch.parent in (-1, 0, None): p_segment = soma parent = neuroml.SegmentParent(segments=soma.id) elif branch.parent in branch_seen: p_segment = branch_seen[branch.parent] parent = neuroml.SegmentParent(segments=p_segment.id) else: parent = None if parent is not None: diameter = branch.diameter if neurite.taper_rate is not None: dist_to_tip = _np.cumsum(branch.r[::-1])[::-1] diameter = diameter + neurite.taper_rate * dist_to_tip else: diameter = (diameter for _ in range(len(branch.xy))) # subsample positions and diameters subnodes = branch.xy[::resolution].m subdiam = diameter[::resolution].m for pos, diam in zip(subnodes, subdiam): p = neuroml.Point3DWithDiam( x=p_segment.distal.x, y=p_segment.distal.y, z=p_segment.distal.z, diameter=p_segment.distal.diameter) d = neuroml.Point3DWithDiam(x=pos[0], y=pos[1], z=p_segment.distal.z, diameter=diam) n_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) n_segment.id = seg_id n_segment.name = '{}_segment_{}'.format( neurite, seg_id) # set as next parent p_segment = n_segment parent = neuroml.SegmentParent(segments=p_segment.id) seg_id += 1 neurites_segments.append(n_segment) # store the last point as future parent for child branches branch_seen[branch.node_id] = p_segment else: todo.append(branch) indices.append(idx) morpho.segments += neurites_segments # make the neuroml cell cell = neuroml.Cell() cell.name = "Neuron {}".format(int(self)) cell.id = int(self) cell.morphology = morpho # write if write: doc = neuroml.NeuroMLDocument(id=filename) doc.cells.append(cell) writers.NeuroMLWriter.write(doc, filename) return cell
def __setitem__(self, key, value): super(CircularOnlineFlow, self).__setitem__(key, value) self.flow = _deque(self.flow) if (key.start < self.output_node_idx) and (self.output_node_idx < key.stop()): print 'Output node is replaced! Resetting the output node.' self.reset_output_node()
def __init__(self): self._waiters = _deque() self._holds = _defaultdict(int)