def update(self, transaction): """ Update an existing transaction in the database :param sakia.data.entities.Transaction transaction: the transaction to update """ updated_fields = attr.astuple(transaction, filter=attr.filters.exclude(*TransactionsRepo._primary_keys), tuple_factory=list) updated_fields[3] = "\n".join([str(n) for n in updated_fields[3]]) updated_fields[4] = "\n".join([str(n) for n in updated_fields[4]]) updated_fields[5] = "\n".join([str(n) for n in updated_fields[5]]) where_fields = attr.astuple(transaction, filter=attr.filters.include(*TransactionsRepo._primary_keys), tuple_factory=list) self._conn.execute("""UPDATE transactions SET written_on=?, blockstamp=?, ts=?, signatures=?, issuers = ?, receivers = ?, amount = ?, amountbase = ?, comment = ?, txid = ?, state = ?, local = ?, raw = ? WHERE currency=? AND pubkey=? AND sha_hash=?""", updated_fields + where_fields)
def update(self, node): """ Update an existing node in the database :param sakia.data.entities.Node node: the node to update """ updated_fields = attr.astuple(node, tuple_factory=list, filter=attr.filters.exclude(*NodesRepo._primary_keys)) updated_fields[0] = "\n".join([str(n) for n in updated_fields[0]]) updated_fields[10] = "\n".join([str(n) for n in updated_fields[9]]) where_fields = attr.astuple(node, tuple_factory=list, filter=attr.filters.include(*NodesRepo._primary_keys)) self._conn.execute("""UPDATE nodes SET endpoints=?, peer_buid=?, uid=?, current_buid=?, current_ts=?, previous_buid=?, state=?, software=?, version=?, merkle_peers_root=?, merkle_peers_leaves=?, root=?, member=?, last_state_change=? WHERE currency=? AND pubkey=?""", updated_fields + where_fields)
def update(self, blockchain): """ Update an existing blockchain in the database :param sakia.data.entities.Blockchain blockchain: the blockchain to update """ updated_fields = attr.astuple(blockchain, filter=attr.filters.exclude( attr.fields(Blockchain).parameters, *BlockchainsRepo._primary_keys)) where_fields = attr.astuple(blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys)) self._conn.execute("""UPDATE blockchains SET current_buid=?, current_members_count=?, current_mass=?, median_time=?, last_mass=?, last_members_count=?, last_ud=?, last_ud_base=?, last_ud_time=?, previous_mass=?, previous_members_count=?, previous_ud=?, previous_ud_base=?, previous_ud_time=? WHERE currency=?""", updated_fields + where_fields)
def update(self, identity): """ Update an existing identity in the database :param sakia.data.entities.Identity identity: the identity to update """ updated_fields = attr.astuple(identity, filter=attr.filters.exclude(*IdentitiesRepo._primary_keys)) where_fields = attr.astuple(identity, filter=attr.filters.include(*IdentitiesRepo._primary_keys)) self._conn.execute("""UPDATE identities SET signature=?, timestamp=?, written=?, revoked_on=?, outdistanced=?, member=?, ms_buid=?, ms_timestamp=?, ms_written_on=?, ms_type=?, sentry=? WHERE currency=? AND pubkey=? AND uid=? AND blockstamp=?""", updated_fields + where_fields )
def union2(p: Union[Type[A], Type[B]]): attr.fields(p) attr.fields_dict(p) attr.asdict(<warning descr="'attr.asdict' method should be called on attrs instances">p</warning>) attr.astuple(<warning descr="'attr.astuple' method should be called on attrs instances">p</warning>) attr.assoc(<warning descr="'attr.assoc' method should be called on attrs instances">p</warning>) attr.evolve(<warning descr="'attr.evolve' method should be called on attrs instances">p</warning>)
def union1(p: Union[A, B]): attr.fields(<warning descr="'attr.fields' method should be called on attrs types">p</warning>) attr.fields_dict(<warning descr="'attr.fields_dict' method should be called on attrs types">p</warning>) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def insert(self, blockchain): """ Commit a blockchain to the database :param sakia.data.entities.Blockchain blockchain: the blockchain to commit """ blockchain_tuple = attr.astuple(blockchain.parameters) \ + attr.astuple(blockchain, filter=attr.filters.exclude(attr.fields(Blockchain).parameters)) values = ",".join(['?'] * len(blockchain_tuple)) self._conn.execute("INSERT INTO blockchains VALUES ({0})".format(values), blockchain_tuple)
def structural(p): print(len(p)) attr.fields(p) attr.fields_dict(p) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def test_negative_source_detection(): """ Also need to detect 'negative' sources, i.e. where a source in the subtraction model is not present in the data, creating a trough in the difference image. Again, start by using a flat background (rather than noisy) to avoid random-noise fluctuations causing erroneous detections (and test-failures). """ img = np.zeros((ydim, xdim)) add_gaussian2d_to_image(negative_src, img) # img += evaluate_model_on_pixel_grid(img.shape, faint_src) sf = SourceFindImage(img, detection_n_sigma=4, analysis_n_sigma=3, rms_est=rms) assert len(sf.islands) == 1 found_island = sf.islands[0] # print() # print(negative_src) # print(found_island.params) assert np.abs(found_island.extremum.index.x - negative_src.x_centre) < 0.5 assert np.abs(found_island.extremum.index.y - negative_src.y_centre) < 0.5 add_gaussian2d_to_image(bright_src, img) sf = SourceFindImage(img, detection_n_sigma=4, analysis_n_sigma=3, rms_est=rms) assert len(sf.islands) == 2 positive_islands = [i for i in sf.islands if i.sign == 1] negative_islands = [i for i in sf.islands if i.sign == -1] assert len(positive_islands) == 1 assert len(negative_islands) == 1 assert negative_islands[0] == found_island neg_island = negative_islands[0] pos_island = positive_islands[0] min_pixel_index = extremum_pixel_index(img, -1) assert attr.astuple(neg_island.extremum.index) == min_pixel_index max_pixel_index = extremum_pixel_index(img, 1) assert attr.astuple(pos_island.extremum.index) == max_pixel_index # Sanity check that the island masks look sensible # Check that the mask==False regions are disjoint - taking the boolean OR # on both masks should result in a fully `True` mask-array. assert (np.logical_or(neg_island.data.mask, pos_island.data.mask).all()) # And that the true/false regions look sensible for the extremum pixels: assert neg_island.data.mask[min_pixel_index] == False assert neg_island.data.mask[max_pixel_index] == True assert pos_island.data.mask[min_pixel_index] == True assert pos_island.data.mask[max_pixel_index] == False
def insert_highscore(self, highscore: HighscoreStruct) -> None: super().insert_highscore(highscore) self.execute( self._get_insert_highscore_sql(fmt="?"), attr.astuple(highscore), commit=True, )
def test_lists_tuples(self, container, C): """ If recurse is True, also recurse into lists. """ assert (1, [(2, 3), (4, 5), "a"]) == astuple( C(1, container([C(2, 3), C(4, 5), "a"])) )
def _mqtt_on_connect(self, _mqttc, _userdata, _flags, result_code: int) -> None: """On connect callback. Resubscribe to all topics we were subscribed to and publish birth message. """ import paho.mqtt.client as mqtt if result_code != mqtt.CONNACK_ACCEPTED: _LOGGER.error('Unable to connect to the MQTT broker: %s', mqtt.connack_string(result_code)) self._mqttc.disconnect() return # Group subscriptions to only re-subscribe once for each topic. keyfunc = attrgetter('topic') for topic, subs in groupby(sorted(self.subscriptions, key=keyfunc), keyfunc): # Re-subscribe with the highest requested qos max_qos = max(subscription.qos for subscription in subs) self.hass.add_job(self._async_perform_subscription, topic, max_qos) if self.birth_message: self.hass.add_job( self.async_publish(*attr.astuple(self.birth_message)))
def test_smoke(self): """ `attrs.astuple` only changes defaults, so we just call it and compare. """ inst = C("foo", 42) assert attrs.astuple(inst) == _attr.astuple(inst)
def __init__(self, init_swing=0, init_extension=2.0, init_pose=None, action_scale=1.0, action_limit=0.5): """Initializes the controller. Args: init_swing: the swing of the default pose offset init_extension: the extension of the default pose offset init_pose: the default pose offset, which is None by default. If not None, it will define the default pose offset while ignoring init_swing and init_extension. action_scale: changes the magnitudes of actions action_limit: clips actions """ if init_pose is None: self._pose = np.array( attr.astuple( minitaur_pose_utils.MinitaurPose( swing_angle_0=init_swing, swing_angle_1=init_swing, swing_angle_2=init_swing, swing_angle_3=init_swing, extension_angle_0=init_extension, extension_angle_1=init_extension, extension_angle_2=init_extension, extension_angle_3=init_extension))) else: # Ignore init_swing and init_extension self._pose = np.array(init_pose) action_high = np.array([action_limit] * minitaur_pose_utils.NUM_MOTORS) self.action_space = spaces.Box(-action_high, action_high, dtype=np.float32) self._action_scale = action_scale
def __init__( self, init_abduction=laikago_pose_utils.LAIKAGO_DEFAULT_ABDUCTION_ANGLE, init_hip=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, init_knee=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE, action_limit=0.5, ): """Initializes the controller. Args: action_limit: a tuple of [limit_abduction, limit_hip, limit_knee] """ self._pose = np.array( attr.astuple( laikago_pose_utils.LaikagoPose( abduction_angle_0=init_abduction, hip_angle_0=init_hip, knee_angle_0=init_knee, abduction_angle_1=init_abduction, hip_angle_1=init_hip, knee_angle_1=init_knee, abduction_angle_2=init_abduction, hip_angle_2=init_hip, knee_angle_2=init_knee, abduction_angle_3=init_abduction, hip_angle_3=init_hip, knee_angle_3=init_knee))) action_high = np.array([action_limit] * 12) self.action_space = spaces.Box(-action_high, action_high, dtype=np.float32)
async def async_set_cast_info(self, cast_info): """Set the cast information and set up the chromecast object.""" import pychromecast old_cast_info = self._cast_info self._cast_info = cast_info if self._chromecast is not None: if old_cast_info.host_port == cast_info.host_port: # Nothing connection-related updated return await self._async_disconnect() # Failed connection will unfortunately never raise an exception, it # will instead just try connecting indefinitely. # pylint: disable=protected-access _LOGGER.debug("Connecting to cast device %s", cast_info) chromecast = await self.hass.async_add_job( pychromecast._get_chromecast_from_host, attr.astuple(cast_info)) self._chromecast = chromecast self._status_listener = CastStatusListener(self, chromecast) # Initialise connection status as connected because we can only # register the connection listener *after* the initial connection # attempt. If the initial connection failed, we would never reach # this code anyway. self._available = True self.cast_status = chromecast.status self.media_status = chromecast.media_controller.status _LOGGER.debug("Connection successful!") self.async_schedule_update_ha_state()
def __init__( self, pose, action_limit=0.5, ): """Initializes the controller. Args: action_limit: a tuple of [limit_abduction, limit_hip, limit_knee] """ self._pose = np.array( attr.astuple( laikago_pose_utils.LaikagoPose(abduction_angle_0=pose[0], hip_angle_0=pose[1], knee_angle_0=pose[2], abduction_angle_1=pose[3], hip_angle_1=pose[4], knee_angle_1=pose[5], abduction_angle_2=pose[6], hip_angle_2=pose[7], knee_angle_2=pose[8], abduction_angle_3=pose[9], hip_angle_3=pose[10], knee_angle_3=pose[11]))) action_high = np.array([action_limit] * 12) self.action_space = spaces.Box(-action_high, action_high, dtype=np.float32)
def update_artifact_by_id(conn, artifact_id: int, artifact: Artifact) -> None: """ Update an artifact in the database Arguments: conn - An sqlite connection object representing the database on which data will be inserted artifact_id - the ID associated to the artifact to update artifact - an artifact object which contains the attributes to update Returns: Returns an error if there was a failure in the update operation """ current_artifact = get_artifact_by_id(conn, artifact_id) if not current_artifact: return None update_record = (artifact_id, artifact.title, artifact.category, artifact.path, artifact.tags, artifact.status, artifact.author, artifact.template) new_record = list() for i, elem in enumerate(attr.astuple(current_artifact)): new_record.append(update_record[i] or elem or None) delete_artifact_by_id(conn, artifact_id) updated_artifact = Artifact(*new_record) insert_artifact_with_id(conn, updated_artifact, artifact_id)
def test_dicts(self, C, tuple_factory): """ If recurse is True, also recurse into dicts. """ res = astuple(C(1, {"a": C(4, 5)}), tuple_factory=tuple_factory) assert tuple_factory([1, {"a": tuple_factory([4, 5])}]) == res assert isinstance(res, tuple_factory)
def write(self, fp, version=1): """Write the element to a file-like object. :param fp: file-like object :param version: psd file version """ return write_fmt(fp, ('hI', 'hQ')[version - 1], *attr.astuple(self))
def test_recurse(self, C, tuple_factory): """ Deep astuple returns correct tuple. """ assert tuple_factory( [tuple_factory([1, 2]), tuple_factory([3, 4])] ) == astuple(C(C(1, 2), C(3, 4)), tuple_factory=tuple_factory)
def test_shallow(self, C, tuple_factory): """ Shallow astuple returns correct dict. """ assert tuple_factory([1, 2]) == astuple( C(x=1, y=2), False, tuple_factory=tuple_factory )
def get_neutral_motor_angles(robot_class): """Return a neutral (standing) pose for a given robot type. Args: robot_class: This returns the class (not the instance) for the robot. Currently it supports minitaur, laikago and mini-cheetah. Returns: Pose object for the given robot. It's either MinitaurPose, LaikagoPose or MiniCheetahPose. Raises: ValueError: If the given robot_class is different than the supported robots. """ if str(robot_class) == str(laikago.Laikago): init_pose = np.array( attr.astuple( laikago_pose_utils.LaikagoPose( abduction_angle_0=0, hip_angle_0=_LAIKAGO_NEUTRAL_POSE_HIP_ANGLE, knee_angle_0=_LAIKAGO_NEUTRAL_POSE_KNEE_ANGLE, abduction_angle_1=0, hip_angle_1=_LAIKAGO_NEUTRAL_POSE_HIP_ANGLE, knee_angle_1=_LAIKAGO_NEUTRAL_POSE_KNEE_ANGLE, abduction_angle_2=0, hip_angle_2=_LAIKAGO_NEUTRAL_POSE_HIP_ANGLE, knee_angle_2=_LAIKAGO_NEUTRAL_POSE_KNEE_ANGLE, abduction_angle_3=0, hip_angle_3=_LAIKAGO_NEUTRAL_POSE_HIP_ANGLE, knee_angle_3=_LAIKAGO_NEUTRAL_POSE_KNEE_ANGLE))) else: init_pose = robot_class.get_neutral_motor_angles() return init_pose
def test_lists_tuples(self, container, C): """ If recurse is True, also recurse into lists. """ assert ((1, [(2, 3), (4, 5), "a"]) == astuple(C(1, container([C(2, 3), C(4, 5), "a"]))) )
async def async_set_cast_info(self, cast_info): """Set the cast information and set up the chromecast object.""" import pychromecast old_cast_info = self._cast_info self._cast_info = cast_info if self._chromecast is not None: if old_cast_info.host_port == cast_info.host_port: # Nothing connection-related updated return self._async_disconnect() # Failed connection will unfortunately never raise an exception, it # will instead just try connecting indefinitely. # pylint: disable=protected-access _LOGGER.debug("Connecting to cast device %s", cast_info) chromecast = await self.hass.async_add_job( pychromecast._get_chromecast_from_host, attr.astuple(cast_info)) self._chromecast = chromecast self._status_listener = CastStatusListener(self, chromecast) # Initialise connection status as connected because we can only # register the connection listener *after* the initial connection # attempt. If the initial connection failed, we would never reach # this code anyway. self._available = True self.cast_status = chromecast.status self.media_status = chromecast.media_controller.status _LOGGER.debug("Connection successful!") self.async_schedule_update_ha_state()
def __init__( self, init_abduction=laikago_pose_utils.LAIKAGO_DEFAULT_ABDUCTION_ANGLE, init_hip=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, init_knee=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE, amplitude=MOTION_AMPLITUDE, frequency=MOTION_FREQUENCY, gait=PACE_GAIT, # can be TROT_GAIT or PACE_GAIT ): """Initializes the controller.""" self._pose = np.array( attr.astuple( laikago_pose_utils.LaikagoPose( abduction_angle_0=init_abduction, hip_angle_0=init_hip, knee_angle_0=init_knee, abduction_angle_1=init_abduction, hip_angle_1=init_hip, knee_angle_1=init_knee, abduction_angle_2=init_abduction, hip_angle_2=init_hip, knee_angle_2=init_knee, abduction_angle_3=init_abduction, hip_angle_3=init_hip, knee_angle_3=init_knee))) action_high = np.array([ACTION_BOUND] * NUM_MOTORS_LAIKAGO) self.action_space = spaces.Box(-action_high, action_high, dtype=np.float32) self.amplitude = amplitude self.period = 1.0 / frequency self.gait = gait
def drop(self, blockchain): """ Drop an existing blockchain from the database :param sakia.data.entities.Blockchain blockchain: the blockchain to update """ where_fields = attr.astuple(blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys)) self._conn.execute("DELETE FROM blockchains WHERE currency=?", where_fields)
def test_recurse_retain(self, cls, tuple_class): """ Property tests for asserting collection types are retained. """ obj = cls() obj_tuple = astuple(obj, tuple_factory=tuple_class, retain_collection_types=True) def assert_proper_col_class(obj, obj_tuple): # Iterate over all attributes, and if they are lists or mappings # in the original, assert they are the same class in the dumped. for index, field in enumerate(fields(obj.__class__)): field_val = getattr(obj, field.name) if has(field_val.__class__): # This field holds a class, recurse the assertions. assert_proper_col_class(field_val, obj_tuple[index]) elif isinstance(field_val, (list, tuple)): # This field holds a sequence of something. expected_type = type(obj_tuple[index]) assert type(field_val) is expected_type # noqa: E721 for obj_e, obj_tuple_e in zip(field_val, obj_tuple[index]): if has(obj_e.__class__): assert_proper_col_class(obj_e, obj_tuple_e) elif isinstance(field_val, dict): orig = field_val tupled = obj_tuple[index] assert type(orig) is type(tupled) # noqa: E721 for obj_e, obj_tuple_e in zip(orig.items(), tupled.items()): if has(obj_e[0].__class__): # Dict key assert_proper_col_class(obj_e[0], obj_tuple_e[0]) if has(obj_e[1].__class__): # Dict value assert_proper_col_class(obj_e[1], obj_tuple_e[1]) assert_proper_col_class(obj, obj_tuple)
def write(self, records: Iterable[Record]): # type: ignore[override] cur = self.conn.cursor() rows_data = [attr.astuple(r) for r in records] try: cur.executemany(self.sql_statement, rows_data) self.conn.commit() finally: cur.close()
def update(self, certification): """ Update an existing certification in the database :param sakia.data.entities.Certification certification: the certification to update """ updated_fields = attr.astuple(certification, filter=attr.filters.exclude(*CertificationsRepo._primary_keys)) where_fields = attr.astuple(certification, filter=attr.filters.include(*CertificationsRepo._primary_keys)) self._conn.execute("""UPDATE certifications SET ts=?, signature=?, written_on=? WHERE currency=? AND certifier=? AND certified=? AND block=?""", updated_fields + where_fields)
def insert(self, certification): """ Commit a certification to the database :param sakia.data.entities.Certification certification: the certification to commit """ certification_tuple = attr.astuple(certification) values = ",".join(['?'] * len(certification_tuple)) self._conn.execute("INSERT INTO certifications VALUES ({0})".format(values), certification_tuple)
def write_mappings(mappings: Iterable[CasePropertyMap]): filename = os.path.join(settings.BASE_DIR, MAPPINGS_FILE) with open(filename, 'w') as csv_file: csv_writer = writer(csv_file) header = [f.name for f in attr.fields(CasePropertyMap)] csv_writer.writerow(header) rows = (attr.astuple(m) for m in mappings) csv_writer.writerows(rows)
def reduce(x): return ( x.id, x.round, x.position, x.pool, tuple(attr.astuple(c) for c in x.competitors), )
def __iter__(self): """ Iterate over the GLFW constants in the KeyMap. :return: """ for member in attr.astuple(self): yield member.value
def insert(self, source): """ Commit a source to the database :param sakia.data.entities.Source source: the source to commit """ source_tuple = attr.astuple(source) values = ",".join(['?'] * len(source_tuple)) self._conn.execute("INSERT INTO sources VALUES ({0})".format(values), source_tuple)
def inline(self): values = [ str(v) for v in attr.astuple(self, True, filter=attr.filters.exclude( attr.fields(HeadV1).v0)) ] return self.v0.inline() + ":" + ":".join(values)
def insert(self, identity): """ Commit an identity to the database :param sakia.data.entities.Identity identity: the identity to commit """ identity_tuple = attr.astuple(identity) values = ",".join(['?'] * len(identity_tuple)) self._conn.execute("INSERT INTO identities VALUES ({0})".format(values), identity_tuple)
def insert(self, dividend): """ Commit a dividend to the database :param sakia.data.entities.Dividend dividend: the dividend to commit """ dividend_tuple = attr.astuple(dividend) values = ",".join(['?'] * len(dividend_tuple)) self._conn.execute("INSERT INTO dividends VALUES ({0})".format(values), dividend_tuple)
def test_filter(self, C, tuple_factory): """ Attributes that are supposed to be skipped are skipped. """ assert tuple_factory([tuple_factory([1, ]), ]) == astuple(C( C(1, 2), C(3, 4), ), filter=lambda a, v: a.name != "y", tuple_factory=tuple_factory)
def serialize(self): """ Serialize this credential scope to a string. @return: The slash-delimited credential scope serialization. @rtype: L{str} """ return "/".join(attr.astuple(self) + ('aws4_request', ))
def serialize(self): """ Serialize this credential scope to a string. @return: The slash-delimited credential scope serialization. @rtype: L{str} """ return "/".join(attr.astuple(self) + ('aws4_request',))
def test_dicts_retain_type(self, container, C): """ If recurse and retain_collection_types are True, also recurse into lists and do not convert them into list. """ assert ((1, container({"a": (4, 5) })) == astuple(C(1, container({"a": C(4, 5)})), retain_collection_types=True))
def reset(self, initial_motor_angles=None, reset_duration=1.0): """Resets the environment as well as trajectory generators.""" self._last_real_time = 0 self._num_step = 0 self._tg_phases = tg_inplace.reset() if self._num_actions == _LAIKAGO_NUM_ACTIONS: # Use laikago's init pose as zero action. init_pose = np.array( attr.astuple( laikago_pose_utils.LaikagoPose( abduction_angle_0=laikago_pose_utils .LAIKAGO_DEFAULT_ABDUCTION_ANGLE, hip_angle_0=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, knee_angle_0=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE, abduction_angle_1=laikago_pose_utils .LAIKAGO_DEFAULT_ABDUCTION_ANGLE, hip_angle_1=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, knee_angle_1=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE, abduction_angle_2=laikago_pose_utils .LAIKAGO_DEFAULT_ABDUCTION_ANGLE, hip_angle_2=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, knee_angle_2=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE, abduction_angle_3=laikago_pose_utils .LAIKAGO_DEFAULT_ABDUCTION_ANGLE, hip_angle_3=laikago_pose_utils.LAIKAGO_DEFAULT_HIP_ANGLE, knee_angle_3=laikago_pose_utils.LAIKAGO_DEFAULT_KNEE_ANGLE))) self._init_pose = init_pose observation = self._gym_env.reset(init_pose, reset_duration) else: # Use minitaur's init pose as zero action. init_pose = np.array( attr.astuple( minitaur_pose_utils.MinitaurPose( swing_angle_0=MINITAUR_INIT_SWING_POS, swing_angle_1=MINITAUR_INIT_SWING_POS, swing_angle_2=MINITAUR_INIT_SWING_POS, swing_angle_3=MINITAUR_INIT_SWING_POS, extension_angle_0=MINITAUR_INIT_EXTENSION_POS, extension_angle_1=MINITAUR_INIT_EXTENSION_POS, extension_angle_2=MINITAUR_INIT_EXTENSION_POS, extension_angle_3=MINITAUR_INIT_EXTENSION_POS))) initial_motor_angles = minitaur_pose_utils.leg_pose_to_motor_angles( init_pose) observation = self._gym_env.reset(initial_motor_angles, reset_duration) return self._modify_observation(observation)
def insert(self, connection): """ Commit a connection to the database :param sakia.data.entities.Connection connection: the connection to commit """ connection_tuple = attr.astuple(connection, filter=attr.filters.exclude(attr.fields(Connection).password, attr.fields(Connection).salt)) values = ",".join(['?'] * len(connection_tuple)) self._conn.execute("INSERT INTO connections VALUES ({0})".format(values), connection_tuple)
def _assert_template_for_offset(self, offset, message_count): current_day, offset, target_day, upgrade_deadline = self._get_dates(offset) user = UserFactory.create() for course_index in range(message_count): self._schedule_factory( offset=offset, enrollment__user=user, enrollment__course__id=CourseKey.from_string('edX/toy/course{}'.format(course_index)) ) patch_policies(self, [StubPolicy([ChannelType.PUSH])]) mock_channel = Mock( channel_type=ChannelType.EMAIL, action_links=[], tracker_image_sources=[], ) channel_map = ChannelMap([ ['sailthru', mock_channel], ]) sent_messages = [] with self.settings(TEMPLATES=self._get_template_overrides()): with patch.object(self.task, 'async_send_task') as mock_schedule_send: mock_schedule_send.apply_async = lambda args, *_a, **_kw: sent_messages.append(args) num_expected_queries = NUM_QUERIES_FIRST_MATCH if self.queries_deadline_for_each_course: # one query per course for opt-out and one for course modes num_expected_queries += (message_count * 2) - 1 else: num_expected_queries += 1 with self.assertNumQueries(num_expected_queries, table_blacklist=WAFFLE_TABLES): self.task().apply(kwargs=dict( site_id=self.site_config.site.id, target_day_str=serialize(target_day), day_offset=offset, bin_num=self._calculate_bin_for_user(user), )) num_expected_messages = 1 if self.consolidates_emails_for_learner else message_count self.assertEqual(len(sent_messages), num_expected_messages) with self.assertNumQueries(NUM_QUERIES_PER_MESSAGE_DELIVERY): with patch('openedx.core.djangoapps.schedules.tasks.segment.track') as mock_segment_track: with patch('edx_ace.channel.channels', return_value=channel_map): self.deliver_task(*sent_messages[0]) self.assertEqual(mock_segment_track.call_count, 1) self.assertEqual(mock_channel.deliver.call_count, 1) for (_name, (_msg, email), _kwargs) in mock_channel.deliver.mock_calls: for template in attr.astuple(email): self.assertNotIn("TEMPLATE WARNING", template) self.assertNotIn("{{", template) self.assertNotIn("}}", template) return mock_channel.deliver.mock_calls
def drop(self, node): """ Drop an existing node from the database :param sakia.data.entities.Node node: the node to update """ where_fields = attr.astuple(node, filter=attr.filters.include(*NodesRepo._primary_keys)) self._conn.execute("""DELETE FROM nodes WHERE currency=? AND pubkey=?""", where_fields)
def test_dicts_retain_type(self, container, C): """ If recurse and retain_collection_types are True, also recurse into lists and do not convert them into list. """ assert ( (1, container({"a": (4, 5)})) == astuple(C(1, container({"a": C(4, 5)})), retain_collection_types=True))
def test_lists_tuples_retain_type(self, container, C): """ If recurse and retain_collection_types are True, also recurse into lists and do not convert them into list. """ assert ( (1, container([(2, 3), (4, 5), "a"])) == astuple(C(1, container([C(2, 3), C(4, 5), "a"])), retain_collection_types=True))
def drop(self, dividend): """ Drop an existing dividend from the database :param sakia.data.entities.Dividend dividend: the dividend to update """ where_fields = attr.astuple(dividend, filter=attr.filters.include(*DividendsRepo._primary_keys)) self._conn.execute("""DELETE FROM dividends WHERE currency=? AND pubkey=? AND block_number=? """, where_fields)
def is_information_complete(self) -> bool: """Return if all information is filled out.""" want_dynamic_group = self.is_audio_group have_dynamic_group = self.is_dynamic_group is not None have_all_except_dynamic_group = all( attr.astuple(self, filter=attr.filters.exclude( attr.fields(ChromecastInfo).is_dynamic_group))) return (have_all_except_dynamic_group and (not want_dynamic_group or have_dynamic_group))
def serialize(self): """ Serialize this canonical request to a string. @return: The line-delimited serialization of this canonical request. @rtype: L{str} """ return '\n'.join(attr.astuple(self))