def resolve_confused_context(tokenized, initial_context): """Resolve situation where a Context thinks it is regtext, but it *should* be an interpretation""" if initial_context[1:2] == ['Interpretations']: final_tokens = [] for token in tokenized: par_with_label = (token.match(tokens.Context, tokens.Paragraph) and len(token.label) > 1) if par_with_label and token.label[1] is None: final_tokens.append( attr.assoc( token, label=[ token.label[0], 'Interpretations', token.label[2], '(' + ')('.join(l for l in token.label[3:] if l) + ')' ])) elif par_with_label and token.label[1].startswith('Appendix:'): final_tokens.append( attr.assoc( token, label=[ token.label[0], 'Interpretations', token.label[1][len('Appendix:'):], '(' + ')('.join(l for l in token.label[2:] if l) + ')' ])) elif token.match(tokens.TokenList): sub_tokens = resolve_confused_context(token.tokens, initial_context) final_tokens.append(attr.assoc(token, tokens=sub_tokens)) else: final_tokens.append(token) return final_tokens else: return tokenized
def test_success(self): """ Gets LB contents with drained_at correctly """ node11 = node('11', 'a11', condition='DRAINING') node12 = node('12', 'a12') node21 = node('21', 'a21', weight=3) node22 = node('22', 'a22', weight=None, condition='DRAINING') seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([[nodes_req(1, [node11, node12])], [nodes_req(2, [node21, node22])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {})]]), parallel_sequence([[node_feed_req('1', '11', '11feed')], [node_feed_req('2', '22', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(1, node11), _drained_at=1.0), CLBNode.from_node_json(1, node12), CLBNode.from_node_json(2, node21), attr.assoc(CLBNode.from_node_json(2, node22), _drained_at=2.0)], {'1': CLB(True), '2': CLB(False)}))
def test_expired(meta_repo): certifications_repo = CertificationsRepo(meta_repo.conn) not_written_expired = Certification( "testcurrency", "7Aqw6Efa9EzE7gtsc8SveLLrM7gm6NEGoywSv4FJx6pZ", "FADxcH5LmXGmGFgdixSes6nWnC4Vb4pRUBYT81zQRhjn", 20, 1000, "H41/8OGV2W4CLKbE35kk5t1HJQsb3jEM0/QGLUf80CwJvGZf3HvVCcNtHPUFoUBKEDQO9mPK3KJkqOoxHpqHCw==", 0) written_expired = attr.assoc( not_written_expired, certifier="8Aqw6Efa9EzE7gtsc8SveLLrM7gm6NEGoywSv4FJx6pZ", written_on=10) written_not_expired = attr.assoc( not_written_expired, certifier="9Aqw6Efa9EzE7gtsc8SveLLrM7gm6NEGoywSv4FJx6pZ", written_on=10, timestamp=3200) not_written_not_expired = attr.assoc( not_written_expired, certifier="1Bqw6Efa9EzE7gtsc8SveLLrM7gm6NEGoywSv4FJx6pZ", timestamp=4900) for c in (written_expired, written_not_expired, not_written_expired, not_written_not_expired): certifications_repo.insert(c) certs = certifications_repo.expired( "testcurrency", "FADxcH5LmXGmGFgdixSes6nWnC4Vb4pRUBYT81zQRhjn", current_ts=5000, sig_window=500, sig_validity=2000) assert written_expired in certs assert not_written_expired in certs assert not_written_not_expired not in certs assert written_not_expired not in certs
def test_success(self): """ Gets LB contents with drained_at correctly """ node11 = node('11', 'a11', condition='DRAINING') node12 = node('12', 'a12') node21 = node('21', 'a21', weight=3) node22 = node('22', 'a22', weight=None, condition='DRAINING') seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[nodes_req(1, [node11, node12])], [nodes_req(2, [node21, node22])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {})]]), parallel_sequence([[node_feed_req('1', '11', '11feed')], [node_feed_req('2', '22', '22feed')]]), ] eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), ([ attr.assoc(CLBNode.from_node_json(1, node11), _drained_at=1.0), CLBNode.from_node_json(1, node12), CLBNode.from_node_json(2, node21), attr.assoc(CLBNode.from_node_json(2, node22), _drained_at=2.0) ], { '1': CLB(True), '2': CLB(False) }))
def union1(p: Union[A, B]): attr.fields(<warning descr="'attr.fields' method should be called on attrs types">p</warning>) attr.fields_dict(<warning descr="'attr.fields_dict' method should be called on attrs types">p</warning>) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def union2(p: Union[Type[A], Type[B]]): attr.fields(p) attr.fields_dict(p) attr.asdict(<warning descr="'attr.asdict' method should be called on attrs instances">p</warning>) attr.astuple(<warning descr="'attr.astuple' method should be called on attrs instances">p</warning>) attr.assoc(<warning descr="'attr.assoc' method should be called on attrs instances">p</warning>) attr.evolve(<warning descr="'attr.evolve' method should be called on attrs instances">p</warning>)
def structural(p): print(len(p)) attr.fields(p) attr.fields_dict(p) attr.asdict(p) attr.astuple(p) attr.assoc(p) attr.evolve(p)
def test_unknown(self, C): """ Wanting to change an unknown attribute raises an AttrsAttributeNotFoundError. """ # No generated class will have a four letter attribute. with pytest.raises(AttrsAttributeNotFoundError) as e: assoc(C(), aaaa=2) assert ("aaaa is not an attrs attribute on {cls!r}.".format( cls=C), ) == e.value.args
def test_unknown(self, C): """ Wanting to change an unknown attribute raises an AttrsAttributeNotFoundError. """ # No generated class will have a four letter attribute. with pytest.raises( AttrsAttributeNotFoundError) as e, pytest.deprecated_call(): assoc(C(), aaaa=2) assert (f"aaaa is not an attrs attribute on {C!r}.", ) == e.value.args
def increment_last_modifier(self): ''' Return version with last modifier incremented by 1 ''' if not self.modifiers: raise Exception( 'Cannot increment last modifier of version; it has no modifiers' ) last_modifier = self.modifiers[-1] last_modifier = attr.assoc(last_modifier, number=last_modifier.number + 1) modifiers = self.modifiers[:-1] + (last_modifier, ) return attr.assoc(self, modifiers=modifiers, raw=None)
def test_unknown(self, C): """ Wanting to change an unknown attribute raises an AttrsAttributeNotFoundError. """ # No generated class will have a four letter attribute. with pytest.raises(AttrsAttributeNotFoundError) as e, \ pytest.deprecated_call(): assoc(C(), aaaa=2) assert ( "aaaa is not an attrs attribute on {cls!r}.".format(cls=C), ) == e.value.args
def main(): """main program""" def update(x, **kwargs): """functional dict update""" res = dict(x) for k, v in kwargs.items(): res[k] = v return res # make a subdirectory in the output dir for the config bundle config_bundle_dirname = os.path.join(OUTPUT_DIR, CONFIG_BUNDLE) if not os.path.exists(config_bundle_dirname): os.makedirs(config_bundle_dirname) # dictionary of configs configs = { "reg_0.001": attr.assoc(CONFIG_DEFAULT, nn_opt=update(CONFIG_DEFAULT.nn_opt, weight_decay=0.001)), "reg_0.005": attr.assoc(CONFIG_DEFAULT, nn_opt=update(CONFIG_DEFAULT.nn_opt, weight_decay=0.005)), "reg_0.01": attr.assoc(CONFIG_DEFAULT, nn_opt=update(CONFIG_DEFAULT.nn_opt, weight_decay=0.01)) } # save config files to bundle subdir for k, v in configs.items(): config_filename = os.path.join(config_bundle_dirname, k + ".json") cf.save(v, config_filename) # TODO: zip bundle subdir shutil.make_archive(config_bundle_dirname, "zip", config_bundle_dirname) # generate a text file of run commands runs_filename = os.path.join(OUTPUT_DIR, "runs.txt") with open(runs_filename, "w") as runs_file: for k in configs.keys(): config_filename = CONFIG_BUNDLE + "/" + k + ".json" run = " ".join([ "run", "handwriting:" + CODE_BUNDLE + "/handwriting", ":" + DATA_BUNDLE, ":" + CONFIG_BUNDLE, "\"" + " ".join([ RUN_COMMAND_PREFIX, PYTHON_MODULE, MODULE_ARGS, config_filename, "model.pkl" ]) + "\"", "-n " + "run_" + k + "_" + DATE, "--request-docker-image bdzimmer/handwriting:0.1", "--request-memory 16g" ]) + "\n" print(run, file=runs_file)
def add_overrides(*, overrides, config_input): """ Create new instance of `ConfigInput` with `overrides` added. """ if overrides is None: return config_input return attr.assoc(config_input, **overrides)
def _move_place(self, m, delta): if self.ply < 2 and m.type != moves.MoveType.PLACE_FLAT: raise IllegalMove("Illegal opening") if self[m.x,m.y]: raise IllegalMove("Place on an occupied square") color = self.to_move() if self.ply < 2: color = color.flip() slot = 'stones' kind = pieces.Kind.FLAT if m.type == moves.MoveType.PLACE_CAPSTONE: slot = 'caps' kind = pieces.Kind.CAPSTONE elif m.type == moves.MoveType.PLACE_STANDING: kind = pieces.Kind.STANDING cs = self.stones[color.value] if getattr(cs, slot) <= 0: raise IllegalMove("not enough stones") newstones = attr.assoc(cs, **{slot: getattr(cs, slot) - 1}) if color == pieces.Color.WHITE: delta['stones'] = (newstones, self.stones[1]) else: delta['stones'] = (self.stones[0], newstones) newboard = list(self.board) newboard[m.x + m.y*self.size] = [pieces.Piece(color=color, kind=kind)] delta['board'] = newboard
def test_lb_disappeared_during_feed_fetch(self): """ If a load balancer gets deleted while fetching feeds, no nodes will be returned for it. """ node21 = node('21', 'a21', condition='DRAINING', weight=None) seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[ nodes_req(1, [ node('11', 'a11', condition='DRAINING'), node('12', 'a12') ]) ], [nodes_req(2, [node21])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {"type": "CONNECT"})]]), parallel_sequence( [[node_feed_req('1', '11', CLBNotFoundError(lb_id=u'1'))], [node_feed_req('2', '21', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(2, node21), _drained_at=2.0) ], { '2': CLB(True) }))
def _load_3(self, state): # Version 3 added ``stripe_subscription_id`` to the state. Everything # else is the same so we can piggy-back on _load_2. return attr.assoc( self._load_2(state), stripe_subscription_id=state["details"]["stripe_subscription_id"], )
def compress_context(tokenized, initial_context): """Add context to each of the paragraphs (removing context)""" # copy context = list(initial_context) converted = [] for token in tokenized: if isinstance(token, tokens.Context): # Interpretations of appendices if (len(context) > 1 and len(token.label) > 1 and context[1] == 'Interpretations' and (token.label[1] or '').startswith('Appendix')): context = compress(context, [token.label[0], None, token.label[1]] + token.label[2:]) else: context = compress(context, token.label) continue # Another corner case: a "paragraph" is indicates interp context elif (isinstance(token, tokens.Paragraph) and len(context) > 1 and len(token.label) > 3 and context[1] == 'Interpretations' and token.label[1] != 'Interpretations'): context = compress(context, [ token.label[0], None, token.label[2], '(' + ')('.join(p for p in token.label[3:] if p) + ')' ]) continue elif isinstance(token, tokens.Paragraph): context = compress(context, token.label) token = attr.assoc(token, label=context) converted.append(token) return converted, context
def _convert_upgrade_request_if_needed(request, remote_repositories, installed_repository): if len(request.jobs) == 1 and request.jobs[0].kind == JobType.upgrade: upgrade_request = attr.assoc(request, jobs=[]) remote_repository = Repository() for repository in remote_repositories: remote_repository.update(repository) latest_packages = [] for package in installed_repository: candidates = remote_repository.find_packages(package.name) # candidates may be empty (e.g. when repository configuration # changed, and an installed package is coming from a repository not # configured in the remote list) if len(candidates) > 0: latest_packages.append(candidates[-1]) for p in latest_packages: upgrade_request.install( InstallRequirement._from_string("{} == {}".format(p.name, p.version)) ) return upgrade_request else: return request
def test_lb_disappeared_during_feed_fetch(self): """ If a load balancer gets deleted while fetching feeds, no nodes will be returned for it. """ node21 = node('21', 'a21', condition='DRAINING', weight=None) seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([ [nodes_req(1, [node('11', 'a11', condition='DRAINING'), node('12', 'a12')])], [nodes_req(2, [node21])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {"type": "CONNECT"})] ]), parallel_sequence([ [node_feed_req('1', '11', CLBNotFoundError(lb_id=u'1'))], [node_feed_req('2', '21', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(2, node21), _drained_at=2.0)], {'2': CLB(True)}))
def remove(self, value): if value not in self.rna_types: return self return attr.assoc( self, rna_types=frozenset(r for r in self.rna_types if r != value) )
def _load_2(self, state): # Version 2 added ``key_prefix`` to the state. Everything else is the # same so we can piggy-back on _load_1. return attr.assoc( self._load_1(state), key_prefix=state["details"]["key_prefix"], )
def _join_touching_or_overlapping(ranges): ''' Join touching or overlapping ranges Parameters ---------- ranges : Sequence(_Range) Sorted ranges Returns ------- [_Range] ''' new_ranges = [] range1 = ranges[0] for range2 in ranges[1:]: # If range is v.., stop as it includes all subsequent ranges if range1.end == Version.MAX: break # If touching/overlapping, is_touching_or_overlapping = range1.end >= range2.start if is_touching_or_overlapping: # Join ranges end = max(range1.end, range2.end) range1 = attr.assoc(end=end) else: # Save range1 and continue with range2 new_ranges.append(range1) range1 = range2 break new_ranges.append(range1) # Save the last range return new_ranges
def clone(self): """ Clone the shell object. :returns: a new Shell object with a copy of the environment dictionary """ return attr.assoc(self, _env=dict(self._env), _procs=[])
def _fieldsToRender(self): # type: () -> Iterable[Field] """ @return: an interable of L{Field} objects to include in the HTML representation of this form. This includes: - all the user-specified fields in the form - the CSRF protection hidden field - if no "submit" buttons are included in the form, one additional field for a default submit button so the form can be submitted. """ anySubmit = False for field in self._form.fields: yield attr.assoc( field, value=self.prevalidationValues.get(field, field.value), error=self.validationErrors.get(field, None), ) if field.formInputType == "submit": anySubmit = True if not anySubmit: yield Field( converter=str, formInputType="submit", value="submit", formFieldName="__klein_auto_submit__", ) if self._method.lower() == "post": yield self._fieldForCSRF()
def switch_passive(tokenized): """Passive verbs are modifying the phrase before them rather than the phrase following. For consistency, we flip the order of such verbs""" if all(not t.match(tokens.Verb, active=False) for t in tokenized): return tokenized converted, remaining = [], tokenized while remaining: to_add = list( takewhile(lambda t: not isinstance(t, tokens.Verb), remaining)) if len(to_add) < len(remaining): # also take the verb verb = remaining[len(to_add)] # switch verb to the beginning if not verb.active: verb = attr.assoc(verb, active=True) to_add.append(verb) to_add = to_add[-1:] + to_add[:-1] # may need to grab one more if the verb is move if (verb.verb == tokens.Verb.MOVE and len(to_add) < len(remaining)): to_add.append(remaining[len(to_add)]) else: to_add.append(verb) converted.extend(to_add) remaining = remaining[len(to_add):] return converted
def increment_release(self): ''' Return version with last component of release incremented by 1 ''' release = self.release.split('.') release[-1] = str(int(release[-1]) + 1) release = '.'.join(release) return attr.assoc(self, release=release, raw=None)
def compress_context_in_tokenlists(tokenized): """Use compress (above) on elements within a tokenlist.""" final = [] for token in tokenized: if token.match(tokens.TokenList): subtokens = [] label_so_far = [] for subtoken in token.tokens: if hasattr(subtoken, 'label'): label_so_far = compress(label_so_far, subtoken.label) subtokens.append(attr.assoc(subtoken, label=label_so_far)) else: subtokens.append(subtoken) final.append(attr.assoc(token, tokens=subtokens)) else: final.append(token) return final
def test_get_version_2(self, details): """ A subscription with version 2 state persisted in the database can be retrieved. Its subscription id is set equal to its stripe subscription id. """ # Version 2 details don't have a separate stripe subscription id # field. We'll expect the upgrade process to populate it with a copy # of the base subscription id field. details = attr.assoc( details, stripe_subscription_id=details.subscription_id, ) def _marshal_oldsecrets(oldsecrets): oldsecrets = oldsecrets.copy() oldsecrets["introducer_node_pem"] = "".join( map(str, oldsecrets["introducer_node_pem"])) oldsecrets["server_node_pem"] = "".join( map(str, oldsecrets["server_node_pem"])) return oldsecrets def v2_subscription_state(subscription_id, details): """ A copy of the implementation of the v2 serializer so that the test can create exactly the v2 representation of a particular subscription. """ return dict( version=2, details=dict( active=True, id=subscription_id, bucket_name=details.bucketname, key_prefix=details.key_prefix, oldsecrets=_marshal_oldsecrets(details.oldsecrets), email=details.customer_email, product_id=details.product_id, customer_id=details.customer_id, subscription_id=details.subscription_id, introducer_port_number=details.introducer_port_number, storage_port_number=details.storage_port_number, ), ) subscription_directory = FilePath(mkdtemp().decode("utf-8")) path = subscription_directory.child( b32encode(details.subscription_id) + u".json", ) path.setContent( dumps(v2_subscription_state( details.subscription_id, details, ), )) client = self._get_client_for_path(subscription_directory) retrieved = self.successResultOf(client.get(details.subscription_id)) self.assertThat(details, AttrsEquals(retrieved))
def test_resources_assigned(self, id_a, id_b, details): """ Some empty fields of the subscription details given to ``create`` are populated and included in the result. """ assume(id_a != id_b) client = self.get_client() details_a = self.successResultOf(client.create( id_a, attr.assoc(details, subscription_id=id_a), )) details_b = self.successResultOf(client.create( id_b, attr.assoc(details, subscription_id=id_b), )) # Secrets get populated with some random goodness. self.expectThat(details_a.oldsecrets, Not(Is(None))) self.expectThat(details_b.oldsecrets, Not(Is(None))) self.expectThat(details_a.oldsecrets, Not(Equals(details_b.oldsecrets)))
def test_no_changes(self, C): """ No changes means a verbatim copy. """ i1 = C() i2 = assoc(i1) assert i1 is not i2 assert i1 == i2
def partial_subscription_details(): return subscription_details().map( lambda d: attr.assoc( d, bucketname=None, # Duplicate key prefix assignment logic in subscription manager. :/ key_prefix=d.subscription_id + u"/", oldsecrets=None, ), )
def test_no_changes(self, C): """ No changes means a verbatim copy. """ i1 = C() with pytest.deprecated_call(): i2 = assoc(i1) assert i1 is not i2 assert i1 == i2
def _balance_panels(panels): """Resize panels so they are evenly spaced.""" allotted_spans = sum(panel.span if panel.span else 0 for panel in panels) no_span_set = [panel for panel in panels if panel.span is None] auto_span = math.ceil( (TOTAL_SPAN - allotted_spans) / (len(no_span_set) or 1)) return [ attr.assoc(panel, span=auto_span) if panel.span is None else panel for panel in panels ]
def stacked(graph): """Turn a graph into a stacked graph.""" return attr.assoc( graph, lineWidth=0, nullPointMode=G.NULL_AS_ZERO, stack=True, fill=10, tooltip=G.Tooltip(valueType=G.INDIVIDUAL, ), )
def _route53_op(body=None, **kw): """ Construct an L{_Operation} representing a I{Route53} service API call. """ op = _Operation(service=b"route53", **kw) if body is None: return succeed(op) d = to_xml(body) d.addCallback(lambda body: attr.assoc(op, body=body)) return d
def create_subscription(self, subscription_id, details): """ Create a brand new subscription in the database given some details about it. Secrets for the subscription are generated as part of the process and must not be included in the given details. """ a = start_action( action_type=u"subscription-database:create-subscription", id=subscription_id, details=attr.asdict(details), ) with a: if details.oldsecrets: raise Exception( "You supplied secrets (%r) but that's nonsense!" % ( details.oldsecrets, ), ) # XXX new_tahoe_configuration still pulls some secrets off this # object. That's fine for now but it's just another example of # how screwed up our secret/config management is. Someone else # will fix up the fact that we're getting bogus values off the # NullDeploymentConfiguration here. We don't really *want* this # global configuration persisted alongside each subscription, # anyway deploy_config = NullDeploymentConfiguration() deploy_config.domain = self.domain key_prefix = details.subscription_id + u"/" config = new_tahoe_configuration( deploy_config, # Subscriptions all share a single S3 bucket that we know # about. self.bucket_name, # They all have a unique prefix in that bucket to keep their # shares separate, though. key_prefix, configmap_public_host(details.subscription_id, self.domain), u"127.0.0.1", details.introducer_port_number, details.storage_port_number, ) legacy = secrets_to_legacy_format(config) details = attr.assoc( details, oldsecrets=legacy, bucketname=self.bucket_name, key_prefix=key_prefix, ) details = self.load_subscription(details) return details
def move(self, m): delta = { 'ply': self.ply + 1, } if m.type.is_slide(): self._move_slide(m, delta) else: self._move_place(m, delta) return attr.assoc(self, **delta)
def provision_subscription( smclient, subscription, ): p = attr.assoc( subscription, introducer_port_number=introducer_port_number, storage_port_number=storage_port_number, oldsecrets=old_secrets, ) provisioned.append(p) return succeed(p)
def change_access_key(self, id, key): """ Change the ``DeploymentConfiguration`` to reference a diferent AWS key. This is a necessary event for good key management (retiring old keys and introducing new ones). """ self.deploy_config = attr.assoc( self.deploy_config, s3_access_key_id=id, s3_secret_key=key, )
def change_tahoe_images(self, tag): """ Change the Deployment configuration to require a different Docker image for the Tahoe-LAFS containers. This essentially corresponds to a Tahoe-LAFS upgrade for all customers. """ self.deploy_config = attr.assoc( self.deploy_config, introducer_image=u"tahoe-introducer:{}".format(tag), storageserver_image=u"tahoe-storageserver:{}".format(tag), )
def test_frozen(self): """ Works on frozen classes. """ @attributes(frozen=True) class C(object): x = attr() y = attr() with pytest.deprecated_call(): assert C(3, 2) == assoc(C(1, 2), x=3)
def test_warning(self): """ DeprecationWarning points to the correct file. """ @attributes class C(object): x = attr() with pytest.warns(DeprecationWarning) as wi: assert C(2) == assoc(C(1), x=2) assert __file__ == wi.list[0].filename
def test_empty(self, slots, frozen): """ Empty classes without changes get copied. """ @attributes(slots=slots, frozen=frozen) class C(object): pass i1 = C() with pytest.deprecated_call(): i2 = assoc(i1) assert i1 is not i2 assert i1 == i2
def in_docker_machine(self, machine): """ Return an executor where all docker commands would point at a specific Docker machine. :param machine: name of machine :returns: a new executor """ new_shell = self._shell.clone() output, _ignored = self.docker_machine.env(machine, shell='cmd').batch() for line in output.splitlines(): directive, args = line.split(None, 1) if directive != 'SET': continue key, value = args.split('=', 1) new_shell.setenv(key, value) return attr.assoc(self, _shell=new_shell)
def render_POST(self, request): """ Change some fields of the subscription represented by this resource. The request body is expected to be a JSON object with keys matching valid fields of a ``SubscriptionDetails`` object. The values will be taken as the new values for the corresponding fields of this subscription. """ details = self.database.get_subscription( subscription_id=self.subscription_id, ) payload = loads(request.content.read()) details = attr.assoc(details, **payload) details = self.database.change_subscription(details) return dumps(marshal_subscription(details))
def in_virtualenv(self, envpath): """ Return an executor where all Python commands would point at a specific virtual environment. :param envpath: path to virtual environment :returns: a new executor """ new_shell = self._shell.clone() new_shell.setenv('VIRTUAL_ENV', envpath) new_shell.setenv('PYTHONHOME', None) try: old_path = new_shell.getenv('PATH') new_path = envpath + '/bin' + ':' + old_path except KeyError: new_path = envpath + '/bin' new_shell.setenv('PATH', new_path) return attr.assoc(self, _shell=new_shell)
def test_load_subscription(self, details): """ ``load`` causes a subscription to be created exactly as specified by the given details - with no secret generation. """ client = self.get_client() self.successResultOf(client.load(details)) [subscription] = self.successResultOf(client.list()) self.assertThat( # The ports don't matter, the server still gets to assign them. attr.assoc( subscription, introducer_port_number=details.introducer_port_number, storage_port_number=details.storage_port_number, ), GoodEquals(details), )
def test_change(self, C, data): """ Changes work. """ # Take the first attribute, and change it. assume(fields(C)) # Skip classes with no attributes. field_names = [a.name for a in fields(C)] original = C() chosen_names = data.draw(st.sets(st.sampled_from(field_names))) change_dict = {name: data.draw(st.integers()) for name in chosen_names} with pytest.deprecated_call(): changed = assoc(original, **change_dict) for k, v in change_dict.items(): assert getattr(changed, k) == v
def test_change_stripe_subscription_id(self, details, new_stripe_id): """ ``change`` accepts a ``stripe_subscription_id`` keyword argument and changes the value of that field associated with the indicated subscription. """ client = self.get_client() expected = self.successResultOf(client.load(details)) modified = self.successResultOf(client.change( details.subscription_id, stripe_subscription_id=new_stripe_id, )) self.assertThat( modified, AttrsEquals(attr.assoc( expected, stripe_subscription_id=new_stripe_id, )), )
def test_switch_level2_context(): """The presence of certain types of context should apply throughout the amendment""" initial = ['105', None, '2'] tokenized = [tokens.Paragraph(), tokens.Verb('verb', True)] transform = amdparser.switch_level2_context # shorthand assert transform(tokenized, initial) == initial context = tokens.Context(['105', 'Subpart:G'], certain=False) tokenized.append(context) assert transform(tokenized, initial) == initial tokenized[-1] = attr.assoc(context, certain=True) assert transform(tokenized, initial) == ['105', 'Subpart:G', '2'] # Don't try to proceed if multiple contexts are present tokenized.append(tokens.Context(['105', 'Appendix:Q'], certain=True)) assert transform(tokenized, initial) == initial
def render_PUT(self, request): """ Create an active subscription by loading subscription details from the given request, including node secrets. This is essentially a way to load a subscription that was previously created and initialized, rather than creating a brand new subscription. """ payload = loads(request.content.read()) request_details = attr.assoc( decode_subscription(payload), subscription_id=self.subscription_id, ) response_details = self.database.load_subscription( details=request_details, ) request.setResponseCode(CREATED) return dumps(marshal_subscription(response_details))