def structural(p):
    print(len(p))
    dataclasses.fields(p)

    dataclasses.asdict(p)
    dataclasses.astuple(p)
    dataclasses.replace(p)
def main():
    """Get Weather Data."""
    weather_data = data(get_api_data=True)
    current_data = parse_weather_data(weather_data.current)
    forecast_data = [asdict(parse_weather_data(data))
                     for data in weather_data.forecast_daily['list']]

    return Weather(current=asdict(current_data),
                   forecast=forecast_data)
示例#3
0
def make_metar_test(station: str):
    """
    Builds METAR test files
    """
    m = avwx.Metar(station)
    m.update()
    # Clear timestamp due to parse_date limitations
    m.data.time = None
    return {
        'data': asdict(m.data),
        'translations': asdict(m.translations),
        'summary': m.summary,
        'speech': m.speech,
        'station_info': asdict(m.station_info)
    }
示例#4
0
 def test_metar_ete(self):
     """
     Performs an end-to-end test of all METAR JSON files
     """
     for path in glob(os.path.dirname(os.path.realpath(__file__))+'/metar/*.json'):
         ref = json.load(open(path))
         station = Metar(path.split('/')[-1][:4])
         self.assertIsNone(station.last_updated)
         self.assertTrue(station.update(ref['data']['raw']))
         self.assertIsInstance(station.last_updated, datetime)
         # Clear timestamp due to parse_date limitations
         station.data.time = None
         self.assertEqual(asdict(station.data), ref['data'])
         self.assertEqual(asdict(station.translations), ref['translations'])
         self.assertEqual(station.summary, ref['summary'])
         self.assertEqual(station.speech, ref['speech'])
         self.assertEqual(asdict(station.station_info), ref['station_info'])
示例#5
0
 def get_dict(self):
     ret = asdict(self)
     ret.pop('id')
     ret.pop('model_root')
     ret['start_date'] = ret['start_date'].strftime('%Y-%m-%d')
     ret['end_date'] = ret['end_date'].strftime('%Y-%m-%d')
     if self.relevant:
         ret.pop('relevant')
     return ret
示例#6
0
async def process_events(app):
    while True:
        event = await app.event_queue.get()
        if app.wss:
            msg = asdict(event)
            msg['_type'] = type(event).__name__
            if 'image' in msg:
                msg['image'] = pil_to_data_url(msg['image'], **app.image_encode_settings)
            await send_message(app, msg)
示例#7
0
文件: api.py 项目: dmtucker/backlog
 def save(self, path: str) -> None:
     """Save a Backlog to a file."""
     with open(path, 'w') as backlog_f:
         backlog_f.write(
             json.dumps(
                 [dataclasses.asdict(entry) for entry in self.entries],
                 sort_keys=True,
                 indent=2,
                 separators=(',', ': '),
             ),
         )
示例#8
0
def make_taf_test(station: str, report: str = None):
    """
    Builds TAF test files
    """
    t = avwx.Taf(station)
    t.update(report)
    data = asdict(t.data)
    # Clear timestamp due to parse_date limitations
    for key in ('time', 'start_time', 'end_time'):
        data[key] = None
    for i in range(len(data['forecast'])):
        for key in ('start_time', 'end_time'):
            data['forecast'][i][key] = None
    return {
        'data': data,
        'translations': asdict(t.translations),
        'summary': t.summary,
        'speech': t.speech,
        'station_info': asdict(t.station_info)
    }
示例#9
0
 def test_taf_ete(self):
     """
     Performs an end-to-end test of all TAF JSON files
     """
     nodate = lambda s: s[s.find('-')+2:]
     for path in glob(os.path.dirname(os.path.realpath(__file__))+'/taf/*.json'):
         ref = json.load(open(path))
         station = Taf(path.split('/')[-1][:4])
         self.assertIsNone(station.last_updated)
         self.assertTrue(station.update(ref['data']['raw']))
         self.assertIsInstance(station.last_updated, datetime)
         # Clear timestamp due to parse_date limitations
         nodt = deepcopy(station.data)
         for key in ('time', 'start_time', 'end_time'):
             setattr(nodt, key, None)
         for i in range(len(nodt.forecast)):
             for key in ('start_time', 'end_time'):
                 setattr(nodt.forecast[i], key, None)
         self.assertEqual(asdict(nodt), ref['data'])
         self.assertEqual(asdict(station.translations), ref['translations'])
         self.assertEqual(station.summary, ref['summary'])
         self.assertEqual(nodate(station.speech), nodate(ref['speech']))
         self.assertEqual(asdict(station.station_info), ref['station_info'])
示例#10
0
def test_dataframe_to_dataclass():

    _df = pd.DataFrame(columns=['field_a', 'field_b'], data=[[1, 'a'], [2, 'b']])

    dataclass_objects = [dataclass_object1, dataclass_object2]

    df = pd.DataFrame([asdict(x) for x in dataclass_objects])

    assert _df.equals(df)

    records = df.to_dict(orient='records')

    record_objects = [SimpleDataObject(**rec) for rec in records]

    assert record_objects == dataclass_objects
示例#11
0
def test_dataclass_to_dataframe():

    _df = pd.DataFrame(columns=['field_a', 'field_b'], data=[[1, 'a'], [2, 'b']])

    dataclass_objects = [dataclass_object1, dataclass_object2]

    df = pd.DataFrame([asdict(x) for x in dataclass_objects])

    print(df)
    """   field_a field_b
    0        1       a
    1        2       b
    """

    assert _df.equals(df)
示例#12
0
def get_scholar_json(id):
    (agg, papers) = get_scholar_data(id)

    dict = {'agg': {}, 'papers': {}}

    for key in agg:
        dict['agg'][key] = agg[key]

    for paper in papers:
        dict['papers'][paper.id] = asdict(paper)

    json_text = json.dumps(dict, sort_keys=True, indent=4)
    json_text = json_text.replace('"agg"', 'agg')
    json_text = json_text.replace('"papers"', 'papers')
    json_text = json_text.replace('"citations"', 'citations')
    json_text = json_text.replace('"id"', 'id')
    json_text = json_text.replace('"impact_factor"', 'impact_factor')
    json_text = json_text.replace('"is_book"', 'is_book')
    json_text = json_text.replace('"is_conference"', 'is_conference')
    json_text = json_text.replace('"is_journal"', 'is_journal')
    json_text = json_text.replace('"title"', 'title')
    return 'var sdata = ' + json_text
def union2(p: Union[Type[A], Type[B]]):
    dataclasses.fields(p)

    dataclasses.asdict(<warning descr="'dataclasses.asdict' method should be called on dataclass instances">p</warning>)
    dataclasses.astuple(<warning descr="'dataclasses.astuple' method should be called on dataclass instances">p</warning>)
    dataclasses.replace(<warning descr="'dataclasses.replace' method should be called on dataclass instances">p</warning>)
示例#14
0
def dump_malware_scan_request(request: saf.model.EvidenceRequest):
    request_dict = dataclasses.asdict(request)
    with tempfile.NamedTemporaryFile(delete=False, mode='wt') as tmp_file:
        tmp_file.write(json.dumps(request_dict, cls=EnumJSONEncoder))
import dataclasses
from typing import Type, Union


class A:
    pass


dataclasses.fields(<warning descr="'dataclasses.fields' method should be called on dataclass instances or types">A</warning>)
dataclasses.fields(<warning descr="'dataclasses.fields' method should be called on dataclass instances or types">A()</warning>)

dataclasses.asdict(<warning descr="'dataclasses.asdict' method should be called on dataclass instances">A()</warning>)
dataclasses.astuple(<warning descr="'dataclasses.astuple' method should be called on dataclass instances">A()</warning>)
dataclasses.replace(<warning descr="'dataclasses.replace' method should be called on dataclass instances">A()</warning>)


@dataclasses.dataclass
class B:
    pass


dataclasses.fields(B)
dataclasses.fields(B())

dataclasses.asdict(B())
dataclasses.astuple(B())
dataclasses.replace(B())

dataclasses.asdict(<warning descr="'dataclasses.asdict' method should be called on dataclass instances">B</warning>)
dataclasses.astuple(<warning descr="'dataclasses.astuple' method should be called on dataclass instances">B</warning>)
dataclasses.replace(<warning descr="'dataclasses.replace' method should be called on dataclass instances">B</warning>)
示例#16
0
文件: dc_shop.py 项目: daineX/random
 def as_dict(self):
     return asdict(self)
示例#17
0
def init_db(db_config: DBConfig) -> None:
    DATABASE.init(**asdict(db_config))
示例#18
0
 def to_dict(self) -> dict:
     return asdict(self)
示例#19
0
def copy_type_with(base: Type,
                   *types: Type,
                   params_to_type: Dict[Type, Type] = None) -> Type:
    if params_to_type is None:
        params_to_type = {}

    if isinstance(base, StrawberryUnion):
        types = cast(
            Tuple[Type, ...],
            tuple(
                copy_type_with(t, params_to_type=params_to_type)
                for t in base.types),
        )

        return union(
            name=get_name_from_types(types),
            types=types,
            description=base.description,
        )

    if hasattr(base, "_type_definition"):
        definition = cast(TypeDefinition, base._type_definition)

        if definition.type_params:
            fields = []

            type_params = definition.type_params.values()
            params_to_type.update(dict(zip(type_params, types)))

            name = get_name_from_types(
                params_to_type.values()) + definition.name

            for field in definition.fields:
                kwargs = dataclasses.asdict(field)

                if field.is_list:
                    child = cast(FieldDefinition, field.child)
                    child_type = cast(Type, child.type)

                    # TODO: nested list

                    kwargs["child"] = FieldDefinition(
                        name=child.name,
                        origin=child.origin,
                        origin_name=child.origin_name,
                        is_optional=child.is_optional,
                        type=copy_type_with(child_type,
                                            params_to_type=params_to_type),
                    )

                else:
                    field_type = cast(Type, field.type)

                    kwargs["type"] = copy_type_with(
                        field_type, params_to_type=params_to_type)

                federation_args = kwargs.pop("federation")
                kwargs["federation"] = FederationFieldParams(**federation_args)

                fields.append(FieldDefinition(**kwargs))

            type_definition = TypeDefinition(
                name=name,
                is_input=definition.is_input,
                origin=definition.origin,
                is_interface=definition.is_interface,
                is_generic=False,
                federation=definition.federation,
                interfaces=definition.interfaces,
                description=definition.description,
                _fields=fields,
            )
            type_definition._type_params = {}

            copied_type = builtins.type(
                name,
                (),
                {"_type_definition": type_definition},
            )

            if not hasattr(base, "_copies"):
                base._copies = {}

            base._copies[types] = copied_type

            return copied_type

    if is_type_var(base):
        return params_to_type[base]

    return base
    if ret is None:  # should not happen... but if it does...
        print("Unable to locate {} is db.scripts... skipping".format(js_id))
        continue
    content = ret.get('code') 
    jsr = JavascriptArtefact(url=hit.get('origin_url'), 
                             sha256=hashlib.sha256(content).hexdigest(), 
                             md5=hashlib.md5(content).hexdigest(), 
                             inline=False)
    m, failed, stderr = analyse_script(content, jsr, java=args.java, feature_extractor=args.extractor)
    if failed:
       n_failed += 1
       continue
    m.update({ 'origin': hit.get('cited_on'), 'js_id': js_id })
    assert 'js_id' in m and len(m['js_id']) > 0  # PRE-CONDITION: ensure hits have origin_js_id field set
    best_control, next_best_control = find_best_control(m, all_controls, db=db)
    d = asdict(best_control) # NB: all fields of the model are sent to output kafka topic and Mongo

    # 2a. also send results to MongoDB for batch-oriented applications and for long-term storage
    # POST-CONDITIONS which MUST be maintained are checked before pushing to topic
    assert 'cited_on' in d and len(d['cited_on']) > 0
    assert 'origin_url' in d and len(d['origin_url']) > 0
    assert isinstance(d['origin_js_id'], str) or d['origin_js_id'] is None
    ret = db.vet_against_control.find_one_and_update({ 'origin_url': best_control.origin_url }, 
                                                     { "$set": d}, 
                                                     upsert=True, 
                                                     return_document=pymongo.ReturnDocument.AFTER)

    # 2b. send results to kafka topic for streaming applications
    assert ret is not None and '_id' in ret
    xref = str(ret.get('_id'))
    assert xref is not None
示例#21
0
def register_bot(authentication_code: str, bot: Bot):
    """
    Register bot to the system.
    """
    get_db().hmset(f'registration.{authentication_code}', asdict(bot))
示例#22
0
文件: snmp.py 项目: inettgmbh/checkmk
 def serialize(self) -> Dict[str, Any]:
     return dataclasses.asdict(self)
示例#23
0
 def test_compute_model_dependencies(
     self,
     mock_expand,
     mock_project,
     mock_inverse_utility,
     mock_affine_model,
     mock_Acquisition_compute,
 ):
     # TODO: Patch only `MFKG_PATH.__init__` once `construct_inputs`
     # implemented for qMFKG.
     with patch(
         f"{MULTI_FIDELITY_PATH}.MultiFidelityAcquisition.__init__",
         return_value=None,
     ):
         # We don't actually need to instantiate the BoTorch acqf in these tests.
         mf_acquisition = MultiFidelityAcquisition(
             surrogate=self.surrogate,
             search_space_digest=self.search_space_digest,
             objective_weights=self.objective_weights,
             botorch_acqf_class=qMultiFidelityKnowledgeGradient,
         )
     # Raise Error if `fidelity_weights` and `target_fidelities` do not align.
     with self.assertRaisesRegex(RuntimeError, "Must provide the same indices"):
         mf_acquisition.compute_model_dependencies(
             surrogate=self.surrogate,
             search_space_digest=SearchSpaceDigest(
                 **{
                     **dataclasses.asdict(self.search_space_digest),
                     "target_fidelities": {1: 5.0},
                 }
             ),
             objective_weights=self.objective_weights,
             pending_observations=self.pending_observations,
             outcome_constraints=self.outcome_constraints,
             linear_constraints=self.linear_constraints,
             fixed_features=self.fixed_features,
             options=self.options,
         )
     # Make sure `fidelity_weights` are set when they are not passed in.
     mf_acquisition.compute_model_dependencies(
         surrogate=self.surrogate,
         search_space_digest=SearchSpaceDigest(
             **{
                 **dataclasses.asdict(self.search_space_digest),
                 "target_fidelities": {2: 5.0, 3: 5.0},
             }
         ),
         objective_weights=self.objective_weights,
         pending_observations=self.pending_observations,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         options={Keys.COST_INTERCEPT: 1.0, Keys.NUM_TRACE_OBSERVATIONS: 0},
     )
     mock_affine_model.assert_called_with(
         fidelity_weights={2: 1.0, 3: 1.0}, fixed_cost=1.0
     )
     # Usual case.
     dependencies = mf_acquisition.compute_model_dependencies(
         surrogate=self.surrogate,
         search_space_digest=self.search_space_digest,
         objective_weights=self.objective_weights,
         pending_observations=self.pending_observations,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         options=self.options,
     )
     mock_Acquisition_compute.assert_called_with(
         surrogate=self.surrogate,
         search_space_digest=self.search_space_digest,
         objective_weights=self.objective_weights,
         pending_observations=self.pending_observations,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         options=self.options,
     )
     mock_affine_model.assert_called_with(
         fidelity_weights=self.options[Keys.FIDELITY_WEIGHTS],
         fixed_cost=self.options[Keys.COST_INTERCEPT],
     )
     mock_inverse_utility.assert_called_with(cost_model="cost_model")
     self.assertTrue(Keys.COST_AWARE_UTILITY in dependencies)
     self.assertTrue(Keys.PROJECT in dependencies)
     self.assertTrue(Keys.EXPAND in dependencies)
     # Check that `project` and `expand` are defined correctly.
     project = dependencies.get(Keys.PROJECT)
     project(torch.tensor([1.0]))
     mock_project.assert_called_with(
         X=torch.tensor([1.0]),
         target_fidelities=self.search_space_digest.target_fidelities,
     )
     expand = dependencies.get(Keys.EXPAND)
     expand(torch.tensor([1.0]))
     mock_expand.assert_called_with(
         X=torch.tensor([1.0]),
         fidelity_dims=sorted(self.search_space_digest.target_fidelities),
         num_trace_obs=self.options.get(Keys.NUM_TRACE_OBSERVATIONS),
     )
示例#24
0
 def save_to_json(self, json_path: str):
     """ Save the content of this instance in JSON format inside :obj:`json_path`."""
     json_string = json.dumps(
         dataclasses.asdict(self), indent=2, sort_keys=True) + "\n"
     with open(json_path, "w", encoding="utf-8") as f:
         f.write(json_string)
示例#25
0
 def default(self, o):
     if dataclasses.is_dataclass(o):
         return dataclasses.asdict(o)
     if isinstance(o, Path):
         return str(o)
     return super().default(o)
示例#26
0
 def as_dict(self) -> Dict[str, Any]:
     return dataclasses.asdict(self)
示例#27
0
 def get(self, iden):
     rule = self._rules.get(iden)
     if rule is None:
         raise s_exc.NoSuchIden()
     return dataclasses.asdict(rule)

@dataclass(order=True)
class Employee:
    first: str = field(compare=False)
    last: str = field(compare=False)
    pay: int
    fullname: str = field(init=False, repr=False, compare=False)
    email: str = field(init=False, repr=False, compare=False)

    def __post_init__(self):

        self.fullname = self.first + self.last
        self.email = self.first + "." + self.last + "@company.com"


e1 = Employee("indhu", "mathy", 500)
e2 = Employee("karthi", "palani", 1000)
print(e1)
print(e1 > e2)
print(e1 == e2)
print(e1.fullname)
print(e1.email)
emp_dict = e1.__dict__
print(emp_dict)
print(asdict(e1))
# converting object to json
import json
jdata = json.dumps(emp_dict, indent=2)
print(jdata)
示例#29
0
文件: model.py 项目: CzarSimon/mimir
 def tojson(self) -> str:
     return json.dumps(asdict(self))
 def to_json(self):
     """Return the json representation of the class."""
     return json.dumps(dataclasses.asdict(self))
示例#31
0
 def test_find_success(self, data, repo: Repository, dao: IDao):
     id_ = dao.insert(**data)
     entity = repo.find(id_)
     assert isinstance(entity, Bike)
     assert dataclasses.asdict(entity) == data
示例#32
0
 def __init__(self):
     super().__init__()
     self.config = Config(**dataclasses.asdict(main_thread_config))
示例#33
0
 def _property_to_dict(self, advertised: AdvertisedProperty) -> Dict:
     as_dict = asdict(advertised)
     as_dict.pop('property_id', None)
     return as_dict
示例#34
0
 def push(self, name: str, data: Data) -> Intent:
     # TODO check stack size overflow and data overflow
     intent = Intent(id_to_str(new_id()), name, data)
     self._stack().append(asdict(intent))
     return intent
示例#35
0
from dataclasses import dataclass, asdict
from typing import List

@dataclass
class Point:
     x: int
     y: int

@dataclass
class C:
     z: List[Point]

p = Point(10, 20)
assert asdict(p) == {'x': 10, 'y': 20}

c = C([Point(0, 0), Point(10, 4)])
assert asdict(c) == {'z': [{'x': 0, 'y': 0}, {'x': 10, 'y': 4}]}
示例#36
0
 def serialize(self):
     """Serializes the ClickEvent for sending to Kafka"""
     out = BytesIO()
     writer(out, ClickEvent.schema, [asdict(self)])
     return out.getvalue()
def unknown(p):
    dataclasses.fields(p)

    dataclasses.asdict(p)
    dataclasses.astuple(p)
示例#38
0
 def asdict(self) -> Dict[str, float]:
     """Converts parameters to a dictionary that maps angle names to values."""
     return dataclasses.asdict(self)
def union1(p: Union[A, B]):
    dataclasses.fields(p)

    dataclasses.asdict(p)
    dataclasses.astuple(p)
    dataclasses.replace(p)
示例#40
0
def wrap_vault(vault: Union[VaultV1, VaultV2], samples: ApySamples,
               aliases: dict, icon_url: str, assets_metadata: dict) -> dict:
    apy_error = Apy("error", 0, 0, ApyFees(0, 0), ApyPoints(0, 0, 0))
    try:
        apy = vault.apy(samples)
    except ValueError as error:
        logger.error(error)
        apy = apy_error
    except PriceError as error:
        logger.error(error)
        apy = apy_error

    if isinstance(vault, VaultV1):
        strategies = [{
            "address":
            str(vault.strategy),
            "name":
            vault.strategy.getName()
            if hasattr(vault.strategy, "getName") else vault.strategy._name,
        }]
    else:
        strategies = [{
            "address": str(strategy.strategy),
            "name": strategy.name
        } for strategy in vault.strategies]

    inception = contract_creation_block(str(vault.vault))

    token_alias = aliases[str(vault.token)]["symbol"] if str(
        vault.token) in aliases else vault.token.symbol()
    vault_alias = token_alias

    tvl = vault.tvl()

    migration = None

    if str(vault.vault) in assets_metadata:
        migration = {
            "available": assets_metadata[str(vault.vault)][1],
            "address": assets_metadata[str(vault.vault)][2]
        }

    object = {
        "inception":
        inception,
        "address":
        str(vault.vault),
        "symbol":
        vault.symbol if hasattr(vault, "symbol") else vault.vault.symbol(),
        "name":
        vault.name,
        "display_name":
        vault_alias,
        "icon":
        icon_url % str(vault.vault),
        "token": {
            "name":
            vault.token.name()
            if hasattr(vault.token, "name") else vault.token._name,
            "symbol":
            vault.token.symbol() if hasattr(vault.token, "symbol") else None,
            "address":
            str(vault.token),
            "decimals":
            vault.token.decimals()
            if hasattr(vault.token, "decimals") else None,
            "display_name":
            token_alias,
            "icon":
            icon_url % str(vault.token),
        },
        "tvl":
        dataclasses.asdict(tvl),
        "apy":
        dataclasses.asdict(apy),
        "strategies":
        strategies,
        "endorsed":
        vault.is_endorsed if hasattr(vault, "is_endorsed") else True,
        "version":
        vault.api_version if hasattr(vault, "api_version") else "0.1",
        "decimals":
        vault.decimals
        if hasattr(vault, "decimals") else vault.vault.decimals(),
        "type":
        "v2" if isinstance(vault, VaultV2) else "v1",
        "emergency_shutdown":
        vault.vault.emergencyShutdown()
        if hasattr(vault.vault, "emergencyShutdown") else False,
        "updated":
        int(time()),
        "migration":
        migration,
    }

    if chain.id == 1 and any(
        [isinstance(vault, t) for t in [Backscratcher, YveCRVJar]]):
        object["special"] = True

    return object
示例#41
0
    def construct_json(self) -> Dict:
        """Construct dictionary/JSON for exporting or printing.

        :return json_out: All class attributes stored in a dictionary.
        """
        return asdict(self)
示例#42
0
文件: models.py 项目: mr-KRAX/TrueJob
    assessment_as_worker: float  # None or 0.0 - 5.0
    assessment_as_employer: float  # None or 0.0 - 5.0

    def __init__(self, dictionary):
        """
      Конструктор через словарь
      """
        for key in dictionary:
            setattr(self, key, dictionary[key])


if __name__ == "__main__":
    dict_to_User = {
        'vkid': 'id',
        'type': 'Admin',
        'employer_rating': 0.2,
        'worker_rating': 1,
        'status': 'Silver',
        'is_blocked': 1
    }
    print("dict to User: "******"type", "Admin")
    print("User from dict: ", User_from_dict)

    # User -> dict
    dict_from_User = asdict(User_from_dict)
    print("dict from User:", dict_from_User)
示例#43
0
 def list(self):
     return [(iden, dataclasses.asdict(rule)) for iden, rule in self._rules.items()]
示例#44
0
文件: models.py 项目: mr-KRAX/TrueJob
 def asDict(self):
     user_as_dict = dict()
     for k, v in asdict(self).items():
         user_as_dict[k[1:]] = v
     return user_as_dict
示例#45
0
 def en(self):
     return s_msgpack.en(dataclasses.asdict(self))
示例#46
0
    def add_game(self, game: str, long_name: str, role_id: int, emoji: int, category_id: int, cog: str = None):
        """ Adds a new game to the bot """

        # The asdict prevents one from create a dict that is invalid to Game
        self.__games[game] = dataclasses.asdict(Game(name_short=game, name_long=long_name, role_id=role_id, emoji=emoji, category_id=category_id, cog=cog))
示例#47
0
 def as_dict(self) -> dict:
     return dataclasses.asdict(self) 
示例#48
0
    def test_parser_mutation(self):
        query = """
            mutation CreateHero {
              createHero {
                hero {
                  name
                }
                ok
              }
            }
        """

        parser = QueryParser(self.swapi_schema)
        parsed = parser.parse(query)

        expected = asdict(
            ParsedQuery(
                query=query,
                objects=[
                    ParsedOperation(
                        name="CreateHero",
                        type="mutation",
                        children=[
                            ParsedObject(
                                name="CreateHeroData",
                                fields=[
                                    ParsedField(
                                        name="createHero",
                                        type="CreateHeroPayload",
                                        nullable=True,
                                    )
                                ],
                                children=[
                                    ParsedObject(
                                        name="CreateHeroPayload",
                                        fields=[
                                            ParsedField(name="hero",
                                                        type="Hero",
                                                        nullable=True),
                                            ParsedField(name="ok",
                                                        type="bool",
                                                        nullable=True),
                                        ],
                                        children=[
                                            ParsedObject(
                                                name="Hero",
                                                fields=[
                                                    ParsedField(
                                                        name="name",
                                                        type="str",
                                                        nullable=False,
                                                    )
                                                ],
                                            )
                                        ],
                                    )
                                ],
                            )
                        ],
                    )
                ],
            ))

        parsed_dict = asdict(parsed)

        assert bool(parsed)
        assert parsed_dict == expected, str(DeepDiff(parsed_dict, expected))
示例#49
0
def _custom_default(o):
    if is_dataclass(o):
        return asdict(o)
    raise TypeError(f"{o!r} is not JSON serializable")
示例#50
0

def write(data, *args, **kwargs):
    """Write data to the json file."""
    with open('game_on.json', 'w') as json_file:
        json.dump(data, json_file)
    return True


def main():
    """Game on main funtion."""
    pass


if __name__ == "__main__":
    if not len(sys.argv) < 1:
        exit(0)

    match = Match('Name')  # -g "Name"
    p1 = Player('Player 1')  # -p1 "Name"
    p2 = Player('Player 2')  # -p1 "Name"
    r1 = Results(match, p1, 2)  # -r1 2
    r2 = Results(match, p2, 12)  # -r2 2

    r1.losses = r2.wins
    r2.losses = r1.wins

    data = {}

    data['result'] = [asdict(r1), asdict(r2)]
示例#51
0
文件: authn.py 项目: SUNET/eduid-IdP
 def to_session_dict(self):
     return asdict(self)
示例#52
0
 def save_to_file(self, config_path):
   with open(config_path, 'w') as config_file:
     json.dump(dataclasses.asdict(self), config_file, indent=2, sort_keys=True)
示例#53
0
 def fill_info_from_seq_region_raw(self,
                                   raw_json: Optional[str],
                                   add_missing: bool = True,
                                   use_syns: bool = True,
                                   update_from_new: bool = True,
                                   update_syns: bool = True) -> None:
     if not raw_json:
         return
     _open = raw_json.endswith(".gz") and gzip.open or open
     with _open(raw_json, 'rt') as sr_raw:
         print("adding data from seq_regions json:",
               raw_json,
               file=sys.stderr)
         data = json.load(sr_raw, object_pairs_hook=OrderedDict)
         if type(data) != list:
             data = [data]
         for sr in data:
             name = sr.get("name")
             if not name:
                 continue
             contig = name
             if use_syns and contig not in self.seq_regions:
                 contig = self.syns.get(contig, contig)
             if not add_missing and contig not in self.seq_regions:
                 continue
             # updating othewise
             sr_dict = dc.asdict(self.seq_regions[contig],
                                 dict_factory=no_nulls_dict)
             updates = {}
             tags_to_copy_str = "length coord_system_level location circular codon_table karyotype_bands"
             for tag in tags_to_copy_str.split():
                 val = sr.get(tag)
                 if val is None: continue
                 if not val: continue
                 if not update_from_new and sr_dict.get(tag) is not None:
                     continue
                 updates[tag] = val
             # update karyotype_bands coloring
             #sorted([(0,3), (1,2), (2,3)], key = lambda x:(x[1],-x[0])) == [(1, 2), (2, 3), (0, 3)]
             bands_start_sorted = sorted(updates.get("karyotype_bands", []),
                                         key=lambda x:
                                         (x["end"], -x["start"]))
             prev, band_no = None, 0
             for band in bands_start_sorted:
                 if band.get("stain"):
                     continue  # don't increase band_no or change prev
                 if prev and band["start"] <= prev["start"]:
                     continue  # same for "metaband"
                 band["stain"] = band_no and "gpos75" or "gpos25"
                 prev, band_no = band, (band_no + 1) % 2
             # synonyms
             raw_syns = sr.get("synonyms")
             if update_syns and raw_syns:
                 old_syns = self.seq_regions[contig].synonyms
                 raw_syns = [
                     SeqRegionSyn(s["name"],
                                  s.get("source", self.syn_src_default))
                     for s in raw_syns
                 ]
                 updates["synonyms"] = self.merge_syns(old_syns,
                                                       raw_syns,
                                                       use_new_source=True)
             # update
             self.seq_regions[contig] = dc.replace(self.seq_regions[contig],
                                                   **updates)
     return
示例#54
0
文件: popups.py 项目: petrows/checkmk
 def asdict(self) -> Dict[str, Union[str, Optional[str]]]:
     """Dictionary representation used to pass information to JS code."""
     return {k: v for k, v in asdict(self).items() if not k.startswith('_')}
示例#55
0
def test_kom_happy_path():
    """
    Test Komer object class
    """

    @dataclass
    class Record:
        first: str  # first name
        last: str  # last name
        street: str  # street address
        city: str  # city name
        state: str  # state code
        zip: int  # zip code

        def __iter__(self):
            return iter(asdict(self))

    jim = Record(first="Jim",
                 last="Black",
                 street="100 Main Street",
                 city="Riverton",
                 state="UT",
                 zip=84058)

    jimser = json.dumps(asdict(jim)).encode("utf-8")
    jim = helping.datify(Record, json.loads(bytes(jimser).decode("utf-8")))
    assert isinstance(jim, Record)

    with dbing.openLMDB() as db:
        assert isinstance(db, dbing.LMDBer)
        assert db.name == "test"
        assert db.opened

        mydb = koming.Komer(db=db, schema=Record, subkey='records.')
        assert isinstance(mydb, koming.Komer)

        sue = Record(first="Susan",
                     last="Black",
                     street="100 Main Street",
                     city="Riverton",
                     state="UT",
                     zip=84058)

        keys = ("test_key", "0001")
        mydb.put(keys=keys, data=sue)
        actual = mydb.get(keys=keys)

        assert actual.first == "Susan"
        assert actual.last == "Black"
        assert actual.street == "100 Main Street"
        assert actual.city == "Riverton"
        assert actual.state == "UT"
        assert actual.zip == 84058

        mydb.rem(keys)

        actual = mydb.get(keys=keys)
        assert actual is None

        keys = ("test_key", "0001")
        mydb.put(keys=keys, data=sue)
        actual = mydb.get(keys=keys)
        assert actual == sue

        kip = Record(first="Kip",
                     last="Thorne",
                     street="200 Center Street",
                     city="Bluffdale",
                     state="UT",
                     zip=84043)
        result = mydb.put(keys=keys, data=kip)
        assert not result
        actual = mydb.get(keys=keys)
        assert actual == sue

        result = mydb.pin(keys=keys, data=kip)
        assert result
        actual = mydb.get(keys=keys)
        assert actual == kip

        # test with keys as string not tuple
        keys = "keystr"

        bob = Record(first="Bob",
                     last="Brown",
                     street="100 Center Street",
                     city="Bluffdale",
                     state="UT",
                     zip=84043)

        mydb.put(keys=keys, data=bob)
        actual = mydb.get(keys=keys)

        assert actual.first == "Bob"
        assert actual.last == "Brown"
        assert actual.street == "100 Center Street"
        assert actual.city == "Bluffdale"
        assert actual.state == "UT"
        assert actual.zip == 84043

        mydb.rem(keys)

        actual = mydb.get(keys=keys)
        assert actual is None


    assert not os.path.exists(db.path)
    assert not db.opened
示例#56
0
    def get_args_dict(self):
        args = dataclasses.asdict(self.args)
        args["scenario_id"] = self.id

        return args
示例#57
0
 def __iter__(self):
     return iter(asdict(self))
示例#58
0
    def test_parser_query_with_enums(self):
        query = """
            query MyIssues {
              viewer {
                issues(first: 5) {
                  edges {
                    node {
                      author { login }
                      authorAssociation
                    }
                  }
                }
              }
            }
        """
        parsed = self.github_parser.parse(query)

        issue_child = ParsedObject(
            name="Issue",
            fields=[
                ParsedField(name="author", type="Actor", nullable=True),
                ParsedField(
                    name="authorAssociation",
                    type="CommentAuthorAssociation",
                    nullable=False,
                ),
            ],
            children=[
                ParsedObject(
                    name="Actor",
                    fields=[
                        ParsedField(name="login", type="str", nullable=False)
                    ],
                )
            ],
        )
        child = ParsedObject(
            name="MyIssuesData",
            fields=[ParsedField(name="viewer", type="User", nullable=False)],
            children=[
                ParsedObject(
                    name="User",
                    fields=[
                        ParsedField(name="issues",
                                    type="IssueConnection",
                                    nullable=False)
                    ],
                    children=[
                        ParsedObject(
                            name="IssueConnection",
                            fields=[
                                ParsedField(name="edges",
                                            type="List[IssueEdge]",
                                            nullable=True)
                            ],
                            children=[
                                ParsedObject(
                                    name="IssueEdge",
                                    fields=[
                                        ParsedField(name="node",
                                                    type="Issue",
                                                    nullable=True)
                                    ],
                                    children=[issue_child],
                                )
                            ],
                        )
                    ],
                )
            ],
        )
        expected = asdict(
            ParsedQuery(
                query=query,
                enums=[
                    ParsedEnum(
                        name="CommentAuthorAssociation",
                        values={
                            "MEMBER": "MEMBER",
                            "OWNER": "OWNER",
                            "COLLABORATOR": "COLLABORATOR",
                            "CONTRIBUTOR": "CONTRIBUTOR",
                            "FIRST_TIME_CONTRIBUTOR": "FIRST_TIME_CONTRIBUTOR",
                            "FIRST_TIMER": "FIRST_TIMER",
                            "NONE": "NONE",
                        },
                    )
                ],
                objects=[
                    ParsedOperation(name="MyIssues",
                                    type="query",
                                    children=[child])
                ],
            ))

        parsed_dict = asdict(parsed)

        assert bool(parsed)
        assert parsed_dict == expected, str(DeepDiff(parsed_dict, expected))