def test_dict_hashkey_with_serializer(self):
        def foo_serializer(obj, **kwargs):
            return "{},{},{}".format(obj.a, obj.b, obj.c)

        def foo_deserializer(obj, cls, **kwargs):
            res = obj.split(',')
            return Foo(a=int(res[0]), b=int(res[1]), c=int(res[2]))

        jsons.set_serializer(foo_serializer, Foo, True)
        jsons.set_deserializer(foo_deserializer, Foo, True)

        bar = {Foo(1, 2, 3): D(a=42, b=39)}
        dumped = jsons.dump(bar,
                            cls=Dict[Foo, D],
                            strict=True,
                            strip_privates=True,
                            strip_properties=True,
                            use_enum_name=True)
        self.assertEqual(dumped, {'1,2,3': {"a": 42, "b": 39}})
        loaded = jsons.load(dumped,
                            cls=Dict[Foo, D],
                            strict=True,
                            strip_privates=True,
                            strip_properties=True,
                            use_enum_name=True)
        self.assertEqual(loaded, bar)
Beispiel #2
0
    def get_serializer(cls):
        if not LMTrainingConfig._serializer:
            cls._serializer = jsons.fork()
            jsons.set_deserializer(jsons.default_object_deserializer,
                                   cls=cls,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(field_based_deserializer_func,
                                   cls=Arch,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(field_based_deserializer_func,
                                   cls=TrainingSchedule,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(field_based_deserializer_func,
                                   cls=Optimizer,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(PrepFunction.deserializer,
                                   cls=PrepFunction,
                                   fork_inst=cls._serializer)

            jsons.set_serializer(jsons.default_object_serializer,
                                 cls=cls,
                                 fork_inst=cls._serializer)
            jsons.set_serializer(PrepFunction.serializer,
                                 cls=PrepFunction,
                                 fork_inst=cls._serializer)

        return cls._serializer
Beispiel #3
0
    def test_set_custom_functions(self):
        jsons.set_serializer(lambda *_, **__: 'custom_serializer', str)
        jsons.set_deserializer(lambda *_, **__: 'custom_deserializer', str)

        dumped = jsons.dump('serialize me')
        loaded = jsons.load(dumped)

        self.assertEqual(dumped, 'custom_serializer')
        self.assertEqual(loaded, 'custom_deserializer')
Beispiel #4
0
def run_jsons():
    """
    """
    glyph = [
        OrientedPoint(0, 2, 3),
        OrientedPoint(3, -1, 3),
        OrientedPoint(7, 0, 3.1415),
        OrientedPoint(2, 0.54, 3),
    ]

    glyph_encoded = jsons.dumps(glyph)
    print(f"\nglyph_encoded:\n{glyph_encoded}")
    glyph_decoded = jsons.loads(glyph_encoded, Glyph)
    print(f"\nglyph_decoded:\n{glyph_decoded}")

    spline = [
        glyph,
        [
            OrientedPoint(2, -1, 3),
            OrientedPoint(0.7436723, 0, 3.1511),
            OrientedPoint(math.e, 0.21, 3),
        ],
    ]
    spline_encoded = jsons.dumps(spline)
    print(f"\nspline_encoded:\n{spline_encoded}")
    spline_decoded = jsons.loads(spline_encoded, Spline)
    print(f"spline_decoded:\n{spline_decoded}")

    an_spline = {
        "source_file": "splne.txt",
        "total_points": sum(map(len, spline)),
        "spline": spline,
    }
    an_spline_encoded = jsons.dumps(an_spline, indent=4)
    print(f"\nan_spline_encoded:\n{an_spline_encoded}")
    an_spline_decoded = jsons.loads(an_spline_encoded, Dict[str, Union[Spline,
                                                                       Any]])
    print(f"an_spline_decoded:\n{an_spline_decoded}")

    li = LigatureInfo(glyph, spline)
    print(f"li: {li}")
    li_enc = jsons.dumps(li, indent=4)
    print(f"\nli_enc:\n{li_enc}")
    li_dec = jsons.loads(li_enc, cls=LigatureInfo)
    print(f"\nli_dec:\n{li_dec}")

    jsons.set_serializer(serializer_oriented_point, OrientedPoint)
    li = LigatureInfo(glyph, spline)
    print(f"\nCustom serializer_oriented_point li: {li}")
    li_enc = jsons.dumps(li, indent=4)
    print(f"\nli_enc:\n{li_enc}")
    li_dec = jsons.loads(li_enc, cls=LigatureInfo)
    print(f"\nli_dec:\n{li_dec}")
def set_serializers() -> None:
    """Set serializers for jsons to use to cast specific classes"""

    # Local import to minimize jsons usage across files
    from .group import ElementModP, ElementModQ
    from .tally import CiphertextTally, PlaintextTally
    from .proof import ProofUsage

    set_serializer(lambda p, **_: str(p), ElementModP)
    set_serializer(lambda q, **_: str(q), ElementModQ)
    set_serializer(lambda tally, **_: dump(tally.cast), CiphertextTally)
    set_serializer(lambda tally, **_: dump(tally.contests), PlaintextTally)
    set_serializer(lambda usage, **_: usage.value, ProofUsage)
Beispiel #6
0
    def __init__(self, dbname, qlog=None):

        self.configLogger(qlog)

        atlas = 'mongodb+srv://opus:[email protected]/asvp2'
        local = 'mongodb://localhost:27017'

        try :
            self.client = MongoClient(local)
            self.db = self.client[dbname]
            for col in self.col_id:
                lcol = self.db.get_collection(col)
                name= lcol.name
        except Exception as e :
            logger.debug('Mongolink failed to  {}'.format(e.__repr__()))
            return

        dispatcher.connect(self.entityChanged, signal=SIGNALS.ENTITYCHANGE, sender=dispatcher.Any)
        # collections = self.db.collection_names()

        # Search fo Entities and register them
        m = __import__( 'Entities' )
        for name, obj in inspect.getmembers(m):
            if inspect.ismodule(obj) and getattr(obj, 'Entity'):
                m1 = getattr(obj, name)
                if inspect.isclass(m1) and m1.__name__ is not 'Entity':
                    self.entities[name] = m1
                    self.serializers[m1] = (jsons.get_serializer(m1))
                    self.deserializers[m1] = (jsons.get_deserializer(m1))
                    jsons.set_serializer(MongoLink.entity_serializer, m1)
                    jsons.set_deserializer(MongoLink.entity_deserializer, m1)
        #  Register new serializers to each entity class
        # self.serializers[m1] = (jsons.get_serializer(BuildPressure))
        # self.serializers[BuildPressure_pf] = (jsons.get_serializer(BuildPressure_pf))
        #
        # jsons.set_serializer(MongoLink.entity_serializer, BuildPressure)
        # jsons.set_serializer(MongoLink.entity_serializer, BuildPressure_pf)

        connected = True
        logger.debug('MongoLink started...'.format())
def set_serializers() -> None:
    """Set serializers for jsons to use to cast specific classes"""

    # Local import to minimize jsons usage across files
    from .group import ElementModP, ElementModQ

    set_serializer(lambda p, **_: str(p), ElementModP)
    set_serializer(lambda q, **_: str(q), ElementModQ)
    set_serializer(lambda dt, **_: dt.isoformat(), datetime)
Beispiel #8
0
    def get_serializer(cls):
        if not cls._serializer:
            cls._serializer = jsons.fork()
            jsons.set_deserializer(jsons.default_object_deserializer,
                                   cls=cls,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(field_based_deserializer_func,
                                   cls=Arch,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(PrepFunction.deserializer,
                                   cls=PrepFunction,
                                   fork_inst=cls._serializer)
            jsons.set_deserializer(Training.deserializer,
                                   cls=Training,
                                   fork_inst=cls._serializer)

            jsons.set_serializer(jsons.default_object_serializer,
                                 cls=cls,
                                 fork_inst=cls._serializer)
            jsons.set_serializer(PrepFunction.serializer,
                                 cls=PrepFunction,
                                 fork_inst=cls._serializer)

        return cls._serializer
Beispiel #9
0
    def test_fork(self):
        f1 = jsons.fork()
        f2 = jsons.fork()
        f3 = jsons.fork(fork_inst=f1)

        jsons.set_serializer(lambda *_, **__: 'f1', str, fork_inst=f1)
        jsons.set_serializer(lambda *_, **__: 'f2', str, fork_inst=f2)
        jsons.set_serializer(lambda *_, **__: 3, int, fork_inst=f3)

        f4 = jsons.fork(fork_inst=f1)

        self.assertEqual(
            'f1', jsons.dump('I wanted a fork on the table.', fork_inst=f1))
        self.assertEqual(
            'f2', jsons.dump('I wanted a fork on the table.', fork_inst=f2))
        self.assertEqual('f3', jsons.dump('f3', fork_inst=f3))
        self.assertEqual(3, jsons.dump(42, fork_inst=f3))
        self.assertEqual(
            'f1', jsons.dump('I wanted a fork on the table.', fork_inst=f4))
Beispiel #10
0
            for channel in ch_list:
                add_listener(channel)
            log.info(
                f"Listener {subscriber_id} added to following queues: {ch_list}"
            )
            yield queue
        finally:
            log.info(f"Remove listener: {subscriber_id}")
            for channel in ch_list:
                remove_listener(channel)
            self.active_listener.pop(subscriber_id, None)
            await self.emit_event(CoreMessage.Disconnected, {
                "subscriber_id": subscriber_id,
                "channels": channels
            })

    async def emit_event(self, event_type: str, data: Json) -> None:
        return await self.emit(Event(event_type, data))

    async def emit(self, message: Message) -> None:
        async def emit_by(name: str) -> None:
            for listener in self.listeners.get(name, []):
                await listener.put(message)

        await emit_by(message.message_type)  # inform specific listener
        await emit_by("*")  # inform "all" event listener


set_deserializer(Message.from_json, Message)
set_serializer(Message.message_to_json, Message)
Schema = Dict[str, Any]


def default_numpy_bool_serializer(np_bool: np.bool_,
                                  **kwargs: Dict[str, Any]) -> bool:
    """
    Serialize the given numpy bool instance to a native python object.
    :param np_obj: the numpy object instance that is to be serialized.
    :param kwargs: not used.
    :return: native python object equivalent to the numpy object representation.
    """
    return np_bool.item()


jsons.set_serializer(default_numpy_bool_serializer, np.bool_)


def default_numpy_number_serializer(np_number: np.number,
                                    **kwargs: Dict[str, Any]) -> object:
    """
    Serialize the given numpy number instance to a native python object.
    :param np_obj: the numpy object instance that is to be serialized.
    :param kwargs: not used.
    :return: native python object equivalent to the numpy object representation.
    """
    return np_number.item()


jsons.set_serializer(default_numpy_number_serializer, np.number)
 def tearDown(cls):
     jsons.set_serializer(jsons.default_primitive_serializer, Foo)
     jsons.set_deserializer(jsons.default_string_deserializer, Foo)
Beispiel #13
0
        # check if no property path is overlapping
        def check_no_overlap() -> None:
            existing_complex = [c for c in self.kinds.values() if isinstance(c, ComplexKind)]
            update_complex = [c for c in to_update if isinstance(c, ComplexKind)]
            ex = {p.path: p for k in existing_complex for p in k.resolved_properties()}
            up = {p.path: p for k in update_complex for p in k.resolved_properties()}

            def simple_kind_incompatible(p: PropertyPath) -> bool:
                left = ex[p].kind
                right = up[p].kind
                return (left.fqn != right.fqn) and not (isinstance(left, AnyKind) or isinstance(right, AnyKind))

            # Filter out duplicates that have the same kind or any side is any
            non_unique = [a for a in ex.keys() & up.keys() if simple_kind_incompatible(a)]
            if non_unique:
                # PropertyPath -> str
                name_by_kind = {p.path: k.fqn for k in update_complex for p in k.resolved_properties()}
                message = ", ".join(f"{name_by_kind[a]}.{a} ({ex[a].kind.fqn} -> {up[a].kind.fqn})" for a in non_unique)
                raise AttributeError(
                    f"Update not possible: following properties would be non unique having "
                    f"the same path but different type: {message}"
                )

        check_no_overlap()
        return Model(updated)


# register serializer for this class
set_deserializer(Kind.from_json, Kind)
set_serializer(SimpleKind.to_json, SimpleKind)
Beispiel #14
0
import os
from pathlib import Path
from typing import Any, List, Tuple, Union

import jsons
import torch
from numpy import ndarray

# Update serializaiton rules for `jsons` module used by `serialize` function (below).
jsons.set_serializer(lambda x, **kwargs: x.tolist(), torch.Tensor)
jsons.set_serializer(lambda x, **kwargs: x.tolist(), ndarray)


def to_tensor(x) -> torch.Tensor:
    if isinstance(x, torch.Tensor):
        return x
    elif isinstance(x, ndarray):
        return torch.from_numpy(x)
    elif isinstance(x, list) and isinstance(x[0], torch.Tensor):
        return torch.stack(x)
    else:
        return torch.tensor(x)


def save_gif(path, images: List[ndarray]) -> None:
    print(f"Saving as a gif to {path}")
    from PIL import Image
    imgs = [Image.fromarray(img[::2, ::2])
            for img in images]  # Reduce /4 size; pick w/2 h/2 pix

    Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True)
Beispiel #15
0
import jsons
import datetime
from utils import utils


# adding custom serializer / deserializer
jsons.set_deserializer(utils.date_deserializer, str)
jsons.set_serializer(utils.date_serializer, datetime.date)
jsons.set_serializer(utils.datetime_serializer, datetime.datetime)
Beispiel #16
0
    """
    msg = QtWidgets.QMessageBox()
    q_string = "Rename all stations from {} to {} in...".format(old_name, new_name)
    msg.setText(q_string)
    msg.addButton(QtWidgets.QPushButton("Campaign"), 0)
    msg.addButton(QtWidgets.QPushButton("Survey"), 0)
    msg.addButton(QtWidgets.QPushButton("Loop"), 0)
    msg.addButton(QtWidgets.QPushButton("Just this station"), 0)
    msg.addButton(QtWidgets.QPushButton("Cancel"), 1)
    method = msg.exec_()
    methods = {0: "Campaign", 1: "Survey", 2: "Loop", 3: "Station", 4: "Cancel"}

    return methods[method]


def survey_serializer(obj, cls, **kwargs):
    """
    Handle serialization of ObsTreeSurvey via .to_json() method.
    """
    return obj.to_json()


def format_numeric_column(column):
    """
    Format fn for simple numeric columns.
    """
    return column + 1


jsons.set_serializer(survey_serializer, ObsTreeSurvey)
Beispiel #17
0
        pass

    @abstractmethod
    async def update_configs_model(self, kinds: List[Kind]) -> Model:
        pass

    @abstractmethod
    def list_config_validation_ids(self) -> AsyncIterator[str]:
        pass

    @abstractmethod
    async def get_config_validation(self,
                                    cfg_id: str) -> Optional[ConfigValidation]:
        pass

    @abstractmethod
    async def put_config_validation(
            self, validation: ConfigValidation) -> ConfigValidation:
        pass

    @abstractmethod
    async def config_yaml(self,
                          cfg_id: ConfigId,
                          revision: bool = False) -> Optional[str]:
        pass


# register serializer for this class
set_deserializer(ConfigEntity.from_json, ConfigEntity)
set_serializer(ConfigEntity.to_json, ConfigEntity)
Beispiel #18
0
 def dump_to_json(self):
     jsons.set_serializer(sml_val_list_entry_serializer, SmlValListEntry)
     return jsons.dumps(self, jdkwargs={"indent": 2, "ensure_ascii": False})
Beispiel #19
0
        return Dataset(
            name=dataset_name,
            author=dct.get("author"),
            description=dct.get("description", ""),
            path_preprocessed=path_preprocessed,
            path_dist=downloaded_data_dir / dct["path"],
            dataloader=dataset_loader,
            preprocessor=dct["preprocessor"],
        )
    else:
        raise NotImplementedError()


jsons.set_deserializer(desearialize_dataset, Dataset)
jsons.set_serializer(Dataset.serealize, Dataset)


def get_all_linked_datasets(
        datasets: Dict[str, Dataset]) -> Dict[str, Dataset]:
    total = datasets.copy()
    for dataset_name, dataset in datasets.items():
        total.update({d.name: d for d in dataset.get_linked_datasets()})
    return total


def train_and_test(
        all_datasets: Dict[str, Dataset],
        label_column: str) -> Tuple[Dict[str, Dataset], Dict[str, Dataset]]:
    train, test = {}, {}
    for name, dataset in all_datasets.items():
Beispiel #20
0
 def tearDownClass(cls):
     jsons.set_serializer(default_primitive_serializer, str)
     jsons.set_deserializer(default_string_deserializer, str)
Beispiel #21
0
    for task_name, task_json in dct["tasks"].items():
        tasks[task_name] = jsons.load(
            task_json,
            Task,
            task_name=task_name,
            heuristic_path=path_config.heuristics,
            datasets=datasets,
        )
    return BohrRepo(
        dct["bohr_framework_version"],
        tasks,
        datasets,
        linkers,
    )


jsons.set_deserializer(deserialize_bohr_repo, BohrRepo)
jsons.set_serializer(BohrRepo.serealize, BohrRepo)


def load_bohr_repo(project_root: Optional[Path] = None) -> BohrRepo:
    project_root = project_root or find_project_root()
    bohr_repo = BohrRepo.load(project_root)

    version_installed = version()
    if str(bohr_repo.bohr_framework_version) != version_installed:
        raise EnvironmentError(
            f"Version of bohr framework from config: {bohr_repo.bohr_framework_version}. "
            f"Version of bohr installed: {version_installed}")
    return bohr_repo
Beispiel #22
0
                f'{type(self).__name__} {self.config_version} '
                f'to {type(self).__name__} {CONFIG_VERSION} object')


def create_comet_experiment(run_id: str):
    experiment = Experiment()
    experiment.set_name(run_id)
    return experiment


@dataclass
class LMTrainingMetrics(object):
    bin_entropy: Optional[float] = None
    training_time_minutes_per_epoch: Optional[int] = None
    n_epochs: Optional[int] = None
    best_epoch: Optional[int] = None
    trainable_params: Optional[int] = None
    size_on_disk_mb: Optional[int] = None
    config_version: str = CONFIG_VERSION

    def __post_init__(self):
        if self.config_version != CONFIG_VERSION:
            raise TypeError(
                f'Trying to deserealize '
                f'{type(self).__name__} {self.config_version} '
                f'to {type(self).__name__} {CONFIG_VERSION} object')


jsons.set_serializer(LMTrainingConfig.serializer, cls=LMTrainingConfig)
jsons.set_deserializer(LMTrainingConfig.deserializer, cls=LMTrainingConfig)
Beispiel #23
0
def compute_letter_alignement(f_let: Letter, s_let: Letter, x_stride: float,
                              data_dir: Path,
                              ligature_dir: Path) -> LigatureInfo:
    """TODO: what is compute_letter_alignement doing?
    """
    logg = logging.getLogger(f"c.{__name__}.compute_letter_alignement")
    logg.debug(
        f"Start compute_letter_alignement {f_let.letter} {s_let.letter}")

    # pick the correct align strategy

    # something like im or iv
    if f_let.right_type == "low_up" and s_let.left_type == "high_down":
        strategy = "align_letter_2"

        # the right side at the moment does not change, so get any one
        f_let_type = "alone"
        f_spline_seq = f_let.get_spline_seq(f_let_type)
        f_pf_name = f_let.get_pf(f_let_type).name
        f_hash_sha1 = f_let.get_hash(f_let_type)

        # we request the high because this *is* a high letter
        s_let_type = "high"
        s_spline_seq = s_let.get_spline_seq(s_let_type)
        s_pf_name = s_let.get_pf(s_let_type).name
        s_hash_sha1 = s_let.get_hash(s_let_type)

    # all other cases use align_letter_1
    else:
        strategy = "align_letter_1"

        # need to pick the correct version of the letters to join

        # the right side at the moment does not change, so get any one
        f_let_type = "alone"
        f_spline_seq = f_let.get_spline_seq(f_let_type)
        f_pf_name = f_let.get_pf(f_let_type).name
        f_hash_sha1 = f_let.get_hash(f_let_type)

        # look at the right of the first letter
        if f_let.right_type == "high_up":
            # use high version of the second letter
            s_let_type = "high"
        elif f_let.right_type == "low_up":
            # use low version of the second letter
            s_let_type = "low"

        # get relevant informations
        s_spline_seq = s_let.get_spline_seq(s_let_type)
        s_pf_name = s_let.get_pf(s_let_type).name
        s_hash_sha1 = s_let.get_hash(s_let_type)

    # load, if available, the ligature for this
    ligature_pf = ligature_dir / f"{f_let.letter}{s_let.letter}.txt"
    if ligature_pf.exists():
        # load the saved LigatureInfo
        ci_load = jsons.loads(ligature_pf.read_text(), LigatureInfo)

        # decide if the ligature loaded is the same
        # logg.debug(f"ci_load: {ci_load!r}")
        logg.debug(f"ci_load: {ci_load}")
        if f_pf_name != ci_load.f_pf_name or s_pf_name != ci_load.s_pf_name:
            logg.debug(
                f"The names in the loaded info are different than the current")
            equals = False
        elif f_let_type != ci_load.f_let_type or s_let_type != ci_load.s_let_type:
            logg.debug(f"The letter types in the loaded info are different")
            equals = False
        elif f_hash_sha1 != ci_load.f_hash_sha1 or s_hash_sha1 != ci_load.s_hash_sha1:
            logg.debug(f"The hash_sha1 in the loaded info are different")
            equals = False
        else:
            logg.debug(f"The ligature is valid!")
            equals = True

        if equals:
            return ci_load

    if strategy == "align_letter_2":
        # load and compute
        spline_seq_con, shift, _ = align_letter_2(f_spline_seq, s_spline_seq,
                                                  x_stride)

        # there is no need to chop the last/first glyphs
        f_gly_chop = f_spline_seq[-1]
        s_gly_chop = s_spline_seq[0]

    else:
        # load and compute
        spline_seq_con, f_gly_chop, s_gly_chop, shift, = align_letter_1(
            f_spline_seq, s_spline_seq, x_stride)

    con_info = LigatureInfo(
        f_pf_name=f_pf_name,
        s_pf_name=s_pf_name,
        f_let_type=f_let_type,
        s_let_type=s_let_type,
        spline_seq_con=spline_seq_con,
        f_gly_chop=f_gly_chop,
        s_gly_chop=s_gly_chop,
        shift=shift,
        f_hash_sha1=f_hash_sha1,
        s_hash_sha1=s_hash_sha1,
    )
    # logg.debug(f"con_info: {con_info!r}")
    logg.debug(f"con_info: {con_info}")

    # save the LigatureInfo
    jsons.set_serializer(serializer_oriented_point, OrientedPoint)
    ligature_info_encoded = jsons.dumps(con_info, indent=4)
    logg.debug(f"\nligature_info_encoded: {ligature_info_encoded}")

    with ligature_pf.open("w") as f_li:
        f_li.write(ligature_info_encoded)

    return con_info
Beispiel #24
0
from typing import Union

import jsons

from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *


def my_custom_datetime_serializer(obj: QPoint, **kwargs) -> [int, int]:
    return [obj.x(), obj.y()]


jsons.set_serializer(my_custom_datetime_serializer, QPoint)


class MainWindow(QMainWindow):
    def __init__(self):
        super().__init__()

        self.initUI()

        self.verts = []
        self.mouse_pos = QPoint()
        self.snap = 20
        self.setMouseTracking(True)

    def initUI(self):
        menubar = self.menuBar()
        fileMenu = menubar.addMenu('File')
Beispiel #25
0
    Data type used to store experiences in experience buffers.
    """

    common_keys = ['state', 'action', 'reward', 'next_state', 'done']
    extra_keys = ['advantage', 'logprob', 'value', 'priority', 'index', 'weight', 'state_idx', 'next_state_idx']
    whitelist = common_keys + extra_keys

    def __init__(self, **kwargs):
        self.data = {}

        for (key, value) in kwargs.items():
            if key in Experience.whitelist:
                self.data[key] = value
                self.__dict__[key] = value  # TODO: Delete after checking that everything is updated

    def __eq__(self, o: object) -> bool:
        return isinstance(o, Experience) and self.data == o.data

    def get_dict(self, serialize=False) -> Dict[str, Any]:
        if serialize:
            return {k: to_list(v) for (k, v) in self.data.items()}
        return self.data


def exprience_serialization(obj: Experience, **kwargs) -> Dict[str, Any]:
    # return {k: to_list(v) for (k, v) in obj.data.items() if v is not None}
    return {k: jsons.dumps(v) for (k, v) in obj.data.items()}


jsons.set_serializer(exprience_serialization, Experience)
Beispiel #26
0
        return first(relevant_ack, self.received_messages)

    def pending_action_for(self, subscriber: Subscriber) -> Optional[Action]:
        """
        In case this task is waiting for an action result from the given subscriber,
        the relevant action is returned.
        """
        state = self.current_state
        if isinstance(state, PerformActionState):
            message_type = state.perform.message_type
            subscriptions = state.wait_for
            if subscriber in subscriptions and self.ack_for(
                    message_type, subscriber) is None:
                return Action(message_type, self.id, state.step.name)
        return None

    def begin_step(self) -> None:
        log.info(f"Task {self.id}: begin step is: {self.current_step.name}")
        # update the step started time, whenever a new state is entered
        self.step_started_at = utc()
        self.current_state.step_started()


set_deserializer(StepAction.from_json, StepAction, high_prio=False)
set_deserializer(Trigger.from_json, Trigger, high_prio=False)
set_deserializer(TaskCommand.from_json, TaskCommand, high_prio=False)
set_deserializer(Job.from_json, Job)
set_serializer(Job.to_json, Job)
set_deserializer(Workflow.from_json, Workflow)
set_serializer(Workflow.to_json, Workflow)
Beispiel #27
0
    # >>> jsons.loads('{"top_artifact": "artifacts.commit.Commit", "test_dataset_names": [], "train_dataset_names": []}', Task, project_root='/', task_name="x")
    # """
    test_datasets = {
        dataset_name: datasets[dataset_name]
        for dataset_name in dct["test_datasets"]
    }
    train_datasets = {
        dataset_name: datasets[dataset_name]
        for dataset_name in dct["train_datasets"]
    }
    try:
        artifact = artifact_map[dct["top_artifact"]]
    except KeyError:
        artifact = load_class_by_full_path(dct["top_artifact"])
    heuristic_groups = get_heuristic_module_list(artifact, heuristic_path)
    return Task(
        task_name,
        dct["author"] if "author" in dct else None,
        dct["description"] if "description" in dct else "",
        artifact,
        dct["label_categories"],
        _train_datasets=train_datasets,
        _test_datasets=test_datasets,
        label_column_name=dct["label_column_name"],
        heuristic_groups=heuristic_groups,
    )


jsons.set_deserializer(deserialize_task, Task)
jsons.set_serializer(Task.serealize, Task)
        self.link = link
        self.to = to

    def __str__(self):
        return f"{self.from_} -> {self.to}, linker: {self.link}"

    def serealize(self, **kwargs) -> Dict[str, Any]:
        dct = {"from": self.from_.name, "to": self.to.name}
        if self.link:
            dct["link"] = self.link.name
        return dct


def desearialize_linker(
    dct: Dict[str, Any],
    cls,
    datasets: Dict[str, Dataset],
    data_dir: RelativePath,
    **kwargs,
) -> "DatasetLinker":
    extras = {}
    if "link" in dct:
        extras["link"] = datasets[dct["link"]]
    return DatasetLinker(from_=datasets[dct["from"]],
                         to=datasets[dct["to"]],
                         **extras)


jsons.set_deserializer(desearialize_linker, DatasetLinker)
jsons.set_serializer(DatasetLinker.serealize, DatasetLinker)