コード例 #1
0
 def asdict(
     self
 ):  # overriding because there are lazy fields in Data, so asdict is mutable and noncached
     dic = self.field_funcs_m.copy()
     if self.hasinner:
         dic["inner"] = self.field_funcs_m["inner"].__name__ if islazy(
             self.inner) else self.inner.asdict
     if self.hasstream:
         dic["stream"] = "iterator"
     dic.update({
         "uuid":
         self.uuid,
         "uuids":
         self.uuids,
         "step":
         self.step_func_m.name
         if islazy(self.step_func_m) else self.step_func_m.asdict
     })
     return dic
コード例 #2
0
    def update(self, step, **fields):
        """Recreate an updated Data object.

        Parameters
        ----------
        steps
            Step object that process this Data object.
        fields
            Matrices or vector/scalar shortcuts to them.

        Returns
        -------
        New Data object (it keeps references to the old one for performance).
        :param step:
        """
        if not isinstance(step, Step):
            raise Exception("Step cannot be of type", type(step))
        if isinstance(step, Timeout):
            changed = ["timeout", "duration"]
        else:
            from aiuna.step.file import File
            from aiuna.step.new import New
            changed = []
            for field, value in fields.items():
                if value is not None:
                    if not islazy(value) and not isinstance(step, (File, New)):
                        raise Exception(f"{field} should be callable! Not:",
                                        type(value))
                    if field in self.triggers + ["changed"]:
                        raise Exception(
                            f"'{field}' cannot be externally set! Step:" +
                            step.longname)
                    changed.append(field)
        changed = list(sorted(changed))

        # Only update field uuids for the provided and changed ones!
        updated_fields = {"changed": changed}
        # REMINDER: conversão saiu de _update() para self[] (que é o novo .field()) para conciliar laziness e aceitação de vetores e escalares
        for k, v in fields.items():
            if v is not None:
                kup = k.upper() if len(k) == 1 else k
                if (kup not in self.field_funcs_m
                    ) or self.field_funcs_m[kup] is not v:
                    updated_fields[kup] = v
        uuid, uuids = evolve_id(self.uuid, self.uuids, step, updated_fields)

        newfields = self.field_funcs_m.copy()

        # Remove Nones.
        for k, v in fields.items():
            if v is None:
                del newfields[k]

        newfields.update(updated_fields)
        return Data(uuid, uuids, self.history << step, **newfields)
コード例 #3
0
ファイル: timeout.py プロジェクト: davips/akangatu
    def _process_(self, data):
        newmatrices = {
            "timeout": True,
            "duration": self.limit
        }  # WARN: assume 'duration' will be equals to 'limit'
        if self.field:
            if not islazy(data.matrices[self.field]):
                raise Exception(
                    f"Inconsistency: specified lazy field {self.field} is expected to be callable."
                )
            # None here means : field unavailable due to previous problems
            newmatrices[self.field] = None
        # TODO Let the target field have the worst possible value, since the step didn't finish.
        if self.comparable:
            newmatrices[data.comparable[0]] = -Inf

        return data.update(self, **newmatrices)
コード例 #4
0
    def __getitem__(self, key):
        """Safe access to a field, with a friendly error message.

        Parameters
        ----------
        name
            Name of the field.

        Returns
        -------
        Matrix, vector or scalar
        """
        # Format single-letter field according to capitalization.
        if len(key) == 1:
            kup = key.upper()
            if key.islower():
                return mat2vec(self[kup])
        else:
            kup = key

        # Is it an already evaluated field?
        if not islazy(self.field_funcs_m[kup]):
            return self.field_funcs_m[kup]

        # Is it a lazy field...
        #   ...from storage? Just call it, without timing or catching exceptions as failures.
        if "_from_storage_" in self.field_funcs_m[kup].__name__:
            self.field_funcs_m[kup] = field_as_matrix(
                key, self.field_funcs_m[kup]())
            return self.field_funcs_m[kup]

        #   ...yet to be processed?
        try:
            with self.time_limit(self.maxtime):
                t, value = self.time(
                    lambda: field_as_matrix(key, self.field_funcs_m[kup]()))
                self._duration += t
                self.field_funcs_m[kup] = value
        except TimeoutException:
            self.mutate(self >> Timeout(self.maxtime))
        except Exception as e:
            print(self.name, "failure:", str(e))
            self._failure = self.step.translate(e, self)
            self.field_funcs_m[kup] = None  # REMINDER None means interrupted
        return self.field_funcs_m[kup]
コード例 #5
0
 def __iter__(self):
     acc = self.start.copy()
     self.iterator = self.iterator() if islazy(self.iterator) else self.iterator
     try:
         for data in self.iterator:
             dic = {"data": data}
             if not self.stream_exception:
                 dic = self.step_func(data, acc)
                 # if step is XXXXX:
                 #     self.stream_exception = True
                 # else:
                 if "inc" in dic:
                     # REMINDER: doesn't need to be thread-safe, since processing of iterator is always sequential
                     acc.append(dic["inc"])
             yield dic["data"]
     finally:
         if not self.stream_exception:
             self._result = self.end_func(acc)
コード例 #6
0
ファイル: storageinterface.py プロジェクト: davips/tatu
 def lamb():
     for k, v in field_funcs.items():
         if islazy(v):
             v = v()  # Cannot call data[k] due to infinite loop.
         held_data.field_funcs_m[
             k] = v  # The old value may not be lazy, but the new one can be due to this very lazystore.
         id = held_data.uuids[k].id
         if id in puts:
             if k != "inner":
                 # TODO/REMINDER: exceptionally two datasets can have some equal contents, like Xd;
                 #   so we send it again while the hash is not based on content
                 self.putcontent(id,
                                 fpack(held_data, k),
                                 ignoredup=True)
     rows = [(held_data.id, fname, fuuid.id)
             for fname, fuuid in held_data.uuids.items()
             if fname != "inner"]
     self.putfields(rows)
     return held_data.field_funcs_m[name]
コード例 #7
0
ファイル: storageinterface.py プロジェクト: davips/tatu
    def fetch(self, data, lock=False, lazy=True, ignorelock=False):
        # , recursive=True):   # TODO: pensar no include_empty=False se faz sentido
        """Fetch the data object fields on-demand.
         data: uuid string or a (probably still not fully evaluated) Data object."""
        data_id = data if isinstance(data, str) else data.id
        # lst = []
        # print("LOGGING:::  Fetching...", data_id)
        # while True:
        try:
            ret = self.getdata(data_id, include_empty=True)
        except LockedEntryException:
            if not ignorelock:
                raise None
            ret = None
            lock = False  # Already locked.

        if ret is None or not ret["uuids"]:
            # REMINDER: check_existence false porque pode ser um data vazio
            # [e é para o Cache funcionar mesmo que ele tenha sido interrompido]
            if lock and not self.lock(data_id, check_existence=False):
                raise Exception("Could not lock data:", data_id)
            return

        dic = ret["uuids"].copy()
        if ret["stream"]:
            dic["stream"] = None
        if ret["inner"]:
            dic["inner"] = ret["inner"]

        fields = {} if isinstance(data, str) else data.field_funcs_m
        for field, fid in list(dic.items()):
            if field == "inner":
                fields[field] = lambda: self.fetch(fid)
            elif field == "stream":
                fields[field] = lambda: self.fetchstream(data_id, lazy)
            elif field == "changed":
                fields[field] = unpack(self.getcontent(fid)) if isinstance(
                    data, str) else data.changed
            elif field not in ["inner"] and (isinstance(data, str)
                                             or field in data.changed):
                if lazy:
                    fields[field] = (lambda fid_: lambda: unpack(
                        self.getcontent(fid_)))(fid)
                else:
                    fields[field] = unpack(self.getcontent(fid))

            # Call each lambda by a friendly name.
            if lazy and field != "changed" and islazy(
                    fields[field]):  # and field in data.field_funcs_m:
                fields[field].__name__ = "_" + fields[
                    field].__name__ + "_from_storage_" + self.id

        if isinstance(data, str):
            # if lazy:
            #     print("Checar se lazy ainda não retorna histórico para data dado por uuid-string")  # <-- TODO?
            history = self.fetchhistory(data)
        else:
            history = data.history
        # print("LOGGING:::  > > > > > > > > > fetched?", data_id, ret)
        return Data(UUID(data_id),
                    {k: UUID(v)
                     for k, v in ret["uuids"].items()}, history, **fields)
コード例 #8
0
 def step(self):
     if islazy(self.step_func_m):
         self.step_func_m = self.step_func_m()
     return self.step_func_m