예제 #1
0
class Comment(BaseComment):
    rank = fields.EmbeddedDocumentField(CommentRank, verbose_name='rank', mongo_name='rank', required=True)
    rank_position = fields.FloatField(verbose_name='rank_position', mongo_name='rankPosition', default=0.0)
예제 #2
0
class EmpowerProject(Env):
    """Empower Project class.

    Attributes:
        owner: The username of the user that requested this pool
        wifi_props: The Wi-Fi properties
        lte_props: The LTE properties
        wifi_slices: The definition of the Wi-Fi slices
        lte_slices: The definition of the Wi-Fi slices

    The Wi-Fi properties are defined starting from a JSON document like the
    following:

    {
        "ssid": "EmPOWER",
        "allowed": {
            "04:46:65:49:e0:1f": {
                "addr": "04:46:65:49:e0:1f",
                "desc": "Some laptop"
            },
            "04:46:65:49:e0:11": {
                "addr": "04:46:65:49:e0:1f",
                "desc": "Some other laptop"
            },
            "04:46:65:49:e0:12": {
                "addr": "04:46:65:49:e0:1f",
                "desc": "Yet another laptop"
            }
        }
        "bssid_type": "unique"
    }

    The LTE properties are defined starting from a JSON document like the
    following:
    {
        "plmnid": "00101"
    }

    A Wi-Fi slice is defined starting from a JSON document like the
    following:

    {
        "slice_id": "0x42",
        "properties": {
            "amsdu_aggregation": true,
            "quantum": 12000,
            "sta_scheduler": 1
        }
    }

    The descriptor above will create a slice with id 0x42 on every WTP.

    In some cases it may be required to use different slice parameters only on
    certain WTPs. This can be done using a descriptor like the following:

    {
        "slice_id": "0x42",
        "properties": {
            "amsdu_aggregation": true,
            "quantum": 12000,
            "sta_scheduler": 1
        }
        "devices": {
            "00:0D:B9:2F:56:64": {
                "quantum": 15000
            }
        }
    }

    In this case the slice is still created on all the WTPs in the network,
    but some slice parameters are different for the specified nodes.

    Similarly, an LTE slice is defined starting from a JSON document like the
    following:

    {
        "slice_id": "0x42",
        "properties": {
            "rbgs": 5,
            "ue_scheduler": 1
        },
        "devices": {
            "aa:bb:cc:dd:ee:ff": {
                rbgs": 2
            }
        }
    }
    """

    owner = fields.CharField(required=True)
    desc = fields.CharField(required=True)
    wifi_props = fields.EmbeddedDocumentField(EmbeddedWiFiProps)
    lte_props = fields.EmbeddedDocumentField(EmbeddedLTEProps)
    wifi_slices = WiFiSlicesDictField(required=False, blank=True)
    lte_slices = LTESlicesDictField(required=False, blank=True)

    def __init__(self, *args, **kwargs):

        super().__init__(*args, **kwargs)

        # Save pointer to ProjectManager
        self.manager = srv_or_die("projectsmanager")

    @property
    def vbses(self):
        """Return the VBSes."""

        return srv_or_die("vbspmanager").devices

    @property
    def wtps(self):
        """Return the WTPs."""

        return srv_or_die("lvappmanager").devices

    @property
    def users(self):
        """Return the UEs."""

        if not self.lte_props:
            return {}

        users = {k: v for k, v in srv_or_die("vbspmanager").users.items()
                 if v.plmnid == self.lte_props.plmnid}

        return users

    @property
    def lvaps(self):
        """Return the LVAPs."""

        if not self.wifi_props:
            return {}

        lvaps = {k: v for k, v in srv_or_die("lvappmanager").lvaps.items()
                 if v.ssid == self.wifi_props.ssid}

        return lvaps

    @property
    def vaps(self):
        """Return the VAPs."""

        if not self.wifi_props:
            return {}

        vaps = {k: v for k, v in srv_or_die("lvappmanager").vaps.items()
                if v.ssid == self.wifi_props.ssid}

        return vaps

    def load_service(self, service_id, name, params):
        """Load a service instance."""

        init_method = getattr(import_module(name), "launch")
        service = init_method(context=self, service_id=service_id, **params)

        if not isinstance(service, EApp):
            raise ValueError("Service %s not EApp type" % name)

        return service

    def upsert_acl(self, addr, desc):
        """Upsert ACL."""

        acl = ACL(addr=addr, desc=desc)

        self.wifi_props.allowed[str(acl.addr)] = acl

        self.save()

        return acl

    def remove_acl(self, addr=None):
        """Upsert new slice."""

        if addr:
            del self.wifi_props.allowed[str(addr)]
        else:
            for k in list(self.wifi_props.allowed.keys()):
                del self.wifi_props.allowed[k]

        self.save()

    def upsert_wifi_slice(self, **kwargs):
        """Upsert new slice."""

        slc = WiFiSlice(**kwargs)

        for wtp in self.wtps.values():
            for block in wtp.blocks.values():
                wtp.connection.send_set_slice(self, slc, block)

        self.wifi_slices[str(slc.slice_id)] = slc

        self.save()
        self.refresh_from_db()

        return slc.slice_id

    def upsert_lte_slice(self, **kwargs):
        """Upsert new slice."""

        slc = LTESlice(**kwargs)

        for vbs in self.vbses.values():
            for cell in vbs.cells.values():
                vbs.connection.send_set_slice(self, slc, cell)

        self.lte_slices[str(slc.slice_id)] = slc

        self.save()
        self.refresh_from_db()

        return slc.slice_id

    def delete_wifi_slice(self, slice_id):
        """Delete slice."""

        if slice_id == "0":
            raise ValueError("Slice 0 cannot be deleted")

        slc = self.wifi_slices[slice_id]

        for wtp in self.wtps.values():
            for block in wtp.blocks.values():
                wtp.connection.send_del_slice(self, slc.slice_id, block)

        del self.wifi_slices[slice_id]

        self.save()
        self.refresh_from_db()

    def delete_lte_slice(self, slice_id):
        """Delete slice."""

        if slice_id == "0":
            raise ValueError("Slice 0 cannot be deleted")

        slc = self.lte_slices[slice_id]

        for vbs in self.vbses.values():
            for cell in vbs.cells.values():
                vbs.connection.send_del_slice(self, slc.slice_id, cell)

        del self.lte_slices[slice_id]

        self.save()
        self.refresh_from_db()

    @property
    def users(self):
        """Return the UEs."""

        if not self.lte_props:
            return {}

        users = {k: v for k, v in srv_or_die("vbspmanager").users.items()
                 if v.plmnid == self.lte_props.plmnid}

        return users

    @property
    def lvaps(self):
        """Return the LVAPs."""

        if not self.wifi_props:
            return {}

        lvaps = {k: v for k, v in srv_or_die("lvappmanager").lvaps.items()
                 if v.ssid == self.wifi_props.ssid}

        return lvaps

    @property
    def vaps(self):
        """Return the VAPs."""

        if not self.wifi_props:
            return {}

        vaps = {k: v for k, v in srv_or_die("lvappmanager").vaps.items()
                if v.ssid == self.wifi_props.ssid}

        return vaps

    def to_dict(self):
        """Return JSON-serializable representation of the object."""

        output = super().to_dict()

        output['owner'] = self.owner

        output['desc'] = self.desc

        output['wifi_props'] = \
            self.wifi_props.to_dict() if self.wifi_props else None

        output['lte_props'] = \
            self.lte_props.to_dict() if self.lte_props else None

        output['wifi_slices'] = \
            self.wifi_slices if self.wifi_slices else None

        output['lte_slices'] = \
            self.lte_slices if self.lte_slices else None

        return output

    def get_prefix(self):
        """Return tenant prefix."""

        tokens = [self.project_id.hex[0:12][i:i + 2] for i in range(0, 12, 2)]
        return EtherAddress(':'.join(tokens))

    def generate_bssid(self, mac):
        """ Generate a new BSSID address. """

        base_mac = self.get_prefix()

        base = str(base_mac).split(":")[0:3]
        unicast_addr_mask = int(base[0], 16) & 0xFE
        base[0] = str(format(unicast_addr_mask, 'X'))
        suffix = str(mac).split(":")[3:6]

        return EtherAddress(":".join(base + suffix))
예제 #3
0
class Project(Env):
    """Project class.

    Attributes:
        owner: The username of the user that requested this pool
        wifi_props: The Wi-Fi properties
        lte_props: The LTE properties
        wifi_slices: The definition of the Wi-Fi slices
        lte_slices: The definition of the Wi-Fi slices

    The Wi-Fi properties are defined starting from a JSON document like the
    following:

    {
        "ssid": "EmPOWER",
        "allowed": ["11:22:33:44:55:66", "aa:bb:cc:dd:ee:ff"],
        "bssid_type": "unique"
    }

    The LTE properties are defined starting from a JSON document like the
    following:
    {
        "plmn_id": "222f93"
    }

    A Wi-Fi slice is defined starting from a JSON document like the
    following:

    {
        "slice_id": "0x42",
        "properties": {
            "amsdu_aggregation": true,
            "quantum": 12000,
            "sta_scheduler": 1
        }
    }

    The descriptor above will create a slice with id 0x42 on every WTP.

    In some cases it may be required to use different slice parameters only on
    certain WTPs. This can be done using a descriptor like the following:

    {
        "slice_id": "0x42",
        "properties": {
            "amsdu_aggregation": true,
            "quantum": 12000,
            "sta_scheduler": 1
        }
        "devices": {
            "00:0D:B9:2F:56:64": {
                "quantum": 15000
            }
        }
    }

    In this case the slice is still created on all the WTPs in the network,
    but some slice parameters are different for the specified nodes.

    Similarly, an LTE slice is defined starting from a JSON document like the
    following:

    {
        "slice_id": "0x42",
        "properties": {
            "rbgs": 5,
            "ue_scheduler": 1
        },
        "devices": {
            "aa:bb:cc:dd:ee:ff": {
                rbgs": 2
            }
        }
    }
    """

    owner = fields.CharField(required=True)
    desc = fields.CharField(required=True)
    wifi_props = fields.EmbeddedDocumentField(EmbeddedWiFiProps)
    lte_props = fields.EmbeddedDocumentField(EmbeddedLTEProps)
    wifi_slices = WiFiSlicesDictField(required=False, blank=True)
    lte_slices = LTESlicesDictField(required=False, blank=True)

    def __init__(self, *args, **kwargs):

        super().__init__(*args, **kwargs)

        # Save pointer to LVAPPManager
        self.lvapp_manager = \
            srv_or_die("empower.managers.ranmanager.lvapp.lvappmanager")

        # Save pointer to VBSPManager
        self.vbsp_manager = \
            srv_or_die("empower.managers.ranmanager.vbsp.vbspmanager")

    def upsert_wifi_slice(self, **kwargs):
        """Upsert new slice."""

        slc = WiFiSlice(**kwargs)

        for wtp in self.lvapp_manager.devices.values():
            for block in wtp.blocks.values():
                wtp.connection.send_set_slice(self, slc, block)

        self.wifi_slices[str(slc.slice_id)] = slc

        self.save()
        self.refresh_from_db()

        return slc.slice_id

    def upsert_lte_slice(self, **kwargs):
        """Upsert new slice."""

        slc = LTESlice(**kwargs)

        for vbs in self.vbsp_manager.devices.values():
            for cell in vbs.cells.values():
                vbs.connection.send_set_slice(self, slc, cell)

        self.lte_slices[str(slc.slice_id)] = slc

        self.save()
        self.refresh_from_db()

        return slc.slice_id

    def delete_wifi_slice(self, slice_id):
        """Delete slice."""

        if slice_id == "0":
            raise ValueError("Slice 0 cannot be deleted")

        slc = self.wifi_slices[slice_id]

        for wtp in self.lvapp_manager.devices.values():
            for block in wtp.blocks.values():
                wtp.connection.send_del_slice(self, slc.slice_id, block)

        del self.wifi_slices[slice_id]

        self.save()
        self.refresh_from_db()

    def delete_lte_slice(self, slice_id):
        """Delete slice."""

        if slice_id == "0":
            raise ValueError("Slice 0 cannot be deleted")

        slc = self.lte_slices[slice_id]

        for vbs in self.vbsp_manager.devices.values():
            for cell in vbs.cells.values():
                vbs.connection.send_del_slice(self, slc.slice_id, cell)

        del self.lte_slices[slice_id]

        self.save()
        self.refresh_from_db()

    @property
    def vbses(self):
        """Return the VBSes."""

        return self.vbsp_manager.devices

    @property
    def ueqs(self):
        """Return the UEs."""

        if not self.lte_props:
            return {}

        ueqs = {
            k: v
            for k, v in self.vbsp_manager.ueqs.items()
            if v.plmnid == self.lte_props.plmnid
        }

        return ueqs

    @property
    def wtps(self):
        """Return the WTPs."""

        return self.lvapp_manager.devices

    @property
    def lvaps(self):
        """Return the LVAPs."""

        if not self.wifi_props:
            return {}

        lvaps = {
            k: v
            for k, v in self.lvapp_manager.lvaps.items()
            if v.ssid == self.wifi_props.ssid
        }

        return lvaps

    @property
    def vaps(self):
        """Return the VAPs."""

        if not self.wifi_props:
            return {}

        vaps = {
            k: v
            for k, v in self.lvapp_manager.vaps.items()
            if v.ssid == self.wifi_props.ssid
        }

        return vaps

    def to_dict(self):
        """Return JSON-serializable representation of the object."""

        output = super().to_dict()

        output['owner'] = self.owner

        output['desc'] = self.desc

        output['wifi_props'] = \
            self.wifi_props.to_dict() if self.wifi_props else None

        output['lte_props'] = \
            self.lte_props.to_dict() if self.lte_props else None

        output['wifi_slices'] = \
            self.wifi_slices if self.wifi_slices else None

        output['lte_slices'] = \
            self.lte_slices if self.lte_slices else None

        return output

    def get_prefix(self):
        """Return tenant prefix."""

        tokens = [self.project_id.hex[0:12][i:i + 2] for i in range(0, 12, 2)]
        return EtherAddress(':'.join(tokens))

    def generate_bssid(self, mac):
        """ Generate a new BSSID address. """

        base_mac = self.get_prefix()

        base = str(base_mac).split(":")[0:3]
        unicast_addr_mask = int(base[0], 16) & 0xFE
        base[0] = str(format(unicast_addr_mask, 'X'))
        suffix = str(mac).split(":")[3:6]

        return EtherAddress(":".join(base + suffix))
예제 #4
0
class User(MongoModel):
    userId = fields.IntegerField(primary_key=True, required=True)
    userName = fields.CharField(required=True)
    email = fields.EmailField()
    passwordHash = fields.CharField()
    savedData = fields.EmbeddedDocumentListField(SavedInfo, blank=True)
    gameLogs = fields.ListField(field=fields.EmbeddedDocumentField('gameLogs'),
                                blank=True)
    dateRegistered = fields.DateTimeField()
    admin = fields.BooleanField(default=False)

    @property
    def userPassword(self):
        raise AttributeError('userPassword: write-only field')

    @userPassword.setter
    def userPassword(self, userPassword):
        self.passwordHash = flask_bcrypt.generate_password_hash(
            userPassword).decode('utf-8')  # hashing userPassword

    def check_userPassword(self, userPassword):
        return flask_bcrypt.check_password_hash(self.passwordHash,
                                                userPassword)

    def encode_auth_token(self, user_id):
        """
        Generates the Auth Token
        :return: string
        """
        try:
            payload = {
                'exp':
                datetime.datetime.utcnow() +
                datetime.timedelta(days=1, seconds=5),
                'iat':
                datetime.datetime.utcnow(),
                'sub':
                user_id
            }
            return jwt.encode(payload, key, algorithm='HS256')
        except Exception as e:
            return e

    @staticmethod
    def decode_auth_token(auth_token):
        """
        Decodes the auth token
        :param auth_token:
        :return: integer|string
        """
        try:
            payload = jwt.decode(auth_token.split()[1],
                                 key,
                                 algorithms='HS256')
            is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
            if is_blacklisted_token:
                return 'Token blacklisted. Please log in again.'
            else:
                return payload['sub']
        except jwt.ExpiredSignatureError:
            return 'Signature expired. Please log in again.'
        except jwt.InvalidTokenError:
            return 'Invalid token. Please log in again.'

    def __repr__(self):
        return "<user '{}'".format(self.userName)

    class Meta:
        indexes = [
            IndexModel([('userName', 1)], unique=True),
            IndexModel([('email', 1)], unique=True)
        ]  # unique field 지정은 이렇게

        collection_name = 'users'  # 지정 안해주면 "User" collection을 따로 만들어버림
        final = True  # _cls 필드 저장 안하도록 설정
예제 #5
0
class TransportationTask(Task):
    constraints = fields.EmbeddedDocumentField(TransportationTaskConstraints)

    objects = TaskManager()

    @classmethod
    def create_new(cls, **kwargs):
        if 'constraints' not in kwargs.keys():
            pickup = TimepointConstraint(earliest_time=datetime.now(),
                                         latest_time=datetime.now() +
                                         timedelta(minutes=1))
            temporal = TransportationTemporalConstraints(
                pickup=pickup, duration=InterTimepointConstraint())
            kwargs.update(constraints=TransportationTaskConstraints(
                temporal=temporal))
        task = super().create_new(**kwargs)
        task.save()
        return task

    @classmethod
    def from_request(cls, request):
        pickup = TimepointConstraint(
            earliest_time=request.earliest_pickup_time,
            latest_time=request.latest_pickup_time)
        temporal = TransportationTemporalConstraints(
            pickup=pickup, duration=InterTimepointConstraint())
        constraints = TransportationTaskConstraints(
            hard=request.hard_constraints, temporal=temporal)
        task = cls.create_new(request=request.request_id,
                              constraints=constraints)
        return task

    def archive(self):
        with switch_collection(TransportationTask,
                               Task.Meta.archive_collection):
            super().save()
        self.delete()

    @property
    def duration(self):
        return self.constraints.temporal.duration

    def update_duration(self, mean, variance):
        self.duration.update(mean, variance)
        self.save()

    @property
    def pickup_constraint(self):
        return self.constraints.temporal.pickup

    def update_pickup_constraint(self, earliest_time, latest_time):
        self.pickup_constraint.update(earliest_time, latest_time)
        self.save()

    @classmethod
    def get_earliest_task(cls, tasks=None):
        if tasks is None:
            tasks = [task for task in cls.objects.all()]
        earliest_time = datetime.max
        earliest_task = None
        for task in tasks:
            if task.pickup_constraint.earliest_time < earliest_time:
                earliest_time = task.pickup_constraint.earliest_time
                earliest_task = task
        return earliest_task
예제 #6
0
class Version(EmbeddedMongoModel):

    hardware = fields.EmbeddedDocumentField(RobotHardware)
    software = fields.EmbeddedDocumentField(SoftwareStack)
예제 #7
0
class DSO(VisionSystem):
    """
    Python wrapper for Direct Sparse Odometry (DSO)
    See https://github.com/JakobEngel/dso
    Bound to python using SWIG
    """
    rectification_mode = EnumField(RectificationMode, required=True)
    rectification_intrinsics = fields.EmbeddedDocumentField(CameraIntrinsics, required=True)

    columns = ColumnList(
        rectification_mode=attrgetter('rectification_mode'),
        undistort_mode=None,
        in_height=None,
        in_width=None,
        in_fx=None,
        in_fy=None,
        in_cx=None,
        in_cy=None,

        in_p1=None,
        in_p2=None,
        in_k1=None,
        in_k2=None,

        out_width=attrgetter('rectification_intrinsics.width'),
        out_height=attrgetter('rectification_intrinsics.height'),
        out_fx=lambda obj: obj.rectification_intrinsics.fx
        if obj.rectification_mode is RectificationMode.CALIB else float('nan'),
        out_fy=lambda obj: obj.rectification_intrinsics.fy
        if obj.rectification_mode is RectificationMode.CALIB else float('nan'),
        out_cx=lambda obj: obj.rectification_intrinsics.cx
        if obj.rectification_mode is RectificationMode.CALIB else float('nan'),
        out_cy=lambda obj: obj.rectification_intrinsics.cy
        if obj.rectification_mode is RectificationMode.CALIB else float('nan')
    )

    def __init__(self, *args, **kwargs):
        super(DSO, self).__init__(*args, **kwargs)

        self._intrinsics = None
        self._framerate = 30
        self._has_photometric_calibration = False

        self._undistorter = None
        self._output_wrapper = None
        self._system = None

        self._start_time = None
        self._image_index = 0
        self._frame_results = None
        self._processing_start_times = None

    @classmethod
    def is_deterministic(cls) -> StochasticBehaviour:
        """
        DSO is deterministic with multi-threading disabled.
        There is a bug in one of the mutli-threaded accumulates that is order important,
        so without it, the system is determinisitic.

        :return: StochasticBehaviour.DETERMINISTIC
        """
        return StochasticBehaviour.DETERMINISTIC

    def is_image_source_appropriate(self, image_source: ImageSource) -> bool:
        """
        Is the dataset appropriate for testing this vision system.
        This will depend on which sensor mode ORB_SLAM is configured in,
        stereo mode will require stereo to be available, while RGB-D mode will require depth to be available.

        :param image_source: The source for images that this system will potentially be run with.
        :return: True iff the particular dataset is appropriate for this vision system.
        :rtype: bool
        """
        return image_source.sequence_type == ImageSequenceType.SEQUENTIAL and (
            self.rectification_mode is not RectificationMode.NONE or
            check_resolution(image_source.camera_intrinsics)
        )

    def get_columns(self) -> typing.Set[str]:
        """
        Get the set of available properties for this system. Pass these to "get_properties", below.
        :return:
        """
        return set(self.columns.keys())

    def get_properties(self, columns: typing.Iterable[str] = None,
                       settings: typing.Mapping[str, typing.Any] = None) -> typing.Mapping[str, typing.Any]:
        """
        Get the values of the requested properties
        :param columns: The columns to get the values of
        :param settings: The settings stored in the trial result.
        :return:
        """
        if columns is None:
            columns = self.columns.keys()
        if settings is None:
            settings = {}
        return {
            col_name: self.get_property(col_name, settings)
            for col_name in columns
            if col_name in self.columns
        }

    def get_property(self, column_name: str, settings: typing.Mapping[str, typing.Any]):
        """
        Get the value of a particular column on this model, given some settings.
        Used in get_properties, to handle various special cases.
        :param column_name:
        :param settings:
        :return:
        """
        if column_name is 'rectification_mode':
            return self.rectification_mode
        elif self.rectification_mode != RectificationMode.CALIB and \
                column_name in {'out_fx', 'out_fy', 'out_cx', 'out_cy'}:
            return float('nan')
        elif column_name in settings:
            return settings[column_name]
        return self.columns.get_value(self, column_name)

    def set_camera_intrinsics(self, camera_intrinsics: CameraIntrinsics, average_timestep: float) -> None:
        """
        Set the intrinsics of the camera using
        :param camera_intrinsics: A metadata.camera_intrinsics.CameraIntriniscs object
        :param average_timestep: The average time interval between frames. Used to configure ORB_SLAM2
        :return:
        """
        if self._system is None:
            self._intrinsics = camera_intrinsics
            self._framerate = 1 / average_timestep

    def start_trial(self, sequence_type: ImageSequenceType, seed: int = 0) -> None:
        """
        Start a trial with this system.
        After calling this, we can feed images to the system.
        When the trial is complete, call finish_trial to get the result.
        :param sequence_type: Are the provided images part of a sequence, or just unassociated pictures.
        :param seed: A random seed. Not used, but may be given.
        :return: void
        """
        if sequence_type is not ImageSequenceType.SEQUENTIAL:
            raise RuntimeError("Cannot start trial with {0} image source".format(sequence_type.name))
        if self._intrinsics is None:
            raise RuntimeError("Cannot start trial, intrinsics have not been provided yet")

        self._start_time = time.time()
        self._frame_results = {}
        self._processing_start_times = {}

        # Figure out mode and preset for DSO
        # mode:
        #   mode = 0 - use iff a photometric calibration exists(e.g.TUM monoVO dataset).
        #   mode = 1 - use iff NO photometric calibration exists(e.g.ETH EuRoC MAV dataset).
        #   mode = 2 - use iff images are not photometrically distorted(e.g.syntheticdatasets).
        # preset:
        #   preset = 0 - default settings (2k pts etc.), not enforcing real - time execution
        #   preset = 1 - default settings (2k pts etc.), enforcing 1x real - time execution
        # WARNING: These two overwrite image resolution with 424 x 320.
        #   preset = 2 - fast settings (800 pts etc.), not enforcing real - time execution.
        #   preset = 3 - fast settings (800 pts etc.), enforcing 5x real - time execution
        mode = 1
        preset = 0

        dso_configure(preset=preset, mode=mode, quiet=True, nolog=True)

        # Build the undistorter, this will preprocess images and remove distortion
        if self.rectification_mode is RectificationMode.NONE:
            # For no undistortion, simply pass through, out resolution is always
            self._undistorter = make_undistort_from_mode(
                self._intrinsics, self.rectification_mode, self._intrinsics.width, self._intrinsics.height)
        elif self.rectification_mode is RectificationMode.CALIB:
            # CALIB rectification uses the full intrinsics
            self._undistorter = make_undistort_from_out_intrinsics(self._intrinsics, self.rectification_intrinsics)
        else:
            # Otherwise, build an undistorter that crops to the configured fixed resolution
            self._undistorter = make_undistort_from_mode(
                self._intrinsics, self.rectification_mode,
                self.rectification_intrinsics.width, self.rectification_intrinsics.height
            )
        if mode is not 0:
            self._undistorter.setNoPhotometricCalibration()
        self._undistorter.applyGlobalConfig()   # Need to do this to set camera intrinsics

        # Make an output wrapper to accumulate output information
        self._output_wrapper = DSOOutputWrapper()

        # Build the system itself.
        self._system = DSOSystem()
        self._system.outputWrapper.append(self._output_wrapper)

        self._start_time = time.time()
        self._image_index = 0

    def process_image(self, image: Image, timestamp: float) -> None:
        """
        Process an image as part of the current run.
        Should automatically start a new trial if none is currently started.
        :param image: The image object for this frame
        :param timestamp: A timestamp or index associated with this image. Sometimes None.
        :return: void
        """
        if self._undistorter is None:
            raise RuntimeError("Cannot process image, trial has not started yet. Call 'start_trial'")
        image_data = image_utils.to_uint_image(image_utils.convert_to_grey(image.pixels))
        dso_img = self._undistorter.undistort_greyscale(image_data, 0, timestamp, 1.0)
        self._processing_start_times[timestamp] = time.time()
        self._system.addActiveFrame(dso_img, self._image_index)
        self._image_index += 1

        self._frame_results[timestamp] = FrameResult(
            timestamp=timestamp,
            image=image.pk,
            pose=image.camera_pose,
            tracking_state=TrackingState.NOT_INITIALIZED,
            processing_time=np.nan
        )

    def finish_trial(self) -> SLAMTrialResult:
        """
        End the current trial, returning a trial result.
        Return none if no trial is started.
        :return:
        :rtype TrialResult:
        """
        if self._system is None:
            raise RuntimeError("Cannot finish trial, no trial started. Call 'start_trial'")

        # Wait for the system to finish
        self._system.blockUntilMappingIsFinished()

        # Collate the frame results
        unrecognised_timestamps = set()
        for timestamp, trans, rot, finish_time in self._output_wrapper.frame_deltas:
            if timestamp in self._frame_results:
                self._frame_results[timestamp].estimated_pose = make_pose(trans, rot)
                self._frame_results[timestamp].processing_time = finish_time - self._processing_start_times[timestamp]
                self._frame_results[timestamp].tracking_state = TrackingState.OK
            else:
                unrecognised_timestamps.add(timestamp)
        if len(unrecognised_timestamps) > 0:
            valid_timestamps = np.array(list(self._frame_results.keys()))
            logging.getLogger(__name__).warning("Got inconsistent timestamps:\n" + '\n'.join(
                '{0} (closest was {1})'.format(
                    unrecognised_timestamp,
                    _find_closest(unrecognised_timestamp, valid_timestamps)
                )
                for unrecognised_timestamp in unrecognised_timestamps
            ))

        # Organize the tracking state, it is NOT_INITIALIZED until we are first found, then it is LOST
        found = False
        for timestamp in sorted(self._frame_results.keys()):
            if self._frame_results[timestamp].tracking_state is TrackingState.OK:
                found = True
            elif found and self._frame_results[timestamp].tracking_state is TrackingState.NOT_INITIALIZED:
                self._frame_results[timestamp].tracking_state = TrackingState.LOST

        # Clean up
        self._undistorter = None
        self._system = None
        self._output_wrapper = None

        result = SLAMTrialResult(
            system=self.pk,
            success=len(self._frame_results) > 0,
            results=[self._frame_results[timestamp]
                     for timestamp in sorted(self._frame_results.keys())],
            has_scale=False,
            settings=self.make_settings()
        )
        result.run_time = time.time() - self._start_time
        self._frame_results = None
        self._start_time = None
        return result

    def make_settings(self):
        undistort_mode = "Pinhole" if (
                self._intrinsics.k1 == 0 and self._intrinsics.k2 == 0 and
                self._intrinsics.p1 == 0 and self._intrinsics.p2 == 0) else "RadTan"
        settings = {
            'rectification_mode': self.rectification_mode.name,
            'undistort_mode': undistort_mode,
            'in_width': self._intrinsics.width,
            'in_height': self._intrinsics.height,
            'in_fx': self._intrinsics.fx,
            'in_fy': self._intrinsics.fy,
            'in_cx': self._intrinsics.cx,
            'in_cy': self._intrinsics.cy,
            'in_p1': self._intrinsics.p1,
            'in_p2': self._intrinsics.p2,
            'in_k1': self._intrinsics.k1,
            'in_k2': self._intrinsics.k2
        }
        if self.rectification_mode is RectificationMode.NONE:
            settings['out_width'] = self._intrinsics.width
            settings['out_height'] = self._intrinsics.height
        else:
            settings['out_width'] = self.rectification_intrinsics.width
            settings['out_height'] = self.rectification_intrinsics.height
        if self.rectification_mode is RectificationMode.CALIB:
            settings['out_fx'] = self.rectification_intrinsics.fx
            settings['out_fy'] = self.rectification_intrinsics.fy
            settings['out_cx'] = self.rectification_intrinsics.cx
            settings['out_cy'] = self.rectification_intrinsics.cy
        return settings

    @classmethod
    def get_instance(
            cls,
            rectification_mode: RectificationMode = None,
            rectification_intrinsics: CameraIntrinsics = None
    ) -> 'DSO':
        """
        Get an instance of this vision system, with some parameters, pulling from the database if possible,
        or construct a new one if needed.
        It is the responsibility of subclasses to ensure that as few instances of each system as possible exist
        within the database.
        Does not save the returned object, you'll usually want to do that straight away.
        :return:
        """
        if rectification_mode is None:
            raise ValueError("Cannot search for DSO without rectification mode")
        if rectification_intrinsics is None:
            raise ValueError("Cannot search for DSO without intrinsics")
        if rectification_mode is not RectificationMode.NONE and not check_resolution(rectification_intrinsics):
            # Check the resolution we're rectifying to. If it will be invalid, raise an exception
            raise ValueError(f"Cannot {rectification_mode.name} to resolution "
                             f"{rectification_intrinsics.width}x{rectification_intrinsics.height}, it is invalid")
        # Look for existing objects with the same settings
        query = {
            'rectification_mode': rectification_mode.name,
            'rectification_intrinsics.width': rectification_intrinsics.width,
            'rectification_intrinsics.height': rectification_intrinsics.height
        }
        if rectification_mode is RectificationMode.CALIB:
            # When using CALIB rectification, the other intrinsics matter
            query['rectification_intrinsics.fx'] = rectification_intrinsics.fx
            query['rectification_intrinsics.fy'] = rectification_intrinsics.fy
            query['rectification_intrinsics.cx'] = rectification_intrinsics.cx
            query['rectification_intrinsics.cy'] = rectification_intrinsics.cy
        all_objects = DSO.objects.raw(query)
        if all_objects.count() > 0:
            return all_objects.first()
        # There isn't an existing system with those settings, make a new one.
        obj = cls(
            rectification_mode=rectification_mode,
            rectification_intrinsics=rectification_intrinsics
        )
        return obj
예제 #8
0
class S_properties(EmbeddedMongoModel):
    sample_info = fields.EmbeddedDocumentField(S_info)
예제 #9
0
class NetworkInterfaceObject(MongoModel):
    key = fields.CharField()
    properties = fields.EmbeddedDocumentField(InterfaceKey, primary_key=True)
예제 #10
0
class Sample(MongoModel):
    barcode = fields.CharField(required=True)
    tags = fields.ListField(fields.ReferenceField(Tag), blank=True)
    properties = fields.EmbeddedDocumentField(S_properties, required=True)
    batches = fields.EmbeddedDocumentListField(Batch)
    workflows = MapField(
        MapField(fields.EmbeddedDocumentListField(WorkflowResults)))
    submitted_on = fields.DateTimeField(required=True)
    archived = fields.BooleanField(required=True, default=False)
    comments = fields.CharField(default="", blank=True, required=True)

    class Meta:
        write_concern = WriteConcern(j=True)
        indexes = [
            IndexModel([("barcode", 1)], unique=True),
            IndexModel(
                [("barcode", TEXT),
                 ("properties.sample_info.summary.name", TEXT),
                 ("properties.sample_info.summary.submitted_species_name",
                  TEXT), ("properties.sample_info.summary.emails", TEXT),
                 ("properties.sample_info.summary.group", TEXT),
                 ("workflows.*.root.0.sample.batch_name", TEXT)],
                name="textindex")
        ]

    @staticmethod
    def plate_size(plate):
        if plate == "96plate":
            return 96

    @staticmethod
    def _validate_type(type_, value):
        if type_ == "basicalphanum":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9]').search(value))
        elif type_ == "alphanum":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9_\-]').search(value))
        elif type_ == "species":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z\. ]').search(value))
        elif type_ == "barcode":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9_\-]').search(value))
        elif type_ == "email":
            value = str(value)
            # from emailregex.com
            return bool(
                re.compile(
                    r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)").
                search(value))
        elif type_ == "boolean":
            return isinstance(value, bool)

    @staticmethod
    def validate_field(field, value):
        """
        Returns false if the field is not valid.
        """
        if field == "species":
            return True  # valid if in species list.
        elif field == "group":  # This for now, but should be validated in full list of groups.
            return Sample._validate_type("alphanum", value)
        elif field in ("name", "sampleid", "barcode", "batch_name"):
            return Sample._validate_type("barcode", value)
        elif field == "emails":
            email_list = re.split("[;,\s]+", value)
            for e in email_list:
                if not Sample._validate_type("email", e):
                    return False
            return True
        elif field == "archived":
            if value == "True":
                value = True
            elif value == "False":
                value = False
            return Sample._validate_type("boolean", value)
        elif field == "organism":
            return Sample._validate_type("species", value)
        elif field == "priority":
            try:
                return 1 <= int(value) <= 4
            except ValueError:
                return False
        elif field == "tags":
            return Tag.validate_field(value)
        elif field in [
                "comments", "costcenter", "submission_comments", "supplyinglab"
        ]:
            # Always valid
            return True
        else:
            return False

    @staticmethod
    def columns(template, species_options=None, custom_mapping=None):
        from minilims.models.species import Species
        columns = {
            "submit": [
                {
                    "data": "SampleID",
                    "unique": True,
                    "required": True
                },
                {
                    "data": "Barcode",
                    "unique": True,
                    "required": True
                },
                {
                    "data": "Organism",
                    "type": "select",
                    "options": Species.get_name_list(alias=True),
                    "required": True
                },
                {
                    "defaultContent": "",
                    "data": "Emails"
                },
                {
                    "defaultContent": "",
                    "data": "Priority",
                    "type": "select",
                    "options": ["low", "high"],
                    "required": True,
                },
                # {
                #     "defaultContent": "",
                #     "data": "SupplyDate",
                # },
                {
                    "defaultContent": "",
                    "data": "Costcenter"
                },
                {
                    "data": "PlateName",
                    "defaultContent": "",
                },
                {
                    "data": "WellPositionInSuppliedPlate",
                    "defaultContent": "",
                },
                {
                    "defaultContent": "",
                    "data": "Comments",
                    "type": "textarea"
                }
            ],
            "sample_list_view": [
                {
                    "data": "none",
                    "type": "hidden"
                },
                {
                    "data": "barcode",
                    "title": "barcode",
                    "readonly": "true",
                    "unique": "true",
                    "name": "barcode",
                },
                {
                    "data":
                    "tags",
                    "title":
                    "tags",
                    "type":
                    "select",
                    "multiple":
                    "true",
                    "options":
                    [str(x.pk) for x in Tag.objects.project({
                        "_id": 1
                    }).all()],
                    "name":
                    "tags"
                },
                {
                    "data": "submitted_on",
                    "title": "Submission date",
                    "readonly": "true",
                    "name": "submitted_on"
                },
                {
                    "data": "name",
                    "title": "SampleID",
                    "name": "name"
                },
                {
                    "data": "priority",
                    "title": "Priority",
                    "type": "select",
                    "options": current_app.config["PRIORITY"],
                    "name": "priority",
                },
                {
                    "data": "species",
                    "title": "species",
                    "type": "select",
                    "options": species_options,
                    "name": "species"
                },
                {
                    "data": "group",
                    "title": "supplying_lab",
                    "name": "group"
                },
                {
                    "data": "costcenter",
                    "title": "Cost Center",
                    "name": "costcenter"
                },
                {
                    "data": "batch",
                    "title": "batch",
                    # "type": "select",
                    # "multiple": "true",
                    "readonly": "true",
                    "name": "batch"
                },
                {
                    "data": "genome_size",
                    "title": "Genome size",
                    "readonly": "true",
                    "name": "genome_size",
                },
                {
                    "data": "supplied_plate_name",
                    "title": "Supplied Plate Name",
                    "readonly": "true",
                    "name": "supplied_plate_name"
                },
                {
                    "data": "position_in_supplied_plate",
                    "title": "Position in Supplied Plate",
                    "readonly": "true",
                    "name": "position_in_supplied_plate"
                },
                {
                    "data": "submission_comments",
                    "title": "Submission Comments",
                    "readonly": "true",
                    "name": "submission_comments"
                },
                {
                    "data": "comments",
                    "title": "Comments",
                    "name": "comments"
                },
                {
                    "data": "archived",
                    "title": "archived",
                    "type": "select",
                    "options": ["True", "False"],
                    "name": "archived"
                }
            ]
        }
        if template == "submit" and custom_mapping is not None:
            customized_columns = []
            for coldata in columns[template]:
                data_l = coldata["data"].lower()
                # change only if in mapping
                coldata["data"] = custom_mapping.get(data_l, coldata["data"])
                coldata["title"] = custom_mapping.get(data_l, coldata["data"])
                customized_columns.append(coldata)
            return customized_columns

        return columns[template]

    @classmethod
    def searchbox_suggestion(cls, query):
        if cls.validate_field("barcode", query):
            samples = cls.objects.raw({
                "barcode": {
                    "$regex": query
                }
            }).project({
                "barcode": 1,
                "_id": 0
            }).limit(10)
            return [sample.barcode for sample in samples]
        else:
            return []

    @classmethod
    def get_batch_names(cls, query=None, workflow_name=None):
        if query is None:
            return cls.objects.raw(
                {})._collection.distinct("batches.batch_name")
        else:
            try:
                workflow = Workflow.objects.get({"name": workflow_name})
            except errors.DoesNotExist:
                workflow = None
            if cls.validate_field("barcode", query) and workflow is not None:
                batches = cls.objects.aggregate(
                    {"$unwind": {
                        "path": "$batches"
                    }}, {"$match": {
                        "batches.workflow": workflow._id
                    }}, {"$group": {
                        "_id": "$batches.batch_name"
                    }})
                return [x["_id"] for x in batches]
            else:
                return []

    @classmethod
    def get_batch_overview(cls):
        overview_query = list(
            cls.objects.aggregate({"$match": {
                "archived": False
            }}, {"$unwind": {
                "path": "$batches"
            }}, {
                "$group": {
                    "_id": {
                        "batch_name": "$batches.batch_name",
                        "workflow_id": "$batches.workflow",
                        "step_cat": "$batches.step_cat",
                        "plate_type": "$batches.position.plate_type"
                    },
                    "count": {
                        "$sum": 1.0
                    },
                    "batch_created_on": {
                        "$min": "$batches.batch_created_on"
                    }
                }
            }, {
                "$group": {
                    "_id": {
                        "batch_name": "$_id.batch_name",
                        "workflow_id": "$_id.workflow_id",
                        "plate_type": "$_id.plate_type"
                    },
                    "count": {
                        "$sum": "$count"
                    },
                    "batch_created_on": {
                        "$min": "$batch_created_on"
                    },
                    "steps": {
                        "$push": "$$ROOT"
                    }
                }
            }, {
                "$group": {
                    "_id": "$_id.workflow_id",
                    "batches": {
                        "$push": "$$ROOT"
                    },
                    "count": {
                        "$sum": "$count"
                    }
                }
            }, {
                "$lookup": {
                    "from": "workflow",
                    "localField": "_id",
                    "foreignField": "_id",
                    "as": "workflow"
                }
            }, {
                "$project": {
                    "_id": 1,
                    "batches._id.batch_name": 1,
                    "batches._id.plate_type": 1,
                    "batches._id.step_cat": 1,
                    "batches.count": 1,
                    "batches.batch_created_on": 1,
                    "count": 1,
                    "batches.steps.count": 1,
                    "batches.steps._id.step_cat": 1,
                    "name": {
                        "$arrayElemAt": ["$workflow.name", 0]
                    }
                }
            }))
        for workflow in overview_query:
            # Dict to cache step names to avoid multiple queries for the same name.
            step_cache = {}
            workflow_db = Workflow.objects.get({"_id": workflow["_id"]})
            workflow["_id"] = str(workflow["_id"])
            workflow["display_name"] = workflow_db.display_name
            for batch in workflow["batches"]:
                step_i = 0
                step_entries = []
                # The first step of the list that has partial count will be finished,
                # the rest will be partially finished so this should be false for them.
                first_finished_true = False
                if batch["steps"][step_i]["_id"]["step_cat"] == "root":
                    step_entries.append({
                        "step_name":
                        "root",
                        "step_d_name":
                        "Assigned",
                        "count":
                        batch["steps"][step_i]["count"],
                        "finished":
                        True
                    })
                    step_i += 1
                    first_finished_true = True
                steps_progress = [0, len(workflow_db.steps)]
                for step in workflow_db.steps:
                    # Check if step is in dict cache before querying
                    if step.pk not in step_cache:
                        step_cache[str(step.pk)] = {
                            "d_name": step.display_name,
                            "cat": step.category,
                            "name": step.name
                        }
                    step_data = step_cache[str(step.pk)]

                    if step_i < len(batch["steps"]) and batch["steps"][step_i][
                            "_id"]["step_cat"] == step_data["cat"]:
                        if not first_finished_true:
                            finished = True
                            first_finished_true = True
                            steps_progress[0] += 1
                        else:
                            finished = False
                        step_entries.append({
                            "step_d_name":
                            step_data["d_name"],
                            "step_name":
                            step_data["name"],
                            "count":
                            batch["steps"][step_i]["count"],
                            "finished":
                            finished
                        })
                        step_i += 1
                    else:
                        if not first_finished_true:
                            finished = True
                            steps_progress[0] += 1
                        else:
                            finished = False
                        step_entries.append({
                            "step_d_name": step_data["d_name"],
                            "step_name": step_data["name"],
                            "count": 0,
                            "finished": finished
                        })
                batch["steps"] = step_entries
                batch["progress"] = steps_progress

        # overview_query = list(overview_query)
        return overview_query

    @classmethod
    def get_unassigned(cls, count=False, group=None):
        query = {
            "$or": [{
                "batches": {
                    "$size": 0
                }
            }, {
                "batches": {
                    "$exists": False
                }
            }]
        }
        if group is not None:
            query["properties.sample_info.summary.group"] = group
        if count:
            return cls.objects.raw(query).count()
        else:
            return cls.objects.raw(query)

    @classmethod
    def get_archived(cls, count=False, group=None):
        query = {"archived": True}
        if group is not None:
            query["properties.sample_info.summary.group"] = group
        if count:
            return cls.objects.raw(query).count()
        else:
            return cls.objects.raw(query)

    @classmethod
    def get_plate_view(cls,
                       workflow,
                       batch_name,
                       plate=None,
                       barcode_only=False):
        """
        Generate plate view object including where samples are.
        """

        if plate is None:
            samples = list(
                cls.objects.raw({
                    "batches": {
                        "$elemMatch": {
                            "batch_name": batch_name,
                            "workflow": workflow.pk,
                            "archived": False
                        }
                    }
                }))

            for sample in samples:
                pt = sample.get_batches(workflow.name,
                                        batch_name)[0].position.plate_type
                if plate is None:
                    plate = pt
                else:
                    if plate != pt:
                        raise ValueError((
                            f"Some samples belong to different plates in the same batch."
                            " Workflow: {workflow.name}. Batch: {batch_name}"))

        if plate == "96plate":
            plate_view = {
                "plate":
                [[None for i in range(12)] for j in range(8)],  # List per row
                "free_spots": [],
                "plate_type": plate
            }
            taken_spots = [False] * 96
            for sample in cls.objects.raw({
                    "batches": {
                        "$elemMatch": {
                            "batch_name": batch_name,
                            "workflow": workflow.pk,
                            "archived": False
                        }
                    }
            }):
                pos = sample.get_batches(workflow.name, batch_name)[0].position
                taken_spots[pos.index] = True
                coord = pos.get_coordinates(True)
                if barcode_only:
                    plate_view["plate"][coord[0]][coord[1]] = sample.barcode
                else:
                    plate_view["plate"][coord[0]][coord[1]] = sample
            for i in range(len(taken_spots)):
                if not taken_spots[i]:
                    plate_view["free_spots"].append(i)
        else:
            return None
        return plate_view

    # @classmethod
    # def get_step_table(cls, sample_ids):

    #     db = connection._get_db()
    #     samples = list(db[cls._mongometa.collection_name].find({
    #         "_id": {"$in": sample_ids}
    #     },{
    #         "barcode": 1,
    #         "comments": 1,
    #         "batches": 1,
    #         "properties.sample_info.summary."
    #         "_id": 0
    #     }))
    #     return samples

    def update(self, field, new_value):
        if field == "species":
            self.properties.sample_info.summary.submitted_species = new_value
            self.properties.sample_info.summary.submitted_species_name = new_value.name
        elif field == "group":
            self.properties.sample_info.summary.group = new_value
        elif field == "name":
            self.properties.sample_info.summary.name = new_value
        elif field == "archived":
            if new_value.lower() == "true":
                new_value = True
            else:
                new_value = False
            self.archived = new_value
        elif field == "priority":
            self.properties.sample_info.summary.priority = new_value
        elif field == "costcenter":
            self.properties.sample_info.summary.costcenter = new_value
        elif field == "comments":
            self.comments = new_value
        elif field == "tags":
            tags = Tag.objects.raw({"_id": {"$in": new_value}})
            self.tags = tags
        else:
            raise ValueError("Field not valid")

    def update_last_workflow(self, workflow, batch_name, step_cat):
        for wlf in self.batches:
            if wlf.workflow == workflow and wlf.batch_name == batch_name:
                wlf.step_cat = step_cat

    def assign_workflow(self, workflow, batch_name, index, plate_type,
                        prev_step_cat):

        batches = self.get_batches(workflow.name, batch_name)
        if len(batches) > 0:
            batch = batches[0]  # Assuming only one archived batch can be here.
            batch.archived = False
            batch.step_cat = prev_step_cat
            batch.position = PositionInPlate(plate_type=plate_type,
                                             index=index)
        else:
            wlf = Batch(workflow=workflow,
                        step_cat=prev_step_cat,
                        batch_name=batch_name,
                        batch_created_on=datetime.datetime.now(),
                        position=PositionInPlate(plate_type=plate_type,
                                                 index=index),
                        archived=False)
            self.batches.append(wlf)
            if workflow.name not in self.workflows or prev_step_cat == "root":
                step_i = WorkflowResults(parent=None,
                                         sample={},
                                         start_date=datetime.datetime.now(),
                                         finish_date=datetime.datetime.now(),
                                         status="finished",
                                         step_instance=None,
                                         batch_name=batch_name,
                                         index=index)
                if workflow.name not in self.workflows:
                    self.workflows[workflow.name] = {prev_step_cat: [step_i]}
                else:
                    self.workflows[workflow.name][prev_step_cat] = [step_i]
        self.save()

    def reorganize(self, workflow, batch_name, new_index):
        batches = self.get_batches(workflow.name, batch_name)
        if len(batches) == 0:
            raise ValueError(
                "Sample tried to be reorganized into a workflow-batch that doesn't exist."
            )
        else:
            batch = batches[0]
            batch.position.index = new_index
        self.save()

    def unassign_workflow(self, workflow, batch_name):
        """
        Sets the batch as archived
        """
        for batch in self.batches:
            if batch.workflow == workflow and batch.batch_name == batch_name:
                batch.archived = True
        self.save()

    def get_batches(self, workflow_name, batch_name=None):
        """
        Get batches with given name
        """
        if batch_name is None:
            return [
                b for b in self.batches if b.workflow.name == workflow_name
            ]
        else:
            return [
                b for b in self.batches if b.batch_name == batch_name
                and b.workflow.name == workflow_name
            ]

    def finish_step(self, step_instance, save):
        """
        Updates batches
        """
        step_cat = step_instance.step.category
        workflow_name, prev_step = self.get_prev_step(step_cat)
        workflow = Workflow.objects.get({"name": workflow_name})
        self.update_last_workflow(workflow, step_instance.batch, step_cat)
        for i in range(len(self.workflows[workflow_name][step_cat])):
            instance = self.workflows[workflow_name][step_cat][i]
            if step_instance == instance.step_instance:
                self.workflows[workflow_name][step_cat][
                    i].finish_date = datetime.datetime.now()
                self.workflows[workflow_name][step_cat][i].status = "finished"
                self.workflows[workflow_name][step_cat][
                    i].all = step_instance.result_all
                self.workflows[workflow_name][step_cat][
                    i].sample = step_instance.result_samples[self.barcode]
        if save:
            self.save()
        recommended_next = workflow.next_step(step_instance.step.name)
        if recommended_next == "_workflow_finished":
            self.finish_workflow(workflow, step_instance.batch, step_cat)
        return recommended_next

    def find_workflow_for_step(self, step_cat):
        """
        Returns a list of workflows for which this sample contains a result for the given step category.
        Random order.
        """
        workflows = []
        for workflow, steps in self.workflows.items():
            if step_cat in steps.keys():
                workflows.append(workflow)
        return workflows

    def _find_valid_prev_steps(self, next_step):
        """
        Return list of workflows this sample belongs to and 
        this step is a valid option for.
        """
        workflows = []
        for wlf in self.batches:
            prev_step = wlf.step_cat
            workflow = wlf.workflow
            if not wlf.archived:
                valid = workflow.valid_next_step(prev_step, next_step)
                if valid:
                    workflows.append((workflow.name, prev_step))
        return workflows

    def get_prev_step(self, step_name):
        """
        Given a step name return prev step and workflow name. Use this instead of _find_valid_prev_steps.
        Returns (workflow_name, step_name)
        """
        workflows = self._find_valid_prev_steps(step_name)
        if len(workflows) == 0:
            raise ValueError(
                "No workflow available for sample {} and step {}.".format(
                    self.barcode, step_name))
        elif len(workflows) > 1:
            current_app.logger.warning(
                "Sample {} can init step {} in more than one workflow: {}. Choosing first"
                .format(self.barcode, step_name, workflows))
        return workflows[0]

    def valid_next_step(self, next_step, batch_name):
        """
        Checks if the provided step is valid for this sample.
        """
        for batch in self.batches:
            if not batch.archived and batch.batch_name == batch_name:
                workflow = batch.workflow
                last_step = batch.step_cat
                valid_w = workflow.valid_next_step(last_step, next_step)
                if valid_w:
                    return True
        return False

    def send_to_step(self, step_name, workflow_name=None):
        if workflow_name is None:
            step = Step.objects.get({"name": step_name})
            workflows = self.find_workflow_for_step(step.category)
            if len(workflows) == 1:
                workflow_name = workflows[0]
            elif len(workflows) == 0:
                raise ValueError(
                    "No workflow available for sample {} and step {}.".format(
                        self.barcode, step_name))
            else:
                current_app.logger.warning(
                    "Sample {} can init step {} in more than one workflow: {}. Choosing first"
                    .format(self.barcode, step_name, workflows))
                workflow_name = workflows[0]
        workflow = Workflow.objects.get({"name": workflow_name})
        prev_step = workflow.get_prev_step(step_name)
        if prev_step is None:
            raise ValueError(
                "No workflow available for sample {} and step {}.".format(
                    self.barcode, step_name))
        self.update_last_workflow(workflow, prev_step)
        self.save()

    def init_step(self, step_instance):
        """
        Initialises step in sample. If step can belong to more than
        one workflow the sample is in, throw an error.
        """
        workflow_name, prev_step = self.get_prev_step(
            step_instance.step.category)
        prev_step_attempts = self.workflows[workflow_name][prev_step]
        instance_index = len(prev_step_attempts) - 1
        batch = self.get_batches(workflow_name, step_instance.batch)
        step_result_sample = WorkflowResults(
            parent="{}.{}.{}".format(workflow_name, prev_step, instance_index),
            status="started",
            step_instance=step_instance._id,
            start_date=datetime.datetime.now(),
            batch_name=step_instance.batch,
            index=batch[0].position.index)
        step_attempts = self.workflows[workflow_name].get(
            step_instance.step.category, [])
        step_attempts.append(step_result_sample)
        self.workflows[workflow_name][
            step_instance.step.category] = step_attempts
        self.save()

        return step_instance.batch

    def result_chain(self, chain, exit_match=None):
        """
        Recursive. From a list with a single WorkflowResults it'll return the full result chain from root.
        Exit match: (workflow_name, step_cat)
        """
        last = chain[-1]
        if last.parent is None:
            return chain
        workflow_name_c, step_cat_c, attempt = last.parent.split(".")
        chain.append(self.workflows[workflow_name_c][step_cat_c][int(attempt)])
        if exit_match is not None and exit_match[
                0] == workflow_name_c and exit_match[1] == step_cat_c:
            return chain
        return self.result_chain(chain, exit_match)

    def find_result(self, workflow_name, step_cat, scope, field_name,
                    step_instance):
        root = None
        self.refresh_from_db()  # Required
        for attempt in self.workflows[workflow_name][
                step_instance.step.category]:
            if attempt.step_instance == step_instance:
                root = attempt
        chain = self.result_chain([root], (workflow_name, step_cat))
        try:
            return getattr(chain[-1], scope)[field_name]
        except KeyError:
            raise minilims.errors.MissingValueError(
                "Value (w) {} (s) {} (sc) {} (f) {} for barcode {} not found in results."
                .format(workflow_name, step_cat, scope, field_name,
                        self.barcode))

    def finish_workflow(self, workflow, batch_name, prev_step_cat):
        step_cat = "_workflow_finished"
        prev_step_attempts = self.workflows[workflow.name][prev_step_cat]
        instance_index = len(prev_step_attempts) - 1
        step_result_sample = WorkflowResults(
            parent="{}.{}.{}".format(workflow.name, prev_step_cat,
                                     instance_index),
            status="finished",
            step_instance=None,
            all={},
            sample={},
            start_date=datetime.datetime.now(),
            finish_date=datetime.datetime.now(),
            batch_name=batch_name)
        step_attempts = self.workflows[workflow.name].get(step_cat, [])
        step_attempts.append(step_result_sample)
        self.workflows[workflow.name][step_cat] = step_attempts
        self.update_last_workflow(workflow, batch_name, step_cat)
        self.save()

    def summary(self, frmt="dict"):
        batches = []
        positions = {}
        for b in self.batches:
            if not b.archived:
                batches.append("{}: {}".format(b.workflow.name, b.batch_name))
                coord = b.position.get_coordinates()
                workflow_batch = "{}: {}".format(b.workflow.name, b.batch_name)
                positions[workflow_batch] = {
                    "coords": "".join([str(coord[0]),
                                       str(coord[1])]),
                    "index": b.position.index
                }
        if len(batches):
            batch = ", ".join(batches)
        else:
            batch = "Unassigned"

        genome_size = self.properties.sample_info.summary.submitted_species.step_variables.get(
            "wgs_routine", {}).get("wgs_08_normalization_pool",
                                   {}).get("genome_size")

        if frmt == "dict":
            result = {
                "barcode": self.barcode,
                "name": self.properties.sample_info.summary.name,
                "group": self.properties.sample_info.summary.group,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "batch": batch,
                "archived": str(self.archived),
                "submitted_on": self.submitted_on
            }
        elif frmt == "datatable":
            result = {
                "none":
                "",  # For checkbox
                "tags": [x.pk for x in self.tags],
                "barcode":
                self.barcode,
                "name":
                self.properties.sample_info.summary.name,
                "group":
                self.properties.sample_info.summary.group,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "batch":
                batch,
                "submission_comments":
                self.properties.sample_info.summary.submission_comments,
                "costcenter":
                self.properties.sample_info.summary.costcenter,
                "archived":
                str(self.archived),
                "batch_json":
                batches,
                "positions":
                positions,
                "genome_size":
                genome_size,
                "submitted_on":
                self.submitted_on.date(),
                "priority":
                self.properties.sample_info.summary.priority,
                "comments":
                self.comments,
                "supplied_plate_name":
                self.properties.sample_info.summary.supplied_plate_name,
                "position_in_supplied_plate":
                self.properties.sample_info.summary.position_in_supplied_plate
            }
        elif frmt == "step_table":
            result = {
                "none": "",  # For checkbox
                "barcode": self.barcode,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "positions": positions,
                "comments": self.comments,
                "batch": batch
            }
        else:
            result = {}
        return result

    def result_report(self, workflow_name, batch_name):
        batch = self.get_batches(workflow_name, batch_name)
        if len(batch) == 0:
            return {}
        data = {}
        data['barcode'] = self.barcode
        batch = batch[0]

        # Find the last step done from this branch and chain back
        chain = None
        for step_name in ["_workflow_finished"
                          ] + [b.name for b in batch.workflow.steps[::-1]]:
            if step_name in self.workflows[workflow_name]:
                for step_i in self.workflows[workflow_name][step_name][::-1]:
                    if step_i.batch_name == batch_name:
                        chain = [step_i]
                        break
                if chain is not None:
                    break
        chain = self.result_chain(chain)

        for step_i_r in chain[::-1]:
            if step_i_r.step_instance is not None:
                step_name = step_i_r.step_instance.step.name
            else:
                step_name = "root"
            for k, v in step_i_r.sample.items():
                data[".".join([step_name, k])] = str(v)
            for k, v in step_i_r.all.items():
                data[".".join([step_name, "all", k])] = str(v)
        return data

    def is_allowed(self, user):
        """
        Returns true if user is in the same group
        """
        return self.properties.sample_info.summary.group == user.group

    def detail_data(self):
        """
        Generate dict with data for sample details view
        """
        batches = []
        for batch in self.batches:
            workflow_name = batch.workflow.name
            batch_name = batch.batch_name
            workflow_o = batch.workflow
            workflow_data = self.workflows[workflow_name]
            workflow_steps = []
            for step_name, step_i_list in workflow_data.items():
                step_i_found = False
                for step_i in step_i_list[::-1]:
                    if step_i.batch_name == batch_name:
                        step_i_found = step_i
                if step_i_found is False:
                    continue
                step_i = step_i_found
                if step_name == "root":
                    step_name = "Workflow initialization"
                elif step_name == "_workflow_finished":
                    step_name = "Workflow finished"
                else:
                    step = Step.objects.project({
                        "display_name": 1
                    }).get({"name": step_name})
                    step_name = step.display_name
                if step_i.step_instance is None:
                    workflow_steps.append({
                        "name": step_name,
                        "attempt": len(step_i_list),
                        "start_date": step_i.start_date,
                        "finish_date": step_i.finish_date,
                        "values_all": step_i.all,
                        "values_sample": step_i.sample
                    })
                else:
                    val = step_i.step_instance.summary_values()
                    workflow_steps.append({
                        "id":
                        step_i.step_instance._id,
                        "name":
                        step_name,
                        "attempt":
                        len(step_i_list),
                        "start_date":
                        step_i.start_date,
                        "finish_date":
                        step_i.finish_date,
                        "values_all":
                        val["values_all"],
                        "values_samples":
                        val["values_samples"],
                        "fields_samples":
                        val["fields_samples"]
                    })
            batches.append({
                "display_name":
                workflow_o.display_name,
                "steps":
                workflow_steps,
                "batch_name":
                batch_name,
                "position":
                "".join(map(str, batch.position.get_coordinates()))
            })

        return {
            "barcode": self.barcode,
            "tags": [x.pk for x in self.tags],
            "properties": {
                "group": self.properties.sample_info.summary.group,
                "name": self.properties.sample_info.summary.name,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "priority": self.properties.sample_info.summary.priority,
                "emails": self.properties.sample_info.summary.emails,
                "submitted_on": self.submitted_on,
                "additional_metadata": []
            },
            "batches": batches
        }

    def assign_tag(self, tag):
        if tag not in self.tags:
            self.tags.append(tag)
        self.save()

    def unassign_tag(self, tag):
        if tag in self.tags:
            self.tags.remove(tag)
        self.save()
예제 #11
0
class S_info(EmbeddedMongoModel):
    summary = fields.EmbeddedDocumentField(S_summary)
예제 #12
0
class Query(EmbeddedMongoModel):
    _id = fields.ObjectIdField(primary_key=True)
    parent_review = fields.ReferenceField('Review')
    time = fields.CharField()
    results = fields.ListField()
    search = fields.EmbeddedDocumentField('Search')
예제 #13
0
class Step(MongoModel):
    name = fields.CharField(required=True)
    display_name = fields.CharField(required=True)
    category = fields.CharField(required=True)
    version = fields.CharField(required=True)
    details = fields.EmbeddedDocumentField(S_details)
    requirements = fields.DictField()
    input_output = fields.EmbeddedDocumentListField(S_input_output)

    class Meta:
        indexes = [IndexModel([("name", 1)], unique=True)]

    def _sl(self):
        try:
            return getattr(self, '_steploader')
        except AttributeError:
            step_folder = os.path.join(current_app.instance_path, "steps")
            self._steploader = Steploader(step_folder)
            return self._steploader

    def run_script(self, script, params):
        module = self._sl().get_and_load(self.name)
        function = getattr(module, script.script)
        try:
            return function(**params)
        except Exception as e:
            raise e
            # return {
            #     "errors": str(e)
            # }

    def run_qc_script(self, script, step_instance):
        module = self._sl().get_and_load(self.name)
        function = getattr(module, script.script)
        return function(step_instance.samples, step_instance.qc_actions,
                        step_instance)

    def required_params(self):
        """
        Get a dict with the required/expected params when a stepend is submitted.
        Generated from input_output
        """
        expected = {"samples": [], "all": []}
        for io in self.input_output:
            if io.stage == "stepend":
                for value in io.input_values:
                    if value.showuser and value.required:
                        expected[value.scope].append((value.name, value.type_))
        return expected

    def available_samples(self, distinct_batches_only=False):
        from minilims.models.workflow import Workflow
        previous_steps = set()  # Steps that lead directly to this one.
        for w in Workflow.objects.raw({"steps": self._id}):
            index = w.steps.index(self)
            if index == 0:
                previous_steps.add((w._id, "root"))
            else:
                previous_steps.add((w._id, w.steps[index - 1].category))
        from minilims.models.sample import Sample
        samples = list(
            Sample.objects.raw({
                "$or": [{
                    "batches": {
                        "$elemMatch": {
                            "workflow": x[0],
                            "step_cat": x[1],
                            "archived": False
                        }
                    }
                } for x in previous_steps]
            }))
        if distinct_batches_only:
            batches = set()
            for sample in samples:
                for batch in sample.batches:
                    for x in previous_steps:
                        if batch.workflow._id == x[0] and batch.step_cat == x[
                                1] and not batch.archived:
                            batches.add("{}: {}".format(
                                batch.workflow.name, batch.batch_name))
            return list(batches)
        else:
            return samples

    def get_started(self):
        """
        Returns list of step instances of this step that are started.
        """
        from minilims.models.step_instance import Step_instance
        return Step_instance.objects.raw({
            "step": self._id,
            "status": "started"
        })

    def summary(self, get_workflows=False):
        info = {
            "name": self.name,
            "display_name": self.display_name,
            "category": self.category,
            "version": self.version,
        }

        if get_workflows:
            from minilims.models.workflow import Workflow
            workflows = Workflow.objects.raw({
                "steps": self._id
            }).project({
                "name": 1,
                "_id": 0
            })
            info["workflows"] = [workflow.name for workflow in workflows]

        return info

    def step_instances_summaries(self):
        from minilims.models.step_instance import Step_instance

        step_instance_summaries = []
        for s_i in Step_instance.objects.raw({"step": self._id}):
            step_instance_summaries.append(s_i.summary())

        return step_instance_summaries

    def available_batches(self):
        samples = self.available_samples()
예제 #14
0
class Contributor(MongoModel):
    name = fields.CharField()
    thumbnail = fields.EmbeddedDocumentField('Image')
예제 #15
0
class Character(MongoModel):
    name = fields.CharField(required=True)
    created = fields.DateTimeField(default=datetime.datetime.utcnow())

    level = fields.IntegerField(default=1)
    xp = fields.IntegerField(default=0)
    coins = fields.IntegerField(default=0)
    strength = fields.IntegerField(default=0)
    bonus_strength = fields.IntegerField(default=0)
    intelligence = fields.IntegerField(default=0)
    bonus_intelligence = fields.IntegerField(default=0)
    dexterity = fields.IntegerField(default=0)
    bonus_dexterity = fields.IntegerField(default=0)
    willpower = fields.IntegerField(default=0)
    bonus_willpower = fields.IntegerField(default=0)
    health = fields.IntegerField(default=100)
    bonus_health = fields.IntegerField(default=0)
    current_health = fields.IntegerField(default=100)
    health_regen = fields.IntegerField(default=0)
    bonus_health_regen = fields.IntegerField(default=0)
    stamina = fields.IntegerField(default=100)
    bonus_stamina = fields.IntegerField(default=0)
    current_stamina = fields.IntegerField(default=100)
    stamina_regen = fields.IntegerField(default=0)
    bonus_stamina_regen = fields.IntegerField(default=0)
    mana = fields.IntegerField(default=100)
    bonus_mana = fields.IntegerField(default=0)
    current_mana = fields.IntegerField(default=100)
    mana_regen = fields.IntegerField(default=0)
    bonus_mana_regen = fields.IntegerField(default=0)
    init = fields.IntegerField(default=0)
    bonus_init = fields.IntegerField(default=0)
    carry = fields.IntegerField(default=100)
    bonus_carry = fields.IntegerField(default=0)
    current_carry = fields.IntegerField(default=0)

    earth_res = fields.FloatField(default=0.0)
    bonus_earth_res = fields.FloatField(default=0.0)
    fire_res = fields.FloatField(default=0.0)
    bonus_fire_res = fields.FloatField(default=0.0)
    electricity_res = fields.FloatField(default=0.0)
    bonus_electricity_res = fields.FloatField(default=0.0)
    water_res = fields.FloatField(default=0.0)
    bonus_water_res = fields.FloatField(default=0.0)
    dot_res = fields.FloatField(default=0.0)
    bonus_dot_res = fields.FloatField(default=0.0)
    dot_reduction = fields.IntegerField(default=0)
    bonus_dot_reduction = fields.IntegerField(default=0)
    dot_effect = fields.FloatField(default=0.0)
    bonus_dot_effect = fields.FloatField(default=0.0)
    dot_duration = fields.IntegerField(default=0)
    bonus_dot_duration = fields.IntegerField(default=0)

    points = fields.IntegerField(default=0)
    abilities = fields.ListField(default=['skill-strike'])
    ability_slots = fields.DictField(default={
        '1': 'skill-strike',
        '2': None,
        '3': None,
        '4': None
    })

    eq_weapon = fields.EmbeddedDocumentField(item.Item,
                                             default=None,
                                             blank=True)
    eq_head = fields.EmbeddedDocumentField(item.Item, default=None, blank=True)
    eq_chest = fields.EmbeddedDocumentField(item.Item,
                                            default=None,
                                            blank=True)
    eq_belt = fields.EmbeddedDocumentField(item.Item, default=None, blank=True)
    eq_boots = fields.EmbeddedDocumentField(item.Item,
                                            default=None,
                                            blank=True)
    eq_gloves = fields.EmbeddedDocumentField(item.Item,
                                             default=None,
                                             blank=True)
    eq_amulet = fields.EmbeddedDocumentField(item.Item,
                                             default=None,
                                             blank=True)
    eq_ring = fields.EmbeddedDocumentField(item.Item, default=None, blank=True)

    inventory = fields.EmbeddedDocumentListField(item.Item,
                                                 default=[],
                                                 blank=True)
    bank = fields.EmbeddedDocumentListField(item.Item, default=[], blank=True)
    bank_limit = fields.IntegerField(default=10)
    shop = fields.EmbeddedDocumentListField(item.Item, default=[], blank=True)

    depths = fields.DictField(default={})
    profession = fields.CharField(default='')
    deaths = fields.IntegerField(default=0)
    status_effects = fields.ListField(
        default=[]
    )  # list of dicts w/ keys = name, stat, value, turns_remaining
    burn = fields.DictField(default={'turns': 0, 'dmg': 0})
    bleed = fields.DictField(default={'turns': 0, 'dmg': 0})
    shock = fields.IntegerField(default=0)
    shock_limit = fields.IntegerField(default=5)
    bonus_shock_limit = fields.IntegerField(default=0)
    confusion = fields.IntegerField(default=0)
    confusion_limit = fields.IntegerField(default=5)
    bonus_confusion_limit = fields.IntegerField(default=0)

    class Meta:
        write_concern = WriteConcern(j=True)

    def reset_stats(self):
        self.current_health = self.health + self.bonus_health
        self.current_stamina = self.stamina + self.bonus_stamina
        self.current_mana = self.mana + self.bonus_mana
        self.remove_all_status_effects()
        self.save()

    def learn(self, _book) -> bool:
        if _book.level <= self.level and self.add_ability(
                book.get_ability_string(_book)):
            self.remove_from_inventory(_book)
            return True

        return False

    def add_ability(self, ability_string: str) -> bool:
        if ability_string not in self.abilities:
            self.abilities.append(ability_string)
            self.save()
            return True

        return False

    def assign_ability_to_slot(self, index: int, slot: int) -> bool:
        if 0 < slot < 7 and 0 <= index < len(self.abilities):
            slot = str(slot)
            self.ability_slots[slot] = self.abilities[index]
            self.save()
            return True

        return False

    def add_to_inventory(self, item, ignore_carry, unequipping=False):
        if ignore_carry or self.current_carry + item.weight <= self.carry + self.bonus_carry:
            self.inventory.append(item)

            if not unequipping:
                self.current_carry += item.weight

            self.save()
            return True
        return False

    def remove_from_inventory(self, item, equipping=False):
        self.inventory.remove(item)

        if not equipping:
            self.current_carry -= item.weight

        self.save()

    def equip(self, item):
        if item.itype not in [
                ItemType.weapon.value, ItemType.head.value,
                ItemType.chest.value, ItemType.belt.value,
                ItemType.boots.value, ItemType.gloves.value,
                ItemType.amulet.value, ItemType.ring.value
        ]:
            return False

        if self.level < item.level:
            return False

        if item.itype == ItemType.weapon.value:
            if self.strength + self.bonus_strength < item.required_strength:
                return False

            if self.intelligence + self.bonus_intelligence < item.required_intelligence:
                return False

            if self.dexterity + self.bonus_dexterity < item.required_dexterity:
                return False

            if self.willpower + self.bonus_willpower < item.required_willpower:
                return False

        slot = ItemType(item.itype).name

        if getattr(self, 'eq_' + slot) is not None:
            self.unequip(slot)
            self.save()

        if self.level >= item.level:
            self.remove_from_inventory(item, True)
            setattr(self, 'eq_' + slot, item)
            self.update_stats(item, True)
            return True

        return False

    def unequip(self, slot: str):
        if slot not in ['weapon', 'head', 'chest', 'belt', 'boots', 'gloves', 'amulet', 'ring'] \
                or getattr(self, 'eq_' + slot) is None:
            return False
        else:
            item = getattr(self, 'eq_' + slot)
            setattr(self, 'eq_' + slot, None)
            self.update_stats(item, False)
            self.add_to_inventory(item, True, True)
            return item

    def unequip_all(self):
        for slot in [
                'weapon', 'head', 'chest', 'belt', 'boots', 'gloves', 'amulet',
                'ring'
        ]:
            self.unequip(slot)

    def update_stats(self, item, equip: bool):
        if equip:
            self.apply_equippable_bonuses(item)
        else:
            self.remove_equippable_bonuses(item)

        self.save()

    def apply_equippable_bonuses(self, item):
        for bonus in [x for x in dir(item) if x.startswith('bonus_')]:
            setattr(self, bonus, getattr(self, bonus) + getattr(item, bonus))

    def remove_equippable_bonuses(self, item):
        for bonus in [x for x in dir(item) if x.startswith('bonus_')]:
            setattr(self, bonus, getattr(self, bonus) - getattr(item, bonus))

    def deposit(self, index: int) -> bool:
        if len(self.inventory) - 1 < index < 0 or len(
                self.bank) >= self.bank_limit:
            return False

        it = self.inventory[index]
        self.bank.append(it)
        self.remove_from_inventory(it)
        return True

    def withdraw(self, index: int) -> bool:
        if len(self.bank) - 1 < index < 0:
            return False

        it = self.bank[index]

        if self.add_to_inventory(it, False):
            cost = len(self.bank)
            self.bank.remove(it)
            self.coins -= cost
            self.save()
            return True

        return False

    def use_consumable(self, consumable):
        if consumable.itype != ItemType.potion.value:
            return f'{consumable.name} is not consumable.'
        elif consumable['uses'] <= 0:
            raise Exception(
                f'Consumable {consumable.name} used by {self.name} had {consumable.uses} uses.'
            )
        else:
            out = f'The {ItemType(consumable.itype).name}:'

            if consumable.health != 0:
                result = self.restore_health(consumable.health)

                if result >= 0:
                    out += f'\nRestores {result} health'
                else:
                    out += f'\nDrains {result} health'

            if consumable.stamina != 0:
                result = self.restore_stamina(consumable.stamina)

                if result >= 0:
                    out += f'\nRestores {result} stamina'
                else:
                    out += f'\nDrains {result} stamina'

            if consumable.mana != 0:
                result = self.restore_mana(consumable.mana)

                if result >= 0:
                    out += f'\nRestores {result} mana'
                else:
                    out += f'\nDrains {result} mana'

            if consumable.burn != 0:
                result = self.affect_burning(consumable.burn)

                if result != 0:
                    out += f'\nBurning duration {result:+}'

            if consumable.bleed != 0:
                result = self.affect_bleeding(consumable.bleed)

                if result != 0:
                    out += f'\nBleeding duration {result:+}'

            if consumable.shock != 0:
                result = self.affect_shock(consumable.shock)

                if result != 0:
                    out += f'\nShock level {result:+}'

            if consumable.confusion != 0:
                result = self.affect_confusion(consumable.confusion)

                if result != 0:
                    out += f'\nConfusion level {result:+}'

            consumable.uses -= 1

            if consumable.uses < 1:
                self.remove_from_inventory(consumable)
                # consumable.delete()
                out += '\n... and was consumed'

            self.save()
            return out

    def has_consumables(self):
        for item in self.inventory:
            if item.itype == ItemType.potion.value:
                return True

        return False

    def restore_health(self, amount: int, source=None):
        start = self.current_health

        if source is not None and isinstance(source, Character):
            self.current_health += int(
                amount * source.get_element_scaling(Elements.water))
        else:
            self.current_health += amount

        if self.current_health > self.health + self.bonus_health:
            self.current_health = self.health + self.bonus_health

        return self.current_health - start

    def restore_stamina(self, amount: int, source=None):
        start = self.current_stamina

        if source is not None and isinstance(source, Character):
            self.current_stamina += int(
                amount * source.get_element_scaling(Elements.water))
        else:
            self.current_stamina += amount

        if self.current_stamina > self.stamina + self.bonus_stamina:
            self.current_stamina = self.stamina + self.bonus_stamina

        return self.current_stamina - start

    def restore_mana(self, amount: int, source=None):
        start = self.current_mana

        if source is not None and isinstance(source, Character):
            self.current_mana += int(
                amount * source.get_element_scaling(Elements.water))
        else:
            self.current_mana += amount

        if self.current_mana > self.mana + self.bonus_mana:
            self.current_mana = self.mana + self.bonus_mana

        return self.current_mana - start

    def restore_all(self, h: int, s: int, m: int):
        return self.restore_health(h), self.restore_stamina(
            s), self.restore_mana(m)

    def recover(self):
        percentage = 0.1
        return self.restore_all(
            0, int((self.stamina + self.bonus_stamina) * percentage),
            int((self.mana + self.bonus_mana) * percentage))

    def regen(self):
        return self.restore_all(self.health_regen + self.bonus_health_regen,
                                self.stamina_regen + self.bonus_stamina_regen,
                                self.mana_regen + self.bonus_mana_regen)

    def affect_burning(self, turns: int) -> int:
        start = self.burn['turns']
        self.burn['turns'] = max(self.burn['turns'] + turns, 0)

        if self.burn['turns'] == 0:
            self.burn['dmg'] = 0

        return self.burn['turns'] - start

    def affect_bleeding(self, turns: int) -> int:
        start = self.bleed['turns']
        self.bleed['turns'] = max(self.bleed['turns'] + turns, 0)

        if self.bleed['turns'] == 0:
            self.bleed['dmg'] = 0

        return self.bleed['turns'] - start

    def affect_shock(self, amount: int) -> int:
        start = self.shock
        self.shock = utilities.clamp(self.shock + amount, 0,
                                     self.shock_limit + self.bonus_shock_limit)
        return self.shock - start

    def affect_confusion(self, amount: int) -> int:
        start = self.confusion
        self.confusion = utilities.clamp(
            self.confusion + amount, 0,
            self.confusion_limit + self.bonus_confusion_limit)
        return self.confusion - start

    def get_ele_pens(self) -> tuple:
        if self.eq_weapon is None:
            return 0.0, 0.0, 0.0, 0.0

        return self.eq_weapon.earth_penetration, self.eq_weapon.fire_penetration, self.eq_weapon.electricity_penetration, self.eq_weapon.water_penetration

    def get_element_scaling(self, element: Elements):
        if element == Elements.earth:
            stat = self.strength + self.bonus_strength
        elif element == Elements.fire:
            stat = self.intelligence + self.bonus_intelligence
        elif element == Elements.electricity:
            stat = self.dexterity + self.bonus_dexterity
        elif element == Elements.water:
            stat = self.willpower + self.bonus_willpower
        else:
            raise Exception(
                f'{self.name} called character get_element_scaling with invalid element {element.name}'
            )

        return 1.0 + (stat / 1000)

    def apply_element_damage_resistances(self, amt: int, element) -> float:
        if element == Elements.earth:
            amt *= (1.0 - (self.earth_res + self.bonus_earth_res))
        elif element == Elements.fire:
            amt *= (1.0 - (self.fire_res + self.bonus_fire_res))
        elif element == Elements.electricity:
            amt *= (1.0 - (self.electricity_res + self.bonus_electricity_res))
        elif element == Elements.water:
            amt *= (1.0 - (self.water_res + self.bonus_water_res))
        return amt

    def deal_damage(self, effect, critical=False, multi=1.0):
        dmgs = []

        if effect.type == ability.EffectType.damage_health:
            if effect.element is None:
                weapon = self.eq_weapon

                for dmg in weapon.damages:
                    total = dice.roll(dmg[0], dmg[1], critical)
                    total *= self.get_element_scaling(Elements(dmg[2]))
                    total = round(total * multi)

                    if critical:
                        total += weapon.crit_damage

                    dmgs.append((total, Elements(dmg[2])))
            elif type(effect) == ability.Effect:
                total = dice.roll(dice.count(self.level), effect.dice_value,
                                  critical)
                total *= self.get_element_scaling(effect.element)
                total = round(total)
                dmgs.append((total, effect.element))
        else:
            raise Exception(
                f'{self.name} called character deal damage with invalid effect type {type(effect)}'
            )

        return dmgs

    def take_damage(self, dmgs: list, ele_pens: tuple) -> list:
        """ele_pens unused on players, but passed in for compability with summon vs. enemy planners (and vice versa)"""
        for dmg in dmgs:
            amt = self.apply_element_damage_resistances(dmg[0], dmg[1])
            self.current_health -= round(amt)
            self.current_health = max(0, self.current_health)

        return dmgs

    def estimate_damage_from_enemy_action(self, enemy, action) -> int:
        amt = 0

        for effect in action.effects:
            if effect.type == ability.EffectType.damage_health:
                element_scaling = enemy.get_element_scaling(effect.element)
                avg = (dice.count(enemy.level) *
                       effect.dice_value) / 2 + element_scaling
                avg += int(
                    (dice.count(enemy.level) * effect.dice_value - avg) *
                    action.base_crit_chance)
                amt += self.apply_element_damage_resistances(
                    avg, effect.element)
            elif effect.type in [
                    ability.EffectType.burn, ability.EffectType.bleed
            ]:
                turns = effect.effect_turns + enemy.dot_duration - self.dot_reduction - self.bonus_dot_reduction
                turns = min(turns, 0)
                amt += round(effect.dot_value *
                             (1.0 + enemy.dot_effect - self.dot_res -
                              self.dot_bonus_res)) * turns

        return amt

    def apply_status_effect(self, name: str, stat: str, value: int,
                            turns_remaining: int):
        remove = None
        ret = None

        for se in self.status_effects:
            if se['name'] == name:
                remove = se
                break

        if remove is not None:
            self.remove_status_effect(remove)

        self.status_effects.append({
            'name': name,
            'stat': stat,
            'value': value,
            'turns_remaining': turns_remaining
        })
        setattr(self, stat, getattr(self, stat) + value)
        self.save()

        if remove is not None:
            ret = remove['name']

        return ret

    def remove_status_effect(self, se):
        try:
            setattr(self, se['stat'], getattr(self, set['stat'] - se['value']))
            self.status_effects.remove(se)
        except KeyError:
            raise Exception(
                f'remove_status_effect failed for {self.name}: {se["name"]}, {se["stat"]}'
            )

    def remove_all_status_effects(self):
        for se in self.status_effects:
            self.remove_status_effect(se)

    def list_active_effects(self):
        out = ''

        for se in self.status_effects:
            out += se['name'] + ', '

        return out.rstrip(', ')

    def countdown_status_effects(self):
        removes = []
        out = ''

        for se in self.status_effects:
            if se['turns_remaining'] > 1:
                se['turns_remaining'] -= 1
            else:
                removes.append(se)

        if len(removes) > 0:
            for se in removes:
                out += f'{se["name"]} expired on {self.name}\n'
                self.remove_status_effect(se)

        self.save()
        return out

    def apply_burn(self, turns: int, dmg: int, strength: float,
                   duration: int) -> bool:
        turns += duration - self.dot_reduction - self.bonus_dot_reduction
        turns = max(turns, 0)
        dmg = round(dmg * (1.0 + strength - self.dot_res - self.dot_bonus_res))

        if turns * dmg > self.burn['turns'] * self.burn['dmg']:
            self.burn['turns'] = turns
            self.burn['dmg'] = dmg
            self.save()
            return True

        return False

    def apply_bleed(self, turns: int, dmg: int, strength: float,
                    duration: int) -> bool:
        pre_turns = self.bleed['turns']
        turns += duration - self.dot_reduction - self.bonus_dot_reduction
        turns = max(turns, 0)
        dmg = round(dmg * (1.0 + strength - self.dot_res - self.dot_bonus_res))
        self.bleed['turns'] += turns
        self.bleed['dmg'] += dmg
        self.save()
        return True if pre_turns == 0 else False

    def apply_damage_over_time(self):
        out = ''

        if self.burn['turns'] <= 0 and self.bleed['turns'] <= 0:
            return None  # no DOT occurred

        if self.burn['turns'] > 0:
            self.current_health -= self.burn['dmg']
            self.current_health = max(0, self.current_health)
            self.burn['turns'] -= 1
            out += f'{self.name} burned for {self.bleed["dmg"]} damage.'

            if self.burn['turns'] == 0:
                self.burn['dmg'] = 0

        if self.bleed['turns'] > 0:
            self.current_health -= self.bleed['dmg']
            self.current_health = max(0, self.current_health)
            self.bleed['turns'] -= 1
            out += f'{self.name} bled for {self.burn["dmg"]} damage.'

            if self.bleed['turns'] == 0:
                self.bleed['dmg'] = 0

        if self.current_health <= 0:
            return True, out
        else:
            return False, out

    def end_of_turn(self):
        out = ''
        h, s, m = self.regen()

        if h > 0 or s > 0 or m > 0:
            out += f'{self.name} regenerates {h}h {s}s {m}m.\n'

        if self.burn['turns'] > 0:
            self.burn['turns'] -= 1

            if self.burn['turns'] == 0:
                self.burn['dmg'] = 0
                out += f'{self.name} has stopped burning.\n'

        return out + self.countdown_status_effects()

    def has_completed_tutorial(self) -> bool:
        if 'Boon Mine' in self.depths.keys(
        ) and self.depths['Boon Mine'] >= 10:
            return True

        return False

    def get_depth_progress(self, zone_name: str) -> int:
        if zone_name in self.depths.keys():
            return self.depths[zone_name]
        else:
            return 0

    def update_depth_progress(self, zone, depth: int) -> bool:
        if zone.name in self.depths.keys() and depth <= self.depths[zone.name]:
            return False

        highest = 0

        for key in self.depths.keys():
            if self.depths[key] > highest:
                highest = self.depths[key]

        self.depths[zone.name] = depth
        self.save()

        if depth % 5 == 0 and depth > highest:
            self.points += 1
            self.save()
            return True

    def restock(self, items: list):
        self.shop = []
        self.shop += items
        self.save()

    @staticmethod
    def display_level_up_menu(threshold=False):
        if threshold:
            return 'Choose one:\n1 - Earth Resistance +1%\n2 - Fire Resistance +1%\n3 - Electricity Resistance +1%\n4 - Water Resistance +1%\n5 - DOT Resistance +3%\n6 - DOT Strength +3%'
        else:
            return 'Choose one:\n1 - Strength +3\n2 - Intelligence +3\n3 - Dexterity +3\n4 - Willpower +3\n5 - Health +5\n6 - Stamina +5\n7 - Mana +5'
예제 #16
0
class ModelWithObjectField(CreatedUpdated, DeleteObjectsMixin, MongoModel):
    data = ObjectField()
    embed = fields.EmbeddedDocumentField(EmbeddedModelWithObjectField)
    created = fields.DateTimeField()
    updated = fields.DateTimeField()
예제 #17
0
class RobotStatus(EmbeddedMongoModel):

    availability = fields.EmbeddedDocumentField(Availability)
    component_status = fields.EmbeddedDocumentField(ComponentStatus)
예제 #18
0
class RunningActivity(Activity):

    shoes = fields.ReferenceField(Shoes)

    avg_power_w = fields.IntegerField(
        verbose_name='Average Power (Estimate) [W]')
    avg_cadence_steps_min = fields.IntegerField(
        verbose_name='Average Running Cadence [strides/min]',
        mongo_name='avg_running_cadence')
    total_num_steps = fields.IntegerField(verbose_name='Total Number of Steps',
                                          mongo_name='num_steps')
    avg_vertical_oscillation_mm = fields.FloatField(
        verbose_name='Average Vertical Oscillation [mm]',
        mongo_name='avg_vert_osc_mm')
    avg_vertical_ratio = fields.FloatField(
        verbose_name='Average Vertical Ratio %', mongo_name='avg_vert_ratio')
    avg_ground_contact_time_ms = fields.FloatField(
        verbose_name='Average Ground Contact Time [ms]',
        mongo_name='avg_gct_ms')
    avg_ground_contact_time_balance = fields.FloatField(
        verbose_name='Average Ground Contact Time Balance %',
        mongo_name='avg_gct_bal')
    avg_step_length_mm = fields.FloatField(
        verbose_name='Average Step Length [mm]')

    race = fields.EmbeddedDocumentField(RunningRace)

    records = fields.ListField(fields.ReferenceField(LocationWhileRunning))

    @property
    def avg_power(self):
        return self.avg_power_w * ureg.watt

    @property
    def avg_vertical_oscillation(self):
        return self.avg_vertical_oscillation_mm * ureg.millimeter

    @property
    def avg_ground_contact_time(self):
        return self.avg_ground_contact_time_ms * ureg.milliseconds

    @property
    def avg_step_length(self):
        return self.avg_step_length_mm * ureg.millimeter

    @property
    def avg_power_density(self):
        return self.avg_power / self.avg_body_weight

    @property
    def avg_watts_per_kg(self):
        return self.avg_power_density.to('watt/kg').magnitude

    @property
    def country_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            country_name = record.country_name
            mapping[country_name] = mapping.get(country_name, 0) + 1

        return {
            country_name: round(num / num_records * 100, 2)
            for country_name, num in mapping.items()
        }

    @property
    def admin1_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            admin1 = record.admin1
            mapping[admin1] = mapping.get(admin1, 0) + 1

        return {
            admin1: round(num / num_records * 100, 2)
            for admin1, num in mapping.items()
        }

    @property
    def admin2_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            admin2 = record.admin2
            mapping[admin2] = mapping.get(admin2, 0) + 1

        return {
            admin2: round(num / num_records * 100, 2)
            for admin2, num in mapping.items()
        }

    @property
    def name_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            name = record.name
            mapping[name] = mapping.get(name, 0) + 1

        return {
            name: round(num / num_records * 100, 2)
            for name, num in mapping.items()
        }
예제 #19
0
 class Card(MongoModel):
     id = fields.EmbeddedDocumentField(CardIdentity, primary_key=True)
     flavor = fields.CharField()
예제 #20
0
class InputUrl(Url):
    user_agent = fields.IntegerField(default=constants.UserAgent.GSTREAMER, required=True)
    stream_link = fields.BooleanField(default=False, required=True)
    proxy = fields.EmbeddedDocumentField(HttpProxy)
예제 #21
0
class FrameError(pymodm.MongoModel):
    """
    All the errors from a single frame
    One of these gets created for each frame for each trial
    """
    trial_result = fields.ReferenceField(TrialResult, required=True)
    image = fields.ReferenceField(Image, required=True, on_delete=fields.ReferenceField.CASCADE)

    repeat = fields.IntegerField(required=True)
    timestamp = fields.FloatField(required=True)
    motion = TransformField(required=True)
    processing_time = fields.FloatField(default=np.nan)
    loop_distances = fields.ListField(fields.FloatField(), blank=True)
    loop_angles = fields.ListField(fields.FloatField(), blank=True)
    num_features = fields.IntegerField(default=0)
    num_matches = fields.IntegerField(default=0)

    tracking = EnumField(TrackingState, default=TrackingState.OK)
    absolute_error = fields.EmbeddedDocumentField(PoseError, blank=True)
    relative_error = fields.EmbeddedDocumentField(PoseError, blank=True)
    noise = fields.EmbeddedDocumentField(PoseError, blank=True)
    systemic_error = fields.EmbeddedDocumentField(PoseError, blank=True)

    system_properties = fields.DictField(blank=True)
    image_properties = fields.DictField(blank=True)

    columns = ColumnList(
        repeat=attrgetter('repeat'),
        timestamp=attrgetter('timestamp'),
        tracking=attrgetter('is_tracking'),
        processing_time=attrgetter('processing_time'),
        motion_x=attrgetter('motion.x'),
        motion_y=attrgetter('motion.y'),
        motion_z=attrgetter('motion.z'),
        motion_length=lambda obj: np.linalg.norm(obj.motion.location),
        motion_roll=lambda obj: obj.motion.euler[0],
        motion_pitch=lambda obj: obj.motion.euler[1],
        motion_yaw=lambda obj: obj.motion.euler[2],
        motion_rotation=lambda obj: tf.quat_angle(obj.motion.rotation_quat(True)),
        num_features=attrgetter('num_features'),
        num_matches=attrgetter('num_matches'),

        is_loop_closure=lambda obj: len(obj.loop_distances) > 0,
        num_loop_closures=lambda obj: len(obj.loop_distances),
        max_loop_closure_distance=lambda obj: np.max(obj.loop_distances) if len(obj.loop_distances) > 0 else np.nan,
        min_loop_closure_distance=lambda obj: np.min(obj.loop_distances) if len(obj.loop_distances) > 0 else np.nan,
        mean_loop_closure_distance=lambda obj: np.mean(obj.loop_distances) if len(obj.loop_distances) > 0 else np.nan,
        max_loop_closure_angle=lambda obj: np.max(obj.loop_angles) if len(obj.loop_angles) > 0 else np.nan,
        min_loop_closure_angle=lambda obj: np.min(obj.loop_angles) if len(obj.loop_angles) > 0 else np.nan,
        mean_loop_closure_angle=lambda obj: np.mean(obj.loop_angles) if len(obj.loop_angles) > 0 else np.nan,

        abs_error_x=lambda obj: obj.absolute_error.x if obj.absolute_error is not None else np.nan,
        abs_error_y=lambda obj: obj.absolute_error.y if obj.absolute_error is not None else np.nan,
        abs_error_z=lambda obj: obj.absolute_error.z if obj.absolute_error is not None else np.nan,
        abs_error_length=lambda obj: obj.absolute_error.length if obj.absolute_error is not None else np.nan,
        abs_error_direction=lambda obj: obj.absolute_error.direction if obj.absolute_error is not None else np.nan,
        abs_rot_error=lambda obj: obj.absolute_error.rot if obj.absolute_error is not None else np.nan,

        trans_error_x=lambda obj: obj.relative_error.x if obj.relative_error is not None else np.nan,
        trans_error_y=lambda obj: obj.relative_error.y if obj.relative_error is not None else np.nan,
        trans_error_z=lambda obj: obj.relative_error.z if obj.relative_error is not None else np.nan,
        trans_error_length=lambda obj: obj.relative_error.length if obj.relative_error is not None else np.nan,
        trans_error_direction=lambda obj: obj.relative_error.direction if obj.relative_error is not None else np.nan,
        rot_error=lambda obj: obj.relative_error.rot if obj.relative_error is not None else np.nan,

        trans_noise_x=lambda obj: obj.noise.x if obj.noise is not None else np.nan,
        trans_noise_y=lambda obj: obj.noise.y if obj.noise is not None else np.nan,
        trans_noise_z=lambda obj: obj.noise.z if obj.noise is not None else np.nan,
        trans_noise_length=lambda obj: obj.noise.length if obj.noise is not None else np.nan,
        trans_noise_direction=lambda obj: obj.noise.direction if obj.noise is not None else np.nan,
        rot_noise=lambda obj: obj.noise.rot if obj.noise is not None else np.nan,

        systemic_x=lambda obj: obj.systemic_error.x if obj.systemic_error is not None else np.nan,
        systemic_y=lambda obj: obj.systemic_error.y if obj.systemic_error is not None else np.nan,
        systemic_z=lambda obj: obj.systemic_error.z if obj.systemic_error is not None else np.nan,
        systemic_length=lambda obj: obj.systemic_error.length if obj.systemic_error is not None else np.nan,
        systemic_direction=lambda obj: obj.systemic_error.direction if obj.systemic_error is not None else np.nan,
        systemic_rot=lambda obj: obj.systemic_error.rot if obj.systemic_error is not None else np.nan,
    )
    # For each of the columns listed above, get the properties necessary to retrieve that column.
    # This lets us exclude all the other fields, and reduce query size
    required_fields = dict(
        repeat=('repeat',),
        timestamp=('timestamp',),
        tracking=('is_tracking',),
        processing_time=('processing_time',),
        motion_x=('motion',),
        motion_y=('motion',),
        motion_z=('motion',),
        motion_length=('motion',),
        motion_roll=('motion',),
        motion_pitch=('motion',),
        motion_yaw=('motion',),
        motion_rotation=('motion',),
        num_features=('num_features',),
        num_matches=('num_matches',),

        is_loop_closure=('loop_distances',),
        num_loop_closures=('loop_distances',),
        max_loop_closure_distance=('loop_distances',),
        min_loop_closure_distance=('loop_distances',),
        mean_loop_closure_distance=('loop_distances',),
        max_loop_closure_angle=('loop_angles',),
        min_loop_closure_angle=('loop_angles',),
        mean_loop_closure_angle=('loop_angles',),

        abs_error_x=('absolute_error',),
        abs_error_y=('absolute_error',),
        abs_error_z=('absolute_error',),
        abs_error_length=('absolute_error',),
        abs_error_direction=('absolute_error',),
        abs_rot_error=('absolute_error',),

        trans_error_x=('relative_error',),
        trans_error_y=('relative_error',),
        trans_error_z=('relative_error',),
        trans_error_length=('relative_error',),
        trans_error_direction=('relative_error',),
        rot_error=('relative_error',),

        trans_noise_x=('noise',),
        trans_noise_y=('noise',),
        trans_noise_z=('noise',),
        trans_noise_length=('noise',),
        trans_noise_direction=('noise',),
        rot_noise=('noise',),

        systemic_x=('systemic_error',),
        systemic_y=('systemic_error',),
        systemic_z=('systemic_error',),
        systemic_length=('systemic_error',),
        systemic_direction=('systemic_error',),
        systemic_rot=('systemic_error',),
    )

    @property
    def is_tracking(self) -> bool:
        return self.tracking is TrackingState.OK

    def get_columns(self) -> typing.Set[str]:
        """
        Get the columns available to this frame error result
        :return:
        """
        return set(self.columns.keys()) | set(self.system_properties.keys()) | set(self.image_properties.keys())

    def get_properties(self, columns: typing.Iterable[str] = None, other_properties: dict = None):
        """
        Flatten the frame error to a dictionary.
        This is used to construct rows in a Pandas data frame, so the keys are column names
        Handles pulling data from the linked system and linked image
        :return:
        """
        if other_properties is None:
            other_properties = {}
        if columns is None:
            columns = set(self.columns.keys()) | set(self.system_properties.keys()) | set(self.image_properties.keys())
        error_properties = {
            column_name: self.columns.get_value(self, column_name)
            for column_name in columns
            if column_name in self.columns
        }
        image_properties = {
            column: self.image_properties[column]
            for column in columns
            if column in self.image_properties
        }
        system_properties = {
            column: self.system_properties[column]
            for column in columns
            if column in self.system_properties
        }
        return {
            **other_properties,
            **image_properties,
            **system_properties,
            **error_properties
        }

    @classmethod
    def load_minimal_for_columns(
            cls,
            error_ids: typing.Iterable[bson.ObjectId],
            columns: typing.Iterable[str] = None
    ) -> typing.List['FrameError']:
        """
        Given a set of FrameError ids, load the FrameError objects.
        If we have a set of columns as well (we will),
        load only partial objects that have just enough data to compute the requested columns.
        Completes the loading in this method with the list call -- does not return a queryset.

        :param error_ids: The list of error ids to load
        :param columns: The columns, from which we derive the set of FrameError properties to load
        :return: A list of
        """
        queryset = cls.objects.raw({'_id': {'$in': list(error_ids)}})
        if columns is not None:
            # Limit to only the fields necessary to compute the columns we're interested in
            columns = set(columns)
            fields_to_include = {
                field
                for column in columns
                if column in cls.required_fields
                for field in cls.required_fields[column]
            }
            # If the column is not in image_properties or system_properties, it will not be included
            fields_to_include.update(
                template.format(column)
                for template in ('image_properties.{0}', 'system_properties.{0}')
                for column in columns - set(cls.required_fields.keys())
            )
            if len(fields_to_include) > 0:
                queryset = queryset.only(*fields_to_include)
        # Run the query
        return list(queryset)
예제 #22
0
class CompletedQueue(MongoModel):
    userId = fields.ReferenceField('User')
    clientId = fields.ReferenceField('Client')
    experimentId = fields.ReferenceField('Experiment')
    run = fields.EmbeddedDocumentField(Run, required=True)
예제 #23
0
class ServiceSettings(MongoModel):
    class Meta:
        collection_name = 'services'

    DEFAULT_SERVICE_NAME = 'Service'
    MIN_SERVICE_NAME_LENGTH = 3
    MAX_SERVICE_NAME_LENGTH = 30

    DEFAULT_FEEDBACK_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/feedback'
    DEFAULT_TIMESHIFTS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/timeshifts'
    DEFAULT_HLS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/hls'
    DEFAULT_PLAYLISTS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/playlists'
    DEFAULT_DVB_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/dvb'
    DEFAULT_CAPTURE_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/capture_card'
    DEFAULT_VODS_IN_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/vods_in'
    DEFAULT_VODS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/vods'
    DEFAULT_CODS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/cods'

    DEFAULT_SERVICE_HOST = 'localhost'
    DEFAULT_SERVICE_PORT = 6317
    DEFAULT_SERVICE_HTTP_HOST = 'localhost'
    DEFAULT_SERVICE_HTTP_PORT = 8000
    DEFAULT_SERVICE_VODS_HOST = 'localhost'
    DEFAULT_SERVICE_VODS_PORT = 7000
    DEFAULT_SERVICE_CODS_HOST = 'localhost'
    DEFAULT_SERVICE_CODS_PORT = 6001

    streams = fields.ListField(fields.ReferenceField(IStream, on_delete=fields.ReferenceField.PULL), default=[],
                               blank=True)
    series = fields.ListField(fields.ReferenceField(Serial, on_delete=fields.ReferenceField.PULL), default=[],
                              blank=True)
    providers = fields.EmbeddedDocumentListField(ProviderPair, default=[])

    name = fields.CharField(default=DEFAULT_SERVICE_NAME, max_length=MAX_SERVICE_NAME_LENGTH,
                            min_length=MIN_SERVICE_NAME_LENGTH)
    host = fields.EmbeddedDocumentField(HostAndPort,
                                        default=HostAndPort(host=DEFAULT_SERVICE_HOST, port=DEFAULT_SERVICE_PORT))
    http_host = fields.EmbeddedDocumentField(HostAndPort, default=HostAndPort(host=DEFAULT_SERVICE_HTTP_HOST,
                                                                              port=DEFAULT_SERVICE_HTTP_PORT))
    vods_host = fields.EmbeddedDocumentField(HostAndPort, default=HostAndPort(host=DEFAULT_SERVICE_VODS_HOST,
                                                                              port=DEFAULT_SERVICE_VODS_PORT))
    cods_host = fields.EmbeddedDocumentField(HostAndPort, default=HostAndPort(host=DEFAULT_SERVICE_CODS_HOST,
                                                                              port=DEFAULT_SERVICE_CODS_PORT))

    feedback_directory = fields.CharField(default=DEFAULT_FEEDBACK_DIR_PATH)
    timeshifts_directory = fields.CharField(default=DEFAULT_TIMESHIFTS_DIR_PATH)
    hls_directory = fields.CharField(default=DEFAULT_HLS_DIR_PATH)
    playlists_directory = fields.CharField(default=DEFAULT_PLAYLISTS_DIR_PATH)
    dvb_directory = fields.CharField(default=DEFAULT_DVB_DIR_PATH)
    capture_card_directory = fields.CharField(default=DEFAULT_CAPTURE_DIR_PATH)
    vods_in_directory = fields.CharField(default=DEFAULT_VODS_IN_DIR_PATH)
    vods_directory = fields.CharField(default=DEFAULT_VODS_DIR_PATH)
    cods_directory = fields.CharField(default=DEFAULT_CODS_DIR_PATH)

    def get_id(self) -> str:
        return str(self.pk)

    @property
    def id(self):
        return self.pk

    def get_host(self) -> str:
        return str(self.host)

    def get_http_host(self) -> str:
        return 'http://{0}'.format(str(self.http_host))

    def get_vods_host(self) -> str:
        return 'http://{0}'.format(str(self.vods_host))

    def get_cods_host(self) -> str:
        return 'http://{0}'.format(str(self.cods_host))

    def generate_http_link(self, url: str) -> str:
        return url.replace(self.hls_directory, self.get_http_host())

    def generate_vods_link(self, url: str) -> str:
        return url.replace(self.vods_directory, self.get_vods_host())

    def generate_cods_link(self, url: str) -> str:
        return url.replace(self.cods_directory, self.get_cods_host())

    def generate_playlist(self) -> str:
        result = '#EXTM3U\n'
        for stream in self.streams:
            result += stream.generate_playlist(False)

        return result

    def add_streams(self, streams: [IStream]):
        self.streams.extend(streams)
        self.save()

    def add_stream(self, stream: IStream):
        if stream:
            self.streams.append(stream)
            self.save()

    def remove_stream(self, stream: IStream):
        if stream:
            self.streams.remove(stream)
            safe_delete_stream(stream)
            self.save()

    def remove_all_streams(self):
        for stream in list(self.streams):
            safe_delete_stream(stream)
        self.streams = []
        self.save()

    def add_provider(self, user: ProviderPair):
        if user:
            self.providers.append(user)
            self.save()

    def remove_provider(self, provider):
        for prov in list(self.providers):
            if prov.user == provider:
                self.providers.remove(provider)
        self.save()

    def find_stream_settings_by_id(self, sid: ObjectId):
        for stream in self.streams:
            if stream.id == sid:
                return stream

        return None

    def delete(self, *args, **kwargs):
        for stream in self.streams:
            safe_delete_stream(stream)
        return super(ServiceSettings, self).delete(*args, **kwargs)
예제 #24
0
 class Container(MongoModel):
     emb = fields.EmbeddedDocumentField(OtherRefModel)
예제 #25
0
class Task(MongoModel):
    task_id = fields.UUIDField(primary_key=True)
    request = fields.ReferenceField(TaskRequest)
    assigned_robots = fields.ListField(blank=True)
    plan = fields.EmbeddedDocumentListField(TaskPlan, blank=True)
    constraints = fields.EmbeddedDocumentField(TaskConstraints)
    start_time = fields.DateTimeField()
    finish_time = fields.DateTimeField()

    objects = TaskManager()

    class Meta:
        archive_collection = 'task_archive'
        ignore_unknown_fields = True
        meta_model = 'task'

    def save(self):
        try:
            super().save(cascade=True)
        except ServerSelectionTimeoutError:
            logging.warning('Could not save models to MongoDB')

    @classmethod
    def create_new(cls, **kwargs):
        if 'task_id' not in kwargs.keys():
            kwargs.update(task_id=uuid.uuid4())
        elif 'constraints' not in kwargs.keys():
            kwargs.update(constraints=TaskConstraints())
        task = cls(**kwargs)
        task.save()
        task.update_status(TaskStatusConst.UNALLOCATED)
        return task

    @classmethod
    def from_payload(cls, payload, **kwargs):
        document = Document.from_payload(payload)
        document['_id'] = document.pop('task_id')
        for key, value in kwargs.items():
            document[key] = value.from_payload(document.pop(key))
        task = cls.from_document(document)
        task.save()
        task.update_status(TaskStatusConst.UNALLOCATED)
        return task

    def to_dict(self):
        dict_repr = self.to_son().to_dict()
        dict_repr.pop('_cls')
        dict_repr["task_id"] = str(dict_repr.pop('_id'))
        dict_repr["constraints"] = self.constraints.to_dict()
        return dict_repr

    def to_msg(self):
        msg = Message.from_model(self)
        return msg

    @classmethod
    def from_request(cls, request):
        constraints = TaskConstraints(hard=request.hard_constraints)
        task = cls.create_new(request=request.request_id,
                              constraints=constraints)
        return task

    @property
    def delayed(self):
        return self.status.delayed

    @delayed.setter
    def delayed(self, boolean):
        task_status = Task.get_task_status(self.task_id)
        task_status.delayed = boolean
        task_status.save()

    @property
    def hard_constraints(self):
        return self.constraints.hard

    @hard_constraints.setter
    def hard_constraints(self, boolean):
        self.constraints.hard = boolean
        self.save()

    def archive(self):
        with switch_collection(Task, Task.Meta.archive_collection):
            super().save()
        self.delete()

    def update_status(self, status):
        try:
            task_status = Task.get_task_status(self.task_id)
            task_status.status = status
        except DoesNotExist:
            task_status = TaskStatus(task=self.task_id, status=status)
        task_status.save()
        if status in [
                TaskStatusConst.COMPLETED, TaskStatusConst.CANCELED,
                TaskStatusConst.ABORTED
        ]:
            task_status.archive()
            self.archive()

    def assign_robots(self, robot_ids):
        self.assigned_robots = robot_ids
        # Assigns the first robot in the list to the plan
        # Does not work for single-task multi-robot
        self.plan[0].robot = robot_ids[0]
        self.update_status(TaskStatusConst.ALLOCATED)
        self.save()

    def unassign_robots(self):
        self.assigned_robots = list()
        self.plan[0].robot = None
        self.save()

    def update_plan(self, task_plan):
        # Adds the section of the plan that is independent from the robot,
        # e.g., for transportation tasks, the plan between pickup and delivery
        self.plan.append(task_plan)
        self.update_status(TaskStatusConst.PLANNED)
        self.save()

    def update_schedule(self, schedule):
        self.start_time = schedule['start_time']
        self.finish_time = schedule['finish_time']
        self.save()

    def is_executable(self):
        current_time = TimeStamp()
        start_time = TimeStamp.from_datetime(self.start_time)

        if start_time < current_time:
            return True
        else:
            return False

    @property
    def meta_model(self):
        return self.Meta.meta_model

    @property
    def status(self):
        return TaskStatus.objects.get({"_id": self.task_id})

    @classmethod
    def get_task(cls, task_id):
        return cls.objects.get_task(task_id)

    @staticmethod
    def get_task_status(task_id):
        return TaskStatus.objects.get({'_id': task_id})

    @staticmethod
    def get_tasks_by_status(status):
        return [status.task for status in TaskStatus.objects.by_status(status)]

    @classmethod
    def get_tasks_by_robot(cls, robot_id):
        return [
            task for task in cls.objects.all()
            if robot_id in task.assigned_robots
        ]

    @classmethod
    def get_tasks(cls, robot_id=None, status=None):
        if status:
            tasks = cls.get_tasks_by_status(status)
        else:
            tasks = cls.objects.all()

        tasks_by_robot = [
            task for task in tasks if robot_id in task.assigned_robots
        ]

        return tasks_by_robot

    def update_progress(self, action_id, action_status, **kwargs):
        status = TaskStatus.objects.get({"_id": self.task_id})
        status.update_progress(action_id, action_status, **kwargs)
예제 #26
0
 class Blog(MongoModel):
     name = fields.CharField()
     reviews = fields.EmbeddedDocumentField(Review)
예제 #27
0
class ProfileIcon(MongoModel):
    riot_id = fields.IntegerField()
    image = fields.EmbeddedDocumentField('Image')
예제 #28
0
class PersonTrail(MongoModel):
    cpf = fields.CharField(primary_key=True)
    last_query = fields.DateTimeField()
    transactions = fields.EmbeddedDocumentListField('Transaction')
    last_purchase = fields.EmbeddedDocumentField('LastPurchase')
class Account(MongoModel):
    account_id = fields.UUIDField(primary_key=True)
    summary = fields.EmbeddedDocumentField('Summary')
    config = fields.EmbeddedDocumentField('Config')
예제 #30
0
class TaskLot(MongoModel):
    task = fields.ReferenceField(Task, primary_key=True)
    start_location = fields.CharField()
    finish_location = fields.CharField()
    constraints = fields.EmbeddedDocumentField(TaskConstraints)

    class Meta:
        archive_collection = 'task_lot_archive'
        ignore_unknown_fields = True

    def save(self):
        try:
            super().save(cascade=True)
        except ServerSelectionTimeoutError:
            logging.warning('Could not save models to MongoDB')

    @classmethod
    def create(cls, task, start_location, finish_location, earliest_start_time,
               latest_start_time, hard_constraints):

        start_timepoint_constraints = TimepointConstraints(
            earliest_time=earliest_start_time, latest_time=latest_start_time)
        timepoint_constraints = [start_timepoint_constraints]

        constraints = TaskConstraints(
            timepoint_constraints=timepoint_constraints, hard=hard_constraints)

        task_lot = cls(task=task,
                       start_location=start_location,
                       finish_location=finish_location,
                       constraints=constraints)
        task_lot.save()
        task_lot.update_status(TaskStatusConst.UNALLOCATED)

        return task_lot

    def update_status(self, status):
        task_status = TaskStatus(task=self.task, status=status)
        task_status.save()
        if status in [TaskStatusConst.COMPLETED, TaskStatusConst.CANCELED]:
            self.archive()
            task_status.archive()

    @classmethod
    def from_payload(cls, payload):
        document = Document.from_payload(payload)
        document['_id'] = document.pop('task_id')
        document["constraints"] = TaskConstraints.from_payload(
            document.pop("constraints"))
        task_lot = TaskLot.from_document(document)
        return task_lot

    def to_dict(self):
        dict_repr = self.to_son().to_dict()
        dict_repr.pop('_cls')
        dict_repr["task_id"] = str(dict_repr.pop('_id'))
        dict_repr["constraints"] = self.constraints.to_dict()
        return dict_repr

    @classmethod
    def from_task(cls, task):
        start_location = task.request.pickup_location
        finish_location = task.request.delivery_location
        earliest_start_time = task.request.earliest_pickup_time
        latest_start_time = task.request.latest_pickup_time
        hard_constraints = task.request.hard_constraints
        task_lot = TaskLot.create(task, start_location, finish_location,
                                  earliest_start_time, latest_start_time,
                                  hard_constraints)
        return task_lot