Esempio n. 1
0
class Sample(MongoModel):
    barcode = fields.CharField(required=True)
    tags = fields.ListField(fields.ReferenceField(Tag), blank=True)
    properties = fields.EmbeddedDocumentField(S_properties, required=True)
    batches = fields.EmbeddedDocumentListField(Batch)
    workflows = MapField(
        MapField(fields.EmbeddedDocumentListField(WorkflowResults)))
    submitted_on = fields.DateTimeField(required=True)
    archived = fields.BooleanField(required=True, default=False)
    comments = fields.CharField(default="", blank=True, required=True)

    class Meta:
        write_concern = WriteConcern(j=True)
        indexes = [
            IndexModel([("barcode", 1)], unique=True),
            IndexModel(
                [("barcode", TEXT),
                 ("properties.sample_info.summary.name", TEXT),
                 ("properties.sample_info.summary.submitted_species_name",
                  TEXT), ("properties.sample_info.summary.emails", TEXT),
                 ("properties.sample_info.summary.group", TEXT),
                 ("workflows.*.root.0.sample.batch_name", TEXT)],
                name="textindex")
        ]

    @staticmethod
    def plate_size(plate):
        if plate == "96plate":
            return 96

    @staticmethod
    def _validate_type(type_, value):
        if type_ == "basicalphanum":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9]').search(value))
        elif type_ == "alphanum":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9_\-]').search(value))
        elif type_ == "species":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z\. ]').search(value))
        elif type_ == "barcode":
            value = str(value)
            return not bool(re.compile(r'[^A-Za-z0-9_\-]').search(value))
        elif type_ == "email":
            value = str(value)
            # from emailregex.com
            return bool(
                re.compile(
                    r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)").
                search(value))
        elif type_ == "boolean":
            return isinstance(value, bool)

    @staticmethod
    def validate_field(field, value):
        """
        Returns false if the field is not valid.
        """
        if field == "species":
            return True  # valid if in species list.
        elif field == "group":  # This for now, but should be validated in full list of groups.
            return Sample._validate_type("alphanum", value)
        elif field in ("name", "sampleid", "barcode", "batch_name"):
            return Sample._validate_type("barcode", value)
        elif field == "emails":
            email_list = re.split("[;,\s]+", value)
            for e in email_list:
                if not Sample._validate_type("email", e):
                    return False
            return True
        elif field == "archived":
            if value == "True":
                value = True
            elif value == "False":
                value = False
            return Sample._validate_type("boolean", value)
        elif field == "organism":
            return Sample._validate_type("species", value)
        elif field == "priority":
            try:
                return 1 <= int(value) <= 4
            except ValueError:
                return False
        elif field == "tags":
            return Tag.validate_field(value)
        elif field in [
                "comments", "costcenter", "submission_comments", "supplyinglab"
        ]:
            # Always valid
            return True
        else:
            return False

    @staticmethod
    def columns(template, species_options=None, custom_mapping=None):
        from minilims.models.species import Species
        columns = {
            "submit": [
                {
                    "data": "SampleID",
                    "unique": True,
                    "required": True
                },
                {
                    "data": "Barcode",
                    "unique": True,
                    "required": True
                },
                {
                    "data": "Organism",
                    "type": "select",
                    "options": Species.get_name_list(alias=True),
                    "required": True
                },
                {
                    "defaultContent": "",
                    "data": "Emails"
                },
                {
                    "defaultContent": "",
                    "data": "Priority",
                    "type": "select",
                    "options": ["low", "high"],
                    "required": True,
                },
                # {
                #     "defaultContent": "",
                #     "data": "SupplyDate",
                # },
                {
                    "defaultContent": "",
                    "data": "Costcenter"
                },
                {
                    "data": "PlateName",
                    "defaultContent": "",
                },
                {
                    "data": "WellPositionInSuppliedPlate",
                    "defaultContent": "",
                },
                {
                    "defaultContent": "",
                    "data": "Comments",
                    "type": "textarea"
                }
            ],
            "sample_list_view": [
                {
                    "data": "none",
                    "type": "hidden"
                },
                {
                    "data": "barcode",
                    "title": "barcode",
                    "readonly": "true",
                    "unique": "true",
                    "name": "barcode",
                },
                {
                    "data":
                    "tags",
                    "title":
                    "tags",
                    "type":
                    "select",
                    "multiple":
                    "true",
                    "options":
                    [str(x.pk) for x in Tag.objects.project({
                        "_id": 1
                    }).all()],
                    "name":
                    "tags"
                },
                {
                    "data": "submitted_on",
                    "title": "Submission date",
                    "readonly": "true",
                    "name": "submitted_on"
                },
                {
                    "data": "name",
                    "title": "SampleID",
                    "name": "name"
                },
                {
                    "data": "priority",
                    "title": "Priority",
                    "type": "select",
                    "options": current_app.config["PRIORITY"],
                    "name": "priority",
                },
                {
                    "data": "species",
                    "title": "species",
                    "type": "select",
                    "options": species_options,
                    "name": "species"
                },
                {
                    "data": "group",
                    "title": "supplying_lab",
                    "name": "group"
                },
                {
                    "data": "costcenter",
                    "title": "Cost Center",
                    "name": "costcenter"
                },
                {
                    "data": "batch",
                    "title": "batch",
                    # "type": "select",
                    # "multiple": "true",
                    "readonly": "true",
                    "name": "batch"
                },
                {
                    "data": "genome_size",
                    "title": "Genome size",
                    "readonly": "true",
                    "name": "genome_size",
                },
                {
                    "data": "supplied_plate_name",
                    "title": "Supplied Plate Name",
                    "readonly": "true",
                    "name": "supplied_plate_name"
                },
                {
                    "data": "position_in_supplied_plate",
                    "title": "Position in Supplied Plate",
                    "readonly": "true",
                    "name": "position_in_supplied_plate"
                },
                {
                    "data": "submission_comments",
                    "title": "Submission Comments",
                    "readonly": "true",
                    "name": "submission_comments"
                },
                {
                    "data": "comments",
                    "title": "Comments",
                    "name": "comments"
                },
                {
                    "data": "archived",
                    "title": "archived",
                    "type": "select",
                    "options": ["True", "False"],
                    "name": "archived"
                }
            ]
        }
        if template == "submit" and custom_mapping is not None:
            customized_columns = []
            for coldata in columns[template]:
                data_l = coldata["data"].lower()
                # change only if in mapping
                coldata["data"] = custom_mapping.get(data_l, coldata["data"])
                coldata["title"] = custom_mapping.get(data_l, coldata["data"])
                customized_columns.append(coldata)
            return customized_columns

        return columns[template]

    @classmethod
    def searchbox_suggestion(cls, query):
        if cls.validate_field("barcode", query):
            samples = cls.objects.raw({
                "barcode": {
                    "$regex": query
                }
            }).project({
                "barcode": 1,
                "_id": 0
            }).limit(10)
            return [sample.barcode for sample in samples]
        else:
            return []

    @classmethod
    def get_batch_names(cls, query=None, workflow_name=None):
        if query is None:
            return cls.objects.raw(
                {})._collection.distinct("batches.batch_name")
        else:
            try:
                workflow = Workflow.objects.get({"name": workflow_name})
            except errors.DoesNotExist:
                workflow = None
            if cls.validate_field("barcode", query) and workflow is not None:
                batches = cls.objects.aggregate(
                    {"$unwind": {
                        "path": "$batches"
                    }}, {"$match": {
                        "batches.workflow": workflow._id
                    }}, {"$group": {
                        "_id": "$batches.batch_name"
                    }})
                return [x["_id"] for x in batches]
            else:
                return []

    @classmethod
    def get_batch_overview(cls):
        overview_query = list(
            cls.objects.aggregate({"$match": {
                "archived": False
            }}, {"$unwind": {
                "path": "$batches"
            }}, {
                "$group": {
                    "_id": {
                        "batch_name": "$batches.batch_name",
                        "workflow_id": "$batches.workflow",
                        "step_cat": "$batches.step_cat",
                        "plate_type": "$batches.position.plate_type"
                    },
                    "count": {
                        "$sum": 1.0
                    },
                    "batch_created_on": {
                        "$min": "$batches.batch_created_on"
                    }
                }
            }, {
                "$group": {
                    "_id": {
                        "batch_name": "$_id.batch_name",
                        "workflow_id": "$_id.workflow_id",
                        "plate_type": "$_id.plate_type"
                    },
                    "count": {
                        "$sum": "$count"
                    },
                    "batch_created_on": {
                        "$min": "$batch_created_on"
                    },
                    "steps": {
                        "$push": "$$ROOT"
                    }
                }
            }, {
                "$group": {
                    "_id": "$_id.workflow_id",
                    "batches": {
                        "$push": "$$ROOT"
                    },
                    "count": {
                        "$sum": "$count"
                    }
                }
            }, {
                "$lookup": {
                    "from": "workflow",
                    "localField": "_id",
                    "foreignField": "_id",
                    "as": "workflow"
                }
            }, {
                "$project": {
                    "_id": 1,
                    "batches._id.batch_name": 1,
                    "batches._id.plate_type": 1,
                    "batches._id.step_cat": 1,
                    "batches.count": 1,
                    "batches.batch_created_on": 1,
                    "count": 1,
                    "batches.steps.count": 1,
                    "batches.steps._id.step_cat": 1,
                    "name": {
                        "$arrayElemAt": ["$workflow.name", 0]
                    }
                }
            }))
        for workflow in overview_query:
            # Dict to cache step names to avoid multiple queries for the same name.
            step_cache = {}
            workflow_db = Workflow.objects.get({"_id": workflow["_id"]})
            workflow["_id"] = str(workflow["_id"])
            workflow["display_name"] = workflow_db.display_name
            for batch in workflow["batches"]:
                step_i = 0
                step_entries = []
                # The first step of the list that has partial count will be finished,
                # the rest will be partially finished so this should be false for them.
                first_finished_true = False
                if batch["steps"][step_i]["_id"]["step_cat"] == "root":
                    step_entries.append({
                        "step_name":
                        "root",
                        "step_d_name":
                        "Assigned",
                        "count":
                        batch["steps"][step_i]["count"],
                        "finished":
                        True
                    })
                    step_i += 1
                    first_finished_true = True
                steps_progress = [0, len(workflow_db.steps)]
                for step in workflow_db.steps:
                    # Check if step is in dict cache before querying
                    if step.pk not in step_cache:
                        step_cache[str(step.pk)] = {
                            "d_name": step.display_name,
                            "cat": step.category,
                            "name": step.name
                        }
                    step_data = step_cache[str(step.pk)]

                    if step_i < len(batch["steps"]) and batch["steps"][step_i][
                            "_id"]["step_cat"] == step_data["cat"]:
                        if not first_finished_true:
                            finished = True
                            first_finished_true = True
                            steps_progress[0] += 1
                        else:
                            finished = False
                        step_entries.append({
                            "step_d_name":
                            step_data["d_name"],
                            "step_name":
                            step_data["name"],
                            "count":
                            batch["steps"][step_i]["count"],
                            "finished":
                            finished
                        })
                        step_i += 1
                    else:
                        if not first_finished_true:
                            finished = True
                            steps_progress[0] += 1
                        else:
                            finished = False
                        step_entries.append({
                            "step_d_name": step_data["d_name"],
                            "step_name": step_data["name"],
                            "count": 0,
                            "finished": finished
                        })
                batch["steps"] = step_entries
                batch["progress"] = steps_progress

        # overview_query = list(overview_query)
        return overview_query

    @classmethod
    def get_unassigned(cls, count=False, group=None):
        query = {
            "$or": [{
                "batches": {
                    "$size": 0
                }
            }, {
                "batches": {
                    "$exists": False
                }
            }]
        }
        if group is not None:
            query["properties.sample_info.summary.group"] = group
        if count:
            return cls.objects.raw(query).count()
        else:
            return cls.objects.raw(query)

    @classmethod
    def get_archived(cls, count=False, group=None):
        query = {"archived": True}
        if group is not None:
            query["properties.sample_info.summary.group"] = group
        if count:
            return cls.objects.raw(query).count()
        else:
            return cls.objects.raw(query)

    @classmethod
    def get_plate_view(cls,
                       workflow,
                       batch_name,
                       plate=None,
                       barcode_only=False):
        """
        Generate plate view object including where samples are.
        """

        if plate is None:
            samples = list(
                cls.objects.raw({
                    "batches": {
                        "$elemMatch": {
                            "batch_name": batch_name,
                            "workflow": workflow.pk,
                            "archived": False
                        }
                    }
                }))

            for sample in samples:
                pt = sample.get_batches(workflow.name,
                                        batch_name)[0].position.plate_type
                if plate is None:
                    plate = pt
                else:
                    if plate != pt:
                        raise ValueError((
                            f"Some samples belong to different plates in the same batch."
                            " Workflow: {workflow.name}. Batch: {batch_name}"))

        if plate == "96plate":
            plate_view = {
                "plate":
                [[None for i in range(12)] for j in range(8)],  # List per row
                "free_spots": [],
                "plate_type": plate
            }
            taken_spots = [False] * 96
            for sample in cls.objects.raw({
                    "batches": {
                        "$elemMatch": {
                            "batch_name": batch_name,
                            "workflow": workflow.pk,
                            "archived": False
                        }
                    }
            }):
                pos = sample.get_batches(workflow.name, batch_name)[0].position
                taken_spots[pos.index] = True
                coord = pos.get_coordinates(True)
                if barcode_only:
                    plate_view["plate"][coord[0]][coord[1]] = sample.barcode
                else:
                    plate_view["plate"][coord[0]][coord[1]] = sample
            for i in range(len(taken_spots)):
                if not taken_spots[i]:
                    plate_view["free_spots"].append(i)
        else:
            return None
        return plate_view

    # @classmethod
    # def get_step_table(cls, sample_ids):

    #     db = connection._get_db()
    #     samples = list(db[cls._mongometa.collection_name].find({
    #         "_id": {"$in": sample_ids}
    #     },{
    #         "barcode": 1,
    #         "comments": 1,
    #         "batches": 1,
    #         "properties.sample_info.summary."
    #         "_id": 0
    #     }))
    #     return samples

    def update(self, field, new_value):
        if field == "species":
            self.properties.sample_info.summary.submitted_species = new_value
            self.properties.sample_info.summary.submitted_species_name = new_value.name
        elif field == "group":
            self.properties.sample_info.summary.group = new_value
        elif field == "name":
            self.properties.sample_info.summary.name = new_value
        elif field == "archived":
            if new_value.lower() == "true":
                new_value = True
            else:
                new_value = False
            self.archived = new_value
        elif field == "priority":
            self.properties.sample_info.summary.priority = new_value
        elif field == "costcenter":
            self.properties.sample_info.summary.costcenter = new_value
        elif field == "comments":
            self.comments = new_value
        elif field == "tags":
            tags = Tag.objects.raw({"_id": {"$in": new_value}})
            self.tags = tags
        else:
            raise ValueError("Field not valid")

    def update_last_workflow(self, workflow, batch_name, step_cat):
        for wlf in self.batches:
            if wlf.workflow == workflow and wlf.batch_name == batch_name:
                wlf.step_cat = step_cat

    def assign_workflow(self, workflow, batch_name, index, plate_type,
                        prev_step_cat):

        batches = self.get_batches(workflow.name, batch_name)
        if len(batches) > 0:
            batch = batches[0]  # Assuming only one archived batch can be here.
            batch.archived = False
            batch.step_cat = prev_step_cat
            batch.position = PositionInPlate(plate_type=plate_type,
                                             index=index)
        else:
            wlf = Batch(workflow=workflow,
                        step_cat=prev_step_cat,
                        batch_name=batch_name,
                        batch_created_on=datetime.datetime.now(),
                        position=PositionInPlate(plate_type=plate_type,
                                                 index=index),
                        archived=False)
            self.batches.append(wlf)
            if workflow.name not in self.workflows or prev_step_cat == "root":
                step_i = WorkflowResults(parent=None,
                                         sample={},
                                         start_date=datetime.datetime.now(),
                                         finish_date=datetime.datetime.now(),
                                         status="finished",
                                         step_instance=None,
                                         batch_name=batch_name,
                                         index=index)
                if workflow.name not in self.workflows:
                    self.workflows[workflow.name] = {prev_step_cat: [step_i]}
                else:
                    self.workflows[workflow.name][prev_step_cat] = [step_i]
        self.save()

    def reorganize(self, workflow, batch_name, new_index):
        batches = self.get_batches(workflow.name, batch_name)
        if len(batches) == 0:
            raise ValueError(
                "Sample tried to be reorganized into a workflow-batch that doesn't exist."
            )
        else:
            batch = batches[0]
            batch.position.index = new_index
        self.save()

    def unassign_workflow(self, workflow, batch_name):
        """
        Sets the batch as archived
        """
        for batch in self.batches:
            if batch.workflow == workflow and batch.batch_name == batch_name:
                batch.archived = True
        self.save()

    def get_batches(self, workflow_name, batch_name=None):
        """
        Get batches with given name
        """
        if batch_name is None:
            return [
                b for b in self.batches if b.workflow.name == workflow_name
            ]
        else:
            return [
                b for b in self.batches if b.batch_name == batch_name
                and b.workflow.name == workflow_name
            ]

    def finish_step(self, step_instance, save):
        """
        Updates batches
        """
        step_cat = step_instance.step.category
        workflow_name, prev_step = self.get_prev_step(step_cat)
        workflow = Workflow.objects.get({"name": workflow_name})
        self.update_last_workflow(workflow, step_instance.batch, step_cat)
        for i in range(len(self.workflows[workflow_name][step_cat])):
            instance = self.workflows[workflow_name][step_cat][i]
            if step_instance == instance.step_instance:
                self.workflows[workflow_name][step_cat][
                    i].finish_date = datetime.datetime.now()
                self.workflows[workflow_name][step_cat][i].status = "finished"
                self.workflows[workflow_name][step_cat][
                    i].all = step_instance.result_all
                self.workflows[workflow_name][step_cat][
                    i].sample = step_instance.result_samples[self.barcode]
        if save:
            self.save()
        recommended_next = workflow.next_step(step_instance.step.name)
        if recommended_next == "_workflow_finished":
            self.finish_workflow(workflow, step_instance.batch, step_cat)
        return recommended_next

    def find_workflow_for_step(self, step_cat):
        """
        Returns a list of workflows for which this sample contains a result for the given step category.
        Random order.
        """
        workflows = []
        for workflow, steps in self.workflows.items():
            if step_cat in steps.keys():
                workflows.append(workflow)
        return workflows

    def _find_valid_prev_steps(self, next_step):
        """
        Return list of workflows this sample belongs to and 
        this step is a valid option for.
        """
        workflows = []
        for wlf in self.batches:
            prev_step = wlf.step_cat
            workflow = wlf.workflow
            if not wlf.archived:
                valid = workflow.valid_next_step(prev_step, next_step)
                if valid:
                    workflows.append((workflow.name, prev_step))
        return workflows

    def get_prev_step(self, step_name):
        """
        Given a step name return prev step and workflow name. Use this instead of _find_valid_prev_steps.
        Returns (workflow_name, step_name)
        """
        workflows = self._find_valid_prev_steps(step_name)
        if len(workflows) == 0:
            raise ValueError(
                "No workflow available for sample {} and step {}.".format(
                    self.barcode, step_name))
        elif len(workflows) > 1:
            current_app.logger.warning(
                "Sample {} can init step {} in more than one workflow: {}. Choosing first"
                .format(self.barcode, step_name, workflows))
        return workflows[0]

    def valid_next_step(self, next_step, batch_name):
        """
        Checks if the provided step is valid for this sample.
        """
        for batch in self.batches:
            if not batch.archived and batch.batch_name == batch_name:
                workflow = batch.workflow
                last_step = batch.step_cat
                valid_w = workflow.valid_next_step(last_step, next_step)
                if valid_w:
                    return True
        return False

    def send_to_step(self, step_name, workflow_name=None):
        if workflow_name is None:
            step = Step.objects.get({"name": step_name})
            workflows = self.find_workflow_for_step(step.category)
            if len(workflows) == 1:
                workflow_name = workflows[0]
            elif len(workflows) == 0:
                raise ValueError(
                    "No workflow available for sample {} and step {}.".format(
                        self.barcode, step_name))
            else:
                current_app.logger.warning(
                    "Sample {} can init step {} in more than one workflow: {}. Choosing first"
                    .format(self.barcode, step_name, workflows))
                workflow_name = workflows[0]
        workflow = Workflow.objects.get({"name": workflow_name})
        prev_step = workflow.get_prev_step(step_name)
        if prev_step is None:
            raise ValueError(
                "No workflow available for sample {} and step {}.".format(
                    self.barcode, step_name))
        self.update_last_workflow(workflow, prev_step)
        self.save()

    def init_step(self, step_instance):
        """
        Initialises step in sample. If step can belong to more than
        one workflow the sample is in, throw an error.
        """
        workflow_name, prev_step = self.get_prev_step(
            step_instance.step.category)
        prev_step_attempts = self.workflows[workflow_name][prev_step]
        instance_index = len(prev_step_attempts) - 1
        batch = self.get_batches(workflow_name, step_instance.batch)
        step_result_sample = WorkflowResults(
            parent="{}.{}.{}".format(workflow_name, prev_step, instance_index),
            status="started",
            step_instance=step_instance._id,
            start_date=datetime.datetime.now(),
            batch_name=step_instance.batch,
            index=batch[0].position.index)
        step_attempts = self.workflows[workflow_name].get(
            step_instance.step.category, [])
        step_attempts.append(step_result_sample)
        self.workflows[workflow_name][
            step_instance.step.category] = step_attempts
        self.save()

        return step_instance.batch

    def result_chain(self, chain, exit_match=None):
        """
        Recursive. From a list with a single WorkflowResults it'll return the full result chain from root.
        Exit match: (workflow_name, step_cat)
        """
        last = chain[-1]
        if last.parent is None:
            return chain
        workflow_name_c, step_cat_c, attempt = last.parent.split(".")
        chain.append(self.workflows[workflow_name_c][step_cat_c][int(attempt)])
        if exit_match is not None and exit_match[
                0] == workflow_name_c and exit_match[1] == step_cat_c:
            return chain
        return self.result_chain(chain, exit_match)

    def find_result(self, workflow_name, step_cat, scope, field_name,
                    step_instance):
        root = None
        self.refresh_from_db()  # Required
        for attempt in self.workflows[workflow_name][
                step_instance.step.category]:
            if attempt.step_instance == step_instance:
                root = attempt
        chain = self.result_chain([root], (workflow_name, step_cat))
        try:
            return getattr(chain[-1], scope)[field_name]
        except KeyError:
            raise minilims.errors.MissingValueError(
                "Value (w) {} (s) {} (sc) {} (f) {} for barcode {} not found in results."
                .format(workflow_name, step_cat, scope, field_name,
                        self.barcode))

    def finish_workflow(self, workflow, batch_name, prev_step_cat):
        step_cat = "_workflow_finished"
        prev_step_attempts = self.workflows[workflow.name][prev_step_cat]
        instance_index = len(prev_step_attempts) - 1
        step_result_sample = WorkflowResults(
            parent="{}.{}.{}".format(workflow.name, prev_step_cat,
                                     instance_index),
            status="finished",
            step_instance=None,
            all={},
            sample={},
            start_date=datetime.datetime.now(),
            finish_date=datetime.datetime.now(),
            batch_name=batch_name)
        step_attempts = self.workflows[workflow.name].get(step_cat, [])
        step_attempts.append(step_result_sample)
        self.workflows[workflow.name][step_cat] = step_attempts
        self.update_last_workflow(workflow, batch_name, step_cat)
        self.save()

    def summary(self, frmt="dict"):
        batches = []
        positions = {}
        for b in self.batches:
            if not b.archived:
                batches.append("{}: {}".format(b.workflow.name, b.batch_name))
                coord = b.position.get_coordinates()
                workflow_batch = "{}: {}".format(b.workflow.name, b.batch_name)
                positions[workflow_batch] = {
                    "coords": "".join([str(coord[0]),
                                       str(coord[1])]),
                    "index": b.position.index
                }
        if len(batches):
            batch = ", ".join(batches)
        else:
            batch = "Unassigned"

        genome_size = self.properties.sample_info.summary.submitted_species.step_variables.get(
            "wgs_routine", {}).get("wgs_08_normalization_pool",
                                   {}).get("genome_size")

        if frmt == "dict":
            result = {
                "barcode": self.barcode,
                "name": self.properties.sample_info.summary.name,
                "group": self.properties.sample_info.summary.group,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "batch": batch,
                "archived": str(self.archived),
                "submitted_on": self.submitted_on
            }
        elif frmt == "datatable":
            result = {
                "none":
                "",  # For checkbox
                "tags": [x.pk for x in self.tags],
                "barcode":
                self.barcode,
                "name":
                self.properties.sample_info.summary.name,
                "group":
                self.properties.sample_info.summary.group,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "batch":
                batch,
                "submission_comments":
                self.properties.sample_info.summary.submission_comments,
                "costcenter":
                self.properties.sample_info.summary.costcenter,
                "archived":
                str(self.archived),
                "batch_json":
                batches,
                "positions":
                positions,
                "genome_size":
                genome_size,
                "submitted_on":
                self.submitted_on.date(),
                "priority":
                self.properties.sample_info.summary.priority,
                "comments":
                self.comments,
                "supplied_plate_name":
                self.properties.sample_info.summary.supplied_plate_name,
                "position_in_supplied_plate":
                self.properties.sample_info.summary.position_in_supplied_plate
            }
        elif frmt == "step_table":
            result = {
                "none": "",  # For checkbox
                "barcode": self.barcode,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "positions": positions,
                "comments": self.comments,
                "batch": batch
            }
        else:
            result = {}
        return result

    def result_report(self, workflow_name, batch_name):
        batch = self.get_batches(workflow_name, batch_name)
        if len(batch) == 0:
            return {}
        data = {}
        data['barcode'] = self.barcode
        batch = batch[0]

        # Find the last step done from this branch and chain back
        chain = None
        for step_name in ["_workflow_finished"
                          ] + [b.name for b in batch.workflow.steps[::-1]]:
            if step_name in self.workflows[workflow_name]:
                for step_i in self.workflows[workflow_name][step_name][::-1]:
                    if step_i.batch_name == batch_name:
                        chain = [step_i]
                        break
                if chain is not None:
                    break
        chain = self.result_chain(chain)

        for step_i_r in chain[::-1]:
            if step_i_r.step_instance is not None:
                step_name = step_i_r.step_instance.step.name
            else:
                step_name = "root"
            for k, v in step_i_r.sample.items():
                data[".".join([step_name, k])] = str(v)
            for k, v in step_i_r.all.items():
                data[".".join([step_name, "all", k])] = str(v)
        return data

    def is_allowed(self, user):
        """
        Returns true if user is in the same group
        """
        return self.properties.sample_info.summary.group == user.group

    def detail_data(self):
        """
        Generate dict with data for sample details view
        """
        batches = []
        for batch in self.batches:
            workflow_name = batch.workflow.name
            batch_name = batch.batch_name
            workflow_o = batch.workflow
            workflow_data = self.workflows[workflow_name]
            workflow_steps = []
            for step_name, step_i_list in workflow_data.items():
                step_i_found = False
                for step_i in step_i_list[::-1]:
                    if step_i.batch_name == batch_name:
                        step_i_found = step_i
                if step_i_found is False:
                    continue
                step_i = step_i_found
                if step_name == "root":
                    step_name = "Workflow initialization"
                elif step_name == "_workflow_finished":
                    step_name = "Workflow finished"
                else:
                    step = Step.objects.project({
                        "display_name": 1
                    }).get({"name": step_name})
                    step_name = step.display_name
                if step_i.step_instance is None:
                    workflow_steps.append({
                        "name": step_name,
                        "attempt": len(step_i_list),
                        "start_date": step_i.start_date,
                        "finish_date": step_i.finish_date,
                        "values_all": step_i.all,
                        "values_sample": step_i.sample
                    })
                else:
                    val = step_i.step_instance.summary_values()
                    workflow_steps.append({
                        "id":
                        step_i.step_instance._id,
                        "name":
                        step_name,
                        "attempt":
                        len(step_i_list),
                        "start_date":
                        step_i.start_date,
                        "finish_date":
                        step_i.finish_date,
                        "values_all":
                        val["values_all"],
                        "values_samples":
                        val["values_samples"],
                        "fields_samples":
                        val["fields_samples"]
                    })
            batches.append({
                "display_name":
                workflow_o.display_name,
                "steps":
                workflow_steps,
                "batch_name":
                batch_name,
                "position":
                "".join(map(str, batch.position.get_coordinates()))
            })

        return {
            "barcode": self.barcode,
            "tags": [x.pk for x in self.tags],
            "properties": {
                "group": self.properties.sample_info.summary.group,
                "name": self.properties.sample_info.summary.name,
                "species":
                self.properties.sample_info.summary.submitted_species.name,
                "priority": self.properties.sample_info.summary.priority,
                "emails": self.properties.sample_info.summary.emails,
                "submitted_on": self.submitted_on,
                "additional_metadata": []
            },
            "batches": batches
        }

    def assign_tag(self, tag):
        if tag not in self.tags:
            self.tags.append(tag)
        self.save()

    def unassign_tag(self, tag):
        if tag in self.tags:
            self.tags.remove(tag)
        self.save()
Esempio n. 2
0
 class MultiReferenceModelEmbed(MongoModel):
     comments = fields.ListField(fields.ReferenceField(Comment))
     posts = fields.ListField(fields.ReferenceField(Post))
Esempio n. 3
0
class Securities(MongoModel):
    name = fields.CharField()
    code = fields.CharField()
    exchange_id = fields.ReferenceField(Exchange)
    industry = fields.CharField()
Esempio n. 4
0
 class Hand(MongoModel):
     cards = fields.ListField(fields.ReferenceField(Card))
Esempio n. 5
0
class CommentWrapper(EmbeddedMongoModel):
    comments = fields.ListField(fields.ReferenceField(Comment))
class Image(EmbeddedMongoModel):
    image_url = fields.CharField(required=True)
    alt_text = fields.CharField()
    photographer = fields.ReferenceField(Contributor)
Esempio n. 7
0
class DRGData(EmbeddedMongoModel):
    drg = fields.ReferenceField(DRG)
    avg = fields.FloatField(min_value=0.0)
Esempio n. 8
0
class Rental(MongoModel):
    """Setup up Rental model using pymodm"""
    rental_id = fields.CharField()
    user_id = fields.ReferenceField(Customer)
    product_id = fields.ReferenceField(Product)
Esempio n. 9
0
class ServiceSettings(MongoModel):
    @staticmethod
    def get_by_id(sid: ObjectId):
        try:
            ser = ServiceSettings.objects.get({'_id': sid})
        except ServiceSettings.DoesNotExist:
            return None
        else:
            return ser

    class Meta:
        collection_name = 'services'

    DEFAULT_SERVICE_NAME = 'Service'
    MIN_SERVICE_NAME_LENGTH = 3
    MAX_SERVICE_NAME_LENGTH = 30

    DEFAULT_FEEDBACK_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/feedback'
    DEFAULT_TIMESHIFTS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/timeshifts'
    DEFAULT_HLS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/hls'
    DEFAULT_PLAYLISTS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/playlists'
    DEFAULT_DVB_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/dvb'
    DEFAULT_CAPTURE_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/capture_card'
    DEFAULT_VODS_IN_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/vods_in'
    DEFAULT_VODS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/vods'
    DEFAULT_CODS_DIR_PATH = constants.DEFAULT_SERVICE_ROOT_DIR_PATH + '/cods'

    DEFAULT_SERVICE_HOST = 'localhost'
    DEFAULT_SERVICE_PORT = 6317
    DEFAULT_SERVICE_HTTP_HOST = 'localhost'
    DEFAULT_SERVICE_HTTP_PORT = 8000
    DEFAULT_SERVICE_VODS_HOST = 'localhost'
    DEFAULT_SERVICE_VODS_PORT = 7000
    DEFAULT_SERVICE_CODS_HOST = 'localhost'
    DEFAULT_SERVICE_CODS_PORT = 6001

    streams = fields.ListField(fields.ReferenceField(IStream),
                               default=[],
                               blank=True)
    series = fields.ListField(fields.ReferenceField(
        Serial, on_delete=fields.ReferenceField.PULL),
                              default=[],
                              blank=True)
    providers = fields.EmbeddedDocumentListField(ProviderPair, default=[])

    name = fields.CharField(default=DEFAULT_SERVICE_NAME,
                            max_length=MAX_SERVICE_NAME_LENGTH,
                            min_length=MIN_SERVICE_NAME_LENGTH)
    host = fields.EmbeddedDocumentField(HostAndPort,
                                        default=HostAndPort(
                                            host=DEFAULT_SERVICE_HOST,
                                            port=DEFAULT_SERVICE_PORT))
    http_host = fields.EmbeddedDocumentField(
        HostAndPort,
        default=HostAndPort(host=DEFAULT_SERVICE_HTTP_HOST,
                            port=DEFAULT_SERVICE_HTTP_PORT))
    vods_host = fields.EmbeddedDocumentField(
        HostAndPort,
        default=HostAndPort(host=DEFAULT_SERVICE_VODS_HOST,
                            port=DEFAULT_SERVICE_VODS_PORT))
    cods_host = fields.EmbeddedDocumentField(
        HostAndPort,
        default=HostAndPort(host=DEFAULT_SERVICE_CODS_HOST,
                            port=DEFAULT_SERVICE_CODS_PORT))

    feedback_directory = fields.CharField(default=DEFAULT_FEEDBACK_DIR_PATH)
    timeshifts_directory = fields.CharField(
        default=DEFAULT_TIMESHIFTS_DIR_PATH)
    hls_directory = fields.CharField(default=DEFAULT_HLS_DIR_PATH)
    playlists_directory = fields.CharField(default=DEFAULT_PLAYLISTS_DIR_PATH)
    dvb_directory = fields.CharField(default=DEFAULT_DVB_DIR_PATH)
    capture_card_directory = fields.CharField(default=DEFAULT_CAPTURE_DIR_PATH)
    vods_in_directory = fields.CharField(default=DEFAULT_VODS_IN_DIR_PATH)
    vods_directory = fields.CharField(default=DEFAULT_VODS_DIR_PATH)
    cods_directory = fields.CharField(default=DEFAULT_CODS_DIR_PATH)

    def get_id(self) -> str:
        return str(self.pk)

    @property
    def id(self):
        return self.pk

    def get_host(self) -> str:
        return str(self.host)

    def get_http_host(self) -> str:
        return 'http://{0}'.format(str(self.http_host))

    def get_vods_host(self) -> str:
        return 'http://{0}'.format(str(self.vods_host))

    def get_cods_host(self) -> str:
        return 'http://{0}'.format(str(self.cods_host))

    def generate_http_link(self, url: str) -> str:
        return url.replace(self.hls_directory, self.get_http_host())

    def generate_vods_link(self, url: str) -> str:
        return url.replace(self.vods_directory, self.get_vods_host())

    def generate_cods_link(self, url: str) -> str:
        return url.replace(self.cods_directory, self.get_cods_host())

    def generate_playlist(self) -> str:
        result = '#EXTM3U\n'
        for stream in self.streams:
            result += stream.generate_playlist(False)

        return result

    def add_streams(self, streams: [IStream]):
        self.streams.extend(streams)

    def add_stream(self, stream: IStream):
        if stream:
            self.streams.append(stream)

    def remove_stream(self, stream: IStream):
        if stream:
            self.streams.remove(stream)
            stream.delete()

    def remove_all_streams(self):
        for stream in list(self.streams):
            stream.delete()
        self.streams = []

    def add_provider(self, user: ProviderPair):
        if user:
            self.providers.append(user)

    def remove_provider(self, provider):
        for prov in list(self.providers):
            if prov.user == provider:
                self.providers.remove(provider)

    def find_stream_by_id(self, sid: ObjectId):
        for stream in self.streams:
            if stream.id == sid:
                return stream

        return None

    def delete(self, *args, **kwargs):
        for stream in self.streams:
            stream.delete()
        return super(ServiceSettings, self).delete(*args, **kwargs)
Esempio n. 10
0
class NetworkObjectGroup(MongoModel):
    name = fields.CharField()
    group_type = cusfields.EnumField(enums.GroupType,
                                     default=enums.GroupType.TILE)
    exclusive = fields.BooleanField(default=False)
    members = fields.ListField(fields.ReferenceField(NetworkObject))
Esempio n. 11
0
class MongoToolVersion(MongoModel):
    """
    This class store the information of a Tool version (e.g. PeptideShacker 2.0 )
    """
    id = fields.CharField(max_length=200, blank=False, required=False)
    name = fields.CharField(max_length=1000, blank=True, required=False)
    version = fields.CharField(max_length=1000, blank=False, required=False)
    description = fields.CharField(blank=True)
    home_url = fields.CharField()
    doc_url = fields.CharField()
    license = fields.CharField(max_length=1000)
    additional_identifiers = fields.CharField()
    organization = fields.CharField()
    has_checker = fields.BooleanField()
    checker_url = fields.CharField(max_length=400)
    is_verified = fields.BooleanField()
    verified_source = fields.CharField(max_length=400)
    registry_url = fields.CharField(max_length=500)

    additional_metadata = fields.CharField()
    tool_classes = fields.EmbeddedDocumentListField('ToolClass')
    authors = fields.ListField(fields.CharField(max_length=200))
    contains = fields.ListField(fields.CharField(max_length=400))
    tool_versions = fields.ListField(fields.CharField(max_length=400))
    aliases = fields.ListField(fields.CharField())
    container_recipe = fields.CharField(max_length=500)

    # Specific of Tool Version
    ref_tool = fields.ReferenceField(MongoTool)
    hash_name = fields.CharField(max_length=2000)
    descriptors = fields.EmbeddedDocumentListField('Descriptor')
    image_containers = fields.EmbeddedDocumentListField('ContainerImage')
    last_update = fields.DateTimeField()

    # All queries must be executed via this_manger
    manager = Manager.from_queryset(ToolQuerySet)()
    manager_versions = Manager.from_queryset(ToolVersionQuerySet)()

    @staticmethod
    def get_all_tool_versions():
        return MongoToolVersion.manager_versions.mongo_all_tool_versions()

    @staticmethod
    def get_tool_version_by_id(tool_version_id):
        tools = MongoToolVersion.manager_versions.get_tool_version_by_id(
            tool_version_id)
        tools_list = list(tools)
        if tools_list is not None and len(tools_list) > 0:
            return tools_list[0]
        return None

    @staticmethod
    def get_tool_version_by_name(tool_name):
        tools = MongoToolVersion.manager_versions.get_tool_version_by_name(
            tool_name)
        tools_list = list(tools)
        if tools_list is not None and len(tools_list) > 0:
            return tools_list
        return None

    def add_image_container(self, image_container):
        """
        Add a new container image to the to the list of containers.
        :param image_container:
        :return:
        """
        new = True
        for index, image_container_old in enumerate(self.image_containers):
            if image_container.full_tag == image_container_old.full_tag and image_container.container_type == image_container_old.container_type:
                self.image_containers[index] = image_container
                new = False
        if new:
            self.image_containers.append(image_container)

    def __getitem__(self, key):
        if key == self.id:
            return self
        return

    def add_author(self, author):
        """
        This method add a new author to the list of authors of the Tool Version
        :param author: New author
        :return:
        """
        if self.authors is None:
            self.authors = []

        if author not in self.authors:
            self.authors.append(author)

    class Meta:
        write_concern = WriteConcern(j=True)
        final = True
        indexes = [
            IndexModel([("id", pymongo.DESCENDING),
                        ("name", pymongo.DESCENDING),
                        ("version", pymongo.DESCENDING)],
                       unique=True)
        ]
Esempio n. 12
0
class InterfaceKey(EmbeddedMongoModel):
    no = fields.ReferenceField(NetworkObject)
    name = fields.CharField()

    class Meta:
        final = True
Esempio n. 13
0
class MeasureTrialTask(arvet.batch_analysis.task.Task):
    """
    A task for benchmarking a trial result. Result is a BenchmarkResult id.
    """
    metric = fields.ReferenceField(Metric,
                                   required=True,
                                   on_delete=fields.ReferenceField.CASCADE)
    trial_results = ReferenceListField(TrialResult,
                                       required=True,
                                       on_delete=fields.ReferenceField.CASCADE)
    result = fields.ReferenceField(MetricResult,
                                   on_delete=fields.ReferenceField.CASCADE)

    @property
    def result_id(self) -> typing.Union[bson.ObjectId, None]:
        """
        Get the id of the result, without attempting to construct the object.
        Makes it easier for other objects to refer to this result, without loading large result objects.
        :return:
        """
        with no_auto_dereference(MeasureTrialTask):
            if self.result is None:
                return None
            if isinstance(self.result, bson.ObjectId):
                return self.result
        return self.result.pk

    def get_result(self) -> typing.Union[MetricResult, None]:
        """
        Actually get the result object.
        This will auto-load the result model, and then attempt to construct it.
        :return:
        """
        with no_auto_dereference(MeasureTrialTask):
            if self.result is None:
                return None
            if isinstance(self.result, bson.ObjectId):
                # result is an id and not a model, autoload the model
                autoload_modules(MetricResult, [self.result])
        # This will now dereference correctly
        return self.result

    def get_unique_name(self) -> str:
        return "measure_{0}".format(self.pk)

    def load_referenced_models(self) -> None:
        """
        Load the metric, trial, and result types so we can save the task
        :return:
        """
        with no_auto_dereference(MeasureTrialTask):
            if isinstance(self.metric, bson.ObjectId):
                # The metric is just an ID, we will need the model to
                autoload_modules(Metric, [self.metric])
            trials_to_load = [
                trial_id for trial_id in self.trial_results
                if isinstance(trial_id, bson.ObjectId)
            ]
            if len(trials_to_load) > 0:
                autoload_modules(TrialResult, trials_to_load)
            if isinstance(self.result, bson.ObjectId):
                # result is an id and not a model, autoload the model
                autoload_modules(MetricResult, [self.result])

    def run_task(self, path_manager: PathManager) -> None:
        import traceback

        # Load all the referenced models
        self.load_referenced_modules()

        # Check all the trials are appropriate
        for trial_num, trial_result in enumerate(self.trial_results):
            if not self.metric.is_trial_appropriate(trial_result):
                # Metric cannot measure these trials, fail permanently
                self.fail_with_message(
                    "Metric {0} cannot assess trial {1}".format(
                        self.metric.get_pretty_name(), trial_num))
                return

        logging.getLogger(__name__).info("Running metric {0}".format(
            self.metric.get_pretty_name()))
        try:
            metric_result = self.metric.measure_results(self.trial_results)
        except Exception as exception:
            self.fail_with_message(
                "Exception while running metric {0}:\n{1}".format(
                    self.metric.get_pretty_name(), traceback.format_exc()))
            raise exception  # Re-raise the caught exception
        if metric_result is None:
            self.fail_with_message(
                "Failed to run {0}, metric returned None".format(
                    self.metric.get_pretty_name()))
            return

        if not metric_result.success:
            logging.getLogger(__name__).info(
                "Measured trials using metric {0}, but got unsuccessful result: {1}"
                .format(self.metric.get_pretty_name(), metric_result.message))
        else:
            logging.getLogger(__name__).info(
                "Successfully measured trials using metric {0}".format(
                    self.metric.get_pretty_name()))

        logging.getLogger(__name__).info("Saving metric result...")
        metric_result.save(True)

        self.result = metric_result
        self.mark_job_complete()

    def load_referenced_modules(self):
        logging.getLogger(__name__).info("Loading referenced models...")
        # Load the metric model
        metric_id = None
        with no_auto_dereference(MeasureTrialTask):
            if isinstance(self.metric, bson.ObjectId):
                metric_id = self.metric
        if metric_id is not None:
            autoload_modules(Metric, [metric_id])

        # Load the trial results models
        with no_auto_dereference(MeasureTrialTask):
            model_ids = list(
                set(tr_id for tr_id in self.trial_results
                    if isinstance(tr_id, bson.ObjectId)))
        if len(model_ids) > 0:
            autoload_modules(TrialResult, model_ids)

    def fail_with_message(self, message):
        """
        Quick helper to log error message, and make and store a metric result as the result
        :param message:
        :return:
        """
        logging.getLogger(__name__).error(message)
        self.result = MetricResult(metric=self.metric,
                                   trial_results=self.trial_results,
                                   success=False,
                                   message=message)
        self.mark_job_complete()
Esempio n. 14
0
class IStream(MongoModel):
    @staticmethod
    def get_by_id(sid: ObjectId):
        try:
            stream = IStream.objects.get({'_id': sid})
        except IStream.DoesNotExist:
            return None
        else:
            return stream

    class Meta:
        collection_name = 'streams'
        allow_inheritance = True

    created_date = fields.DateTimeField(default=datetime.now)  # for inner use
    name = fields.CharField(default=constants.DEFAULT_STREAM_NAME,
                            max_length=constants.MAX_STREAM_NAME_LENGTH,
                            min_length=constants.MIN_STREAM_NAME_LENGTH,
                            required=True)
    group = fields.CharField(
        default=constants.DEFAULT_STREAM_GROUP_TITLE,
        max_length=constants.MAX_STREAM_GROUP_TITLE_LENGTH,
        min_length=constants.MIN_STREAM_GROUP_TITLE_LENGTH,
        required=True,
        blank=True)

    tvg_id = fields.CharField(default=constants.DEFAULT_STREAM_TVG_ID,
                              max_length=constants.MAX_STREAM_TVG_ID_LENGTH,
                              min_length=constants.MIN_STREAM_TVG_ID_LENGTH,
                              blank=True)
    tvg_name = fields.CharField(default=constants.DEFAULT_STREAM_TVG_NAME,
                                max_length=constants.MAX_STREAM_NAME_LENGTH,
                                min_length=constants.MIN_STREAM_NAME_LENGTH,
                                blank=True)  #
    tvg_logo = fields.CharField(default=constants.DEFAULT_STREAM_ICON_URL,
                                max_length=constants.MAX_URL_LENGTH,
                                min_length=constants.MIN_URL_LENGTH,
                                required=True)  #

    price = fields.FloatField(default=0.0,
                              min_value=constants.MIN_PRICE,
                              max_value=constants.MAX_PRICE,
                              required=True)
    visible = fields.BooleanField(default=True, required=True)
    iarc = fields.IntegerField(
        default=21, min_value=0,
        required=True)  # https://support.google.com/googleplay/answer/6209544

    view_count = fields.IntegerField(default=0)
    parts = fields.ListField(fields.ReferenceField('IStream'), default=[])
    output = fields.EmbeddedDocumentListField(OutputUrl, default=[])  #

    def add_part(self, stream):
        if stream:
            self.parts.append(stream)

    def remove_part(self, stream):
        if stream:
            self.parts.remove(stream)

    def get_groups(self) -> list:
        return self.group.split(';')

    def to_front_dict(self) -> dict:
        return {
            StreamFields.NAME_FIELD: self.name,
            StreamFields.ID_FIELD: self.get_id(),
            StreamFields.TYPE_FIELD: self.get_type(),
            StreamFields.ICON_FIELD: self.tvg_logo,
            StreamFields.PRICE_FIELD: self.price,
            StreamFields.VISIBLE_FIELD: self.visible,
            StreamFields.IARC_FIELD: self.iarc,
            StreamFields.VIEW_COUNT_FIELD: self.view_count,
            StreamFields.GROUP_FIELD: self.group
        }

    def get_type(self) -> constants.StreamType:
        raise NotImplementedError('subclasses must override get_type()!')

    @property
    def id(self) -> ObjectId:
        return self.pk

    def get_id(self) -> str:
        return str(self.pk)

    def generate_playlist(self, header=True) -> str:
        result = '#EXTM3U\n' if header else ''
        stream_type = self.get_type()
        if stream_type == constants.StreamType.RELAY or stream_type == constants.StreamType.VOD_RELAY or \
                stream_type == constants.StreamType.COD_RELAY or stream_type == constants.StreamType.ENCODE or \
                stream_type == constants.StreamType.VOD_ENCODE or stream_type == constants.StreamType.COD_ENCODE or \
                stream_type == constants.StreamType.PROXY or stream_type == constants.StreamType.VOD_PROXY or \
                stream_type == constants.StreamType.VOD_ENCODE or \
                stream_type == constants.StreamType.TIMESHIFT_PLAYER or stream_type == constants.StreamType.CATCHUP:
            for out in self.output:
                result += '#EXTINF:-1 tvg-id="{0}" tvg-name="{1}" tvg-logo="{2}" group-title="{3}",{4}\n{5}\n'.format(
                    self.tvg_id, self.tvg_name, self.tvg_logo, self.group,
                    self.name, out.uri)

        return result

    def generate_device_playlist(self,
                                 uid: str,
                                 pass_hash: str,
                                 did: str,
                                 lb_server_host_and_port: str,
                                 header=True) -> str:
        result = '#EXTM3U\n' if header else ''
        stream_type = self.get_type()
        if stream_type == constants.StreamType.RELAY or stream_type == constants.StreamType.VOD_RELAY or \
                stream_type == constants.StreamType.COD_RELAY or stream_type == constants.StreamType.ENCODE or \
                stream_type == constants.StreamType.VOD_ENCODE or stream_type == constants.StreamType.COD_ENCODE or \
                stream_type == constants.StreamType.PROXY or stream_type == constants.StreamType.VOD_PROXY or \
                stream_type == constants.StreamType.VOD_ENCODE or \
                stream_type == constants.StreamType.TIMESHIFT_PLAYER or stream_type == constants.StreamType.CATCHUP:
            for out in self.output:
                parsed_uri = urlparse(out.uri)
                if parsed_uri.scheme == 'http' or parsed_uri.scheme == 'https':
                    file_name = os.path.basename(parsed_uri.path)
                    url = 'http://{0}/{1}/{2}/{3}/{4}/{5}/{6}'.format(
                        lb_server_host_and_port, uid, pass_hash, did, self.id,
                        out.id, file_name)
                    result += '#EXTINF:-1 tvg-id="{0}" tvg-name="{1}" tvg-logo="{2}" group-title="{3}",{4}\n{5}\n'.format(
                        self.tvg_id, self.tvg_name, self.tvg_logo, self.group,
                        self.name, url)

        return result

    def generate_input_playlist(self, header=True) -> str:
        raise NotImplementedError(
            'subclasses must override generate_input_playlist()!')

    def delete(self, *args, **kwargs):
        from pyfastocloud_models.subscriber.entry import Subscriber
        subscribers = Subscriber.objects.all()
        for subscriber in subscribers:
            subscriber.remove_official_stream(self)
            subscriber.remove_official_vod(self)
            subscriber.remove_official_catchup(self)
            subscriber.save()
        return super(IStream, self).delete(*args, **kwargs)
Esempio n. 15
0
 class ReferenceA(MongoModel):
     ref = fields.ReferenceField('ReferenceB')
Esempio n. 16
0
class Activity(MongoModel):

    start_time = fields.DateTimeField(verbose_name='Start Time',
                                      mongo_name='start_dt')
    total_time_s = fields.FloatField(verbose_name='Total Time [s]',
                                     mongo_name='time_s')
    total_distance_m = fields.FloatField(verbose_name='Total Distance [m]',
                                         mongo_name='distance_m')

    calories_cal = fields.IntegerField(verbose_name='Calories',
                                       mongo_name='cal')
    avg_speed_m_s = fields.FloatField(verbose_name='Average Speed [m/s]')
    max_speed_m_s = fields.FloatField(verbose_name='Maximum Speed [m/s]')
    total_ascent_m = fields.IntegerField(verbose_name='Total Ascent [m]',
                                         mongo_name='ascent_m')
    total_descent_m = fields.IntegerField(verbose_name='Total Descent [m]',
                                          mongo_name='descent_m')
    avg_heart_rate_bpm = fields.IntegerField(
        verbose_name='Average Heart Rate [bpm]', mongo_name='avg_hr_bpm')
    max_heart_rate_bpm = fields.IntegerField(
        verbose_name='Maximum Heart Rate [bpm]', mongo_name='max_hr_bpm')

    aerobic_training_effect = fields.FloatField(
        verbose_name='Aerobic Training Effect', mongo_name='aerobic_t_e')
    anaerobic_training_effect = fields.FloatField(
        verbose_name='Anaerobic Training Effect', mongo_name='anaerobic_t_e')

    avg_temperature_c = fields.IntegerField(
        verbose_name='Average Temperature [C]', mongo_name='avg_temp_c')
    max_temperature_c = fields.IntegerField(
        verbose_name='Maximum Temperature [C]', mongo_name='max_temp_c')

    start_body_weight_measurment = fields.ReferenceField(BodyWeight)
    end_body_weight_measurment = fields.ReferenceField(BodyWeight)

    device = fields.ReferenceField(Device)

    start_location = fields.ReferenceField(Location)

    source_file = fields.FileField()

    commute = fields.BooleanField(default=False)

    class Meta:
        connection_alias = 'activity-connection'
        collection_name = 'data'

    @property
    def total_time(self):
        return self.total_time_s * ureg.second

    @property
    def total_distance(self):
        return self.total_distance_m * ureg.meter

    @property
    def calories(self):
        return self.calories_cal * ureg.calorie

    @property
    def avg_speed(self):
        return self.avg_speed_m_s * ureg.meter / ureg.second

    @property
    def max_speed(self):
        return self.max_speed_m_s * ureg.meter / ureg.second

    @property
    def total_ascent(self):
        return self.total_ascent_m * ureg.meter

    @property
    def total_descent(self):
        return self.total_descent_m * ureg.meter

    @property
    def avg_temperature(self):
        return ureg.Quantity(self.avg_temperature_c, ureg.degC)

    @property
    def max_temperature(self):
        return ureg.Quantity(self.max_temperature_c, ureg.degC)

    @property
    def source_file_as_fit(self):

        self.source_file.open()
        fit_file = FitFile(b''.join(self.source_file.file.chunks()))
        self.source_file.close()

        return fit_file

    @property
    def start_body_weight(self):
        return self.start_body_weight_measurment.value

    @property
    def end_body_weight(self):
        return self.end_body_weight_measurment.value

    @property
    def avg_body_weight(self):
        return 0.5 * (self.start_body_weight + self.end_body_weight)
Esempio n. 17
0
 class ReferenceB(MongoModel):
     ref = fields.ReferenceField(ReferenceA)
Esempio n. 18
0
class RunningActivity(Activity):

    shoes = fields.ReferenceField(Shoes)

    avg_power_w = fields.IntegerField(
        verbose_name='Average Power (Estimate) [W]')
    avg_cadence_steps_min = fields.IntegerField(
        verbose_name='Average Running Cadence [strides/min]',
        mongo_name='avg_running_cadence')
    total_num_steps = fields.IntegerField(verbose_name='Total Number of Steps',
                                          mongo_name='num_steps')
    avg_vertical_oscillation_mm = fields.FloatField(
        verbose_name='Average Vertical Oscillation [mm]',
        mongo_name='avg_vert_osc_mm')
    avg_vertical_ratio = fields.FloatField(
        verbose_name='Average Vertical Ratio %', mongo_name='avg_vert_ratio')
    avg_ground_contact_time_ms = fields.FloatField(
        verbose_name='Average Ground Contact Time [ms]',
        mongo_name='avg_gct_ms')
    avg_ground_contact_time_balance = fields.FloatField(
        verbose_name='Average Ground Contact Time Balance %',
        mongo_name='avg_gct_bal')
    avg_step_length_mm = fields.FloatField(
        verbose_name='Average Step Length [mm]')

    race = fields.EmbeddedDocumentField(RunningRace)

    records = fields.ListField(fields.ReferenceField(LocationWhileRunning))

    @property
    def avg_power(self):
        return self.avg_power_w * ureg.watt

    @property
    def avg_vertical_oscillation(self):
        return self.avg_vertical_oscillation_mm * ureg.millimeter

    @property
    def avg_ground_contact_time(self):
        return self.avg_ground_contact_time_ms * ureg.milliseconds

    @property
    def avg_step_length(self):
        return self.avg_step_length_mm * ureg.millimeter

    @property
    def avg_power_density(self):
        return self.avg_power / self.avg_body_weight

    @property
    def avg_watts_per_kg(self):
        return self.avg_power_density.to('watt/kg').magnitude

    @property
    def country_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            country_name = record.country_name
            mapping[country_name] = mapping.get(country_name, 0) + 1

        return {
            country_name: round(num / num_records * 100, 2)
            for country_name, num in mapping.items()
        }

    @property
    def admin1_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            admin1 = record.admin1
            mapping[admin1] = mapping.get(admin1, 0) + 1

        return {
            admin1: round(num / num_records * 100, 2)
            for admin1, num in mapping.items()
        }

    @property
    def admin2_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            admin2 = record.admin2
            mapping[admin2] = mapping.get(admin2, 0) + 1

        return {
            admin2: round(num / num_records * 100, 2)
            for admin2, num in mapping.items()
        }

    @property
    def name_to_percent_records_mapping(self):
        records = self.records
        num_records = len(records)
        mapping = {}
        for record in records:
            name = record.name
            mapping[name] = mapping.get(name, 0) + 1

        return {
            name: round(num / num_records * 100, 2)
            for name, num in mapping.items()
        }
Esempio n. 19
0
 class Post(MongoModel):
     body = fields.CharField()
     comments = fields.ListField(fields.ReferenceField(Comment))
Esempio n. 20
0
class Client(MongoModel):
    clientId = fields.CharField(primary_key=True)
    userId = fields.ReferenceField('User')
    resources = fields.CharField()
Esempio n. 21
0
class CurrentTask(EmbeddedMongoModel):

    status = fields.CharField()
    task_id = fields.ReferenceField(Task)
    action_id = fields.ReferenceField(Action)
Esempio n. 22
0
class CompletedQueue(MongoModel):
    userId = fields.ReferenceField('User')
    clientId = fields.ReferenceField('Client')
    experimentId = fields.ReferenceField('Experiment')
    run = fields.EmbeddedDocumentField(Run, required=True)
Esempio n. 23
0
class Comment(MongoModel):
    body = fields.CharField()
    post = fields.ReferenceField(Post)
Esempio n. 24
0
 class OtherRefModel(EmbeddedMongoModel):
     ref = fields.ReferenceField(OtherModel)
Esempio n. 25
0
 class Container(MongoModel):
     one_to_many = fields.ListField(fields.ReferenceField(OtherModel))
Esempio n. 26
0
 class CommentWithUser(MongoModel):
     body = fields.CharField()
     post = fields.ReferenceField(Post)
     user = fields.ReferenceField(User)
Esempio n. 27
0
 class MultiReferenceModel(MongoModel):
     comments = fields.ListField(fields.ReferenceField(Comment))
     posts = fields.ListField(fields.ReferenceField(Post))
     embeds = fields.EmbeddedDocumentListField(MultiReferenceModelEmbed)
Esempio n. 28
0
 class CommentContainer(MongoModel):
     ref = fields.ReferenceField(Comment)
Esempio n. 29
0
class BankLink(MongoModel):

    user = fields.ReferenceField(User)
    auth_token = fields.CharField()
Esempio n. 30
0
class Subscriber(MongoModel):
    class Meta:
        collection_name = 'subscribers'
        allow_inheritance = True

    MAX_DATE = datetime(2100, 1, 1)
    ID_FIELD = 'id'
    EMAIL_FIELD = 'login'
    PASSWORD_FIELD = 'password'

    class Status(IntEnum):
        NOT_ACTIVE = 0
        ACTIVE = 1
        DELETED = 2

        @classmethod
        def choices(cls):
            return [(choice, choice.name) for choice in cls]

        @classmethod
        def coerce(cls, item):
            return cls(int(item)) if not isinstance(item, cls) else item

        def __str__(self):
            return str(self.value)

    SUBSCRIBER_HASH_LENGTH = 32

    email = fields.CharField(max_length=64, required=True)
    first_name = fields.CharField(max_length=64, required=True)
    last_name = fields.CharField(max_length=64, required=True)
    password = fields.CharField(min_length=SUBSCRIBER_HASH_LENGTH,
                                max_length=SUBSCRIBER_HASH_LENGTH,
                                required=True)
    created_date = fields.DateTimeField(default=datetime.now)
    exp_date = fields.DateTimeField(default=MAX_DATE)
    status = fields.IntegerField(default=Status.NOT_ACTIVE)
    country = fields.CharField(min_length=2, max_length=3, required=True)
    language = fields.CharField(default=constants.DEFAULT_LOCALE,
                                required=True)

    servers = fields.ListField(fields.ReferenceField(
        ServiceSettings, on_delete=fields.ReferenceField.PULL),
                               default=[],
                               blank=True)
    devices = fields.EmbeddedDocumentListField(Device, default=[], blank=True)
    max_devices_count = fields.IntegerField(
        default=constants.DEFAULT_DEVICES_COUNT)
    # content
    streams = fields.EmbeddedDocumentListField(UserStream,
                                               default=[],
                                               blank=True)
    vods = fields.EmbeddedDocumentListField(UserStream, default=[], blank=True)
    catchups = fields.EmbeddedDocumentListField(UserStream,
                                                default=[],
                                                blank=True)

    def get_id(self) -> str:
        return str(self.pk)

    @property
    def id(self):
        return self.pk

    def created_date_utc_msec(self):
        return date_to_utc_msec(self.created_date)

    def expiration_date_utc_msec(self):
        return date_to_utc_msec(self.exp_date)

    def add_server(self, server: ServiceSettings):
        self.servers.append(server)
        self.save()

    def add_device(self, device: Device):
        if len(self.devices) < self.max_devices_count:
            self.devices.append(device)
            self.save()

    def remove_device(self, did: ObjectId):
        for dev in self.devices:
            if dev.id == did:
                self.devices.remove(dev)
                break
        self.save()

        # devices = self.devices.get({'id': sid})
        # if devices:
        #    devices.delete()

    def find_device(self, did: ObjectId):
        for dev in self.devices:
            if dev.id == did:
                return dev

        return None

    def generate_playlist(self, did: str, lb_server_host_and_port: str) -> str:
        result = '#EXTM3U\n'
        sid = str(self.id)
        for stream in self.streams:
            if stream.private:
                result += stream.sid.generate_playlist(False)
            else:
                result += stream.sid.generate_device_playlist(
                    sid, self.password, did, lb_server_host_and_port, False)

        return result

    def all_streams(self):
        return self.streams

    # official streams
    def add_official_stream_by_id(self, oid: ObjectId):
        user_stream = UserStream(sid=oid)
        self.add_official_stream(user_stream)

    def add_official_stream(self, user_stream: UserStream):
        if not user_stream:
            return

        for stream in self.streams:
            if not stream.private and stream.sid == user_stream.sid:
                return

        self.streams.append(user_stream)
        self.save()

    def remove_official_stream(self, ostream: IStream):
        if not ostream:
            return

        for stream in self.streams:
            if not stream.private and stream.sid == ostream:
                self.streams.remove(stream)
        self.save()

    def remove_official_stream_by_id(self, sid: ObjectId):
        original_stream = IStream.get_stream_by_id(sid)
        self.remove_official_stream(original_stream)

    # official vods
    def add_official_vod_by_id(self, oid: ObjectId):
        user_stream = UserStream(sid=oid)
        self.add_official_vod(user_stream)

    def add_official_vod(self, user_stream: UserStream):
        if not user_stream:
            return

        for vod in self.vods:
            if not vod.private and vod.sid == user_stream.sid:
                return

        self.vods.append(user_stream)
        self.save()

    def remove_official_vod(self, ostream: IStream):
        if not ostream:
            return

        for vod in self.vods:
            if not vod.private and vod.sid == ostream:
                self.vods.remove(vod)
        self.save()

    def remove_official_vod_by_id(self, sid: ObjectId):
        original_stream = IStream.get_stream_by_id(sid)
        self.remove_official_vod(original_stream)

    # official catchups
    def add_official_catchup_by_id(self, oid: ObjectId):
        user_stream = UserStream(sid=oid)
        self.add_official_catchup(user_stream)

    def add_official_catchup(self, user_stream: UserStream):
        if not user_stream:
            return

        for catchup in self.catchups:
            if not catchup.private and catchup.sid == user_stream.sid:
                return

        self.catchups.append(user_stream)
        self.save()

    def remove_official_catchup(self, ostream: IStream):
        if not ostream:
            return

        for catchup in self.catchups:
            if not catchup.private and catchup.sid == ostream:
                self.catchups.remove(catchup)
        self.save()

    def remove_official_catchup_by_id(self, sid: ObjectId):
        original_stream = IStream.get_stream_by_id(sid)
        self.remove_official_catchup(original_stream)

    # own
    def add_own_stream(self, user_stream: UserStream):
        for stream in self.streams:
            if stream.private and stream.sid == user_stream:
                return

        user_stream.private = True
        self.streams.append(user_stream)
        self.save()

    def remove_own_stream_by_id(self, sid: ObjectId):
        stream = IStream.get_stream_by_id(sid)
        if stream:
            for stream in self.streams:
                if stream.sid == sid:
                    self.stream.remove(stream)
            stream.delete()
            self.save()

    def remove_all_own_streams(self):
        for stream in self.streams:
            if stream.private:
                self.streams.remove(stream)
        self.save()

    def add_own_vod(self, user_stream: UserStream):
        for vod in self.vod:
            if vod.private and vod.sid == user_stream.sid:
                return

        user_stream.private = True
        self.vod.append(user_stream)
        self.save()

    def remove_own_vod_by_id(self, sid: ObjectId):
        vod = IStream.get_stream_by_id(sid)
        if vod:
            for vod in self.vod:
                if vod.private and vod.sid == sid:
                    self.vod.remove(vod)
            vod.delete()
            self.save()

    def remove_all_own_vods(self):
        for stream in self.vods:
            if stream.private:
                self.vods.remove(stream)
        self.save()

    # available
    def official_streams(self):
        streams = []
        for stream in self.streams:
            if not stream.private:
                streams.append(stream)

        return streams

    def official_vods(self):
        streams = []
        for stream in self.vods:
            if not stream.private:
                streams.append(stream)

        return streams

    def official_catchups(self):
        streams = []
        for stream in self.catchups:
            if not stream.private:
                streams.append(stream)

        return streams

    def own_streams(self):
        streams = []
        for stream in self.streams:
            if stream.private:
                streams.append(stream)

        return streams

    def all_available_servers(self):
        return self.servers

    def all_available_official_streams(self) -> [IStream]:
        streams = []
        for serv in self.servers:
            for stream in serv.streams:
                if is_live_stream(stream):
                    streams.append(stream)

        return streams

    def all_available_official_vods(self) -> [IStream]:
        streams = []
        for serv in self.servers:
            for stream in serv.streams:
                if is_vod_stream(stream):
                    streams.append(stream)

        return streams

    def all_available_official_catchups(self) -> [IStream]:
        streams = []
        for serv in self.servers:
            for stream in serv.streams:
                if is_catchup(stream):
                    streams.append(stream)

        return streams

    # select
    def select_all_streams(self, select: bool):
        if not select:
            self.streams = []
            self.save()
            return

        ustreams = []
        for stream in self.all_available_official_streams():
            user_stream = UserStream(sid=stream.id)
            for stream in self.streams:
                if not stream.private and stream.sid == user_stream.sid:
                    user_stream = stream
                    break
            ustreams.append(user_stream)

        self.streams = ustreams
        self.save()

    def select_all_vods(self, select: bool):
        if not select:
            self.vods = []
            self.save()
            return

        ustreams = []
        for ovod in self.all_available_official_vods():
            user_vod = UserStream(sid=ovod.id)
            for vod in self.vods:
                if not vod.private and vod.sid == user_vod.sid:
                    user_vod = vod
                    break
            ustreams.append(user_vod)

        self.vods = ustreams
        self.save()

    def select_all_catchups(self, select: bool):
        if not select:
            self.catchups = []
            self.save()
            return

        ustreams = []
        for ocatchup in self.all_available_official_catchups():
            user_catchup = UserStream(sid=ocatchup.id)
            for catchup in self.catchups:
                if not catchup.private and catchup.sid == user_catchup.sid:
                    user_catchup = catchup
                    break
            ustreams.append(user_catchup)

        self.catchups = ustreams
        self.save()

    def delete(self, *args, **kwargs):
        self.remove_all_own_streams()
        self.remove_all_own_vods()
        return super(Subscriber, self).delete(*args, **kwargs)

    def delete_fake(self, *args, **kwargs):
        self.remove_all_own_streams()
        self.remove_all_own_vods()
        self.status = Subscriber.Status.DELETED
        self.save()
        # return Document.delete(self, *args, **kwargs)

    @staticmethod
    def make_md5_hash_from_password(password: str) -> str:
        m = md5()
        m.update(password.encode())
        return m.hexdigest()

    @staticmethod
    def generate_password_hash(password: str) -> str:
        return Subscriber.make_md5_hash_from_password(password)

    @staticmethod
    def check_password_hash(hash_str: str, password: str) -> bool:
        return hash_str == Subscriber.generate_password_hash(password)

    @classmethod
    def make_subscriber(cls,
                        email: str,
                        first_name: str,
                        last_name: str,
                        password: str,
                        country: str,
                        language: str,
                        exp_date=MAX_DATE):
        return cls(email=email,
                   first_name=first_name,
                   last_name=last_name,
                   password=Subscriber.make_md5_hash_from_password(password),
                   country=country,
                   language=language,
                   exp_date=exp_date)