Beispiel #1
0
    def test_origin_only_tank(self):
        #  move group with only origin, make sure group is still in original tank
        utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
        move_date = datetime.now().date()

        utils.create_movement_evnt(self.final_tank, None, self.cleaned_data, move_date, grp_pk=self.grp.pk)

        indv_list, grp_list = self.tank.fish_in_cont()
        self.assertIn(self.grp, grp_list)
Beispiel #2
0
 def test_two_grps_one_tank(self):
     #  put two grps into a single tank, make sure both are located:
     second_grp = BioFactoryFloor.GrpFactory()
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=second_grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertEqual(len(grp_list), 2)
     self.assertIn(self.grp, grp_list)
     self.assertIn(second_grp, grp_list)
Beispiel #3
0
 def test_move_grp(self):
     # grp in one tank, gets moved, is in second tank and not in first tank
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertIn(self.grp, grp_list)
     move_date = datetime.now().date()
     utils.create_movement_evnt(self.tank, self.final_tank, self.cleaned_data, move_date, grp_pk=self.grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertNotIn(self.grp, grp_list)
     indv_list, grp_list = self.final_tank.fish_in_cont()
     self.assertIn(self.grp, grp_list)
Beispiel #4
0
 def test_two_cnts_one_grp(self):
     # add two counts in different containers and make sure group record proper count
     cnt_val = randint(0, 100)
     utils.enter_cnt(self.cleaned_data, cnt_val, self.contx.pk, cnt_code="Fish in Container")
     contx = utils.enter_contx(self.final_tank, self.cleaned_data, True, grp_pk=self.grp.pk, return_contx=True)
     utils.enter_cnt(self.cleaned_data, cnt_val, contx.pk, cnt_code="Fish in Container")
     self.assertEqual(self.grp.count_fish_in_group(), 2 * cnt_val)
Beispiel #5
0
 def setUp(self):
     super().setUp()  # used to import fixtures
     self.grp = BioFactoryFloor.GrpFactory()
     self.trof = BioFactoryFloor.TrofFactory(name='-1')
     self.trof_two = BioFactoryFloor.TrofFactory(
         name='-2', facic_id=self.trof.facic_id)
     self.evnt_date = utils.naive_to_aware(datetime.today() -
                                           timedelta(days=100))
     self.evnt = BioFactoryFloor.EvntFactory(start_datetime=self.evnt_date,
                                             facic_id=self.trof.facic_id)
     self.cleaned_data = {
         "facic_id": self.evnt.facic_id,
         "evnt_id": self.evnt,
         "created_by": self.evnt.created_by,
         "created_date": self.evnt.created_date,
     }
     self.contx, data_entered = utils.enter_contx(self.trof,
                                                  self.cleaned_data,
                                                  None,
                                                  return_contx=True)
     self.contx_two, data_entered = utils.enter_contx(self.trof_two,
                                                      self.cleaned_data,
                                                      None,
                                                      return_contx=True)
     temp_envc = models.EnvCode.objects.filter(name="Temperature").get()
     # add ten days worth of temp data to the trough
     for temp in range(0, 10):
         env_date = utils.naive_to_aware(self.evnt.start_date +
                                         timedelta(days=temp))
         utils.enter_env(temp,
                         env_date,
                         self.cleaned_data,
                         temp_envc,
                         contx=self.contx)
     for temp in range(10, 20):
         env_date = utils.naive_to_aware(self.evnt.start_date +
                                         timedelta(days=temp))
         utils.enter_env(temp,
                         env_date,
                         self.cleaned_data,
                         temp_envc,
                         contx=self.contx_two)
Beispiel #6
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_datetime = utils.get_row_date(row, get_time=True)

        trof_list = utils.parse_trof_str(row.get(self.trof_key), cleaned_data["facic_id"])
        for trof_id in trof_list:
            row_contx, contx_entered = utils.enter_contx(trof_id, cleaned_data, final_flag=None, return_contx=True)
            self.row_entered += contx_entered

            self.row_entered += utils.enter_env(row[self.temp_key], row_datetime.date(), cleaned_data,
                                                self.envc_id, env_time=row_datetime.time(), contx=row_contx,
                                                save=True, qual_id=self.qual_id)
Beispiel #7
0
 def test_two_cnts_one_grp(self):
     # add two counts in different containers and make sure group record proper count
     cnt_val = randint(0, 100)
     utils.enter_cnt(self.cleaned_data, cnt_val, self.contx.pk, cnt_code="Fish in Container")
     # sometimes factories will reuse an event/tank which will prevent new contx's and cnt's from being entered.
     # this loop ensures that new data does get added
     data_entered = False
     while not data_entered:
         contx, data_entered = utils.enter_contx(self.final_tank, self.cleaned_data, True, grp_pk=self.grp.pk,
                                                 return_contx=True)
     utils.enter_cnt(self.cleaned_data, cnt_val, contx.pk, cnt_code="Fish in Container")
     self.assertEqual(self.grp.count_fish_in_group(), 2 * cnt_val)
Beispiel #8
0
    def data_preper(self):
        cleaned_data = self.cleaned_data
        self.prog_grp_anidc_id = models.AnimalDetCode.objects.filter(name="Program Group").get()
        self.sex_anidc_id = models.AnimalDetCode.objects.filter(name="Gender").get()
        self.len_anidc_id = models.AnimalDetCode.objects.filter(name="Length").get()
        self.weight_anidc_id = models.AnimalDetCode.objects.filter(name="Weight").get()
        self.vial_anidc_id = models.AnimalDetCode.objects.filter(name="Vial").get()
        self.ani_health_anidc_id = models.AnimalDetCode.objects.filter(name="Animal Health").get()
        self.envelope_anidc_id = models.AnimalDetCode.objects.filter(name="Scale Envelope").get()
        self.wr_adsc_id = models.AniDetSubjCode.objects.filter(name="Wild Return").get()
        self.locc_id = models.LocCode.objects.filter(name="Adult Collection Site").get()
        self.salmon_id = models.SpeciesCode.objects.filter(name="Salmon").get()
        self.sampc_id = models.SampleCode.objects.filter(name="Individual Sample").get()

        for site_name in self.data[self.site_key].unique():
            if utils.nan_to_none(site_name):
                self.site_dict[site_name] = models.ReleaseSiteCode.objects.filter(name__icontains=site_name).select_related("rive_id").get()

        for tank_name in self.data[self.tank_key].unique():
            if utils.nan_to_none(tank_name):
                self.tank_dict[tank_name] = models.Tank.objects.filter(name__iexact=tank_name, facic_id=cleaned_data["facic_id"]).get()
                utils.enter_contx(self.tank_dict[tank_name], cleaned_data)
Beispiel #9
0
    def test_aboslute_cnt(self):
        #  take eggs from a group and then record absolute count the following day.
        init_cnt = randint(300, 500)
        cnt_one_val = randint(5, 100)
        cnt_final_val = randint(0, 5)
        next_day_evnt = BioFactoryFloor.EvntFactory()
        next_day_evnt.facic_id = self.evnt.facic_id
        next_day_evnt.start_datetime = self.evnt.start_datetime + datetime.timedelta(days=1)
        next_day_evnt.save()
        new_cleaned_data = self.cleaned_data.copy()
        new_cleaned_data["evnt_id"] = next_day_evnt
        end_contx = utils.enter_contx(self.tank, new_cleaned_data, None, grp_pk=self.grp.pk, return_contx=True)

        utils.enter_cnt(self.cleaned_data, init_cnt, self.contx.pk, cnt_code="Eggs Added")
        cnt = utils.enter_cnt(self.cleaned_data, 0, self.contx.pk, cnt_code="Eggs Removed")
        utils.enter_cnt_det(self.cleaned_data, cnt, cnt_one_val, "Program Group", "EQU")
        utils.enter_cnt(new_cleaned_data, cnt_final_val, end_contx.pk, cnt_code="Egg Count")
        self.assertEqual(self.grp.count_fish_in_group(), cnt_final_val)
Beispiel #10
0
    def row_parser(self, row):
        # need to: find the pair's group, link it to it's pairing, create a tray, and add the count.
        cleaned_data = self.cleaned_data
        row_date = utils.get_row_date(row)
        pair_list = utils.get_pair(row[self.cross_key],
                                   row["stok_id"],
                                   row[self.year_key],
                                   prog_grp=utils.nan_to_none(
                                       row.get(self.prog_key)),
                                   fail_on_not_found=True)
        if len(pair_list) == 1:
            pair_id = pair_list[0]
        else:
            raise Exception("Too many pairs found for row \n{}".format(row))

        anix_id = models.AniDetailXref.objects.filter(
            pair_id=pair_id,
            grp_id__isnull=False).select_related('grp_id').first()
        grp_id = anix_id.grp_id
        self.row_entered += utils.enter_anix(cleaned_data,
                                             grp_pk=grp_id.pk,
                                             return_sucess=True)

        tray_id = utils.create_tray(row["trof_id"], row[self.tray_key],
                                    row_date, cleaned_data)
        contx, contx_entered = utils.enter_contx(tray_id,
                                                 cleaned_data,
                                                 True,
                                                 grp_pk=grp_id.pk,
                                                 return_contx=True)
        self.row_entered += contx_entered

        if utils.nan_to_none(row.get(self.fecu_key)):
            cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                               row[self.fecu_key],
                                               contx_pk=contx.pk,
                                               cnt_code="Photo Count")
            self.row_entered += cnt_entered

        self.row_entered += utils.enter_bulk_grpd(anix_id.pk,
                                                  cleaned_data,
                                                  row_date,
                                                  comments=row.get(
                                                      self.comment_key))
Beispiel #11
0
    def setUp(self):
        super().setUp()  # used to import fixtures

        # create group, put them in a tank:
        self.grp = BioFactoryFloor.GrpFactory()
        self.evnt = BioFactoryFloor.EvntFactory()
        self.tank = BioFactoryFloor.TankFactory()
        self.tank.facic_id = self.evnt.facic_id
        self.tank.save()
        self.final_tank = BioFactoryFloor.TankFactory()
        self.final_tank.facic_id = self.evnt.facic_id
        self.final_tank.save()
        self.cleaned_data = {
            "facic_id": self.evnt.facic_id,
            "evnt_id": self.evnt,
            "created_by": self.evnt.created_by,
            "created_date": self.evnt.created_date,
        }
        self.contx = utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk, return_contx=True)
Beispiel #12
0
    def clean(self):
        cleaned_data = super().clean()
        cleaned_data["move_date"] = utils.naive_to_aware(
            cleaned_data["move_date"])
        cleaned_data["facic_id"] = cleaned_data["evnt_id"].facic_id

        if cleaned_data["grp_id"] and not cleaned_data["new_grp_id"]:
            grp_id = cleaned_data["grp_id"]
        elif not cleaned_data["grp_id"] and cleaned_data["new_grp_id"]:
            grp_id = cleaned_data["new_grp_id"]
        if cleaned_data["grp_id"] and cleaned_data["new_grp_id"]:
            self.add_error('grp_id',
                           gettext("Can only add to one group at a time"))
            self.add_error('new_grp_id',
                           gettext("Can only add to one group at a time"))

        if not self.is_valid():
            return cleaned_data

        # fish into tank contx
        contx, entered = utils.enter_contx(self.cont,
                                           cleaned_data,
                                           True,
                                           grp_pk=grp_id.pk,
                                           return_contx=True)

        #fish to event
        utils.enter_anix(cleaned_data, grp_pk=grp_id.pk)

        # perc contx:
        if cleaned_data["perc_id"]:
            team_id, entered = utils.add_team_member(cleaned_data["perc_id"],
                                                     cleaned_data["evnt_id"],
                                                     return_team=True)
            utils.enter_anix(cleaned_data,
                             grp_pk=grp_id.pk,
                             team_pk=team_id.pk)

        # cnt:
        utils.enter_cnt(cleaned_data,
                        cleaned_data["num_fish"],
                        contx_pk=contx.pk)
Beispiel #13
0
 def clean(self):
     cleaned_data = super(FeedHandlerForm, self).clean()
     if self.is_valid():
         cleaned_data["feed_date"] = utils.naive_to_aware(
             cleaned_data["feed_date"])
         cleaned_data["created_date"] = utils.naive_to_aware(
             cleaned_data["created_date"])
         cleaned_data["evnt_id"] = utils.create_feed_evnt(cleaned_data)
         contx_id, entered = utils.enter_contx(self.cont,
                                               cleaned_data,
                                               return_contx=True)
         feed_entered = utils.enter_feed(cleaned_data,
                                         contx_id,
                                         cleaned_data["feedc_id"],
                                         cleaned_data["feedm_id"],
                                         cleaned_data["amt"],
                                         freq=cleaned_data["freq"])
         if not feed_entered:
             raise ValidationError("Feeding instance not entered")
     return cleaned_data
Beispiel #14
0
    def data_preper(self):
        cleaned_data = self.cleaned_data
        self.sampc_id = models.SampleCode.objects.filter(
            name="Individual Sample").get()
        self.prnt_grp_anidc_id = models.AnimalDetCode.objects.filter(
            name="Parent Group").get()
        self.prog_grp_anidc_id = models.AnimalDetCode.objects.filter(
            name="Program Group").get()
        self.sex_anidc_id = models.AnimalDetCode.objects.filter(
            name="Gender").get()
        self.len_anidc_id = models.AnimalDetCode.objects.filter(
            name="Length").get()
        self.weight_anidc_id = models.AnimalDetCode.objects.filter(
            name="Weight").get()
        self.vial_anidc_id = models.AnimalDetCode.objects.filter(
            name="Vial").get()
        self.envelope_anidc_id = models.AnimalDetCode.objects.filter(
            name="Scale Envelope").get()
        self.ani_health_anidc_id = models.AnimalDetCode.objects.filter(
            name="Animal Health").get()
        self.anidc_ufid_id = models.AnimalDetCode.objects.filter(
            name="UFID").get()
        self.vax_anidc_id = models.AnimalDetCode.objects.filter(
            name="Vaccination").get()
        self.mark_anidc_id = models.AnimalDetCode.objects.filter(
            name="Mark").get()
        self.lifestage_anidc_id = models.AnimalDetCode.objects.filter(
            name="Lifestage").get()
        self.comment_anidc_id = models.AnimalDetCode.objects.filter(
            name="Comment").get()

        # The following steps are to set additional columns on each row to facilitate parsing.
        # In particular,  columns set will be: "datetime", "grp_year", "grp_coll", "start_tank_id",
        # "end_tank_id", "grp_key", "end_grp_key".
        # The two grp_keys will link to dictionaries of the groups, which are also set below

        # set date
        self.data = utils.set_row_datetime(self.data)
        # split year-coll
        self.data["grp_year"] = self.data.apply(
            lambda row: utils.year_coll_splitter(row[self.yr_coll_key])[0],
            axis=1)
        self.data["grp_coll"] = self.data.apply(
            lambda row: utils.year_coll_splitter(row[self.yr_coll_key])[1],
            axis=1)

        # set start and end tank columns:
        self.data = utils.set_row_tank(self.data,
                                       cleaned_data,
                                       self.start_tank_key,
                                       col_name="start_tank_id")
        self.data = utils.set_row_tank(self.data,
                                       cleaned_data,
                                       self.end_tank_key,
                                       col_name="end_tank_id")

        # set the dict keys for groups, use astype(str) to handle anything that might be a nan.
        self.data, self.start_grp_dict = utils.set_row_grp(
            self.data,
            self.rive_key,
            self.yr_coll_key,
            self.prio_key,
            "start_tank_id",
            "datetime",
            self.grp_mark_key,
            grp_col_name="start_grp_id",
            return_dict=True)
        for item, grp in self.start_grp_dict.items():
            utils.enter_anix(cleaned_data, grp_pk=grp.pk)

        self.data["end_grp_key"] = self.data[self.rive_key] + self.data[self.yr_coll_key] + \
                                   self.data[self.end_tank_key].astype(str) + self.data[self.prio_key].astype(str) + \
                                   self.data["datetime"].astype(str) + self.data[self.grp_mark_key].astype(str)

        # create the end group dict and create, movement event, groups, counts, contxs, etc. necesarry
        end_grp_data = self.data.groupby([
            self.rive_key, "grp_year", "grp_coll", "end_tank_id",
            "start_tank_id", self.prio_key, "datetime", self.grp_mark_key,
            "grp_key", "end_grp_key"
        ],
                                         dropna=False,
                                         sort=False).size().reset_index()
        for row in end_grp_data.to_dict('records'):
            # check if end tank is set, otherwise, skip this step
            if not utils.nan_to_none(row["end_tank_id"]):
                self.end_grp_dict[row["end_grp_key"]] = None
                continue
            grps = utils.get_grp(row[self.rive_key],
                                 row["grp_year"],
                                 row["grp_coll"],
                                 row["end_tank_id"],
                                 at_date=row["datetime"],
                                 prog_str=row[self.prio_key],
                                 mark_str=row[self.grp_mark_key])
            start_grp_id = self.start_grp_dict[row["grp_key"]]
            start_contx, contx_entered = utils.enter_contx(
                row["start_tank_id"],
                cleaned_data,
                None,
                grp_pk=start_grp_id.pk,
                return_contx=True)
            self.row_entered += utils.enter_cnt(
                cleaned_data,
                sum(end_grp_data[end_grp_data["grp_key"] == row["grp_key"]]
                    [0]),
                start_contx.pk,
                cnt_code="Fish Removed from Container")[1]

            if len(grps) > 0:
                end_grp_id = grps[0]
                self.end_grp_dict[row["end_grp_key"]] = grps[0]
            else:
                end_grp_id = copy.deepcopy(start_grp_id)
                end_grp_id.pk = None
                end_grp_id.save()
                self.end_grp_dict[row["end_grp_key"]] = end_grp_id

            if end_grp_id.pk != start_grp_id.pk:
                grp_anix = utils.enter_anix(cleaned_data,
                                            grp_pk=end_grp_id.pk,
                                            return_anix=True)
                utils.enter_grpd(grp_anix.pk,
                                 cleaned_data,
                                 row["datetime"],
                                 None,
                                 self.prnt_grp_anidc_id.pk,
                                 frm_grp_id=start_grp_id)
                if utils.nan_to_none(row[self.prio_key]):
                    utils.enter_grpd(grp_anix.pk, cleaned_data,
                                     row["datetime"], row[self.prio_key],
                                     self.prog_grp_anidc_id.pk,
                                     row[self.prio_key])
                if utils.nan_to_none(row[self.grp_mark_key]):
                    utils.enter_grpd(grp_anix.pk, cleaned_data,
                                     row["datetime"], row[self.grp_mark_key],
                                     self.mark_anidc_id.pk,
                                     row[self.grp_mark_key])
                end_contx = utils.create_movement_evnt(row["start_tank_id"],
                                                       row["end_tank_id"],
                                                       cleaned_data,
                                                       row["datetime"],
                                                       grp_pk=end_grp_id.pk,
                                                       return_end_contx=True)
                if end_contx:
                    self.row_entered += utils.enter_cnt(
                        cleaned_data, row[0], end_contx.pk)[1]
        self.data_dict = self.data.to_dict("records")
Beispiel #15
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = row["datetime"].date()
        row_start_grp = utils.get_grp(row[self.rive_key],
                                      row["grp_year"],
                                      row["grp_coll"],
                                      row["start_tank_id"],
                                      row_date,
                                      prog_str=row.get(self.prio_key),
                                      mark_str=row.get(self.grp_mark_key),
                                      fail_on_not_found=True)[0]
        start_anix, self.row_entered = utils.enter_anix(
            cleaned_data, grp_pk=row_start_grp.pk)
        start_contx, contx_entered = utils.enter_contx(row["start_tank_id"],
                                                       cleaned_data,
                                                       None,
                                                       return_contx=True)
        self.row_entered += contx_entered

        whole_grp = utils.y_n_to_bool(row[self.abs_key])
        det_anix = start_anix
        row["start_contx_pk"] = None
        if not whole_grp:
            row["start_contx_pk"] = start_contx.pk

        if utils.nan_to_none(row["end_tank_id"]):
            # 4 possible cases here: group in tank or not and whole group move or not:
            row_end_grp_list = utils.get_grp(row[self.rive_key],
                                             row["grp_year"],
                                             row["grp_coll"],
                                             row["end_tank_id"],
                                             row_date,
                                             prog_str=row[self.prio_key],
                                             mark_str=row[self.grp_mark_key])
            row_end_grp = None
            if not whole_grp and not row_end_grp_list:
                # splitting fish group, create end group:
                row_end_grp = copy.deepcopy(row_start_grp)
                row_end_grp.pk = None
                row_end_grp.id = None
                row_end_grp.save()
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered = anix_entered

                self.row_entered += utils.enter_bulk_grpd(
                    end_grp_anix.pk,
                    cleaned_data,
                    row_date,
                    prog_grp=row.get(self.prio_key),
                    mark=row.get(self.mark_key))
            elif not whole_grp:
                # splitting fish group, merging to exsisting end group
                row_end_grp = row_end_grp_list[0]

            if row_end_grp:
                move_contx = utils.create_movement_evnt(row["start_tank_id"],
                                                        row["end_tank_id"],
                                                        cleaned_data,
                                                        row_date,
                                                        grp_pk=row_end_grp.pk,
                                                        return_end_contx=True)
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered += anix_entered
                self.row_entered += utils.enter_grpd(end_grp_anix.pk,
                                                     cleaned_data,
                                                     row_date,
                                                     None,
                                                     self.prnt_grp_anidc_id.pk,
                                                     frm_grp_id=row_start_grp)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk)
                self.row_entered = cnt_entered

                # record details on end tank group
                det_anix = end_grp_anix

            else:
                # move all the fish (whole group, merge to fish at destination if needed)
                move_contx = utils.create_movement_evnt(
                    row["start_tank_id"],
                    row["end_tank_id"],
                    cleaned_data,
                    row_date,
                    grp_pk=row_start_grp.pk,
                    return_end_contx=True)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered
        else:
            if utils.nan_to_none(row[self.nfish_key]):
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   start_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered

        # add details to det_anix:
        self.row_entered += utils.enter_bulk_grpd(
            det_anix.pk,
            cleaned_data,
            row_date,
            vaccinated=row.get(self.vax_key),
            mark=row.get(self.mark_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key))

        self.row_entered += utils.parse_extra_cols(row,
                                                   self.cleaned_data,
                                                   det_anix,
                                                   grp=True)
Beispiel #16
0
    def clean(self):
        cleaned_data = super().clean()
        cleaned_data["mort_date"] = utils.naive_to_aware(
            cleaned_data["mort_date"])

        if not self.is_valid():
            return cleaned_data

        # grab an event for facility info, etc.
        if cleaned_data["indv_mort"]:
            cleaned_data["evnt_id"] = models.AniDetailXref.objects.filter(
                indv_id_id=cleaned_data["indv_mort"]).last().evnt_id
            mort_evntc = models.EventCode.objects.filter(
                name="Mortality").get()
        else:
            cleaned_data["evnt_id"] = models.AniDetailXref.objects.filter(
                grp_id_id=cleaned_data["grp_mort"]).last().evnt_id
            mort_evntc = models.EventCode.objects.filter(
                name="Mortality").get()

        mortality_evnt = models.Event(
            evntc_id=mort_evntc,
            facic_id=cleaned_data["evnt_id"].facic_id,
            prog_id=cleaned_data["evnt_id"].prog_id,
            perc_id=cleaned_data["evnt_id"].perc_id,
            start_datetime=cleaned_data["mort_date"],
            end_datetime=cleaned_data["mort_date"],
            created_by=cleaned_data["created_by"],
            created_date=cleaned_data["created_date"],
        )
        try:
            mortality_evnt.clean()
            mortality_evnt.save()
        except (ValidationError, IntegrityError):
            mortality_evnt = models.Event.objects.filter(
                evntc_id=mortality_evnt.evntc_id,
                facic_id=mortality_evnt.facic_id,
                prog_id=mortality_evnt.prog_id,
                start_datetime=mortality_evnt.start_datetime,
                end_datetime=mortality_evnt.end_datetime,
            ).get()

        cleaned_data["evnt_id"] = mortality_evnt
        cleaned_data["facic_id"] = mortality_evnt.facic_id

        if cleaned_data["indv_mort"]:
            indv = models.Individual.objects.filter(
                pk=cleaned_data["indv_mort"]).get()
            indv.indv_valid = False
            indv.save()

            anix, mort_entered = utils.enter_mortality(
                indv, cleaned_data, cleaned_data["mort_date"])

            utils.enter_bulk_indvd(
                anix.pk,
                cleaned_data,
                cleaned_data["mort_date"],
                len_val=cleaned_data["indv_length"],
                weight=cleaned_data["indv_mass"],
                vial=cleaned_data["indv_vial"],
                scale_envelope=cleaned_data["scale_envelope"],
                gender=cleaned_data["indv_gender"],
            )

            if cleaned_data["observations"].count() != 0:
                for adsc in cleaned_data["observations"]:
                    utils.enter_indvd(anix.pk, cleaned_data,
                                      cleaned_data["mort_date"], None,
                                      adsc.anidc_id.pk, adsc.name, None)
        if cleaned_data["grp_mort"]:
            grp = models.Group.objects.filter(
                pk=cleaned_data["grp_mort"]).get()
            salmon_pk = models.SpeciesCode.objects.filter(
                name__icontains="Salmon").get().pk
            mort_sampc = models.SampleCode.objects.filter(
                name="Mortality Sample").get().pk

            cont = grp.current_cont(at_date=cleaned_data["mort_date"])[0]

            # create contx, link to grp and samp:
            contx, contx_entered = utils.enter_contx(cont,
                                                     cleaned_data,
                                                     None,
                                                     return_contx=True)

            samp_anix, anix_entered = utils.enter_anix(cleaned_data,
                                                       grp_pk=grp.pk,
                                                       contx_pk=contx.pk)

            samp_id, samp_entered = utils.enter_samp(cleaned_data,
                                                     cleaned_data["samp_num"],
                                                     salmon_pk,
                                                     mort_sampc,
                                                     anix_pk=samp_anix.pk)

            mort_entered = utils.enter_samp_mortality(
                samp_id, cleaned_data, cleaned_data["mort_date"])

            utils.enter_bulk_sampd(
                samp_id.pk,
                cleaned_data,
                cleaned_data["mort_date"],
                len_val=cleaned_data["indv_length"],
                weight=cleaned_data["indv_mass"],
                vial=cleaned_data["indv_vial"],
                scale_envelope=cleaned_data["scale_envelope"],
                gender=cleaned_data["indv_gender"],
            )
Beispiel #17
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = utils.get_row_date(row)
        self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)
        # find group from either cross or tray:

        if utils.nan_to_none(row.get(self.hu_key)):
            cont_id = utils.get_cont_from_dot(row[self.hu_key], cleaned_data,
                                              row_date)
        elif utils.nan_to_none(row.get(self.tray_key)):
            cont_id = models.Tray.objects.filter(
                trof_id=row["trof_id"],
                end_date__isnull=True,
                name=row[self.tray_key]).get()
        else:
            cont_id = row["trof_id"]

        if utils.nan_to_none(row.get(self.cross_key)):
            pair_id = models.Pairing.objects.filter(
                cross=row[self.cross_key],
                end_date__isnull=True,
                indv_id__stok_id=row["stok_id"],
                start_date__year=row[self.year_key]).first()
            grp_id = utils.get_tray_group(pair_id, cont_id, row_date)
        else:
            grp_id = cont_id.fish_in_cont(row_date, get_grp=True)

        grp_anix = None
        shock = False
        for pickc_id in cleaned_data["pickc_id"]:
            if utils.nan_to_none(row[pickc_id.name]):
                shock = utils.y_n_to_bool(row.get(self.shocking_key))
                grp_anix, evnt_entered = utils.create_picks_evnt(
                    cleaned_data,
                    cont_id,
                    grp_id.pk,
                    row[pickc_id.name],
                    row_date,
                    pickc_id.name,
                    cleaned_data["evnt_id"].perc_id,
                    shocking=shock,
                    return_anix=True,
                    pick_comments=row.get(self.comment_key))
                self.row_entered += evnt_entered

        for col_name in row.keys():
            col_date = utils.get_col_date(col_name)

            if col_date:
                col_date_str = datetime.strftime(col_date, "%Y-%b-%d")
                self.date_dict[col_date_str] = True
                if utils.nan_to_none(row.get(col_name)):
                    self.row_entered += utils.create_picks_evnt(
                        cleaned_data,
                        cont_id,
                        grp_id.pk,
                        row[col_name],
                        col_date,
                        self.default_pickc_id,
                        cleaned_data["evnt_id"].perc_id,
                        pick_comments=row.get(self.comment_key))

        # record development
        if grp_anix and shock:
            pick_evnt_cleaned_data = cleaned_data.copy()
            pick_evnt_cleaned_data["evnt_id"] = grp_anix.evnt_id
            dev_at_pick = grp_id.get_development(row_date)
            utils.enter_grpd(grp_anix.pk,
                             pick_evnt_cleaned_data,
                             row_date,
                             dev_at_pick,
                             None,
                             anidc_str="Development")
            self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)
Beispiel #18
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        # get tray, group, and row date
        row_date = utils.get_row_date(row)

        tray_qs = models.Tray.objects.filter(trof_id=row["trof_id"],
                                             name=row[self.tray_key])
        tray_id = tray_qs.filter(
            Q(start_date__lte=row_date, end_date__gte=row_date)
            | Q(end_date__isnull=True)).get()
        pair_id = models.Pairing.objects.filter(
            cross=row[self.cross_key],
            end_date__isnull=True,
            indv_id__stok_id=row["stok_id"],
            start_date__year=row[self.year_key]).first()

        grp_id = utils.get_tray_group(pair_id, tray_id, row_date)

        # want to shift the hu move event, so that the counting math always works out.
        hu_move_date = row_date + timedelta(minutes=1)
        hu_cleaned_data = utils.create_new_evnt(cleaned_data, "Allocation",
                                                hu_move_date)
        hu_anix, data_entered = utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=grp_id.pk)
        self.row_entered += data_entered
        hu_contx, data_entered = utils.enter_contx(tray_id,
                                                   hu_cleaned_data,
                                                   None,
                                                   grp_pk=grp_id.pk,
                                                   return_contx=True)
        self.row_entered += data_entered
        # record development
        dev_at_hu_transfer = grp_id.get_development(hu_move_date)
        utils.enter_grpd(hu_anix.pk,
                         hu_cleaned_data,
                         hu_move_date,
                         dev_at_hu_transfer,
                         None,
                         anidc_str="Development")
        self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)

        # HU Picks:
        self.row_entered += utils.enter_cnt(cleaned_data,
                                            row[self.loss_key],
                                            hu_contx.pk,
                                            cnt_code="HU Transfer Loss")[1]

        # generate new group, cup, and movement event:
        cont = None
        if utils.nan_to_none(row[self.end_tray_key]):
            trof_id = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
            tray_qs = models.Tray.objects.filter(trof_id=trof_id,
                                                 name=row[self.tray_key])
            cont = tray_qs.filter(
                Q(start_date__lte=row_date, end_date__gte=row_date)
                | Q(end_date__isnull=True)).get()
        elif utils.nan_to_none(row[self.end_trof_key]):
            cont = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
        elif utils.nan_to_none(row[self.heatl_key]):
            cont = utils.get_cont_from_dot(row[self.cont_key], cleaned_data,
                                           row_date)
        elif utils.nan_to_none(row[self.tank_key]):
            cont = models.Tank.objects.filter(
                facic_id=cleaned_data["facic_id"], name=row[self.tank_key])

        self.row_entered += utils.enter_contx(cont, cleaned_data)
        if not utils.y_n_to_bool(row[self.final_key]):
            # NEW GROUPS TAKEN FROM INITIAL
            out_cnt = utils.enter_cnt(cleaned_data,
                                      0,
                                      hu_contx.pk,
                                      cnt_code="Eggs Removed")[0]
            utils.enter_cnt_det(cleaned_data, out_cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])

            indv, final_grp = cont.fish_in_cont(row_date)
            if not final_grp:
                final_grp = models.Group(
                    spec_id=grp_id.spec_id,
                    coll_id=grp_id.coll_id,
                    grp_year=grp_id.grp_year,
                    stok_id=grp_id.stok_id,
                    grp_valid=True,
                    created_by=cleaned_data["created_by"],
                    created_date=cleaned_data["created_date"],
                )
                try:
                    final_grp.clean()
                    final_grp.save()
                except (ValidationError, IntegrityError):
                    return None
            else:
                # MAIN GROUP GETTING MOVED
                final_grp = final_grp[0]
            final_grp_anix = utils.enter_anix(cleaned_data,
                                              grp_pk=final_grp.pk,
                                              return_anix=True)
            self.row_entered += utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 return_sucess=True)
            self.row_entered += utils.enter_bulk_grpd(
                final_grp_anix,
                cleaned_data,
                row_date,
                prnt_grp=grp_id,
                prog_grp=row.get(self.prog_key),
                comments=row.get(self.comment_key))
            self.row_entered += utils.enter_grpd(final_grp_anix.pk,
                                                 cleaned_data,
                                                 row_date,
                                                 dev_at_hu_transfer,
                                                 None,
                                                 anidc_str="Development")

            # create movement for the new group, create 2 contx's and 3 anix's
            # cup contx is contx used to link the positive counts
            cont_contx = utils.create_egg_movement_evnt(tray_id,
                                                        cont,
                                                        cleaned_data,
                                                        row_date,
                                                        final_grp.pk,
                                                        return_cup_contx=True)

            move_cleaned_data = cleaned_data.copy()
            move_cleaned_data["evnt_id"] = cont_contx.evnt_id
            cnt_contx = cont_contx
            cnt_contx.pk = None
            cnt_contx.tray_id = tray_id
            try:
                cnt_contx.save()
            except IntegrityError:
                cnt_contx = models.ContainerXRef.objects.filter(
                    pk=cont_contx.pk).get()
            self.row_entered += utils.enter_anix(move_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 contx_pk=cnt_contx.pk,
                                                 return_sucess=True)
            # add the positive counts
            cnt = utils.enter_cnt(
                move_cleaned_data,
                row[self.cnt_key],
                cnt_contx.pk,
                cnt_code="Eggs Added",
            )[0]
            if utils.nan_to_none(self.weight_key):
                utils.enter_cnt_det(move_cleaned_data, cnt,
                                    row[self.weight_key], "Weight")
            utils.enter_cnt_det(move_cleaned_data, cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])
        else:
            # Move main group to drawer, and add end date to tray:
            if cont:
                end_contx = utils.create_movement_evnt(tray_id,
                                                       cont,
                                                       cleaned_data,
                                                       row_date,
                                                       grp_pk=grp_id.pk,
                                                       return_end_contx=True)
                tray_id.end_date = row_date
                tray_id.save()
                end_cnt = utils.enter_cnt(cleaned_data,
                                          row[self.cnt_key],
                                          end_contx.pk,
                                          cnt_code="Egg Count")[0]
                utils.enter_cnt_det(cleaned_data, end_cnt,
                                    row[self.weight_key], "Weight")
            else:
                self.log_data += "\n Draw {} from {} not found".format(
                    cont, row[self.cont_key])

            # link cup to egg development event
            utils.enter_contx(cont, cleaned_data, None)
Beispiel #19
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_datetime = utils.get_row_date(row)
        relc_id = self.site_dict[row[self.site_key]]
        year, coll = utils.year_coll_splitter(row[self.coll_key])
        coll_id = utils.coll_getter(coll)
        stok_id = models.StockCode.objects.filter(name__iexact=relc_id.rive_id.name).get()

        indv_id = None

        if utils.nan_to_none(row[self.pit_key]):
            indv_id = models.Individual.objects.filter(pit_tag=row[self.pit_key]).first()
            if not indv_id:
                indv_id = models.Individual(spec_id=self.salmon_id,
                                            stok_id=stok_id,
                                            coll_id=coll_id,
                                            indv_year=year,
                                            pit_tag=row[self.pit_key],
                                            indv_valid=True,
                                            comments=utils.nan_to_none(row.get(self.comment_key)),
                                            created_by=cleaned_data["created_by"],
                                            created_date=cleaned_data["created_date"],
                                            )
                try:
                    indv_id.clean()
                    indv_id.save()
                    self.row_entered = True
                except (ValidationError, IntegrityError):
                    indv_id = models.Individual.objects.filter(pit_tag=indv_id.pit_tag).get()
            indv_anix, data_entered = utils.enter_anix(cleaned_data, indv_pk=indv_id.pk)
            self.row_entered += data_entered
            # add program group to individual if needed:

        loc = models.Location(evnt_id_id=cleaned_data["evnt_id"].pk,
                              locc_id=self.locc_id,
                              rive_id=relc_id.rive_id,
                              relc_id=relc_id,
                              loc_date=row_datetime,
                              created_by=cleaned_data["created_by"],
                              created_date=cleaned_data["created_date"],
                              )
        try:
            loc.clean()
            loc.save()
            self.row_entered = True
        except ValidationError:
            loc = models.Location.objects.filter(evnt_id=loc.evnt_id, locc_id=loc.locc_id,
                                                 rive_id=loc.rive_id, subr_id=loc.subr_id,
                                                 relc_id=loc.relc_id, loc_lat=loc.loc_lat,
                                                 loc_lon=loc.loc_lon, loc_date=loc.loc_date).get()
        self.loc = loc
        self.team_parser(row[self.crew_key], row, loc_id=loc)

        if indv_id:
            anix_loc_indv, anix_entered = utils.enter_anix(cleaned_data, loc_pk=loc.pk, indv_pk=indv_id.pk)
            self.row_entered += anix_entered

            self.row_entered += utils.enter_bulk_indvd(anix_loc_indv.pk, self.cleaned_data, row_datetime,
                                                       gender=row.get(self.sex_key),
                                                       len_mm=row.get(self.len_key_mm),
                                                       len_val=row.get(self.len_key),
                                                       weight=row.get(self.weight_key),
                                                       weight_kg=row.get(self.weight_key_kg),
                                                       vial=row.get(self.vial_key),
                                                       scale_envelope=row.get(self.scale_key),
                                                       prog_grp=row.get(self.grp_key),
                                                       comments=row.get(self.comment_key)
                                                       )

            if utils.nan_to_none(row.get(self.mort_key)):
                if utils.y_n_to_bool(row[self.mort_key]):
                    mort_anix, mort_entered = utils.enter_mortality(indv_id, self.cleaned_data, row_datetime)
                    self.row_entered += mort_entered

            if utils.nan_to_none(row.get(self.wr_key)):
                if utils.y_n_to_bool(row[self.wr_key]):
                    self.row_entered += utils.enter_indvd(anix_loc_indv.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str=self.wr_adsc_id.name)

            if utils.nan_to_none(row.get(self.aquaculture_key)):
                if utils.y_n_to_bool(row[self.aquaculture_key]):
                    self.row_entered += utils.enter_indvd(anix_loc_indv.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str="Aquaculture")

            if utils.nan_to_none(row[self.tank_key]):
                self.row_entered += utils.enter_contx(self.tank_dict[row[self.tank_key]], cleaned_data, True, indv_id.pk)
                if self.loc.pk not in self.loc_caught_dict:
                    self.loc_caught_dict[self.loc.pk] = 1
                else:
                    self.loc_caught_dict[self.loc.pk] += 1
            else:
                if self.loc.pk not in self.loc_obs_dict:
                    self.loc_obs_dict[self.loc.pk] = 1
                else:
                    self.loc_obs_dict[self.loc.pk] += 1

        elif utils.nan_to_none(row.get(self.samp_key)):
            samp = models.Sample.objects.filter(anix_id__evnt_id=cleaned_data["evnt_id"],
                                                loc_id=loc,
                                                spec_id=self.salmon_id,
                                                samp_num=row[self.samp_key],
                                                sampc_id=self.sampc_id,
                                                ).get()
            if not samp:
                # create group for sample:
                grp_id = models.Group(spec_id=self.salmon_id,
                                      stok_id=stok_id,
                                      coll_id=coll_id,
                                      grp_year=year,
                                      grp_valid=False,
                                      created_by=cleaned_data["created_by"],
                                      created_date=cleaned_data["created_date"],
                                      )
                grp_id.clean()
                grp_id.save()
                self.row_entered = True

                grp_anix, data_entered = utils.enter_anix(cleaned_data, grp_pk=grp_id.pk)
                self.row_entered += data_entered

                samp, samp_entered = utils.enter_samp(cleaned_data, row[self.samp_key], self.salmon_id.pk, self.sampc_id.pk,
                                                      anix_pk=grp_anix.pk, loc_pk=loc.pk,
                                                      comments=utils.nan_to_none(row.get(self.comment_key)))
                self.row_entered += samp_entered

            self.row_entered += utils.enter_bulk_sampd(samp.pk, self.cleaned_data, row_datetime,
                                                       gender=row.get(self.sex_key),
                                                       len_mm=row.get(self.len_key_mm),
                                                       len_val=row.get(self.len_key),
                                                       weight=row.get(self.weight_key),
                                                       weight_kg=row.get(self.weight_key_kg),
                                                       vial=row.get(self.vial_key),
                                                       scale_envelope=row.get(self.scale_key),
                                                       prog_grp=row.get(self.grp_key),
                                                       comments=row.get(self.comment_key)
                                                       )
            if utils.nan_to_none(row.get(self.mort_key)):
                if utils.y_n_to_bool(row[self.mort_key]):
                    self.row_entered += utils.enter_samp_mortality(samp, self.cleaned_data, row_datetime)

            if utils.nan_to_none(row.get(self.wr_key)):
                if utils.y_n_to_bool(row[self.wr_key]):
                    self.row_entered += utils.enter_sampd(samp.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str=self.wr_adsc_id.name)

            if utils.nan_to_none(row.get(self.aquaculture_key)):
                if utils.y_n_to_bool(row[self.aquaculture_key]):
                    self.row_entered += utils.enter_sampd(samp.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str="Aquaculture")
        else:
            raise Exception("Fish must either be assigned a sample number or a pit tag.")
Beispiel #20
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        year, coll = utils.year_coll_splitter(row[self.coll_key])
        row_datetime = utils.get_row_date(row)
        row_date = row_datetime.date()
        indv_ufid = utils.nan_to_none(row.get(self.ufid_key))
        indv = models.Individual(
            grp_id=self.grp_id,
            spec_id=self.salmon_id,
            stok_id=self.stok_id,
            coll_id=self.coll_id,
            indv_year=year,
            pit_tag=row[self.pit_key],
            ufid=indv_ufid,
            indv_valid=True,
            comments=utils.nan_to_none(row.get(self.comment_key)),
            created_by=cleaned_data["created_by"],
            created_date=cleaned_data["created_date"],
        )
        try:
            indv.clean()
            indv.save()
            self.row_entered = True
        except (ValidationError, IntegrityError):
            indv = models.Individual.objects.filter(pit_tag=indv.pit_tag).get()

        if utils.nan_to_none(row[self.from_tank_id_key]) or utils.nan_to_none(
                row[self.to_tank_id_key]):
            in_tank = row[self.from_tank_id_key]
            out_tank = row[self.to_tank_id_key]
            self.row_entered += utils.create_movement_evnt(in_tank,
                                                           out_tank,
                                                           cleaned_data,
                                                           row_datetime,
                                                           indv_pk=indv.pk)
            # if tagged fish goes back into same tank, still link fish to tank:
            if in_tank == out_tank:
                utils.enter_contx(in_tank, cleaned_data, True, indv_pk=indv.pk)

        anix_indv, anix_entered = utils.enter_anix(cleaned_data,
                                                   indv_pk=indv.pk)
        self.row_entered += anix_entered
        self.anix_indv = anix_indv

        utils.enter_bulk_indvd(
            anix_indv.pk,
            self.cleaned_data,
            row_date,
            len_mm=row.get(self.len_key_mm),
            len_val=row.get(self.len_key),
            weight=row.get(self.weight_key),
            weight_kg=row.get(self.weight_key_kg),
            vial=row.get(self.vial_key),
            mark=row.get(self.mark_key),
            prog_grp=row.get(self.group_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key),
        )

        if utils.nan_to_none(row.get(self.precocity_key)):
            self.row_entered += utils.enter_indvd(anix_indv.pk, cleaned_data,
                                                  row_date, None,
                                                  self.ani_health_anidc_id.pk,
                                                  "Precocity")

        if utils.nan_to_none(row.get(self.crew_key)):
            perc_list, inits_not_found = utils.team_list_splitter(
                row[self.crew_key])
            for perc_id in perc_list:
                team_id, team_entered = utils.add_team_member(
                    perc_id,
                    cleaned_data["evnt_id"],
                    role_id=self.tagger_code,
                    return_team=True)
                self.row_entered += team_entered
                if team_id:
                    self.row_entered += utils.enter_anix(cleaned_data,
                                                         indv_pk=indv.pk,
                                                         team_pk=team_id.pk,
                                                         return_sucess=True)
            for inits in inits_not_found:
                self.log_data += "No valid personnel with initials ({}) for row with pit tag" \
                                 " {}\n".format(inits, row[self.pit_key])
Beispiel #21
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = row["datetime"].date()
        row_grp = row["start_grp_id"]
        row_end_grp = self.end_grp_dict[row["end_grp_key"]]
        if row_end_grp:
            row_grp = row_end_grp
        row_anix, data_entered = utils.enter_anix(cleaned_data,
                                                  grp_pk=row_grp.pk)
        self.row_entered += data_entered

        row_contx, data_entered = utils.enter_contx(row.get("start_tank_id"),
                                                    cleaned_data,
                                                    None,
                                                    grp_pk=row_grp.pk,
                                                    return_contx=True)
        self.row_entered += data_entered

        samp_anix = row_contx.animal_details.filter(
            grp_id=row_grp,
            evnt_id=cleaned_data["evnt_id"],
            indv_id__isnull=True,
            loc_id__isnull=True,
            pair_id__isnull=True,
            final_contx_flag=None).get()

        row_samp, data_entered = utils.enter_samp(cleaned_data,
                                                  row[self.samp_key],
                                                  row_grp.spec_id.pk,
                                                  self.sampc_id.pk,
                                                  anix_pk=samp_anix.pk)
        self.row_entered += data_entered

        if utils.nan_to_none(row.get(self.mort_key)):
            if utils.y_n_to_bool(row[self.mort_key]):
                self.row_entered += utils.enter_samp_mortality(
                    row_samp, cleaned_data, row_date)

        if row_samp:
            self.row_entered += utils.enter_bulk_sampd(
                row_samp.pk,
                self.cleaned_data,
                row_date,
                gender=row.get(self.sex_key),
                len_mm=row.get(self.len_key_mm),
                len_val=row.get(self.len_key),
                weight=row.get(self.weight_key),
                weight_kg=row.get(self.weight_key_kg),
                vial=row.get(self.vial_key),
                scale_envelope=row.get(self.envelope_key),
                tissue_yn=row.get(self.tissue_key),
                mark=row.get(self.mark_key),
                vaccinated=row.get(self.vax_key),
                lifestage=row.get(self.lifestage_key),
                comments=row.get(self.comment_key))

            if utils.nan_to_none(row.get(self.precocity_key)):
                if utils.y_n_to_bool(row[self.precocity_key]):
                    self.row_entered += utils.enter_sampd(
                        row_samp.pk,
                        cleaned_data,
                        row_date,
                        "Precocity",
                        self.ani_health_anidc_id.pk,
                        adsc_str="Precocity")

            if utils.nan_to_none(row.get(self.ufid_key)):
                self.row_entered += utils.enter_sampd(row_samp.pk,
                                                      cleaned_data, row_date,
                                                      row[self.ufid_key],
                                                      self.anidc_ufid_id.pk)

            self.row_entered += utils.parse_extra_cols(row,
                                                       self.cleaned_data,
                                                       row_samp,
                                                       samp=True)

        else:
            self.success = False
Beispiel #22
0
 def test_cont_has_grp(self):
     # test a tank with a contx has one and only one group in it:
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertEqual(len(grp_list), 1)
     self.assertIn(self.grp, grp_list)
Beispiel #23
0
 def test_grp_in_cont(self):
     # test a group with a contx is in one and only one tank:
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
     self.assertEqual(self.grp.current_cont()[0], self.tank)
     self.assertEqual(len(self.grp.current_cont()), 1)