コード例 #1
0
ファイル: test_models.py プロジェクト: dfo-mar-odis/dm_apps
 def test_move_and_detail_development(self):
     # test grp placed in trof, get a detail recorded and then moved to second trof
     entry_date = self.evnt_date - timedelta(days=1)
     det_date = self.evnt_date + timedelta(days=5)
     move_date = self.evnt_date + timedelta(days=10)
     det_evnt_cleaned_data = utils.create_new_evnt(self.cleaned_data,
                                                   "Picking", det_date)
     anix = utils.enter_anix(det_evnt_cleaned_data,
                             grp_pk=self.grp.pk,
                             return_anix=True)
     utils.enter_grpd(anix.pk,
                      det_evnt_cleaned_data,
                      det_date,
                      10,
                      None,
                      anidc_str="Development")
     utils.create_movement_evnt(None,
                                self.trof,
                                self.cleaned_data,
                                entry_date,
                                grp_pk=self.grp.pk)
     utils.create_movement_evnt(self.trof,
                                self.trof_two,
                                self.cleaned_data,
                                move_date,
                                grp_pk=self.grp.pk)
     grp_dev = self.grp.get_development()
     self.assertEqual(round(grp_dev, 3), round(Decimal(36.291), 3))
コード例 #2
0
ファイル: test_utils.py プロジェクト: dfo-mar-odis/dm_apps
    def test_origin_only_tank(self):
        #  move group with only origin, make sure group is still in original tank
        utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
        move_date = datetime.now().date()

        utils.create_movement_evnt(self.final_tank, None, self.cleaned_data, move_date, grp_pk=self.grp.pk)

        indv_list, grp_list = self.tank.fish_in_cont()
        self.assertIn(self.grp, grp_list)
コード例 #3
0
ファイル: test_utils.py プロジェクト: dfo-mar-odis/dm_apps
 def test_move_grp(self):
     # grp in one tank, gets moved, is in second tank and not in first tank
     utils.enter_contx(self.tank, self.cleaned_data, True, grp_pk=self.grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertIn(self.grp, grp_list)
     move_date = datetime.now().date()
     utils.create_movement_evnt(self.tank, self.final_tank, self.cleaned_data, move_date, grp_pk=self.grp.pk)
     indv_list, grp_list = self.tank.fish_in_cont()
     self.assertNotIn(self.grp, grp_list)
     indv_list, grp_list = self.final_tank.fish_in_cont()
     self.assertIn(self.grp, grp_list)
コード例 #4
0
ファイル: test_models.py プロジェクト: dfo-mar-odis/dm_apps
 def test_development(self):
     # test grp placed in trof
     entry_date = self.evnt_date - timedelta(days=1)
     utils.create_movement_evnt(None,
                                self.trof,
                                self.cleaned_data,
                                entry_date,
                                grp_pk=self.grp.pk)
     grp_dev = self.grp.get_development()
     # compare to hard coded value corresponding to 10 days of sequential temperature increases:
     self.assertEqual(round(grp_dev, 3), round(Decimal(5.579), 3))
コード例 #5
0
ファイル: test_models.py プロジェクト: dfo-mar-odis/dm_apps
 def test_movement_development(self):
     # test grp placed in trof and moved to second trof
     entry_date = self.evnt_date - timedelta(days=1)
     move_date = self.evnt_date + timedelta(days=10)
     utils.create_movement_evnt(None,
                                self.trof,
                                self.cleaned_data,
                                entry_date,
                                grp_pk=self.grp.pk)
     utils.create_movement_evnt(self.trof,
                                self.trof_two,
                                self.cleaned_data,
                                move_date,
                                grp_pk=self.grp.pk)
     grp_dev = self.grp.get_development()
     self.assertEqual(round(grp_dev, 3), round(Decimal(27.854), 3))
コード例 #6
0
ファイル: test_utils.py プロジェクト: dfo-mar-odis/dm_apps
    def test_fix_jumped_tanks(self):
        # simulate accidentally recording group in wrong tank and correcting:
        # ie A->B, C->D (fish is in both B and D) ->E should correct this
        tank_a = BioFactoryFloor.TankFactory(name="A")
        tank_a.facic_id = self.evnt.facic_id
        tank_a.save()
        tank_b = BioFactoryFloor.TankFactory(name="B")
        tank_b.facic_id = self.evnt.facic_id
        tank_b.save()
        tank_c = BioFactoryFloor.TankFactory(name="C")
        tank_c.facic_id = self.evnt.facic_id
        tank_c.save()
        tank_d = BioFactoryFloor.TankFactory(name="D")
        tank_d.facic_id = self.evnt.facic_id
        tank_d.save()
        tank_e = BioFactoryFloor.TankFactory(name="E")
        tank_e.facic_id = self.evnt.facic_id
        tank_e.save()
        # need three dates to ensure unique moving events, to keep django test env happy
        move_a_date = (datetime.now() - timedelta(days=2)).date()
        move_b_date = (datetime.now() - timedelta(days=1)).date()
        move_c_date = datetime.now().date()

        utils.create_movement_evnt(tank_a, tank_b, self.cleaned_data, move_a_date, grp_pk=self.grp.pk)
        utils.create_movement_evnt(tank_c, tank_d, self.cleaned_data, move_b_date, grp_pk=self.grp.pk)
        self.assertIn(tank_b, self.grp.current_cont())
        self.assertIn(tank_d, self.grp.current_cont())
        utils.create_movement_evnt(None, tank_e, self.cleaned_data, move_c_date, grp_pk=self.grp.pk)
        self.assertIn(tank_e, self.grp.current_cont())
        self.assertNotIn(tank_c, self.grp.current_cont())
        self.assertNotIn(tank_d, self.grp.current_cont())
コード例 #7
0
ファイル: test_models.py プロジェクト: dfo-mar-odis/dm_apps
    def test_development_after_detail(self):
        # test grp placed in trof, has development recorded go off of that and don't double count
        entry_date = self.evnt_date - timedelta(days=1)
        utils.create_movement_evnt(None,
                                   self.trof,
                                   self.cleaned_data,
                                   entry_date,
                                   grp_pk=self.grp.pk)

        det_date = self.evnt_date + timedelta(days=5)
        det_evnt_cleaned_data = utils.create_new_evnt(self.cleaned_data,
                                                      "Picking", det_date)
        anix = utils.enter_anix(det_evnt_cleaned_data,
                                grp_pk=self.grp.pk,
                                return_anix=True)
        utils.enter_grpd(anix.pk,
                         det_evnt_cleaned_data,
                         det_date,
                         10,
                         None,
                         anidc_str="Development")
        grp_dev = self.grp.get_development()
        self.assertEqual(round(grp_dev, 3), round(Decimal(14.015), 3))
コード例 #8
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        # get tray, group, and row date
        row_date = utils.get_row_date(row)

        tray_qs = models.Tray.objects.filter(trof_id=row["trof_id"],
                                             name=row[self.tray_key])
        tray_id = tray_qs.filter(
            Q(start_date__lte=row_date, end_date__gte=row_date)
            | Q(end_date__isnull=True)).get()
        pair_id = models.Pairing.objects.filter(
            cross=row[self.cross_key],
            end_date__isnull=True,
            indv_id__stok_id=row["stok_id"],
            start_date__year=row[self.year_key]).first()

        grp_id = utils.get_tray_group(pair_id, tray_id, row_date)

        # want to shift the hu move event, so that the counting math always works out.
        hu_move_date = row_date + timedelta(minutes=1)
        hu_cleaned_data = utils.create_new_evnt(cleaned_data, "Allocation",
                                                hu_move_date)
        hu_anix, data_entered = utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=grp_id.pk)
        self.row_entered += data_entered
        hu_contx, data_entered = utils.enter_contx(tray_id,
                                                   hu_cleaned_data,
                                                   None,
                                                   grp_pk=grp_id.pk,
                                                   return_contx=True)
        self.row_entered += data_entered
        # record development
        dev_at_hu_transfer = grp_id.get_development(hu_move_date)
        utils.enter_grpd(hu_anix.pk,
                         hu_cleaned_data,
                         hu_move_date,
                         dev_at_hu_transfer,
                         None,
                         anidc_str="Development")
        self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)

        # HU Picks:
        self.row_entered += utils.enter_cnt(cleaned_data,
                                            row[self.loss_key],
                                            hu_contx.pk,
                                            cnt_code="HU Transfer Loss")[1]

        # generate new group, cup, and movement event:
        cont = None
        if utils.nan_to_none(row[self.end_tray_key]):
            trof_id = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
            tray_qs = models.Tray.objects.filter(trof_id=trof_id,
                                                 name=row[self.tray_key])
            cont = tray_qs.filter(
                Q(start_date__lte=row_date, end_date__gte=row_date)
                | Q(end_date__isnull=True)).get()
        elif utils.nan_to_none(row[self.end_trof_key]):
            cont = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
        elif utils.nan_to_none(row[self.heatl_key]):
            cont = utils.get_cont_from_dot(row[self.cont_key], cleaned_data,
                                           row_date)
        elif utils.nan_to_none(row[self.tank_key]):
            cont = models.Tank.objects.filter(
                facic_id=cleaned_data["facic_id"], name=row[self.tank_key])

        self.row_entered += utils.enter_contx(cont, cleaned_data)
        if not utils.y_n_to_bool(row[self.final_key]):
            # NEW GROUPS TAKEN FROM INITIAL
            out_cnt = utils.enter_cnt(cleaned_data,
                                      0,
                                      hu_contx.pk,
                                      cnt_code="Eggs Removed")[0]
            utils.enter_cnt_det(cleaned_data, out_cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])

            indv, final_grp = cont.fish_in_cont(row_date)
            if not final_grp:
                final_grp = models.Group(
                    spec_id=grp_id.spec_id,
                    coll_id=grp_id.coll_id,
                    grp_year=grp_id.grp_year,
                    stok_id=grp_id.stok_id,
                    grp_valid=True,
                    created_by=cleaned_data["created_by"],
                    created_date=cleaned_data["created_date"],
                )
                try:
                    final_grp.clean()
                    final_grp.save()
                except (ValidationError, IntegrityError):
                    return None
            else:
                # MAIN GROUP GETTING MOVED
                final_grp = final_grp[0]
            final_grp_anix = utils.enter_anix(cleaned_data,
                                              grp_pk=final_grp.pk,
                                              return_anix=True)
            self.row_entered += utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 return_sucess=True)
            self.row_entered += utils.enter_bulk_grpd(
                final_grp_anix,
                cleaned_data,
                row_date,
                prnt_grp=grp_id,
                prog_grp=row.get(self.prog_key),
                comments=row.get(self.comment_key))
            self.row_entered += utils.enter_grpd(final_grp_anix.pk,
                                                 cleaned_data,
                                                 row_date,
                                                 dev_at_hu_transfer,
                                                 None,
                                                 anidc_str="Development")

            # create movement for the new group, create 2 contx's and 3 anix's
            # cup contx is contx used to link the positive counts
            cont_contx = utils.create_egg_movement_evnt(tray_id,
                                                        cont,
                                                        cleaned_data,
                                                        row_date,
                                                        final_grp.pk,
                                                        return_cup_contx=True)

            move_cleaned_data = cleaned_data.copy()
            move_cleaned_data["evnt_id"] = cont_contx.evnt_id
            cnt_contx = cont_contx
            cnt_contx.pk = None
            cnt_contx.tray_id = tray_id
            try:
                cnt_contx.save()
            except IntegrityError:
                cnt_contx = models.ContainerXRef.objects.filter(
                    pk=cont_contx.pk).get()
            self.row_entered += utils.enter_anix(move_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 contx_pk=cnt_contx.pk,
                                                 return_sucess=True)
            # add the positive counts
            cnt = utils.enter_cnt(
                move_cleaned_data,
                row[self.cnt_key],
                cnt_contx.pk,
                cnt_code="Eggs Added",
            )[0]
            if utils.nan_to_none(self.weight_key):
                utils.enter_cnt_det(move_cleaned_data, cnt,
                                    row[self.weight_key], "Weight")
            utils.enter_cnt_det(move_cleaned_data, cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])
        else:
            # Move main group to drawer, and add end date to tray:
            if cont:
                end_contx = utils.create_movement_evnt(tray_id,
                                                       cont,
                                                       cleaned_data,
                                                       row_date,
                                                       grp_pk=grp_id.pk,
                                                       return_end_contx=True)
                tray_id.end_date = row_date
                tray_id.save()
                end_cnt = utils.enter_cnt(cleaned_data,
                                          row[self.cnt_key],
                                          end_contx.pk,
                                          cnt_code="Egg Count")[0]
                utils.enter_cnt_det(cleaned_data, end_cnt,
                                    row[self.weight_key], "Weight")
            else:
                self.log_data += "\n Draw {} from {} not found".format(
                    cont, row[self.cont_key])

            # link cup to egg development event
            utils.enter_contx(cont, cleaned_data, None)
コード例 #9
0
ファイル: tagging.py プロジェクト: dfo-mar-odis/dm_apps
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        year, coll = utils.year_coll_splitter(row[self.coll_key])
        row_datetime = utils.get_row_date(row)
        row_date = row_datetime.date()
        indv_ufid = utils.nan_to_none(row.get(self.ufid_key))
        indv = models.Individual(
            grp_id=self.grp_id,
            spec_id=self.salmon_id,
            stok_id=self.stok_id,
            coll_id=self.coll_id,
            indv_year=year,
            pit_tag=row[self.pit_key],
            ufid=indv_ufid,
            indv_valid=True,
            comments=utils.nan_to_none(row.get(self.comment_key)),
            created_by=cleaned_data["created_by"],
            created_date=cleaned_data["created_date"],
        )
        try:
            indv.clean()
            indv.save()
            self.row_entered = True
        except (ValidationError, IntegrityError):
            indv = models.Individual.objects.filter(pit_tag=indv.pit_tag).get()

        if utils.nan_to_none(row[self.from_tank_id_key]) or utils.nan_to_none(
                row[self.to_tank_id_key]):
            in_tank = row[self.from_tank_id_key]
            out_tank = row[self.to_tank_id_key]
            self.row_entered += utils.create_movement_evnt(in_tank,
                                                           out_tank,
                                                           cleaned_data,
                                                           row_datetime,
                                                           indv_pk=indv.pk)
            # if tagged fish goes back into same tank, still link fish to tank:
            if in_tank == out_tank:
                utils.enter_contx(in_tank, cleaned_data, True, indv_pk=indv.pk)

        anix_indv, anix_entered = utils.enter_anix(cleaned_data,
                                                   indv_pk=indv.pk)
        self.row_entered += anix_entered
        self.anix_indv = anix_indv

        utils.enter_bulk_indvd(
            anix_indv.pk,
            self.cleaned_data,
            row_date,
            len_mm=row.get(self.len_key_mm),
            len_val=row.get(self.len_key),
            weight=row.get(self.weight_key),
            weight_kg=row.get(self.weight_key_kg),
            vial=row.get(self.vial_key),
            mark=row.get(self.mark_key),
            prog_grp=row.get(self.group_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key),
        )

        if utils.nan_to_none(row.get(self.precocity_key)):
            self.row_entered += utils.enter_indvd(anix_indv.pk, cleaned_data,
                                                  row_date, None,
                                                  self.ani_health_anidc_id.pk,
                                                  "Precocity")

        if utils.nan_to_none(row.get(self.crew_key)):
            perc_list, inits_not_found = utils.team_list_splitter(
                row[self.crew_key])
            for perc_id in perc_list:
                team_id, team_entered = utils.add_team_member(
                    perc_id,
                    cleaned_data["evnt_id"],
                    role_id=self.tagger_code,
                    return_team=True)
                self.row_entered += team_entered
                if team_id:
                    self.row_entered += utils.enter_anix(cleaned_data,
                                                         indv_pk=indv.pk,
                                                         team_pk=team_id.pk,
                                                         return_sucess=True)
            for inits in inits_not_found:
                self.log_data += "No valid personnel with initials ({}) for row with pit tag" \
                                 " {}\n".format(inits, row[self.pit_key])
コード例 #10
0
ファイル: spawning.py プロジェクト: dfo-mar-odis/dm_apps
    def row_parser(self, row):
        cleaned_data = self.cleaned_data

        indv_female, samp_female, new_log = utils.get_indv_or_samp(
            row, self.pit_key_f, self.samp_key_f, cleaned_data["evnt_id"])

        if new_log:
            self.log_data += new_log
            return self.log_data, False

        indv_male, samp_male, new_log = utils.get_indv_or_samp(
            row, self.pit_key_m, self.samp_key_m, cleaned_data["evnt_id"])
        if new_log:
            self.log_data += new_log
            return self.log_data, False

        if not (indv_female or samp_female) or not (indv_male or samp_male):
            raise Exception(
                "No Individual or Fish found for row {}".format(row))

        if not utils.nan_to_none(row[self.choice_key]):
            raise Exception(
                "Choice column cannot be empty. Set Fecundity column to zero to indicate Duds."
            )

        row_date = utils.get_row_date(row)
        if indv_female:
            anix_female, anix_entered = utils.enter_anix(
                cleaned_data, indv_pk=indv_female.pk)
            self.row_entered += anix_entered
            self.row_entered += utils.enter_bulk_indvd(
                anix_female.pk,
                cleaned_data,
                row_date,
                gender="F",
                len_mm=row.get(self.len_key_f_mm),
                len_val=row.get(self.len_key_f),
                weight=row.get(self.weight_key_f),
                weight_kg=row.get(self.weight_key_f_kg),
                status=row.get(self.status_key_f),
                comments=row.get(self.comment_key_f))
        if indv_male:
            anix_male, anix_entered = utils.enter_anix(cleaned_data,
                                                       indv_pk=indv_male.pk)
            self.row_entered += anix_entered

            self.row_entered += utils.enter_bulk_indvd(
                anix_male.pk,
                cleaned_data,
                row_date,
                gender="M",
                len_mm=row.get(self.len_key_m_mm),
                len_val=row.get(self.len_key_m),
                weight=row.get(self.weight_key_m),
                weight_kg=row.get(self.weight_key_m_kg),
                status=row.get(self.status_key_m),
                comments=row.get(self.comment_key_m))
        if samp_female:
            self.row_entered += utils.enter_bulk_sampd(
                samp_female.pk,
                cleaned_data,
                row_date,
                gender="F",
                len_mm=row.get(self.len_key_f_mm),
                len_val=row.get(self.len_key_f),
                weight=row.get(self.weight_key_f),
                weight_kg=row.get(self.weight_key_f_kg),
                status=row.get(self.status_key_f),
                comments=row.get(self.comment_key_f))
        if samp_male:
            self.row_entered += utils.enter_bulk_sampd(
                samp_male.pk,
                cleaned_data,
                row_date,
                gender="M",
                len_mm=row.get(self.len_key_m_mm),
                len_val=row.get(self.len_key_m),
                weight=row.get(self.weight_key_m),
                weight_kg=row.get(self.weight_key_m_kg),
                status=row.get(self.status_key_m),
                comments=row.get(self.comment_key_m))

        if utils.nan_to_none(row.get(self.dest_key_f)) and indv_female:
            end_tank_id_f = models.Tank.objects.filter(
                name=row[self.dest_key_f],
                facic_id=cleaned_data["facic_id"]).get()
            self.row_entered += utils.create_movement_evnt(
                None, end_tank_id_f, cleaned_data, row_date, indv_female.pk)

        if utils.nan_to_none(row.get(self.dest_key_m)) and indv_male:
            end_tank_id_m = models.Tank.objects.filter(
                name=row[self.dest_key_m],
                facic_id=cleaned_data["facic_id"]).get()
            self.row_entered += utils.create_movement_evnt(
                None, end_tank_id_m, cleaned_data, row_date, indv_male.pk)

        # pair
        pair = models.Pairing(
            start_date=row_date,
            prio_id=models.PriorityCode.objects.filter(
                name__iexact=prio_dict[row[self.prio_key_f]]).get(),
            pair_prio_id=models.PriorityCode.objects.filter(
                name__iexact=prio_dict[row[self.prio_key_pair]]).get(),
            cross=row[self.cross_key],
            valid=True,
            indv_id=indv_female,
            samp_id=samp_female,
            comments=utils.nan_to_none(row[self.comment_key_pair]),
            created_by=cleaned_data["created_by"],
            created_date=cleaned_data["created_date"],
        )
        try:
            pair.clean()
            pair.save()
            self.row_entered = True
        except (ValidationError, IntegrityError):
            pair = models.Pairing.objects.filter(start_date=row_date,
                                                 indv_id=indv_female,
                                                 samp_id=samp_female).get()

        # sire
        sire = models.Sire(
            prio_id=models.PriorityCode.objects.filter(
                name__iexact=prio_dict[row[self.prio_key_m]]).get(),
            pair_id=pair,
            indv_id=indv_male,
            samp_id=samp_male,
            choice=row[self.choice_key],
            comments=utils.nan_to_none(row[self.comment_key_m]),
            created_by=cleaned_data["created_by"],
            created_date=cleaned_data["created_date"],
        )
        try:
            sire.clean()
            sire.save()
            self.row_entered = True
        except (ValidationError, IntegrityError):
            # don't use sire again anywhere
            pass

        self.row_entered += utils.enter_anix(cleaned_data,
                                             pair_pk=pair.pk,
                                             return_sucess=True)

        # pairing program:
        if utils.nan_to_none(row.get(self.prog_key)):
            self.row_entered += utils.enter_spwnd(
                pair.pk,
                cleaned_data,
                row[self.prog_key],
                self.prog_spwndc_id.pk,
                spwnsc_str=row[self.prog_key])

        # fecu/dud/extra male
        if row[self.egg_est_key] > 0:
            self.row_entered += utils.enter_spwnd(pair.pk, cleaned_data,
                                                  int(row[self.egg_est_key]),
                                                  self.fecu_spwndc_id.pk, None,
                                                  "Calculated")
        else:
            self.row_entered += utils.enter_spwnd(pair.pk, cleaned_data,
                                                  row[self.choice_key],
                                                  self.dud_spwndc_id.pk, None,
                                                  "Good")

        # grp
        anix_grp_qs = models.AniDetailXref.objects.filter(
            evnt_id=cleaned_data["evnt_id"],
            grp_id__isnull=False,
            pair_id=pair,
            indv_id__isnull=True,
            contx_id__isnull=True,
            loc_id__isnull=True,
        )
        anix_grp = False
        if anix_grp_qs.count() == 0:
            if indv_female:
                stok_id = indv_female.stok_id
                spec_id = indv_female.spec_id
            else:
                stok_id = samp_female.stok_id
                spec_id = samp_female.spec_id

            grp = models.Group(
                spec_id=spec_id,
                stok_id=stok_id,
                coll_id=models.Collection.objects.filter(
                    name="Egg (F1)").get(),
                grp_year=row_date.year,
                grp_valid=False,
                created_by=cleaned_data["created_by"],
                created_date=cleaned_data["created_date"],
            )
            try:
                grp.clean()
                grp.save()
                row_entered = True
                anix_grp, anix_entered = utils.enter_anix(cleaned_data,
                                                          grp_pk=grp.pk)
                row_entered += utils.enter_anix(cleaned_data,
                                                grp_pk=grp.pk,
                                                pair_pk=pair.pk,
                                                return_sucess=True)
                grp.grp_valid = True
                grp.save()
                self.row_entered = True
            except ValidationError:
                # recovering the group is only doable through the anix with both grp and pair.
                # no way to find it here, so only make the group valid after anix's created.
                pass

        elif anix_grp_qs.count() == 1:
            anix_grp = anix_grp_qs.get()
            grp = anix_grp.grp_id

        if anix_grp:
            utils.enter_bulk_grpd(anix_grp,
                                  cleaned_data,
                                  row_date,
                                  prog_grp=row[self.prog_key])
コード例 #11
0
ファイル: generic.py プロジェクト: dfo-mar-odis/dm_apps
    def row_parser(self, row):
        row_datetime = utils.get_row_date(row)
        row_date = row_datetime.date()

        indv_qs = models.Individual.objects.filter(pit_tag=row[self.pit_key])
        if len(indv_qs) == 1:
            indv = indv_qs.get()
        else:
            self.log_data += "Error parsing row: \n"
            self.log_data += str(row)
            self.log_data += "\nFish with PIT {} not found in db\n".format(
                row[self.pit_key])
            self.success = False
            return

        anix, anix_entered = utils.enter_anix(self.cleaned_data,
                                              indv_pk=indv.pk)
        self.row_entered += anix_entered

        self.row_entered += utils.enter_bulk_indvd(
            anix.pk,
            self.cleaned_data,
            row_date,
            gender=row.get(self.sex_key),
            len_mm=row.get(self.len_key_mm),
            len_val=row.get(self.len_key),
            weight=row.get(self.weight_key),
            weight_kg=row.get(self.weight_key_kg),
            vial=row.get(self.vial_key),
            scale_envelope=row.get(self.envelope_key),
            tissue_yn=row.get(self.tissue_key),
            mark=row.get(self.mark_key),
            vaccinated=row.get(self.vax_key),
            status=row.get(self.status_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key))

        if utils.nan_to_none(row.get(self.precocity_key)):
            if utils.y_n_to_bool(row[self.precocity_key]):
                self.row_entered += utils.enter_indvd(
                    anix.pk, self.cleaned_data, row_date, None,
                    self.ani_health_anidc_id.pk, "Precocity")
        if utils.nan_to_none(row.get(self.mort_key)):
            if utils.y_n_to_bool(row[self.mort_key]):
                mort_anix, mort_entered = utils.enter_mortality(
                    indv, self.cleaned_data, row_datetime)
                self.row_entered += mort_entered

        in_tank = None
        out_tank = None
        if utils.nan_to_none(row[self.start_tank_key]):
            in_tank = models.Tank.objects.filter(
                name=row[self.start_tank_key]).get()
        if utils.nan_to_none(row[self.end_tank_key]):
            out_tank = models.Tank.objects.filter(
                name=row[self.end_tank_key]).get()
        if in_tank or out_tank:
            self.row_entered += utils.create_movement_evnt(in_tank,
                                                           out_tank,
                                                           self.cleaned_data,
                                                           row_datetime,
                                                           indv_pk=indv.pk)

        self.row_entered += utils.parse_extra_cols(row,
                                                   self.cleaned_data,
                                                   anix,
                                                   indv=True)
コード例 #12
0
ファイル: generic.py プロジェクト: dfo-mar-odis/dm_apps
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = row["datetime"].date()
        row_start_grp = utils.get_grp(row[self.rive_key],
                                      row["grp_year"],
                                      row["grp_coll"],
                                      row["start_tank_id"],
                                      row_date,
                                      prog_str=row.get(self.prio_key),
                                      mark_str=row.get(self.grp_mark_key),
                                      fail_on_not_found=True)[0]
        start_anix, self.row_entered = utils.enter_anix(
            cleaned_data, grp_pk=row_start_grp.pk)
        start_contx, contx_entered = utils.enter_contx(row["start_tank_id"],
                                                       cleaned_data,
                                                       None,
                                                       return_contx=True)
        self.row_entered += contx_entered

        whole_grp = utils.y_n_to_bool(row[self.abs_key])
        det_anix = start_anix
        row["start_contx_pk"] = None
        if not whole_grp:
            row["start_contx_pk"] = start_contx.pk

        if utils.nan_to_none(row["end_tank_id"]):
            # 4 possible cases here: group in tank or not and whole group move or not:
            row_end_grp_list = utils.get_grp(row[self.rive_key],
                                             row["grp_year"],
                                             row["grp_coll"],
                                             row["end_tank_id"],
                                             row_date,
                                             prog_str=row[self.prio_key],
                                             mark_str=row[self.grp_mark_key])
            row_end_grp = None
            if not whole_grp and not row_end_grp_list:
                # splitting fish group, create end group:
                row_end_grp = copy.deepcopy(row_start_grp)
                row_end_grp.pk = None
                row_end_grp.id = None
                row_end_grp.save()
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered = anix_entered

                self.row_entered += utils.enter_bulk_grpd(
                    end_grp_anix.pk,
                    cleaned_data,
                    row_date,
                    prog_grp=row.get(self.prio_key),
                    mark=row.get(self.mark_key))
            elif not whole_grp:
                # splitting fish group, merging to exsisting end group
                row_end_grp = row_end_grp_list[0]

            if row_end_grp:
                move_contx = utils.create_movement_evnt(row["start_tank_id"],
                                                        row["end_tank_id"],
                                                        cleaned_data,
                                                        row_date,
                                                        grp_pk=row_end_grp.pk,
                                                        return_end_contx=True)
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered += anix_entered
                self.row_entered += utils.enter_grpd(end_grp_anix.pk,
                                                     cleaned_data,
                                                     row_date,
                                                     None,
                                                     self.prnt_grp_anidc_id.pk,
                                                     frm_grp_id=row_start_grp)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk)
                self.row_entered = cnt_entered

                # record details on end tank group
                det_anix = end_grp_anix

            else:
                # move all the fish (whole group, merge to fish at destination if needed)
                move_contx = utils.create_movement_evnt(
                    row["start_tank_id"],
                    row["end_tank_id"],
                    cleaned_data,
                    row_date,
                    grp_pk=row_start_grp.pk,
                    return_end_contx=True)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered
        else:
            if utils.nan_to_none(row[self.nfish_key]):
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   start_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered

        # add details to det_anix:
        self.row_entered += utils.enter_bulk_grpd(
            det_anix.pk,
            cleaned_data,
            row_date,
            vaccinated=row.get(self.vax_key),
            mark=row.get(self.mark_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key))

        self.row_entered += utils.parse_extra_cols(row,
                                                   self.cleaned_data,
                                                   det_anix,
                                                   grp=True)
コード例 #13
0
ファイル: generic.py プロジェクト: dfo-mar-odis/dm_apps
    def data_preper(self):
        cleaned_data = self.cleaned_data
        self.sampc_id = models.SampleCode.objects.filter(
            name="Individual Sample").get()
        self.prnt_grp_anidc_id = models.AnimalDetCode.objects.filter(
            name="Parent Group").get()
        self.prog_grp_anidc_id = models.AnimalDetCode.objects.filter(
            name="Program Group").get()
        self.sex_anidc_id = models.AnimalDetCode.objects.filter(
            name="Gender").get()
        self.len_anidc_id = models.AnimalDetCode.objects.filter(
            name="Length").get()
        self.weight_anidc_id = models.AnimalDetCode.objects.filter(
            name="Weight").get()
        self.vial_anidc_id = models.AnimalDetCode.objects.filter(
            name="Vial").get()
        self.envelope_anidc_id = models.AnimalDetCode.objects.filter(
            name="Scale Envelope").get()
        self.ani_health_anidc_id = models.AnimalDetCode.objects.filter(
            name="Animal Health").get()
        self.anidc_ufid_id = models.AnimalDetCode.objects.filter(
            name="UFID").get()
        self.vax_anidc_id = models.AnimalDetCode.objects.filter(
            name="Vaccination").get()
        self.mark_anidc_id = models.AnimalDetCode.objects.filter(
            name="Mark").get()
        self.lifestage_anidc_id = models.AnimalDetCode.objects.filter(
            name="Lifestage").get()
        self.comment_anidc_id = models.AnimalDetCode.objects.filter(
            name="Comment").get()

        # The following steps are to set additional columns on each row to facilitate parsing.
        # In particular,  columns set will be: "datetime", "grp_year", "grp_coll", "start_tank_id",
        # "end_tank_id", "grp_key", "end_grp_key".
        # The two grp_keys will link to dictionaries of the groups, which are also set below

        # set date
        self.data = utils.set_row_datetime(self.data)
        # split year-coll
        self.data["grp_year"] = self.data.apply(
            lambda row: utils.year_coll_splitter(row[self.yr_coll_key])[0],
            axis=1)
        self.data["grp_coll"] = self.data.apply(
            lambda row: utils.year_coll_splitter(row[self.yr_coll_key])[1],
            axis=1)

        # set start and end tank columns:
        self.data = utils.set_row_tank(self.data,
                                       cleaned_data,
                                       self.start_tank_key,
                                       col_name="start_tank_id")
        self.data = utils.set_row_tank(self.data,
                                       cleaned_data,
                                       self.end_tank_key,
                                       col_name="end_tank_id")

        # set the dict keys for groups, use astype(str) to handle anything that might be a nan.
        self.data, self.start_grp_dict = utils.set_row_grp(
            self.data,
            self.rive_key,
            self.yr_coll_key,
            self.prio_key,
            "start_tank_id",
            "datetime",
            self.grp_mark_key,
            grp_col_name="start_grp_id",
            return_dict=True)
        for item, grp in self.start_grp_dict.items():
            utils.enter_anix(cleaned_data, grp_pk=grp.pk)

        self.data["end_grp_key"] = self.data[self.rive_key] + self.data[self.yr_coll_key] + \
                                   self.data[self.end_tank_key].astype(str) + self.data[self.prio_key].astype(str) + \
                                   self.data["datetime"].astype(str) + self.data[self.grp_mark_key].astype(str)

        # create the end group dict and create, movement event, groups, counts, contxs, etc. necesarry
        end_grp_data = self.data.groupby([
            self.rive_key, "grp_year", "grp_coll", "end_tank_id",
            "start_tank_id", self.prio_key, "datetime", self.grp_mark_key,
            "grp_key", "end_grp_key"
        ],
                                         dropna=False,
                                         sort=False).size().reset_index()
        for row in end_grp_data.to_dict('records'):
            # check if end tank is set, otherwise, skip this step
            if not utils.nan_to_none(row["end_tank_id"]):
                self.end_grp_dict[row["end_grp_key"]] = None
                continue
            grps = utils.get_grp(row[self.rive_key],
                                 row["grp_year"],
                                 row["grp_coll"],
                                 row["end_tank_id"],
                                 at_date=row["datetime"],
                                 prog_str=row[self.prio_key],
                                 mark_str=row[self.grp_mark_key])
            start_grp_id = self.start_grp_dict[row["grp_key"]]
            start_contx, contx_entered = utils.enter_contx(
                row["start_tank_id"],
                cleaned_data,
                None,
                grp_pk=start_grp_id.pk,
                return_contx=True)
            self.row_entered += utils.enter_cnt(
                cleaned_data,
                sum(end_grp_data[end_grp_data["grp_key"] == row["grp_key"]]
                    [0]),
                start_contx.pk,
                cnt_code="Fish Removed from Container")[1]

            if len(grps) > 0:
                end_grp_id = grps[0]
                self.end_grp_dict[row["end_grp_key"]] = grps[0]
            else:
                end_grp_id = copy.deepcopy(start_grp_id)
                end_grp_id.pk = None
                end_grp_id.save()
                self.end_grp_dict[row["end_grp_key"]] = end_grp_id

            if end_grp_id.pk != start_grp_id.pk:
                grp_anix = utils.enter_anix(cleaned_data,
                                            grp_pk=end_grp_id.pk,
                                            return_anix=True)
                utils.enter_grpd(grp_anix.pk,
                                 cleaned_data,
                                 row["datetime"],
                                 None,
                                 self.prnt_grp_anidc_id.pk,
                                 frm_grp_id=start_grp_id)
                if utils.nan_to_none(row[self.prio_key]):
                    utils.enter_grpd(grp_anix.pk, cleaned_data,
                                     row["datetime"], row[self.prio_key],
                                     self.prog_grp_anidc_id.pk,
                                     row[self.prio_key])
                if utils.nan_to_none(row[self.grp_mark_key]):
                    utils.enter_grpd(grp_anix.pk, cleaned_data,
                                     row["datetime"], row[self.grp_mark_key],
                                     self.mark_anidc_id.pk,
                                     row[self.grp_mark_key])
                end_contx = utils.create_movement_evnt(row["start_tank_id"],
                                                       row["end_tank_id"],
                                                       cleaned_data,
                                                       row["datetime"],
                                                       grp_pk=end_grp_id.pk,
                                                       return_end_contx=True)
                if end_contx:
                    self.row_entered += utils.enter_cnt(
                        cleaned_data, row[0], end_contx.pk)[1]
        self.data_dict = self.data.to_dict("records")