示例#1
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        # get tray, group, and row date
        row_date = utils.get_row_date(row)

        tray_qs = models.Tray.objects.filter(trof_id=row["trof_id"],
                                             name=row[self.tray_key])
        tray_id = tray_qs.filter(
            Q(start_date__lte=row_date, end_date__gte=row_date)
            | Q(end_date__isnull=True)).get()
        pair_id = models.Pairing.objects.filter(
            cross=row[self.cross_key],
            end_date__isnull=True,
            indv_id__stok_id=row["stok_id"],
            start_date__year=row[self.year_key]).first()

        grp_id = utils.get_tray_group(pair_id, tray_id, row_date)

        # want to shift the hu move event, so that the counting math always works out.
        hu_move_date = row_date + timedelta(minutes=1)
        hu_cleaned_data = utils.create_new_evnt(cleaned_data, "Allocation",
                                                hu_move_date)
        hu_anix, data_entered = utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=grp_id.pk)
        self.row_entered += data_entered
        hu_contx, data_entered = utils.enter_contx(tray_id,
                                                   hu_cleaned_data,
                                                   None,
                                                   grp_pk=grp_id.pk,
                                                   return_contx=True)
        self.row_entered += data_entered
        # record development
        dev_at_hu_transfer = grp_id.get_development(hu_move_date)
        utils.enter_grpd(hu_anix.pk,
                         hu_cleaned_data,
                         hu_move_date,
                         dev_at_hu_transfer,
                         None,
                         anidc_str="Development")
        self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)

        # HU Picks:
        self.row_entered += utils.enter_cnt(cleaned_data,
                                            row[self.loss_key],
                                            hu_contx.pk,
                                            cnt_code="HU Transfer Loss")[1]

        # generate new group, cup, and movement event:
        cont = None
        if utils.nan_to_none(row[self.end_tray_key]):
            trof_id = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
            tray_qs = models.Tray.objects.filter(trof_id=trof_id,
                                                 name=row[self.tray_key])
            cont = tray_qs.filter(
                Q(start_date__lte=row_date, end_date__gte=row_date)
                | Q(end_date__isnull=True)).get()
        elif utils.nan_to_none(row[self.end_trof_key]):
            cont = models.Trough.objects.filter(
                facic_id=cleaned_data["facic_id"],
                name=row[self.end_trof_key]).get()
        elif utils.nan_to_none(row[self.heatl_key]):
            cont = utils.get_cont_from_dot(row[self.cont_key], cleaned_data,
                                           row_date)
        elif utils.nan_to_none(row[self.tank_key]):
            cont = models.Tank.objects.filter(
                facic_id=cleaned_data["facic_id"], name=row[self.tank_key])

        self.row_entered += utils.enter_contx(cont, cleaned_data)
        if not utils.y_n_to_bool(row[self.final_key]):
            # NEW GROUPS TAKEN FROM INITIAL
            out_cnt = utils.enter_cnt(cleaned_data,
                                      0,
                                      hu_contx.pk,
                                      cnt_code="Eggs Removed")[0]
            utils.enter_cnt_det(cleaned_data, out_cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])

            indv, final_grp = cont.fish_in_cont(row_date)
            if not final_grp:
                final_grp = models.Group(
                    spec_id=grp_id.spec_id,
                    coll_id=grp_id.coll_id,
                    grp_year=grp_id.grp_year,
                    stok_id=grp_id.stok_id,
                    grp_valid=True,
                    created_by=cleaned_data["created_by"],
                    created_date=cleaned_data["created_date"],
                )
                try:
                    final_grp.clean()
                    final_grp.save()
                except (ValidationError, IntegrityError):
                    return None
            else:
                # MAIN GROUP GETTING MOVED
                final_grp = final_grp[0]
            final_grp_anix = utils.enter_anix(cleaned_data,
                                              grp_pk=final_grp.pk,
                                              return_anix=True)
            self.row_entered += utils.enter_anix(hu_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 return_sucess=True)
            self.row_entered += utils.enter_bulk_grpd(
                final_grp_anix,
                cleaned_data,
                row_date,
                prnt_grp=grp_id,
                prog_grp=row.get(self.prog_key),
                comments=row.get(self.comment_key))
            self.row_entered += utils.enter_grpd(final_grp_anix.pk,
                                                 cleaned_data,
                                                 row_date,
                                                 dev_at_hu_transfer,
                                                 None,
                                                 anidc_str="Development")

            # create movement for the new group, create 2 contx's and 3 anix's
            # cup contx is contx used to link the positive counts
            cont_contx = utils.create_egg_movement_evnt(tray_id,
                                                        cont,
                                                        cleaned_data,
                                                        row_date,
                                                        final_grp.pk,
                                                        return_cup_contx=True)

            move_cleaned_data = cleaned_data.copy()
            move_cleaned_data["evnt_id"] = cont_contx.evnt_id
            cnt_contx = cont_contx
            cnt_contx.pk = None
            cnt_contx.tray_id = tray_id
            try:
                cnt_contx.save()
            except IntegrityError:
                cnt_contx = models.ContainerXRef.objects.filter(
                    pk=cont_contx.pk).get()
            self.row_entered += utils.enter_anix(move_cleaned_data,
                                                 grp_pk=final_grp.pk,
                                                 contx_pk=cnt_contx.pk,
                                                 return_sucess=True)
            # add the positive counts
            cnt = utils.enter_cnt(
                move_cleaned_data,
                row[self.cnt_key],
                cnt_contx.pk,
                cnt_code="Eggs Added",
            )[0]
            if utils.nan_to_none(self.weight_key):
                utils.enter_cnt_det(move_cleaned_data, cnt,
                                    row[self.weight_key], "Weight")
            utils.enter_cnt_det(move_cleaned_data, cnt, row[self.cnt_key],
                                "Program Group Split", row[self.prog_key])
        else:
            # Move main group to drawer, and add end date to tray:
            if cont:
                end_contx = utils.create_movement_evnt(tray_id,
                                                       cont,
                                                       cleaned_data,
                                                       row_date,
                                                       grp_pk=grp_id.pk,
                                                       return_end_contx=True)
                tray_id.end_date = row_date
                tray_id.save()
                end_cnt = utils.enter_cnt(cleaned_data,
                                          row[self.cnt_key],
                                          end_contx.pk,
                                          cnt_code="Egg Count")[0]
                utils.enter_cnt_det(cleaned_data, end_cnt,
                                    row[self.weight_key], "Weight")
            else:
                self.log_data += "\n Draw {} from {} not found".format(
                    cont, row[self.cont_key])

            # link cup to egg development event
            utils.enter_contx(cont, cleaned_data, None)
示例#2
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = utils.get_row_date(row)
        self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)
        # find group from either cross or tray:

        if utils.nan_to_none(row.get(self.hu_key)):
            cont_id = utils.get_cont_from_dot(row[self.hu_key], cleaned_data,
                                              row_date)
        elif utils.nan_to_none(row.get(self.tray_key)):
            cont_id = models.Tray.objects.filter(
                trof_id=row["trof_id"],
                end_date__isnull=True,
                name=row[self.tray_key]).get()
        else:
            cont_id = row["trof_id"]

        if utils.nan_to_none(row.get(self.cross_key)):
            pair_id = models.Pairing.objects.filter(
                cross=row[self.cross_key],
                end_date__isnull=True,
                indv_id__stok_id=row["stok_id"],
                start_date__year=row[self.year_key]).first()
            grp_id = utils.get_tray_group(pair_id, cont_id, row_date)
        else:
            grp_id = cont_id.fish_in_cont(row_date, get_grp=True)

        grp_anix = None
        shock = False
        for pickc_id in cleaned_data["pickc_id"]:
            if utils.nan_to_none(row[pickc_id.name]):
                shock = utils.y_n_to_bool(row.get(self.shocking_key))
                grp_anix, evnt_entered = utils.create_picks_evnt(
                    cleaned_data,
                    cont_id,
                    grp_id.pk,
                    row[pickc_id.name],
                    row_date,
                    pickc_id.name,
                    cleaned_data["evnt_id"].perc_id,
                    shocking=shock,
                    return_anix=True,
                    pick_comments=row.get(self.comment_key))
                self.row_entered += evnt_entered

        for col_name in row.keys():
            col_date = utils.get_col_date(col_name)

            if col_date:
                col_date_str = datetime.strftime(col_date, "%Y-%b-%d")
                self.date_dict[col_date_str] = True
                if utils.nan_to_none(row.get(col_name)):
                    self.row_entered += utils.create_picks_evnt(
                        cleaned_data,
                        cont_id,
                        grp_id.pk,
                        row[col_name],
                        col_date,
                        self.default_pickc_id,
                        cleaned_data["evnt_id"].perc_id,
                        pick_comments=row.get(self.comment_key))

        # record development
        if grp_anix and shock:
            pick_evnt_cleaned_data = cleaned_data.copy()
            pick_evnt_cleaned_data["evnt_id"] = grp_anix.evnt_id
            dev_at_pick = grp_id.get_development(row_date)
            utils.enter_grpd(grp_anix.pk,
                             pick_evnt_cleaned_data,
                             row_date,
                             dev_at_pick,
                             None,
                             anidc_str="Development")
            self.row_entered += utils.enter_contx(row["trof_id"], cleaned_data)
示例#3
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_datetime = utils.get_row_date(row)
        relc_id = self.site_dict[row[self.site_key]]
        year, coll = utils.year_coll_splitter(row[self.coll_key])
        coll_id = utils.coll_getter(coll)
        stok_id = models.StockCode.objects.filter(name__iexact=relc_id.rive_id.name).get()

        indv_id = None

        if utils.nan_to_none(row[self.pit_key]):
            indv_id = models.Individual.objects.filter(pit_tag=row[self.pit_key]).first()
            if not indv_id:
                indv_id = models.Individual(spec_id=self.salmon_id,
                                            stok_id=stok_id,
                                            coll_id=coll_id,
                                            indv_year=year,
                                            pit_tag=row[self.pit_key],
                                            indv_valid=True,
                                            comments=utils.nan_to_none(row.get(self.comment_key)),
                                            created_by=cleaned_data["created_by"],
                                            created_date=cleaned_data["created_date"],
                                            )
                try:
                    indv_id.clean()
                    indv_id.save()
                    self.row_entered = True
                except (ValidationError, IntegrityError):
                    indv_id = models.Individual.objects.filter(pit_tag=indv_id.pit_tag).get()
            indv_anix, data_entered = utils.enter_anix(cleaned_data, indv_pk=indv_id.pk)
            self.row_entered += data_entered
            # add program group to individual if needed:

        loc = models.Location(evnt_id_id=cleaned_data["evnt_id"].pk,
                              locc_id=self.locc_id,
                              rive_id=relc_id.rive_id,
                              relc_id=relc_id,
                              loc_date=row_datetime,
                              created_by=cleaned_data["created_by"],
                              created_date=cleaned_data["created_date"],
                              )
        try:
            loc.clean()
            loc.save()
            self.row_entered = True
        except ValidationError:
            loc = models.Location.objects.filter(evnt_id=loc.evnt_id, locc_id=loc.locc_id,
                                                 rive_id=loc.rive_id, subr_id=loc.subr_id,
                                                 relc_id=loc.relc_id, loc_lat=loc.loc_lat,
                                                 loc_lon=loc.loc_lon, loc_date=loc.loc_date).get()
        self.loc = loc
        self.team_parser(row[self.crew_key], row, loc_id=loc)

        if indv_id:
            anix_loc_indv, anix_entered = utils.enter_anix(cleaned_data, loc_pk=loc.pk, indv_pk=indv_id.pk)
            self.row_entered += anix_entered

            self.row_entered += utils.enter_bulk_indvd(anix_loc_indv.pk, self.cleaned_data, row_datetime,
                                                       gender=row.get(self.sex_key),
                                                       len_mm=row.get(self.len_key_mm),
                                                       len_val=row.get(self.len_key),
                                                       weight=row.get(self.weight_key),
                                                       weight_kg=row.get(self.weight_key_kg),
                                                       vial=row.get(self.vial_key),
                                                       scale_envelope=row.get(self.scale_key),
                                                       prog_grp=row.get(self.grp_key),
                                                       comments=row.get(self.comment_key)
                                                       )

            if utils.nan_to_none(row.get(self.mort_key)):
                if utils.y_n_to_bool(row[self.mort_key]):
                    mort_anix, mort_entered = utils.enter_mortality(indv_id, self.cleaned_data, row_datetime)
                    self.row_entered += mort_entered

            if utils.nan_to_none(row.get(self.wr_key)):
                if utils.y_n_to_bool(row[self.wr_key]):
                    self.row_entered += utils.enter_indvd(anix_loc_indv.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str=self.wr_adsc_id.name)

            if utils.nan_to_none(row.get(self.aquaculture_key)):
                if utils.y_n_to_bool(row[self.aquaculture_key]):
                    self.row_entered += utils.enter_indvd(anix_loc_indv.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str="Aquaculture")

            if utils.nan_to_none(row[self.tank_key]):
                self.row_entered += utils.enter_contx(self.tank_dict[row[self.tank_key]], cleaned_data, True, indv_id.pk)
                if self.loc.pk not in self.loc_caught_dict:
                    self.loc_caught_dict[self.loc.pk] = 1
                else:
                    self.loc_caught_dict[self.loc.pk] += 1
            else:
                if self.loc.pk not in self.loc_obs_dict:
                    self.loc_obs_dict[self.loc.pk] = 1
                else:
                    self.loc_obs_dict[self.loc.pk] += 1

        elif utils.nan_to_none(row.get(self.samp_key)):
            samp = models.Sample.objects.filter(anix_id__evnt_id=cleaned_data["evnt_id"],
                                                loc_id=loc,
                                                spec_id=self.salmon_id,
                                                samp_num=row[self.samp_key],
                                                sampc_id=self.sampc_id,
                                                ).get()
            if not samp:
                # create group for sample:
                grp_id = models.Group(spec_id=self.salmon_id,
                                      stok_id=stok_id,
                                      coll_id=coll_id,
                                      grp_year=year,
                                      grp_valid=False,
                                      created_by=cleaned_data["created_by"],
                                      created_date=cleaned_data["created_date"],
                                      )
                grp_id.clean()
                grp_id.save()
                self.row_entered = True

                grp_anix, data_entered = utils.enter_anix(cleaned_data, grp_pk=grp_id.pk)
                self.row_entered += data_entered

                samp, samp_entered = utils.enter_samp(cleaned_data, row[self.samp_key], self.salmon_id.pk, self.sampc_id.pk,
                                                      anix_pk=grp_anix.pk, loc_pk=loc.pk,
                                                      comments=utils.nan_to_none(row.get(self.comment_key)))
                self.row_entered += samp_entered

            self.row_entered += utils.enter_bulk_sampd(samp.pk, self.cleaned_data, row_datetime,
                                                       gender=row.get(self.sex_key),
                                                       len_mm=row.get(self.len_key_mm),
                                                       len_val=row.get(self.len_key),
                                                       weight=row.get(self.weight_key),
                                                       weight_kg=row.get(self.weight_key_kg),
                                                       vial=row.get(self.vial_key),
                                                       scale_envelope=row.get(self.scale_key),
                                                       prog_grp=row.get(self.grp_key),
                                                       comments=row.get(self.comment_key)
                                                       )
            if utils.nan_to_none(row.get(self.mort_key)):
                if utils.y_n_to_bool(row[self.mort_key]):
                    self.row_entered += utils.enter_samp_mortality(samp, self.cleaned_data, row_datetime)

            if utils.nan_to_none(row.get(self.wr_key)):
                if utils.y_n_to_bool(row[self.wr_key]):
                    self.row_entered += utils.enter_sampd(samp.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str=self.wr_adsc_id.name)

            if utils.nan_to_none(row.get(self.aquaculture_key)):
                if utils.y_n_to_bool(row[self.aquaculture_key]):
                    self.row_entered += utils.enter_sampd(samp.pk, cleaned_data, row_datetime, None,
                                                          self.ani_health_anidc_id.pk, adsc_str="Aquaculture")
        else:
            raise Exception("Fish must either be assigned a sample number or a pit tag.")
示例#4
0
    def row_parser(self, row):
        row_datetime = utils.get_row_date(row)
        row_date = row_datetime.date()

        indv_qs = models.Individual.objects.filter(pit_tag=row[self.pit_key])
        if len(indv_qs) == 1:
            indv = indv_qs.get()
        else:
            self.log_data += "Error parsing row: \n"
            self.log_data += str(row)
            self.log_data += "\nFish with PIT {} not found in db\n".format(
                row[self.pit_key])
            self.success = False
            return

        anix, anix_entered = utils.enter_anix(self.cleaned_data,
                                              indv_pk=indv.pk)
        self.row_entered += anix_entered

        self.row_entered += utils.enter_bulk_indvd(
            anix.pk,
            self.cleaned_data,
            row_date,
            gender=row.get(self.sex_key),
            len_mm=row.get(self.len_key_mm),
            len_val=row.get(self.len_key),
            weight=row.get(self.weight_key),
            weight_kg=row.get(self.weight_key_kg),
            vial=row.get(self.vial_key),
            scale_envelope=row.get(self.envelope_key),
            tissue_yn=row.get(self.tissue_key),
            mark=row.get(self.mark_key),
            vaccinated=row.get(self.vax_key),
            status=row.get(self.status_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key))

        if utils.nan_to_none(row.get(self.precocity_key)):
            if utils.y_n_to_bool(row[self.precocity_key]):
                self.row_entered += utils.enter_indvd(
                    anix.pk, self.cleaned_data, row_date, None,
                    self.ani_health_anidc_id.pk, "Precocity")
        if utils.nan_to_none(row.get(self.mort_key)):
            if utils.y_n_to_bool(row[self.mort_key]):
                mort_anix, mort_entered = utils.enter_mortality(
                    indv, self.cleaned_data, row_datetime)
                self.row_entered += mort_entered

        in_tank = None
        out_tank = None
        if utils.nan_to_none(row[self.start_tank_key]):
            in_tank = models.Tank.objects.filter(
                name=row[self.start_tank_key]).get()
        if utils.nan_to_none(row[self.end_tank_key]):
            out_tank = models.Tank.objects.filter(
                name=row[self.end_tank_key]).get()
        if in_tank or out_tank:
            self.row_entered += utils.create_movement_evnt(in_tank,
                                                           out_tank,
                                                           self.cleaned_data,
                                                           row_datetime,
                                                           indv_pk=indv.pk)

        self.row_entered += utils.parse_extra_cols(row,
                                                   self.cleaned_data,
                                                   anix,
                                                   indv=True)
示例#5
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = row["datetime"].date()
        row_start_grp = utils.get_grp(row[self.rive_key],
                                      row["grp_year"],
                                      row["grp_coll"],
                                      row["start_tank_id"],
                                      row_date,
                                      prog_str=row.get(self.prio_key),
                                      mark_str=row.get(self.grp_mark_key),
                                      fail_on_not_found=True)[0]
        start_anix, self.row_entered = utils.enter_anix(
            cleaned_data, grp_pk=row_start_grp.pk)
        start_contx, contx_entered = utils.enter_contx(row["start_tank_id"],
                                                       cleaned_data,
                                                       None,
                                                       return_contx=True)
        self.row_entered += contx_entered

        whole_grp = utils.y_n_to_bool(row[self.abs_key])
        det_anix = start_anix
        row["start_contx_pk"] = None
        if not whole_grp:
            row["start_contx_pk"] = start_contx.pk

        if utils.nan_to_none(row["end_tank_id"]):
            # 4 possible cases here: group in tank or not and whole group move or not:
            row_end_grp_list = utils.get_grp(row[self.rive_key],
                                             row["grp_year"],
                                             row["grp_coll"],
                                             row["end_tank_id"],
                                             row_date,
                                             prog_str=row[self.prio_key],
                                             mark_str=row[self.grp_mark_key])
            row_end_grp = None
            if not whole_grp and not row_end_grp_list:
                # splitting fish group, create end group:
                row_end_grp = copy.deepcopy(row_start_grp)
                row_end_grp.pk = None
                row_end_grp.id = None
                row_end_grp.save()
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered = anix_entered

                self.row_entered += utils.enter_bulk_grpd(
                    end_grp_anix.pk,
                    cleaned_data,
                    row_date,
                    prog_grp=row.get(self.prio_key),
                    mark=row.get(self.mark_key))
            elif not whole_grp:
                # splitting fish group, merging to exsisting end group
                row_end_grp = row_end_grp_list[0]

            if row_end_grp:
                move_contx = utils.create_movement_evnt(row["start_tank_id"],
                                                        row["end_tank_id"],
                                                        cleaned_data,
                                                        row_date,
                                                        grp_pk=row_end_grp.pk,
                                                        return_end_contx=True)
                end_grp_anix, anix_entered = utils.enter_anix(
                    cleaned_data, grp_pk=row_end_grp.pk)
                self.row_entered += anix_entered
                self.row_entered += utils.enter_grpd(end_grp_anix.pk,
                                                     cleaned_data,
                                                     row_date,
                                                     None,
                                                     self.prnt_grp_anidc_id.pk,
                                                     frm_grp_id=row_start_grp)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk)
                self.row_entered = cnt_entered

                # record details on end tank group
                det_anix = end_grp_anix

            else:
                # move all the fish (whole group, merge to fish at destination if needed)
                move_contx = utils.create_movement_evnt(
                    row["start_tank_id"],
                    row["end_tank_id"],
                    cleaned_data,
                    row_date,
                    grp_pk=row_start_grp.pk,
                    return_end_contx=True)
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   move_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered
        else:
            if utils.nan_to_none(row[self.nfish_key]):
                cnt, cnt_entered = utils.enter_cnt(cleaned_data,
                                                   row[self.nfish_key],
                                                   start_contx.pk,
                                                   cnt_code="Fish Count")
                self.row_entered = cnt_entered

        # add details to det_anix:
        self.row_entered += utils.enter_bulk_grpd(
            det_anix.pk,
            cleaned_data,
            row_date,
            vaccinated=row.get(self.vax_key),
            mark=row.get(self.mark_key),
            lifestage=row.get(self.lifestage_key),
            comments=row.get(self.comment_key))

        self.row_entered += utils.parse_extra_cols(row,
                                                   self.cleaned_data,
                                                   det_anix,
                                                   grp=True)
示例#6
0
    def row_parser(self, row):
        cleaned_data = self.cleaned_data
        row_date = row["datetime"].date()
        row_grp = row["start_grp_id"]
        row_end_grp = self.end_grp_dict[row["end_grp_key"]]
        if row_end_grp:
            row_grp = row_end_grp
        row_anix, data_entered = utils.enter_anix(cleaned_data,
                                                  grp_pk=row_grp.pk)
        self.row_entered += data_entered

        row_contx, data_entered = utils.enter_contx(row.get("start_tank_id"),
                                                    cleaned_data,
                                                    None,
                                                    grp_pk=row_grp.pk,
                                                    return_contx=True)
        self.row_entered += data_entered

        samp_anix = row_contx.animal_details.filter(
            grp_id=row_grp,
            evnt_id=cleaned_data["evnt_id"],
            indv_id__isnull=True,
            loc_id__isnull=True,
            pair_id__isnull=True,
            final_contx_flag=None).get()

        row_samp, data_entered = utils.enter_samp(cleaned_data,
                                                  row[self.samp_key],
                                                  row_grp.spec_id.pk,
                                                  self.sampc_id.pk,
                                                  anix_pk=samp_anix.pk)
        self.row_entered += data_entered

        if utils.nan_to_none(row.get(self.mort_key)):
            if utils.y_n_to_bool(row[self.mort_key]):
                self.row_entered += utils.enter_samp_mortality(
                    row_samp, cleaned_data, row_date)

        if row_samp:
            self.row_entered += utils.enter_bulk_sampd(
                row_samp.pk,
                self.cleaned_data,
                row_date,
                gender=row.get(self.sex_key),
                len_mm=row.get(self.len_key_mm),
                len_val=row.get(self.len_key),
                weight=row.get(self.weight_key),
                weight_kg=row.get(self.weight_key_kg),
                vial=row.get(self.vial_key),
                scale_envelope=row.get(self.envelope_key),
                tissue_yn=row.get(self.tissue_key),
                mark=row.get(self.mark_key),
                vaccinated=row.get(self.vax_key),
                lifestage=row.get(self.lifestage_key),
                comments=row.get(self.comment_key))

            if utils.nan_to_none(row.get(self.precocity_key)):
                if utils.y_n_to_bool(row[self.precocity_key]):
                    self.row_entered += utils.enter_sampd(
                        row_samp.pk,
                        cleaned_data,
                        row_date,
                        "Precocity",
                        self.ani_health_anidc_id.pk,
                        adsc_str="Precocity")

            if utils.nan_to_none(row.get(self.ufid_key)):
                self.row_entered += utils.enter_sampd(row_samp.pk,
                                                      cleaned_data, row_date,
                                                      row[self.ufid_key],
                                                      self.anidc_ufid_id.pk)

            self.row_entered += utils.parse_extra_cols(row,
                                                       self.cleaned_data,
                                                       row_samp,
                                                       samp=True)

        else:
            self.success = False