def setUp(self): super().setUp() # used to import fixtures self.grp = BioFactoryFloor.GrpFactory() self.trof = BioFactoryFloor.TrofFactory(name='-1') self.trof_two = BioFactoryFloor.TrofFactory( name='-2', facic_id=self.trof.facic_id) self.evnt_date = utils.naive_to_aware(datetime.today() - timedelta(days=100)) self.evnt = BioFactoryFloor.EvntFactory(start_datetime=self.evnt_date, facic_id=self.trof.facic_id) self.cleaned_data = { "facic_id": self.evnt.facic_id, "evnt_id": self.evnt, "created_by": self.evnt.created_by, "created_date": self.evnt.created_date, } self.contx, data_entered = utils.enter_contx(self.trof, self.cleaned_data, None, return_contx=True) self.contx_two, data_entered = utils.enter_contx(self.trof_two, self.cleaned_data, None, return_contx=True) temp_envc = models.EnvCode.objects.filter(name="Temperature").get() # add ten days worth of temp data to the trough for temp in range(0, 10): env_date = utils.naive_to_aware(self.evnt.start_date + timedelta(days=temp)) utils.enter_env(temp, env_date, self.cleaned_data, temp_envc, contx=self.contx) for temp in range(10, 20): env_date = utils.naive_to_aware(self.evnt.start_date + timedelta(days=temp)) utils.enter_env(temp, env_date, self.cleaned_data, temp_envc, contx=self.contx_two)
def row_parser(self, row): cleaned_data = self.cleaned_data row_datetime = utils.get_row_date(row, get_time=True) trof_list = utils.parse_trof_str(row.get(self.trof_key), cleaned_data["facic_id"]) for trof_id in trof_list: row_contx, contx_entered = utils.enter_contx(trof_id, cleaned_data, final_flag=None, return_contx=True) self.row_entered += contx_entered self.row_entered += utils.enter_env(row[self.temp_key], row_datetime.date(), cleaned_data, self.envc_id, env_time=row_datetime.time(), contx=row_contx, save=True, qual_id=self.qual_id)
def data_preper(self): cleaned_data = self.cleaned_data contx, data_entered = utils.enter_trof_contx(cleaned_data["trof_id"].name, cleaned_data, final_flag=None, return_contx=True) qual_id = models.QualCode.objects.filter(name="Good").get() envc_id = models.EnvCode.objects.filter(name="Temperature").get() self.data["datetime"] = self.data.apply( lambda row: datetime.strptime(row[self.date_key] + ", " + row[self.time_key], "%Y-%m-%d, %H:%M:%S").replace(tzinfo=pytz.UTC), axis=1) self.data["env"] = self.data.apply( lambda row: utils.enter_env(row[self.temp_key], row["datetime"].date(), cleaned_data, envc_id, env_time=row["datetime"].time(), contx=contx, save=False, qual_id=qual_id), axis=1) entered_list = models.EnvCondition.objects.bulk_create(list(self.data["env"].dropna())) self.rows_parsed = len(self.data["env"]) self.row_entered = len(self.data["env"].dropna())
def row_parser(self, row): cleaned_data = self.cleaned_data row_datetime = utils.get_row_date(row) relc_id = None rive_id = self.river_dict[row[self.rive_key]] if not utils.nan_to_none(row.get(self.tank_key)) and utils.nan_to_none(row.get(self.fish_caught_key)): # make sure if fish are caught they are assigned a tank: raise Exception("All caught fish must be assigned a tank") if utils.nan_to_none(row.get(self.site_key)): relc_qs = models.ReleaseSiteCode.objects.filter(name__iexact=row[self.site_key]) if len(relc_qs) == 1: relc_id = relc_qs.get() start_lat = utils.round_no_nan(row.get(self.lat_key), 6) start_lon = utils.round_no_nan(row.get(self.lon_key), 6) if not relc_id and not (start_lat and start_lon): raise Exception("Site code not found and lat-long not given for site on row") loc = models.Location(evnt_id_id=cleaned_data["evnt_id"].pk, locc_id=self.locc_id, rive_id=rive_id, relc_id=relc_id, loc_lat=start_lat, loc_lon=start_lon, end_lat=utils.round_no_nan(row.get(self.end_lat), 6), end_lon=utils.round_no_nan(row.get(self.end_lon), 6), loc_date=row_datetime, comments=utils.nan_to_none(row.get(self.comment_key)), created_by=cleaned_data["created_by"], created_date=cleaned_data["created_date"], ) try: loc.clean() loc.save() self.row_entered = True except ValidationError: loc = models.Location.objects.filter(evnt_id=loc.evnt_id, locc_id=loc.locc_id, rive_id=loc.rive_id, subr_id=loc.subr_id, relc_id=loc.relc_id, loc_lat=loc.loc_lat, loc_lon=loc.loc_lon, loc_date=loc.loc_date).get() self.loc = loc if row["grp_id"]: self.row_entered += utils.enter_anix(cleaned_data, loc_pk=loc.pk, grp_pk=row["grp_id"].pk, return_sucess=True) if self.loc.loc_lon and self.loc.loc_lat and not self.loc.relc_id: self.log_data += "\nNo site found in db for Lat-Long ({}, {}) given on row: \n{}\n\n"\ .format(self.loc.loc_lat, self.loc.loc_lon, row) if utils.nan_to_none(row["contx_id"]): self.row_entered += utils.enter_anix(cleaned_data, loc_pk=loc.pk, contx_pk=row["contx_id"].pk, return_sucess=True) self.team_parser(row[self.crew_key], row, loc_id=loc) if utils.nan_to_none(row.get(self.temp_key)): self.row_entered += utils.enter_env(row[self.temp_key], row_datetime, cleaned_data, self.temp_envc_id, loc_id=loc) cnt_caught, cnt_entered = utils.enter_cnt(cleaned_data, cnt_value=row[self.fish_caught_key], loc_pk=loc.pk, cnt_code="Fish Caught") self.row_entered += cnt_entered cnt_obs, cnt_entered = utils.enter_cnt(cleaned_data, cnt_value=row[self.fish_obs_key], loc_pk=loc.pk, cnt_code="Fish Observed") self.row_entered += cnt_entered if utils.nan_to_none(row.get(self.settings_key)): self.row_entered += utils.enter_locd(loc.pk, cleaned_data, row_datetime, row[self.settings_key], self.settings_locdc_id.pk) if utils.nan_to_none(row.get(self.fishing_time_key)): self.row_entered += utils.enter_locd(loc.pk, cleaned_data, row_datetime, row[self.fishing_time_key], self.fishing_time_locdc_id.pk) if utils.nan_to_none(row.get(self.voltage_key)): self.row_entered += utils.enter_locd(loc.pk, cleaned_data, row_datetime, row[self.voltage_key], self.voltage_locdc_id.pk)
def row_parser(self, row): cleaned_data = self.cleaned_data contx, data_entered = utils.enter_tank_contx(row[self.tank_key], cleaned_data, None, return_contx=True) self.row_entered += data_entered row_date = utils.get_row_date(row) if utils.nan_to_none(row[self.time_key]): row_time = row[self.time_key].replace(tzinfo=pytz.UTC) else: row_time = None if utils.nan_to_none(row.get(self.temp_key)): self.row_entered += utils.enter_env(row[self.temp_key], row_date, cleaned_data, self.temp_envc_id, contx=contx, env_time=row_time) if utils.nan_to_none(row.get(self.dox_key)): self.row_entered += utils.enter_env(row[self.dox_key], row_date, cleaned_data, self.oxlvl_envc_id, contx=contx, env_time=row_time) if utils.nan_to_none(row.get(self.ph_key)): self.row_entered += utils.enter_env(row[self.ph_key], row_date, cleaned_data, self.ph_envc_id, contx=contx, env_time=row_time) if utils.nan_to_none(row.get(self.dn_key)): self.row_entered += utils.enter_env(row[self.dn_key], row_date, cleaned_data, self.disn_envc_id, contx=contx, env_time=row_time) if utils.nan_to_none(row.get(self.source_key)): source_envsc_id = models.EnvSubjCode.objects.filter( name__icontains=row[self.source_key]).get() self.row_entered += utils.enter_env(row[self.source_key], row_date, cleaned_data, self.ws_envc_id, envsc_id=source_envsc_id, contx=contx, env_time=row_time) if utils.nan_to_none(row.get(self.crew_key)): perc_list, inits_not_found = utils.team_list_splitter( row[self.crew_key]) for perc_id in perc_list: team_id, team_entered = utils.add_team_member( perc_id, cleaned_data["evnt_id"], return_team=True) self.row_entered += team_entered if team_id: self.row_entered += utils.enter_tank_contx( row[self.tank_key], cleaned_data, team_pk=team_id.pk) for inits in inits_not_found: self.log_data += "No valid personnel with initials ({}) for row {} \n".format( inits, row)