def step1_get_data(year, month, observer_id=None, region_id=None, get_new=True, make_pickle=False, data_description="no_description_given"): """Gets data for one month and prepares for plotting :param year: [int] :param month: [int] :param observer_id: [int or list of ints] :param region_id [int] :param get_new: [bool] get data with a new request or use local pickle :param make_pickle: [bool] only matters if getting new data, make a pickle in local storage :param data_description [string] Custom description for naming pickle-files :return dates: [list of DayData objects] """ if data_description is not None: pickle_file_name = "{0}{1}_{2}{3:02d}.pickle".format(env.local_storage, data_description, year, month) else: if region_id is not None: pickle_file_name = "{0}runPlotRegionData_{1}_{2}{3:02d}.pickle".format(env.local_storage, region_id, year, month) else: if observer_id is not None: pickle_file_name = "{0}runPlotObserverData_{1}_{2}{3:02d}.pickle".format(env.local_storage, observer_id, year, month) else: print 'Need Observerid and/or forecastRegionTID to make this work.' return [] first, last = cal.monthrange(year, month) from_date = dt.date(year, month, 1) to_date = dt.date(year, month, last) + dt.timedelta(days=1) if get_new: all_observations = go.get_all_registrations(from_date, to_date, output='DataFrame', geohazard_tid=10, observer_ids=observer_id, region_ids=region_id) # for all dates in the requested from-to interval dates = [] for d in _get_dates(from_date, to_date, dt.timedelta(days=1)): if observer_id is not None and region_id is None: # if only data for one observer dd = DayData(d, observer_id=observer_id) else: # else whish to have data for some/all observervers in a region dd = DayData(d, region_id=region_id) obstyp = [] regids = [] nicks = [] loc_pr_regid = {} obs_pr_regid = {} nic_pr_regid = {} # loop through all observations for i in all_observations.index: this_date = all_observations.iloc[i].DtObsTime.date() # append all observations where dates match if this_date == d: regid = all_observations.iloc[i].RegID # location on regid (only one location pr RegID) if regid not in loc_pr_regid.keys(): loc_pr_regid[regid] = all_observations.iloc[i].ForecastRegionName # get the nicname use on the regid (only one pr RegID) if regid not in nic_pr_regid.keys(): nic_pr_regid[regid] = all_observations.iloc[i].NickName # observations pr regid (might be more) if regid not in obs_pr_regid.keys(): obs_pr_regid[regid] = [all_observations.iloc[i].RegistrationName] else: obs_pr_regid[regid].append(all_observations.iloc[i].RegistrationName) # list of all observations on this date if all_observations.iloc[i].RegistrationName == 'Bilde': if all_observations.iloc[i].TypicalValue1 == 'Bilde av: Snoeprofil': obstyp.append('Snoeprofil') else: obstyp.append(all_observations.iloc[i].RegistrationName) else: obstyp.append(all_observations.iloc[i].RegistrationName) # list of all regids - this counts occurances regids.append(int(all_observations.iloc[i].RegID)) # list of all observers nickanmes - this counts occurances nicks.append(all_observations.iloc[i].NickName) # add to object for plotting dd.add_loc_pr_regid(loc_pr_regid) dd.add_obs_pr_regid(obs_pr_regid) dd.add_nic_pr_regid(nic_pr_regid) dd.add_observations(obstyp) dd.add_regids(regids) dd.add_nicks(nicks) dates.append(dd) if make_pickle: mp.pickle_anything(dates, pickle_file_name) else: dates = mp.unpickle_anything(pickle_file_name) return dates
self.RowNumber = d.RowNumber self.GeoHazardName = d.GeoHazardName self.RegistrationName = d.RegistrationName self.TypicalValue1 = d.TypicalValue1 self.TypicalValue2 = d.TypicalValue2 self.LangKey = d.LangKey self.Picture = d.Picture if __name__ == "__main__": from_date = dt.date(2016, 4, 1) to_date = dt.date(2016, 5, 1) region_ids = 116 all_data = go.get_all_registrations(from_date, to_date, region_ids) Base.metadata.create_all() Session = sessionmaker(bind=engine) s = Session() all_data_ny_mapping = [] for d in all_data: all_data_ny_mapping.append(AllRegistrations(d)) s.add_all(all_data_ny_mapping) s.commit() a = 1