def final(self): lenofvals = len(self.vals) if lenofvals <= 0: yield ("bgroupid", "C1") yield [None, None, None] return yield tuple(["bgroupid"] + ["C" + str(i) for i in xrange(1, len(self.vals[0]) - 1)]) counter = 0 if lenofvals != 1: for el in self.vals: try: el.insert(0, iso8601.parse_date(el[0])) except Exception: raise functions.OperatorError( "datediffnewsesid", "Wrong date format: %s" % (el[0])) self.vals.sort(key=itemgetter(0)) dt = self.vals[0][0] for el in self.vals[1:]: dtnew = el[0] diff = dtnew - dt dt = dtnew if (diff.days * 86400 + diff.seconds) > self.maxdiff: counter += 1 if counter != 0: bid = unistr(el[2]) + str(counter) yield [bid] + el[3:]
def forecast_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(forecast): forecast_timestep = ForecastTimestep.find_by_site_and_dates( site, date, issued_date) if forecast_timestep is None: forecast_timestep = ForecastTimestep( site=site, forecast_datetime=date, issued_datetime=issued_date, forecast_date=date.date()) for k, v in data.items(): prop_name = snake_case(k) if hasattr(forecast_timestep, prop_name): if v == "missing": v = None setattr(forecast_timestep, prop_name, v) forecast_timestep.save() return Response(status=204)
def final(self): if self.position: self.vals.sort(key=lambda x:tuple(x[-self.comparesize:])) if self.vals==[]: size=0 else: size=len(self.vals[0])-self.comparesize-1 if size<=0: yield ("bgroupid","C1") yield [None, None] return yield tuple(["bgroupid"]+["C"+str(i) for i in xrange(1,size)]) counter=0 dt=None dtpos=self.comparesize+1 for el in self.vals: try: dtnew=iso8601.parse_date(el[-dtpos]) except Exception: raise functions.OperatorError("datediffbreak","Wrong date format: %s" %(el[-dtpos])) if dt and timedelta2millisec(dtnew-dt)>self.maxdiff: counter+=1 dt=dtnew bid=unistr(el[0])+str(counter) yield [bid]+el[1:-dtpos]
def forecast_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, day in days(forecast): forecast_day = ForecastDay.get_by_key_name(make_key_name(site,date)) if forecast_day is None: forecast_day = ForecastDay(key_name=make_key_name(site,date), forecast_date = date, site = site) forecast_day.site = site for timestep, data in day_timesteps(day): w = Forecast() w.issued = issued_date for k,v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None setattr(w, prop_name, v) forecast_day.forecasts.add(timestep,w) forecast_day.save() site.save() return Response(status = 204)
def forecast_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, day in days(forecast): forecast_day = ForecastDay.get_by_key_name( make_key_name(site, date)) if forecast_day is None: forecast_day = ForecastDay(key_name=make_key_name(site, date), forecast_date=date, site=site) forecast_day.site = site for timestep, data in day_timesteps(day): w = Forecast() w.issued = issued_date for k, v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None setattr(w, prop_name, v) forecast_day.forecasts.add(timestep, w) forecast_day.save() site.save() return Response(status=204)
def final(self): lenofvals = len(self.vals) if lenofvals == 0: yield ("date", "C1") yield [None, None] return yield tuple(["date"] + ["C" + str(i) for i in xrange(1, len(self.vals[0]))]) dt = None dtpos = 0 diff = 0 if lenofvals == 1: yield (self.vals[dtpos]) else: for el in self.vals: el.append(iso8601.parse_date(el[0])) self.vals.sort(key=itemgetter(-1)) for el in self.vals: if dtpos < lenofvals - 1: dt = el[-1] dtnew = self.vals[dtpos + 1][-1] diff = dtnew - dt dtpos += 1 if (diff.days * 86400 + diff.seconds) > self.maxdiff: yield (el[0:-1]) if dtpos == lenofvals - 1: yield (self.vals[dtpos][0:-1])
def date2iso(*args): """ .. function:: date2iso(sec) -> ISO Datetime Converts an input date to ISO-8601 date format. It tries to autodetect, the input date format. Examples: >>> table1(''' ... 2007-12-31 ... 2010-01-01 ... 2010W06 ... "18/Jan/2011:11:13:00 +0100" ... ''') >>> sql("select date2iso(a) from table1") date2iso(a) ------------------------- 2007-12-31T00:00:00+00:00 2010-01-01T00:00:00+00:00 2010-02-05T00:00:00+00:00 2011-01-18T11:13:00+01:00 """ date = args[0] try: date = iso8601.parse_date(date) except iso8601.ParseError: date = parser.parse(date, fuzzy=True) return date.isoformat()
def final(self): lenofvals=len(self.vals) if lenofvals==0: yield ("date","C1") yield [None,None] return yield tuple(["date"]+["C"+str(i) for i in xrange(1, len(self.vals[0]))]) dt=None dtpos=0 diff=0 if lenofvals==1: yield(self.vals[dtpos]) else: for el in self.vals: el.append(iso8601.parse_date(el[0])) self.vals.sort(key=itemgetter(-1)) for el in self.vals: if dtpos<lenofvals-1: dt = el[-1] dtnew =self.vals[dtpos+1][-1] diff=dtnew-dt dtpos+=1 if (diff.days*86400+diff.seconds)>self.maxdiff: yield(el[0:-1]) if dtpos==lenofvals-1: yield(self.vals[dtpos][0:-1])
def final(self): lenofvals=len(self.vals) if lenofvals<=0: yield ("bgroupid", "C1") yield [None, None, None] return yield tuple(["bgroupid"]+["C"+str(i) for i in xrange(1,len(self.vals[0])-1)]) counter=0 if lenofvals!=1: for el in self.vals: try: el.insert(0,iso8601.parse_date(el[0])) except Exception: raise functions.OperatorError("datediffnewsesid","Wrong date format: %s" %(el[0])) self.vals.sort(key=itemgetter(0)) dt=self.vals[0][0] for el in self.vals[1:]: dtnew=el[0] diff=dtnew-dt dt=dtnew if (diff.days*86400+diff.seconds)>self.maxdiff: counter+=1 if counter!=0: bid=unistr(el[2])+str(counter) yield [bid]+el[3:]
def final(self): if self.position: self.vals.sort(key=lambda x: tuple(x[-self.comparesize:])) if self.vals == []: size = 0 else: size = len(self.vals[0]) - self.comparesize - 1 if size <= 0: yield ("bgroupid", "C1") yield [None, None] return yield tuple(["bgroupid"] + ["C" + str(i) for i in xrange(1, size)]) counter = 0 dt = None dtpos = self.comparesize + 1 for el in self.vals: try: dtnew = iso8601.parse_date(el[-dtpos]) except Exception: raise functions.OperatorError( "datediffbreak", "Wrong date format: %s" % (el[-dtpos])) if dt and timedelta2millisec(dtnew - dt) > self.maxdiff: counter += 1 dt = dtnew bid = unistr(el[0]) + str(counter) yield [bid] + el[1:-dtpos]
def forecast_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(forecast): forecast_timestep = ForecastTimestep.find_by_site_and_dates(site, date, issued_date) if forecast_timestep is None: forecast_timestep = ForecastTimestep(site = site, forecast_datetime = date, issued_datetime = issued_date, forecast_date = date.date()) for k,v in data.items(): prop_name = snake_case(k) if hasattr(forecast_timestep, prop_name): if v == "missing": v = None setattr(forecast_timestep, prop_name, v) forecast_timestep.save() return Response(status = 204)
def timesteps(data): days = data["Location"]["Day"] for day in ensure_array(days): date = day["@date"] for ts in ensure_array(day["TimeSteps"]["TimeStep"]): time = ts["@time"] timestamp = parse_date("%sT%s.000Z" % (date, time)) yield timestamp, ts["WeatherParameters"]
def activityindex(*args): """ .. function:: activityIndex(date, c1, c2) -> int Specialized function that classifies the provided date argument into a 6-point scale (0 to 5) Examples: >>> table1(''' ... '2009-01-01T01:32:03Z' ... '2010-01-01T00:03:13Z' ... '2010-12-31T00:03:13Z' ... '2011-04-01T00:03:13Z' ... ''') >>> sql("select activityIndex(a) from table1") activityIndex(a) ---------------- 0 1 3 5 """ now = datetime.datetime.now() now = iso8601.parse_date(now.strftime("%Y-%m-%d %H:%M:%S")) d = args[0].replace('T',' ') dt = iso8601.parse_date(args[0].replace('Z','')) diff=now-dt if (diff.days)<30: return 5 elif (diff.days)<3*30: return 4 elif (diff.days)<6*30: return 3 elif (diff.days)<12*30: return 2 elif (diff.days)<24*30: return 1 elif (diff.days)>=24*30: return 0 else: return -1;
def activityindex(*args): """ .. function:: activityIndex(date, c1, c2) -> int Specialized function that classifies the provided date argument into a 6-point scale (0 to 5) Examples: >>> table1(''' ... '2009-01-01T01:32:03Z' ... '2010-01-01T00:03:13Z' ... '2010-12-31T00:03:13Z' ... '2011-04-01T00:03:13Z' ... ''') >>> sql("select activityIndex(a) from table1") activityIndex(a) ---------------- 0 1 3 5 """ now = datetime.datetime.now() now = iso8601.parse_date(now.strftime("%Y-%m-%d %H:%M:%S")) d = args[0].replace('T', ' ') dt = iso8601.parse_date(args[0].replace('Z', '')) diff = now - dt if (diff.days) < 30: return 5 elif (diff.days) < 3 * 30: return 4 elif (diff.days) < 6 * 30: return 3 elif (diff.days) < 12 * 30: return 2 elif (diff.days) < 24 * 30: return 1 elif (diff.days) >= 24 * 30: return 0 else: return -1
def step(self, *args): if not args: raise functions.OperatorError("frecency", "No arguments") # last 2 arguments are static , so they are parse only the first time if not self.initstatic: self.initstatic = True self.points = 100.0 self.now = datetime.datetime.now() if len(args) >= 2: for arg in args[1:]: isnowarg = re_now.match(arg) if isnowarg: nowdate = isnowarg.groupdict()['now'] self.now = iso8601.parse_date(nowdate) else: self.points = int(arg) input = args[0] dt = iso8601.parse_date(input) self.frecency += self.__decrease(self.now - dt) * self.points
def step(self, *args): if not args: raise functions.OperatorError("frecencyindex","No arguments") now = datetime.datetime.now() now = iso8601.parse_date(now.strftime("%Y-%m-%d %H:%M:%S")) d = args[0].replace('T',' ') dt = iso8601.parse_date(args[0].replace('Z','')) diff=now-dt if (diff.days)<30: self.monthCounter+=1 elif (diff.days)<3*30: self.trimesterCounter+=1 elif (diff.days)<6*30: self.semesterCounter+=1 elif (diff.days)<12*30: self.yearCounter+=1 elif (diff.days)<24*30: self.twoyearsCounter+=1
def step(self, *args): if not args: raise functions.OperatorError("frecency","No arguments") # last 2 arguments are static , so they are parse only the first time if not self.initstatic: self.initstatic=True self.points=100.0 self.now=datetime.datetime.now() if len(args)>=2: for arg in args[1:]: isnowarg=re_now.match(arg) if isnowarg: nowdate=isnowarg.groupdict()['now'] self.now=iso8601.parse_date(nowdate) else: self.points=int(arg) input=args[0] dt=iso8601.parse_date(input) self.frecency+=self.__decrease(self.now-dt)*self.points
def final(self): if self.tablesize<=0: yield ("groupid","date","C1") else: yield tuple(["groupid"]+["date"]+["C"+str(i+1) for i in xrange(self.tablesize-1)]) dt=None dtpos=0 diff=0 for el in self.vals: if dtpos<self.counter-1: dt = iso8601.parse_date(el[0]) dtnew =iso8601.parse_date(self.vals[dtpos+1][0]) diff=dtnew-dt yield [str(self.groupIdCounter)]+el if (diff.days*24*60*60+diff.seconds)>self.maxdiff: self.groupIdCounter+=1 dtpos+=1 if dtpos==self.counter-1: yield [str(self.groupIdCounter)]+self.vals[dtpos]
def step(self, *args): pdate = iso8601.parse_date(args[0]) if self.datemin == None: self.datemin = pdate if self.datemax == None: self.datemax = pdate if pdate < self.datemin: self.datemin = pdate if pdate > self.datemax: self.datemax = pdate
def observation_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key obs = {} def get_db_observation(date): key_name = make_key_name(site, date.date()) if key_name in obs: return obs[key_name] o = ObservationDay.get_by_key_name(key_name) if o is None: o = ObservationDay(key_name=key_name) o.site = site o.observation_date = date.date() o.observations = Observations() obs[key_name] = o return o result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) issue_date = parse_date(observations['@issueDate']) site.last_obs_issue_datetime = issue_date site.last_obs_update_datetime = datetime.now() for date, data in timesteps(observations): o = get_db_observation(date) o.lastdata_datetime = issue_date w = Weather({}) for k, v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(w, prop_name, v) o.observations.add(date, w) for o in obs.values(): o.save() site.save() return Response(status=204)
def observation_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key obs = {} def get_db_observation(date): key_name = make_key_name(site, date.date()) if key_name in obs: return obs[key_name] o = ObservationDay.get_by_key_name(key_name) if o is None: o = ObservationDay(key_name=key_name) o.site = site o.observation_date = date.date() o.observations = Observations() obs[key_name] = o return o result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) issue_date = parse_date(observations['@issueDate']) site.last_obs_issue_datetime = issue_date site.last_obs_update_datetime = datetime.now() for date, data in timesteps(observations): o = get_db_observation(date) o.lastdata_datetime = issue_date w = Weather({}) for k,v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(w, prop_name, v) o.observations.add(date, w) for o in obs.values(): o.save() site.save() return Response(status = 204)
def final(self): if self.tablesize <= 0: yield ("groupid", "date", "C1") else: yield tuple(["groupid"] + ["date"] + ["C" + str(i + 1) for i in xrange(self.tablesize - 1)]) dt = None dtpos = 0 diff = 0 for el in self.vals: if dtpos < self.counter - 1: dt = iso8601.parse_date(el[0]) dtnew = iso8601.parse_date(self.vals[dtpos + 1][0]) diff = dtnew - dt yield [str(self.groupIdCounter)] + el if (diff.days * 24 * 60 * 60 + diff.seconds) > self.maxdiff: self.groupIdCounter += 1 dtpos += 1 if dtpos == self.counter - 1: yield [str(self.groupIdCounter)] + self.vals[dtpos]
def observation_import(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) today = date.today() url = "http://metofficewatch.appspot.com/sites/%s/observations?day=%s" % (site_id, today.isoformat()) result = urlfetch.fetch(url) if result.status_code == 200: obs = json.loads(result.content) obs_day = ObservationDay.get_by(site, today, not_found_return_new = True) obs_day.observations = Observations.from_json(obs['observations']) obs_day.lastdata_datetime = parse_date(obs['lastdata_datetime']) obs_day.save() return Response(status = 204)
def forecast_import(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) today = date.today() url = "http://metofficewatch.appspot.com/sites/%s/forecasts?day=%s" % (site_id, today.isoformat()) result = urlfetch.fetch(url) if result.status_code == 200: forecasts = json.loads(result.content) forecast_day = ForecastDay.get_by(site, today, not_found_return_new = True) forecast_day.forecasts = Forecasts.from_json(forecasts['forecasts']) forecast_day.lastdata_datetime = parse_date(forecasts['lastdata_datetime']) if forecasts['lastdata_datetime'] is not None else None forecast_day.save() return Response(status = 204)
def day_timesteps(day): for ts in ensure_array(day["TimeSteps"]["TimeStep"]): time = ts["@time"] timestamp = parse_date("%sT%s.000Z" % (day["@date"], time)) yield timestamp, ts["WeatherParameters"]
def tzconverter(*args): """ .. function:: tzconverter(timestamp,offset) Returns timestamps converted from UTC to target timezone, indicated by the offset parameter. Example:: >>> table1(''' ... "2010-12-05T00:00:00+00:00" ... "2010-12-05T00:01:00+00:00" ... "2010-12-05T00:02:00+00:00" ... ''') ... ''') >>> sql("select a, tzconverter(a,'-01:00') from table1 ") a | tzconverter(a,'-01:00') ----------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-01:00 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-01:00 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-01:00 ... ''') >>> sql("select a, tzconverter(a,'-01') from table1 ") a | tzconverter(a,'-01') -------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-01 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-01 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-01 >>> sql("select a, tzconverter(a,'-0100') from table1 ") a | tzconverter(a,'-0100') ---------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-0100 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-0100 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-0100 >>> sql("select a, tzconverter(a,'+00:30') from table1 ") a | tzconverter(a,'+00:30') ----------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-05T00:30:00+00:30 2010-12-05T00:01:00+00:00 | 2010-12-05T00:31:00+00:30 2010-12-05T00:02:00+00:00 | 2010-12-05T00:32:00+00:30 """ date = iso8601.parse_date(args[0]) mins = 0 sign = '' result = '' c = 0 for i in args[1]: if c == 0: sign = args[1][0] elif c == 1: mins += int(args[1][1])*600 elif c == 2: mins += int (args[1][2])*60 elif c == 3 and args[1][3] == ':': #in this case i know what's next mins += int(args[1][4])*10 + int(args[1][5]) break; elif c == 3: mins += int(args[1][3])*10 elif c == 4: mins += int (args[1][4]) c+=1 if sign == '+': result = date + timedelta(minutes = mins) elif sign == '-': result = date - timedelta(minutes = mins) result = str(result).replace(" ","T").replace("+00:00", args[1]) return result
def tzconverter(*args): """ .. function:: tzconverter(timestamp,offset) Returns timestamps converted from UTC to target timezone, indicated by the offset parameter. Example:: >>> table1(''' ... "2010-12-05T00:00:00+00:00" ... "2010-12-05T00:01:00+00:00" ... "2010-12-05T00:02:00+00:00" ... ''') ... ''') >>> sql("select a, tzconverter(a,'-01:00') from table1 ") a | tzconverter(a,'-01:00') ----------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-01:00 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-01:00 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-01:00 ... ''') >>> sql("select a, tzconverter(a,'-01') from table1 ") a | tzconverter(a,'-01') -------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-01 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-01 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-01 >>> sql("select a, tzconverter(a,'-0100') from table1 ") a | tzconverter(a,'-0100') ---------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-04T23:00:00-0100 2010-12-05T00:01:00+00:00 | 2010-12-04T23:01:00-0100 2010-12-05T00:02:00+00:00 | 2010-12-04T23:02:00-0100 >>> sql("select a, tzconverter(a,'+00:30') from table1 ") a | tzconverter(a,'+00:30') ----------------------------------------------------- 2010-12-05T00:00:00+00:00 | 2010-12-05T00:30:00+00:30 2010-12-05T00:01:00+00:00 | 2010-12-05T00:31:00+00:30 2010-12-05T00:02:00+00:00 | 2010-12-05T00:32:00+00:30 """ date = iso8601.parse_date(args[0]) mins = 0 sign = '' result = '' c = 0 for i in args[1]: if c == 0: sign = args[1][0] elif c == 1: mins += int(args[1][1]) * 600 elif c == 2: mins += int(args[1][2]) * 60 elif c == 3 and args[1][3] == ':': #in this case i know what's next mins += int(args[1][4]) * 10 + int(args[1][5]) break elif c == 3: mins += int(args[1][3]) * 10 elif c == 4: mins += int(args[1][4]) c += 1 if sign == '+': result = date + timedelta(minutes=mins) elif sign == '-': result = date - timedelta(minutes=mins) result = str(result).replace(" ", "T").replace("+00:00", args[1]) return result
def __init__(self, values = {}): super(Forecast,self).__init__(values) self.issued = parse_date(values['issued']) if 'issued' in values else None
def VTiter(self, *parsedArgs, **envars): largs, dictargs = self.full_parse(parsedArgs) quantum = None output = False nextproducetuple = 0 # The next tuple that must appear if 'query' not in dictargs: raise functions.OperatorError(__name__.rsplit('.')[-1], "No query argument ") query=dictargs['query'] if 'ratio' in dictargs: self.ratio=float(dictargs['ratio']) if self.ratio >= 1: self.ratio=int(self.ratio) elif self.ratio <= 0: self.ratio = 1 else: if (float(float(1)/float(self.ratio)) - int(float(1)/float(self.ratio))) != 0: raise functions.OperatorError(__name__.rsplit('.')[-1], "1/Ratio must be a not decimal number ") else: self.ratio = 1 self.quantum = None if 'quantum' in dictargs: self.quantum=int(dictargs['quantum']) if self.quantum <= 0: self.quantum = 1 if 'output' in dictargs: if str(dictargs['output']).lower() == 'same': output = True if 'starttimestamp' in dictargs: dt=iso8601.parse_date(dictargs['starttimestamp']) nextproducetupletime=long(time.mktime(dt.utctimetuple()) - time.timezone) else: nextproducetupletime=long(time.time()) lines = [] cur = envars['db'].cursor() q = cur.execute(query, parse=False) schema = list(cur.getdescriptionsafe()) for x in range(len(schema)): if str(schema[x][0]).lower() == 'timestamp': schema[x] = ('timestamp1', 'text') try: yield [('timestamp', 'text')] + schema except StopIteration: try: raise finally: try: cur.close() except: pass if not output: numoflines=sum(1 for x in self.getDataGen(q)) - 1 nextproducetuple = int(nextproducetupletime) % int(float(numoflines)) # For ever simtuple = [] while True: try: q = cur.execute(query, parse=False) dataGen = self.getDataGen(q) for x in range(nextproducetuple): dataGen.next() for secstuples in dataGen: try: time.sleep(float(int(nextproducetupletime)-float(time.time().real))) except IOError: pass # For every tuple in second for line in secstuples: simtuple[:] = [datetime.datetime.utcfromtimestamp(nextproducetupletime).strftime('%Y-%m-%dT%H:%M:%S+00:00')] for value in line: simtuple.append(value) yield simtuple nextproducetupletime += 1 nextproducetuple=0 except KeyboardInterrupt: break
def step(self, *args): if not args: raise functions.OperatorError("mindtdiff","No arguments") dt=iso8601.parse_date(args[0]) self.dates.put_nowait(dt)
def VTiter(self, *parsedArgs, **envars): largs, dictargs = self.full_parse(parsedArgs) quantum = None output = False nextproducetuple = 0 # The next tuple that must appear if 'query' not in dictargs: raise functions.OperatorError( __name__.rsplit('.')[-1], "No query argument ") query = dictargs['query'] if 'ratio' in dictargs: self.ratio = float(dictargs['ratio']) if self.ratio >= 1: self.ratio = int(self.ratio) elif self.ratio <= 0: self.ratio = 1 else: if (float(float(1) / float(self.ratio)) - int(float(1) / float(self.ratio))) != 0: raise functions.OperatorError( __name__.rsplit('.')[-1], "1/Ratio must be a not decimal number ") else: self.ratio = 1 self.quantum = None if 'quantum' in dictargs: self.quantum = int(dictargs['quantum']) if self.quantum <= 0: self.quantum = 1 if 'output' in dictargs: if str(dictargs['output']).lower() == 'same': output = True if 'starttimestamp' in dictargs: dt = iso8601.parse_date(dictargs['starttimestamp']) nextproducetupletime = long( time.mktime(dt.utctimetuple()) - time.timezone) else: nextproducetupletime = long(time.time()) lines = [] cur = envars['db'].cursor() q = cur.execute(query, parse=False) schema = list(cur.getdescriptionsafe()) for x in range(len(schema)): if str(schema[x][0]).lower() == 'timestamp': schema[x] = ('timestamp1', 'text') try: yield [('timestamp', 'text')] + schema except StopIteration: try: raise finally: try: cur.close() except: pass if not output: numoflines = sum(1 for x in self.getDataGen(q)) - 1 nextproducetuple = int(nextproducetupletime) % int( float(numoflines)) # For ever simtuple = [] while True: try: q = cur.execute(query, parse=False) dataGen = self.getDataGen(q) for x in range(nextproducetuple): dataGen.next() for secstuples in dataGen: try: time.sleep( float( int(nextproducetupletime) - float(time.time().real))) except IOError: pass # For every tuple in second for line in secstuples: simtuple[:] = [ datetime.datetime.utcfromtimestamp( nextproducetupletime).strftime( '%Y-%m-%dT%H:%M:%S+00:00') ] for value in line: simtuple.append(value) yield simtuple nextproducetupletime += 1 nextproducetuple = 0 except KeyboardInterrupt: break