def getMeals(userId): try: user = User.objects(id=userId).get() newMealsFlag = False assignedMealIds = [] if user['mealAssigned']: for meal in user['mealAssigned']: assignedMealIds.append(meal['id']) if user and user['mealExpiry']: ''' Check for meal plan expiry''' tzinf = tz.tz.tzoffset('TZONE', int(user['timeZone']) / 1000) # creating the user's timezone by localCurrentTime = utils.default_tzinfo(datetime.now(), tzinf) # datetime.now(tz=tzinf) # creating local time # Check if meal plan has expired if localCurrentTime > utils.default_tzinfo(user['mealExpiry'], tzinf): newMealsFlag = True else: try: meals = Meal.objects(id__in=assignedMealIds) return jsonify(meals), status.HTTP_200_OK except Exception as e: print('Error while getting meals ' + format(e)) return jsonify({'stat': 'Some error occurred'}), status.HTTP_500_INTERNAL_SERVER_ERROR else: newMealsFlag = True # ******* New Meal Assignment Starts Here ******* if newMealsFlag: usersMedicalCondition = user['medicalCondition'] generated_plan = [] if user['foodPreference'] == 'Vegan': generated_plan = vegan_meals(assignedMealIds, usersMedicalCondition) print("Vegan Count : " + str(len(generated_plan))) elif user['foodPreference'] == 'Vegetarian': generated_plan = vegetarian_meals(assignedMealIds, usersMedicalCondition) print("Vege Count : " + str(len(generated_plan))) else: generated_plan = non_veg_meals(assignedMealIds, usersMedicalCondition) print("Non-Veg Count : " + str(len(generated_plan))) # ****** Assigning generated plan to the user ****** tzinf = tz.tz.tzoffset('TZONE', int(user['timeZone']) / 1000) localCurrentTime = utils.default_tzinfo(datetime.now(), tzinf) # datetime.now(tz=tzinf) expiryTime = localCurrentTime + timedelta(days=1) mealExpiry = utils.default_tzinfo(expiryTime.replace(hour=5, minute=0, second=0, microsecond=0), tzinf) #user['mealAssigned'] = generated_plan user.modify(mealExpiry=mealExpiry,mealAssigned=generated_plan) return jsonify(user['mealAssigned']), status.HTTP_200_OK except Exception as e: print("Error occurred in meal assignment : " + format(e)) return format(e), status.HTTP_500_INTERNAL_SERVER_ERROR
def _get_daily_price(self, fund, date=None): assert fund[0] == "F" params = { "callback": "thecallback", "fundCode": fund[1:], "pageIndex": 1, "pageSize": 1, } if date is not None: dt_str = date.strftime("%Y-%m-%d") params.update({ "startDate": dt_str, "endDate": dt_str, }) resp = self.http.get( "https://api.fund.eastmoney.com/f10/lsjz", params=params, headers={ "Referer": "http://fundf10.eastmoney.com/jjjz_{fund}.html" } ) assert resp.status_code == 200, resp.text result_str = next(re.finditer("thecallback\((.*)\)", resp.text)).groups()[0] result = json.loads(result_str) records = result["Data"]["LSJZList"] if len(records) == 0: return trade_date = records[0]["FSRQ"] nav = D(records[0]["DWJZ"]).quantize(D('1.000000000000000000')) trade_date = datetime.strptime(trade_date, "%Y-%m-%d") trade_date = utils.default_tzinfo(trade_date, CN_TZ) return source.SourcePrice(nav, trade_date, 'CNY')
def iso_to_date(date: str) -> datetime.datetime: """ Format string date in ISO 8601 to datetime. :param date: ISO date :return: datetime """ return default_tzinfo(parse(date), _DEFAULT_TZ)
def is_course_run_enrollable(course_run): # Checks if a course run is available for enrollment by checking the following conditions: # if end date is not set or is in the future # if enrollment start is not set or is in the past # if enrollment end is not set or is in the future end = course_run.get('end') and default_tzinfo(parse(course_run['end']), pytz.UTC) enrollment_start = (course_run.get('enrollment_start') and default_tzinfo(parse(course_run['enrollment_start']), pytz.UTC)) enrollment_end = (course_run.get('enrollment_end') and default_tzinfo(parse(course_run['enrollment_end']), pytz.UTC)) current_time = now() return ( (not end or end > current_time) and (not enrollment_start or enrollment_start <= current_time) and (not enrollment_end or enrollment_end > current_time) )
def get_historical_price(self, ticker, date): try: currency, pair = asset_pair(ticker) trade_date_start = utils.default_tzinfo( datetime.combine(date, datetime.min.time()), tz.UTC) trade_date = datetime.combine(date, datetime.max.time()) ohlcv = pair.candles(1440, 1000 * int(trade_date_start.timestamp()), 1000 * int(trade_date.timestamp())) assert len(ohlcv) == 1 ohlcv = ohlcv[0] price = D(ohlcv['close']).quantize(D('1.000000000000000000')) trade_date = utils.default_tzinfo( datetime.fromtimestamp(ohlcv['timestamp'] / 1000), tz.UTC) return source.SourcePrice( D('0') if price == 0 else price, trade_date, currency) except Exception as e: logging.error(traceback.format_exc()) return None
def get_timestamp_ms(value, timestamp_name="timestamp", is_parse_timestamp_only=False): if not value and value != 0: return None result = None # From complex object type if isinstance(value, dict): value = value.get(timestamp_name) elif hasattr(value, timestamp_name) and not isinstance(value, datetime): value = getattr(value, timestamp_name) if not is_parse_timestamp_only and isinstance(value, datetime): value = default_tzinfo(value, tzutc()) value = value.timestamp() # From scalar type if isinstance(value, (int, float)): result = value elif isinstance(value, str): try: result = float(value) except ValueError: if not is_parse_timestamp_only: try: dt = parser.parse(value) dt = default_tzinfo(dt, tzutc()) result = dt.timestamp() * 1000 except: logging.exception( "Error while parsing %s as datetime. Return None.", value) result = None # s -> ms if result is not None and result < 1500000000 * 10: result *= 1000 result_int = int(result) if isinstance(result, float) else result return result_int if result_int == result else result
def as_utc(dt: datetime) -> datetime: """Returns the datetime object with a UTC timezone. Date and time data is adjusted so that the timestamp remains the same. Args: - dt: The datetime Returns: A datetime with the same timestamp, with the timezone set to UTC """ dt = utils.default_tzinfo(dt, tz.UTC) return dt.astimezone(tz.UTC)
def parseDateString(datetime_string, dflt_tz_string=None): if (dflt_tz_string == None): try: return parse(datetime_string) except: return None else: dflt_tz = tz.gettz(dflt_tz_string) try: return default_tzinfo(parse(datetime_string), dflt_tz) except: return None
def cast_date(*, value: str, params: str, shared: dict) -> datetime: """Cast date/datetime string into object. :param value: Current value. :param params: Additional value-related parameters. :param shared: Global shared parameters. """ param_key, _, param_val = params.partition('=') date = parse_datestr(value) tz = param_val if param_key == 'TZID' else shared['tz'] or 'UTC' return default_tzinfo(date, gettz(tz))
def _get_daily_price(self, ticker, date=None): region, symbol = ticker.split(":", 1) if region in {"HK", "CN"}: exchange_tz = CN_TZ if region == "HK": currency = "HKD" else: currency = "CNY" else: assert region == "US" exchange_tz = NY_TZ currency = "USD" if date is None: trade_date = utils.default_tzinfo( datetime.now(), exchange_tz, ) else: trade_date = utils.default_tzinfo( datetime.combine(date, datetime.max.time()), exchange_tz) begin = int(time.mktime(trade_date.timetuple())) * 1000 url = (f"https://stock.xueqiu.com/v5/stock/chart/kline.json?" f"symbol={symbol}&begin={begin}&period=day&" f"type=before&count=-1&indicator=kline") resp = self.http.get(url, headers=self.headers) assert resp.status_code == 200, resp.text result = resp.json() assert result["error_code"] == 0, result["error_description"] bar = result["data"]["item"][0] assert result["data"]["column"] == EXPECTED_COLS returned_ts = bar[EXPECTED_COLS.index("timestamp")] close_price = D(bar[EXPECTED_COLS.index("close")]).quantize( D('1.000000000000000000')) trade_date = datetime.fromtimestamp(returned_ts / 1000, exchange_tz) return source.SourcePrice(close_price, trade_date, currency)
def get_latest_price(self, ticker): try: currency, pair = asset_pair(ticker) ticker = pair.ticker() if 'status' in ticker and ticker['status'] == 'error': logging.error("%s", ticker['message']) return None logging.info("Ticker %s", ticker) price = D(ticker['24h_close']).quantize(D('1.000000000000000000')) ts = ticker['timestamp'] / 1000 trade_date = utils.default_tzinfo(datetime.fromtimestamp(ts), tz.UTC) return source.SourcePrice( D('0') if price == 0 else price, trade_date, currency) except Exception as e: logging.error(traceback.format_exc()) return None
def get_latest_price(self, ticker): commodity, currency = ticker.split(':') url = 'https://min-api.cryptocompare.com/data/price?fsym={}&tsyms={}'.format( commodity, currency) logging.info("Fetching %s", url) try: response = net_utils.retrying_urlopen(url) if response is None: return None response = response.read().decode('utf-8').strip() response = json.loads(response) except error.HTTPError: return None price = D(response[currency]).quantize(D('1.000000000000000000')) trade_date = utils.default_tzinfo(datetime.now(), tz.gettz()) return source.SourcePrice( D('0') if price == 0 else price, trade_date, currency)
def get_historical_price(self, ticker, date): commodity, currency = ticker.split(':') trade_date = utils.default_tzinfo( datetime.combine(date, datetime.max.time()), tz.UTC) ts = int(time.mktime(trade_date.timetuple())) url = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym={}&tsyms={}&ts={}'.format( commodity, currency, ts) logging.info("Fetching %s", url) try: response = net_utils.retrying_urlopen(url) if response is None: return None response = response.read().decode('utf-8').strip() response = json.loads(response) except error.HTTPError: return None price = D(response[commodity][currency]).quantize( D('1.000000000000000000')) return source.SourcePrice( D('0') if price == 0 else price, trade_date, currency)
def _get_daily_price(self, ticker, date=None): assert len(ticker) == 6, ticker base = ticker[:3] symbol = ticker[3:] if date is None: date_str = "latest" else: date_str = date.strftime("%Y-%m-%d") resp = self.http.get("https://api.exchangeratesapi.io/" + date_str, params={ "symbols": symbol, "base": base, }) result = resp.json() close_price = D(result["rates"][symbol]).quantize( D('1.000000000000000000')) trade_date = utils.default_tzinfo( datetime.strptime(result["date"], "%Y-%m-%d"), tz.UTC) currency = base return source.SourcePrice(close_price, trade_date, currency)
def testDefaultTZInfoAware(self): dt = datetime(2014, 9, 14, 9, 30, tzinfo=UTC) self.assertIs(utils.default_tzinfo(dt, NYC).tzinfo, UTC)
def testDefaultTZInfoNaive(self): dt = datetime(2014, 9, 14, 9, 30) self.assertIs(utils.default_tzinfo(dt, NYC).tzinfo, NYC)
def tz_last_updated(self): return default_tzinfo(self.last_updated, tzutc())
def string2dt(self, s, lower_bound=None): """Return datetime from a given string.""" original_s = s result = {} dt = None # if s is completely empty, return start or end, # depending on what parameter is evaluated if s == '': return self.end if lower_bound else self.start # first try to match the defined regexes for idx in self.dtRegexes: regex = self.dtRegexes[idx] mo = regex.search(s) # if match was found, cut it out of original string and # store in result if mo: result[idx] = mo s = s[:mo.start(0)] + s[mo.end(0):] # handle constants if 'constant' in result: constant = result['constant'].group(0).strip() if constant == 'end': dt = self.end elif constant == 'start': dt = self.start elif constant == 'today': dt = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) elif constant == 'yesterday': dt = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1) elif constant == 'now': dt = datetime.now() elif 'weekday' in result: weekday = result['weekday'].group(0).strip() # assume most-recently occured weekday in logfile most_recent_date = self.end.replace(hour=0, minute=0, second=0, microsecond=0) offset = (most_recent_date.weekday() - self.weekdays.index(weekday)) % 7 dt = most_recent_date - timedelta(days=offset) # if anything remains unmatched, try parsing it with dateutil's parser if s.strip() != '': try: if dt: dt = default_tzinfo(parser.parse(s, default=dt), tzutc) else: # check if it's only time, then use the start dt as # default, else just use the current year if re.match('(?P<hour>\d{1,2}):(?P<minute>\d{2,2})' '(?::(?P<second>\d{2,2})' '(?:.(?P<microsecond>\d{3,3}))?)?' '(?P<timezone>[0-9Z:\+\-]+)?$', s): default = self.end if lower_bound else self.start else: default = datetime(self.end.year, 1, 1, 0, 0, 0) default = default.replace(second=0, microsecond=0) dt = parser.parse(s, default=default) except ValueError: raise ValueError("Error in DateTimeBoundaries: " "can't parse datetime from %s" % s) if not dt: dt = lower_bound or self.end # if no timezone specified, use the one from the logfile if dt.tzinfo is None: dt = dt.replace(tzinfo=self.start.tzinfo) # time is applied separately (not through the parser) so that string # containing only time don't use today as default date # (parser behavior) # if 'time' in result: # dct = dict((k, int(v)) # for k,v in six.iteritems(result['time'].groupdict(0))) # dct['microsecond'] *= 1000 # dt = dt.replace(**dct) # apply offset if 'offset' in result: # separate in operator, value, unit dct = result['offset'].groupdict() mult = 1 if dct['unit'] in ['s', 'sec', 'secs']: dct['unit'] = 'seconds' elif dct['unit'] in ['m', 'min', 'mins']: dct['unit'] = 'minutes' elif dct['unit'] in ['h', 'hour', 'hours']: dct['unit'] = 'hours' elif dct['unit'] in ['d', 'day', 'days']: dct['unit'] = 'days' elif dct['unit'] in ['w', 'week', 'weeks']: dct['unit'] = 'days' mult = 7 elif dct['unit'] in ['mo', 'month', 'months']: dct['unit'] = 'days' mult = 30.43 elif dct['unit'] in ['y', 'year', 'years']: dct['unit'] = 'days' mult = 365.24 if dct['operator'] == '-': mult *= -1 dt = dt + eval('timedelta(%s=%i)' % (dct['unit'], mult * int(dct['value']))) # if parsed datetime is out of bounds and no year specified, # try to adjust year year_present = re.search('\d{4,4}', original_s) if not year_present and 'constant' not in result: if (dt < self.start and dt.replace(year=dt.year + 1) >= self.start and dt.replace(year=dt.year + 1) <= self.end): dt = dt.replace(year=dt.year + 1) elif (dt > self.end and dt.replace(year=dt.year - 1) >= self.start and dt.replace(year=dt.year - 1) <= self.end): dt = dt.replace(year=dt.year - 1) return dt
""" r1 = requests.post("http://159.89.231.170/", json={'query': grant_pool}) r2 = requests.post("http://159.89.231.170/", json={'query': history}) resp1 = json.loads(r1.text) resp2 = json.loads(r2.text) current_grant_fcts = resp1["data"]["factoids"]["grantPool"] # All historical grant pool data sorted with most recent last. # Data is formatted as lists: [unix_ts_ms, grant_pool_fcts] data = resp2["data"]["history"]["factoids"]["grantPool"] # The first item will almost always be an incomplete day so use the next two # for calculating the most recent daily amount going into the grant pool. # We start at the end of the list to get the most recent values. daily_fct_to_grant_pool = float(data[-2][1]) - float(data[-3][1]) print(daily_fct_to_grant_pool) # Parse end date supplied by user and assume UTC end_time = utils.default_tzinfo(parser.parse(sys.argv[1]), default_tz).timestamp() current_time = datetime.now(pytz.utc).timestamp() days = (end_time - current_time) / 86400.0 future_fct_amount = current_grant_fcts + daily_fct_to_grant_pool * days print(future_fct_amount)
def test_utils_default_tz_info_naive(): dt = datetime(2014, 9, 14, 9, 30) assert utils.default_tzinfo(dt, NYC).tzinfo is NYC
def import_gpx(gpx_file, wpt_fc, trk_fc): GCS_WGS_84 = arcpy.SpatialReference(4326) GCS_TRANSFORMS = 'WGS_1984_(ITRF08)_To_NAD_1983_2011; NAD_1927_To_NAD_1983_NADCON' arcpy.env.geographicTransformations = arcpy.env.geographicTransformations or GCS_TRANSFORMS arcpy.AddMessage('Geographic Transformations: %s' % arcpy.env.geographicTransformations) scratch = arcpy.env.scratchWorkspace arcpy.env.addOutputsToMap = False WPT_FIELDS = [ ('ELEVATION', 'gpx:ele'), ('TIME', 'gpx:time'), ('NAME', 'gpx:name'), ('DESCRIPTION', 'gpx:desc'), ('SYMBOL', 'gpx:sym'), ('TYPE', 'gpx:type'), ('SAMPLES', 'gpx:extensions/wptx1:WaypointExtension/wptx1:Samples') ] ns = { 'gpx': 'http://www.topografix.com/GPX/1/1', 'gpxx': 'http://www.garmin.com/xmlschemas/GpxExtensions/v3', 'wptx1': 'http://www.garmin.com/xmlschemas/WaypointExtension/v1', 'ctx': 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1', } etree.register_namespace('', 'http://www.topografix.com/GPX/1/1') etree.register_namespace('gpxx', 'http://www.garmin.com/xmlschemas/GpxExtensions/v3') etree.register_namespace('wptx1', 'http://www.garmin.com/xmlschemas/WaypointExtension/v1') etree.register_namespace('ctx', 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1') gpx = etree.parse(gpx_file).getroot() sr = arcpy.env.outputCoordinateSystem if wpt_fc: create_points_feature_class(wpt_fc, sr) waypoints = [] for wpt in gpx.findall('gpx:wpt', ns): x, y = wpt.get('lon'), wpt.get('lat') row = [arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr)] for field, tag in WPT_FIELDS: elem = wpt.find(tag, ns) if elem is None: row.append(None) elif field == 'ELEVATION': row.append('%0.4f' % (float(elem.text) / sr.metersPerUnit)) elif field == 'NAME' and elem.text.isdigit(): row.append('%d' % int(elem.text)) else: row.append(elem.text) waypoints.append(row) if waypoints: fields = ['SHAPE@'] + [f[0] for f in WPT_FIELDS] cur = arcpy.da.InsertCursor(wpt_fc, fields) for row in waypoints: cur.insertRow(row) del cur if trk_fc: # idle time between trkpts to start a new track segment TRKSEG_IDLE_SECS = 600 tracks = [] track_num = 0 for trk in gpx.findall('gpx:trk', ns): track_num += 1 elem = trk.find('gpx:name', ns) if elem is None: track_name = 'track-%04d' % track_num else: track_name = elem.text track_pts = [] dt_last = None segment_num = 0 for trkpt in trk.findall('./gpx:trkseg/gpx:trkpt', ns): x, y = trkpt.get('lon'), trkpt.get('lat') pt = arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr).firstPoint # See if there's a track point time elem = trkpt.find('gpx:time', ns) if elem is None: dt_last = None else: dt = utils.default_tzinfo(parser.parse(elem.text), tz.UTC) if dt_last and (dt - dt_last).seconds > TRKSEG_IDLE_SECS: # start a new segment if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom , segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) track_pts = [] dt_last = dt track_pts.append(pt) if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom, segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) if tracks: temp_fc = os.path.join(scratch, os.path.basename(trk_fc) + '_Temp') if sr is None: arcpy.AddError('Geoprocessing environment not set: outputCoordinateSystem') return None fc = mgmt.CreateFeatureclass(*os.path.split(temp_fc), geometry_type='POLYLINE', spatial_reference=sr) mgmt.AddField(fc, 'NAME', 'TEXT', field_length=64) mgmt.AddField(fc, 'POINTS', 'LONG') cur = arcpy.da.InsertCursor(fc, ('SHAPE@', 'NAME', 'POINTS')) for row in tracks: cur.insertRow(row) del cur mgmt.CopyFeatures(temp_fc, trk_fc) del fc
def test_utils_default_tz_info_aware(): dt = datetime(2014, 9, 14, 9, 30, tzinfo=UTC) assert utils.default_tzinfo(dt, NYC).tzinfo is UTC