def build_days(self): start_date = dtdate(self.epoch_year, 1, 1) # could also do self.years[0] end_date = dtdate(self.end_year, 12, 31) # could also do self.years[-1] count = 0 for date_foo in date_range(start_date, end_date): day_dict = {} day_dict['date'] = date_foo day_dict['date_as_string'] = day_dict['date'].strftime("%Y-%m-%d") day_dict['weekday_name'] = date_foo.strftime("%A") day_dict['weekday_name_short'] = date_foo.strftime("%a") day_dict['day_of_month'] = date_foo.strftime("%d") day_dict['month_in_year_int'] = date_foo.strftime("%m") day_dict['month_in_year_str'] = date_foo.strftime("%B") day_dict['year'] = date_foo.year day_dict['day_of_year'] = date_foo.strftime("%j") # Calculate the week number: week_tuple = Internals.date_to_week_tuple(date_foo, verbose=self.debug_mode) day_dict['week_year'] = week_tuple[0] day_dict['week_number'] = week_tuple[1] day_dict['index_in_week'] = int(date_foo.strftime("%w")) + 1 # 1-based indexing # Write this dictionary in the Redis cache: temporal_redis.write_single_day(day_dict) count += 1 if self.debug_mode: print(f"\u2713 Created {count} Temporal Day keys in Redis.")
def _in_cache(self, base, symbols, date=_date()): """ Checks to see if the specified rates have already been retrieved and are in the cache """ try: if symbols is None: in_date = dtdate(*(int(x.lstrip('0')) for x in date.split('-'))) if in_date < dtdate(2011, 1, 3): symbols = list(CURRENCIES.difference({base, 'ISK', 'ILS'})) elif in_date < dtdate(2018, 2, 1): symbols = list(CURRENCIES.difference({base, 'ISK'})) else: symbols = list(CURRENCIES.difference({base})) symbols = _format_currency(symbols) if base in self._cache: if date in self._cache[base]: if symbols is not None: for x in symbols: if x not in self._cache[base][date]: return False else: return True else: return CURRENCIES.difference({base}).issubset(self._cache[base][date].keys()) else: return False else: return False except KeyError: return False
def runGame(self, game): # Output inputDict for updating model # Game is a dict from a row, from csvreader game = self.cleaner.cleanRow(game) race1 = None if 'Player1Race' in game.keys(): race1 = game['Player1Race'] race2 = None if 'Player2Race' in game.keys(): race2 = game['Player2Race'] idx1 = self.findProfileIdx(name=game['Player1'], race=race1, country=game['Player1Region']) idx2 = self.findProfileIdx(name=game['Player2'], race=race2, country=game['Player2Region']) out = {'features': [], 'matches': []} shuffledMatches = self.shuffleGames(int(game['Score1']), int(game['Score2'])) d = game['Date'].split() year = int(d[3]) month = self.months[d[1]] day = int(d[2]) date = dtdate(year=year, month=month, day=day) # Check decay if self.decay: self.profiles[game['Player1']][idx1].checkDecay(date) self.profiles[game['Player2']][idx2].checkDecay(date) for match in shuffledMatches: # Model is updated before profiles in order to ensure prediction order is maintained # Since profile update happens after the game is over, whereas prediction happens before the game # self.model.update(self.profiles[game['Player1']], self.profiles[game['Player2']], match) # More efficient strptime given we know our format # date = datetime.strptime(game['Date'], self.profiles[game['Player1']][idx1].dateFormat).date() # Json load and dump is faster than deepcopy by a huge margin # out['profile1'].append(ujson.loads(ujson.dumps(self.profiles[game['Player1']][idx1]))) # out['profile2'].append(ujson.loads(ujson.dumps(self.profiles[game['Player2']][idx2]))) if random.random() <= self.keepPercent: out['features'].append( self.model.getFeatures( self.profiles[game['Player1']][idx1], self.profiles[game['Player2']][idx2])) out['matches'].append(match) # out['profile1'].append(pickle.loads(pickle.dumps(self.profiles[game['Player1']][idx1], -1))) # out['profile2'].append(pickle.loads(pickle.dumps(self.profiles[game['Player2']][idx2], -1))) # out['matches'].append(match) self.profiles[game['Player1']][idx1].updateProfile( date, self.profiles[game['Player2']][idx2], match[0]) self.profiles[game['Player2']][idx2].updateProfile( date, self.profiles[game['Player1']][idx1], match[1]) return out
def max_min(self, year, month=None): """Returns a lower date and a upper date from year or month""" start_month = month if month is None: start_month = 1 month = 12 d_min = dtdate(int(year), int(start_month), 1) d_max = dtdate(int(year), int(month), calendar.monthrange(int(year), int(month))[1]) d_min = dtdate.today() if d_min > dtdate.today() else d_min d_max = dtdate.today() if d_max > dtdate.today() else d_max return (d_min, d_max)
def date(self, value: str): date = value.replace("_", "-") tokens = date.split("-") if len(tokens) == 2: date = utils.nasa_date_to_datetime(date) elif len(tokens) == 3: tokens = [int(i) for i in tokens] date = dtdate(*tokens) self._date = date
def getUniqueDays(self): days = [] dtdates = [] for data in self.get_datetimes(): dia = (data.day, data.month, data.year) if dia not in days: days.append(dia) dtdates.append(dtdate(dia[2], dia[1], dia[0])) return dtdates
def _format_date(date: str): """ Takes string representation of a date in the format 'yyyy-mm-dd' and returns a date object """ try: if date == LATEST: return date elif type(date) is dtdate: return date else: return dtdate(*[int(x.lstrip('0' + whitespace)) for x in date.strip().split('-')]) except Exception as e: raise FixerioInvalidDate('Please enter a valid date in the format "yyy-mm-dd"') from e
def build_year(self, year): """ Create a dictionary of Year metadata and write to Redis. """ date_start = dtdate(year, 1, 1) date_end = dtdate(year, 12, 31) days_in_year = (date_end - date_start).days + 1 jan_one_dayname = date_start.strftime("%a").upper() year_dict = {} year_dict['year'] = year year_dict['date_start'] = date_start.strftime("%m/%d/%Y") year_dict['date_end'] = date_end.strftime("%m/%d/%Y") year_dict['days_in_year'] = days_in_year # What day of the week is January 1st? year_dict['jan_one_dayname'] = jan_one_dayname try: weekday_short_names = tuple(weekday['name_short'] for weekday in self.weekday_names) year_dict['jan_one_weekpos'] = weekday_short_names.index(jan_one_dayname) + 1 # because zero-based indexing except ValueError as ex: raise ValueError(f"Could not find value '{jan_one_dayname}' in tuple 'self.weekday_names' = {self.weekday_names}") from ex # Get the maximum week number (52 or 53) max_week_number = max(week['week_number'] for week in self.week_dicts if week['year'] == year) year_dict['max_week_number'] = max_week_number temporal_redis.write_single_year(year_dict, self.debug_mode)
def build_weeks(self): """ Build all the weeks between Epoch Date and End Date """ # Begin on January 1st jan1_date = dtdate(self.epoch_year, 1, 1) jan1_day_of_week = int(jan1_date.strftime("%w")) # day of week for January 1st week_start_date = jan1_date - timedelta(days=jan1_day_of_week) # if January 1st is not Sunday, back up. week_end_date = None week_number = None if self.debug_mode: print(f"Processing weeks begining with calendar date: {week_start_date}") count = 0 while True: # Stop once week_start_date's year exceeds the Maximum Year. if week_start_date.year > self.end_year: break week_end_date = week_start_date + timedelta(days=6) if (week_start_date.day == 1) and (week_start_date.month == 1): # Sunday is January 1st, it's a new year. week_number = 1 elif week_end_date.year > week_start_date.year: # January 1st falls somewhere inside the week week_number = 1 else: week_number += 1 tuple_of_dates = tuple(list(date_range(week_start_date, week_end_date))) week_dict = {} week_dict['year'] = week_end_date.year week_dict['week_number'] = week_number week_dict['week_start'] = week_start_date week_dict['week_end'] = week_end_date week_dict['week_dates'] = tuple_of_dates temporal_redis.write_single_week(week_dict) self.week_dicts.append(week_dict) # internal object in Builder, for use later in build_years # Increment to the Next Week week_start_date = week_start_date + timedelta(days=7) count += 1 # Loop complete. if self.debug_mode: print(f"\u2713 Created {count} Temporal Week keys in Redis.")
def jan1_next_year(self): return TDate(dtdate(year=self.date.year + 1, month=1, day=1))
def jan1(self): return TDate(dtdate(year=self.date.year, month=1, day=1))
# Frappe modules. import frappe from frappe import _, throw, msgprint, ValidationError # noqa F401 # Temporal from temporal import core from temporal import redis as temporal_redis # alias to distinguish from Third Party module # Constants __version__ = '13.0.1' # Epoch is the range of 'business active' dates. EPOCH_START_YEAR = 2020 EPOCH_END_YEAR = 2050 EPOCH_START_DATE = dtdate(EPOCH_START_YEAR, 1, 1) EPOCH_END_DATE = dtdate(EPOCH_END_YEAR, 12, 31) # These should be considered true Min/Max for all other calculations. MIN_YEAR = 2000 MAX_YEAR = 2201 MIN_DATE = dtdate(MIN_YEAR, 1, 1) MAX_DATE = dtdate(MAX_YEAR, 12, 31) # Module Typing: https://docs.python.org/3.8/library/typing.html#module-typing WEEKDAYS = ( { 'name_short': 'SUN', 'name_long': 'Sunday' }, { 'name_short': 'MON', 'name_long': 'Monday' }, { 'name_short': 'TUE', 'name_long': 'Tuesday' }, { 'name_short': 'WED', 'name_long': 'Wednesday' },
def get_organizations(self): global date_range city = Organization('City of Saint Paul', classification='executive') city.add_post( 'Mayor', 'Mayor', division_id='ocd-division/country:us/state:mn/place:st_paul') city.add_post( 'City Clerk', 'City Clerk', division_id='ocd-division/country:us/state:mn/place:st_paul') yield city council = Organization(name="Saint Paul City Council", classification="legislature", parent_id=city) for x in range(1, 8): council.add_post( "Ward {}".format(x), "Councilmember", division_id= 'ocd-division/country:us/state:mn/place:st_paul/ward:{}'. format(x)) yield council carter = Person(name="Melvin Carter") carter.add_term('Mayor', 'executive', start_date=dtdate(2018, 1, 19), appointment=True) carter.add_source('http://www.google.com') yield carter new_meetings = [] temp_labels = [] for date in date_range: print('Checking date:', date) root = requests.get("https://www.stpaul.gov/calendar/" + date) base = html.fromstring(root.text) items = base.xpath('.//*/div[@class="view-content"]/div') meetings = [] for i in items: if len( i.xpath( './/*/span[@class="date-display-single"]/text()') ) > 0: d = {} d['date'] = i.xpath( './/*/span[@class="date-display-single"]/text()')[0] d['info'] = i.xpath( './/*/span[@class="field-content"]/a/text()')[0] d['link'] = i.xpath( './/*/span[@class="field-content"]/a/@href')[0] meetings.append(d) for m in meetings: m['link'] = "https://www.stpaul.gov" + m['link'] for m in meetings: r = requests.get(m['link']) b = html.fromstring(r.text) exists = b.xpath('.//div[@class="node-content clearfix"]') if len(exists) > 0: if not 'City Council' in m[ 'info'] and not 'Legislative' in m[ 'info'] and not 'Holiday' in m['info']: m['name'] = m['info'].replace('Meeting', '').replace( ' - Cancelled', '').replace('Events', '').strip() if not m['name'] in temp_labels: temp_labels.append(m['name']) new_meetings.append(m) print('Creating organizations') for m in new_meetings: print(m) cmt = Organization(name=m['name'], classification='committee', parent_id=city) cmt.add_source(m['link']) yield cmt
def get_students_net(self): net = [] today = datetime.utcnow().date() for student in self.students: lessons = filter(lambda l: l.student == student, self.lessons) lessons_dates = [l.date for l in lessons] debts = filter(lambda d: d.student == student and d.date not in lessons_dates and d.date >= self.date_1.date(), self.debts) phantom_passes = filter(lambda p: p.student == student, self.phantom_passes) comments = filter(lambda c: c.student == student, self.comments) _net = [] arr = lessons + debts arr.sort(key=lambda x: x.date) for p in phantom_passes: last_group_lesson = self.orm.last_lesson last_lesson_by_passes = arr[-1].date if len(arr) > 0 else dtdate(1900, 1, 1) temp_date = max(p.bonus_class.date, last_group_lesson, last_lesson_by_passes ) phantom_lessons = [ self.PhantomLesson(pl.date(), p) for pl in self.orm.get_calendar(p.lessons, temp_date) if self.date_1 <= pl <= self.date_2 ] arr += phantom_lessons iterator = iter(arr) i_calendar = iter(self.calendar) try: obj = iterator.next() for date in i_calendar: while obj.date < date: obj = iterator.next() if date in self.canceled_lessons: _net.append(self.CanceledLesson()) elif obj.date == date: _net.append(obj) obj = iterator.next() else: _net.append(None) except StopIteration: _net += [None] * len(list(i_calendar)) net.append( dict( student=student, #debts=[debt for debt in self.debts if debt.student==student], debts = debts, lessons=_net, pass_remaining=self.all_available_lessons.get(student.pk, 0) + sum([p.lessons for p in phantom_passes], 0), last_comment=comments[-1] if comments else None ) ) return net
from fixerio.exceptions import FixerioInvalidCurrency from fixerio.exceptions import FixerioCurrencyUnavailable from string import whitespace HTTP_BASE_URL = 'http://api.fixer.io/' HTTPS_BASE_URL = 'https://api.fixer.io/' # Modify BASE_URL if you want to use a different URL to fetch data BASE_URL = HTTPS_BASE_URL # Modify DEFAULT BASE if you want to use a different base when 'base' is # omitted in the 'convert' or 'get_rates' methods DEFAULT_BASE = 'USD' LATEST = 'latest' # Modify DEFAULT_DATE if you want use a different date when 'date' is # omitted in the 'convert' or 'get_rates' methods DEFAULT_DATE = LATEST MIN_DATE = dtdate(1999, 1, 4) UPDATE_TIME_UTC = 15 # rates are updated at 3pm UTC (10am ET) # Modify SPECIFIC_CURRENCIES if you want to only retrieve a subset of # ALL_CURRENCIES when 'symbols' is omitted in the 'get_rates' method. # You need to also modify CURRENCIES SPECIFIC_CURRENCIES = set() ALL_CURRENCIES = {"AUD", "BGN", "BRL", "CAD", "CHF", "CNY", "CZK", "DKK", "EUR", "GBP", "HKD", "HRK", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN", "MYR", "NOK", "NZD", "PHP", "PLN", "RON", "RUB", "SEK", "SGD", "THB", "TRY", "USD", "ZAR"} # Modify currencies to specify which currencies to retrieve when 'symbols' # is omitted in the 'get_rates' method CURRENCIES = ALL_CURRENCIES
def __init__(self, date, data): date = date.split('/') self.date = dtdate(int("20" + date[2]), int(date[0]), int(date[1])) self.data = data
]) ], style={ 'vertical-align': 'middle', 'margin-top': '10px', 'font-size': 10, 'font-family': 'Verdana', 'textAlign': 'center', 'color': colors['text'] }), html.Div( [ html.Label([ "Start Date", dcc.DatePickerSingle(id='my-date-picker-single', min_date_allowed=dtdate(2020, 1, 22), max_date_allowed=(max_date), initial_visible_month=dtdate(2021, 1, 1), date=dtdate(2021, 1, 1), style={ 'display': 'inline-block', 'margin-left': '10px' }) ]) ], style={ 'vertical-align': 'middle', 'margin-top': '10px', 'font-size': 10, 'font-family': 'Verdana', 'textAlign': 'center',
from fixerio3.utils import _csv_to_json from fixerio3.utils import read_from_file from fixerio3.utils import write_to_file from string import whitespace OPEN_BASE_URL = 'https://api.fixer.io/' FREE_BASE_URL = 'http://data.fixer.io/api/' PAID_BASE_URL = 'https://data.fixer.io/api/' # Modify DEFAULT BASE if you want to use a different base when 'base' is # omitted in the 'convert' or 'get_rates' methods DEFAULT_BASE = 'USD' LATEST = 'latest' # Modify DEFAULT_DATE if you want use a different date when 'date' is # omitted in the 'convert' or 'get_rates' methods DEFAULT_DATE = LATEST MIN_DATE = dtdate(1999, 1, 4) # DO NOT CHANGE/REMOVE THIS UPDATE_TIME_UTC = 15 # rates are updated at 3pm (15:00) UTC (10am ET) ALL_CURRENCIES = {"AUD", "BGN", "BRL", "CAD", "CHF", "CNY", "CZK", "DKK", "EUR", "GBP", "HKD", "HRK", "HUF", "IDR", "ILS", "INR", "ISK", "JPY", "KRW", "MXN", "MYR", "NOK", "NZD", "PHP", "PLN", "RON", "RUB", "SEK", "SGD", "THB", "TRY", "USD", "ZAR"} # Modify currencies to specify which currencies to retrieve when 'symbols' # is omitted in the 'get_rates' method CURRENCIES = ALL_CURRENCIES def _date(date=None): """ Returns the date fixer.io should've been last updated """ if (date == LATEST) or (date is None): if datetime.utcnow().time().hour < UPDATE_TIME_UTC: