def output_expr(self, date: datetime.date): if date.month == 1: return date.strftime('%Y-Q1') if date.month == 4: return date.strftime('%Y-Q2') if date.month == 7: return date.strftime('%Y-Q3') if date.month == 10: return date.strftime('%Y-Q4')
def _get_export_form_data(form_page_soup: BeautifulSoup, acct_session_id: str, sptn: str, from_date: datetime.date=None, to_date: datetime.date=None, format: str='OFX', cycle: int=0, iifaccount: str='') -> dict: """ Returns dict for POSTing to download transactions. :param form_page_soup: The soup of the page to extract fields from. :param acct_session_id: The per-session account number or id. :param sptn: The per-session identifier. :param from_date: The starting date to download transactions from. Only valid when `cycle` is 0. :param to_date: The ending date to download transactions to. Only valid when `cycle` is 0. Defaults to today. :param format: One of ['QFX', 'QIF', 'QBO', 'IIF', 'OFX', 'CSV']. If IIF, must provide `iifaccount`. :param cycle: One of [0, 1, 2, 3]. If 0, must provide `from_date` and optionally `to_date`. '1' is for recent transactions, '2' is for current statement, and 3 is for previous statement. :param iifaccount: The name of the Quickbooks account if downloading IIF format. :return: dict of POSTable data. """ assert -1 < cycle < 4, "Cycle must be between -1 and 4." if cycle == 0: if not to_date: to_date = datetime.date.today() assert from_date <= to_date, "You must provide a from_date lower than a to_date." from_date = from_date.strftime('%m/%d/%y') to_date = to_date.strftime('%m/%d/%y') else: from_date = None to_date = None valid_formats = ['QFX', 'QIF', 'QBO', 'IIF', 'OFX', 'CSV'] assert format in valid_formats, '{} is an invalid format. It must be one of: {}'.format(format, valid_formats) if format == 'IIF': assert iifaccount, "You must provide an account name for Quickbooks when downloading to IIF." return { 'Cycle': '0{}'.format(cycle), 'Tran_Type': '09', # This seems to always be this number 'FromDate': from_date, 'ThruDate': to_date, 'lstFormat': format, 'Submit': 'Export', 'IIFAccount': get_input_value_ided_as(form_page_soup, 'IIFAccount'), 'WO': get_input_value_ided_as(form_page_soup, 'WO'), 'ESPTN': get_input_value_ided_as(form_page_soup, 'ESPTN'), 'Number': acct_session_id, 'Type': '001', # This seems to always be this number, 'SPTN': sptn, }
def get_styles(self, date: dt.date, focus: bool) -> Union[str, None, Tuple[str, str]]: if focus: if date == date.today(): return 'today focus' else: return 'reveal focus' else: if date == date.today(): return 'today' else: if self.highlight_event_days: return self.get_day_styles(date, focus) else: return None
def get_clips(self, for_date: date): """ Get all available video clips for the given local date. """ # The '_sl' suffix yields mms:// URLs. resp = self._search(self.provider_url + '/meeting_search_sl.php', 'search_clips_sl', ['', for_date.strftime('%Y-%m-%d'), '']) start_bit, end_bit = "+:var res = { \"result\": '", "'}; res;" body = '"{}"'.format(resp.text[len(start_bit):-1 - len(end_bit)]) body = body.replace("\\n", "\n").replace("\\'", "'").replace('\\"', '"') parsed_html = BeautifulSoup(body, 'html.parser') category = None for element in parsed_html.select('td.gameDate'): strong, a_link = element.find('strong'), element.find('a') if strong: category = str(strong.string).strip() elif a_link: # Back up to previous <td> and grab the date. # Asking for videos on a particular date may yield videos that are for nearby dates, # but on the same date according to the video URL. actual_date = str(list(element.previous_siblings)[1].string).strip() actual_date = pendulum.parse(actual_date).date() href = a_link['href'] match = re.match( r"javascript:reload_media_sl\('(mms://[\w\-./]+)', '(\d+:\d+:\d+)', '(\d+:\d+:\d+)'\)", href) if not match: continue mms_url, start_time, end_time = match.group(1), match.group(2), match.group(3) if start_time == '41:09:00': start_time = '00:41:09' title = str(element.string).strip() yield InsIncVideoClip(category, title, mms_url, actual_date, start_time, end_time)
def add_admin(self, pk, username: str, password_hash: str, first_name: str, last_name: str, middle_name: str, dob: date_type, addr: str, email: str, phone: str, primary_hospital_id: int, hospital_ids: list): """ Add an admin to be exported :param pk: The pk of this admin :param username: The username of this admin :param password_hash: The password_hash of this admin :param first_name: The first_name of this admin :param last_name: The last name of this admin :param middle_name: The middle name of this admin :param dob: The date of birth for this admin :param addr: The address of this admin :param email: The email for this admin :param phone: The phone number for this admin :param primary_hospital_id: The id of the primary hospital for this admin :param hospital_ids: A list of ids for the hospitals associated with this admin :return: None """ self.__pk_map['admins'][pk] = len(self.__pk_map['admins']) self.__export_scheme['admins'] += [{ 'username': username, 'password_hash': password_hash, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'dob': dob.isoformat(), 'addr': addr, 'email': email, 'phone': phone, 'primary_hospital_id': primary_hospital_id, 'hospital_ids': hospital_ids }]
def add_doctor(self, pk, username: str, password_hash: str, first_name: str, last_name: str, middle_name: str, dob: date_type, addr: str, email: str, phone: str, hospital_ids: list, patient_ids: list): """ Add a doctor to be exported :param pk: The pk of this doctor :param username: The username of this doctor :param password_hash: The password hash of this doctor :param first_name: The first name of this doctor :param last_name: The last name of this doctor :param middle_name: The middle name of this doctor :param dob: The birth date of this doctor :param addr: The address of this doctor :param email: The email for this doctor :param phone: The phone number for this doctor :param hospital_ids: A list of hospital ids associated with this doctor :param patient_ids: A list of patient ids associated with this doctor :return: None """ self.__pk_map['doctors'][pk] = len(self.__pk_map['doctors']) self.__export_scheme['doctors'] += [{ 'username': username, 'password_hash': password_hash, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'dob': dob.isoformat(), 'addr': addr, 'email': email, 'phone': phone, 'hospital_ids': hospital_ids, 'patient_ids': patient_ids }]
def add_nurse(self, pk, username: str, password_hash: str, first_name: str, last_name: str, middle_name: str, dob: date_type, addr: str, email: str, phone: str, primary_hospital_id: list, doctor_ids: list): """ Add a nurse to be exported :param pk: The pk for this nurse :param username: The username of this nurse :param password_hash: The password hash for this nurse :param first_name: The first name of the nurse :param last_name: The last name of the nurse :param middle_name: The middle name of the nurse :param dob: The birth date of this nurse :param addr: The address of this nurse :param email: The email of this nurse :param phone: The phone number of this nurse :param primary_hospital_id: The id of the primary hospital assocaited with this nurse :param doctor_ids: A lost of doctors ids associated with this nurse :return: None """ self.__pk_map['nurses'][pk] = len(self.__pk_map['nurses']) self.__export_scheme['nurses'] += [{ 'username': username, 'password_hash': password_hash, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'dob': dob.isoformat(), 'addr': addr, 'email': email, 'phone': phone, 'primary_hospital_id': primary_hospital_id, 'doctor_ids': doctor_ids }]
def find_schedules( self, teacher_id: int, from_date: datetime.date, to_date: datetime.date ) -> List[Schedule]: with self._conn.cursor() as cursor: sql = """ SELECT * FROM schedule WHERE teacher_id = %s AND DATE(datetime) BETWEEN %s AND %s ORDER BY datetime """.strip() cursor.execute( sql, (teacher_id, from_date.strftime("%Y-%m-%d"), (to_date + datetime.timedelta(days=2)).strftime("%Y-%m-%d")) ) #print(cursor._last_executed) schedules = [] for row in cursor.fetchall(): schedules.append(Schedule(row["teacher_id"], row["datetime"], ScheduleStatus(row["status"]))) return schedules
def _response_for_date(cls, date: datetime.date=None) -> str: if date is None: date = datetime.date.today() str_date = date.strftime(cls.DATE_FORMAT) payload = {"openForm": 1, "date": str_date} r = requests.get(cls.BASE_URL, params=payload) return r.text
def dateAsRFC3339Text(date: Date) -> str: """ Convert a :class:`Date` into an RFC 3339 formatted date string. :param date: A date to convert. :return: An RFC 3339 formatted date string corresponding to :obj:`date`. """ return date.isoformat()
def naturalday(value, format='%b %d'): """For date values that are tomorrow, today or yesterday compared to present day returns representing string. Otherwise, returns a string formatted according to ``format``.""" try: value = Date(value.year, value.month, value.day) except AttributeError: # Passed value wasn't date-ish return value except (OverflowError, ValueError): # Date arguments out of range return value delta = value - Date.today() if delta.days == 0: return 'today' elif delta.days == 1: return 'tomorrow' elif delta.days == -1: return 'yesterday' return value.strftime(format)
def is_on_date(self, date: datetime.date) -> bool: if not super().is_on_date(date): return False if date.weekday() != self.day_of_the_week: return False if self.skip_weeks == 0: return True weeks_skipped = (date - self.start_date).days // 7 if weeks_skipped == 0: return True return (weeks_skipped % (self.skip_weeks + 1)) == 0
def get_day_metrics_float(date: datetime.date): daily_transactions = Transaction.query.filter(Transaction.createDate >= date, Transaction.createDate <= date + datetime.timedelta(days=1)).all() ret = {'date': date.isoformat(), 'overallNumber': len(daily_transactions), 'distinctUsers': len(set((x.userId for x in daily_transactions))), 'dayBalance': sum((x.value for x in daily_transactions)) / 100, 'dayBalancePositive': sum((x.value if x.value > 0 else 0 for x in daily_transactions)) / 100, 'dayBalanceNegative': sum((x.value if x.value < 0 else 0 for x in daily_transactions)) / 100 } return ret
def date_matches_template_certain_days(self, d: date): def nth_xday(d): """ Return a value which indicates that date d is the nth <x>day of the month. """ dom_num = d.day ord_num = 1 while dom_num > 7: dom_num -= 7 ord_num += 1 return ord_num def is_last_xday(d): """ Return a value which indicates whether date d is the LAST <x>day of the month. """ month = d.month d += timedelta(weeks = +1) return True if d.month > month else False month_matches = (d.month==1 and self.jan) \ or (d.month==2 and self.feb) \ or (d.month==3 and self.mar) \ or (d.month==4 and self.apr) \ or (d.month==5 and self.may) \ or (d.month==6 and self.jun) \ or (d.month==7 and self.jul) \ or (d.month==8 and self.aug) \ or (d.month==9 and self.sep) \ or (d.month==10 and self.oct) \ or (d.month==11 and self.nov) \ or (d.month==12 and self.dec) if not month_matches: return False dow_num = d.weekday() # day-of-week number day_matches = (dow_num==0 and self.monday) \ or (dow_num==1 and self.tuesday) \ or (dow_num==2 and self.wednesday) \ or (dow_num==3 and self.thursday) \ or (dow_num==4 and self.friday) \ or (dow_num==5 and self.saturday) \ or (dow_num==6 and self.sunday) if not day_matches: return False # Doesn't match template if day-of-week doesn't match. if self.every: return True # Does match if it happens every week and the day-of-week matches. if is_last_xday(d) and self.last: return True # Check for last <x>day match. # Otherwise, figure out the ordinal and see if we match it. ord_num = nth_xday(d) ordinal_matches = (ord_num==1 and self.first) \ or (ord_num==2 and self.second) \ or (ord_num==3 and self.third) \ or (ord_num==4 and self.fourth) return ordinal_matches
def month(self, year=None, month=None, month_format='%b', date=None): if not date: try: if year and month: date = Date(*strptime(year + month, '%Y' + month_format)[:3]) else: raise TypeError('Request.objects.month() takes exactly 2 arguments') except ValueError: return # Calculate first and last day of month, for use in a date-range lookup. first_day = date.replace(day=1) if first_day.month == 12: last_day = first_day.replace(year=first_day.year + 1, month=1) else: last_day = first_day.replace(month=first_day.month + 1) lookup_kwargs = { 'visit__requests__time__gte': first_day, 'visit__requests__time__lt': last_day, } return self.filter(**lookup_kwargs).distinct()
def cache_currency(self, bank_short_name: str, cur_instance, date: datetime.date) -> None: str_date = date.strftime(CACHE_DATE_FORMAT) search_key = "{}_{}_{}".format(bank_short_name.lower(), cur_instance.iso.lower(), str_date.lower()) multiplier = 1 if hasattr(cur_instance, "multiplier"): multiplier = cur_instance.multiplier str_value = ",".join([str(cur_instance.buy), str(cur_instance.sell), str(multiplier)]) self.cache.put(search_key, str_value)
def select_day(self, dt: datetime.date): self.go_to_date(dt) elems = self._picker.find_elements_by_css_selector(self.selectable_days_selector) # type: Sequence[WebElement] for elem in elems: assert isinstance(elem, WebElement) a = elem.find_element_by_tag_name('a') try: a = int(a.text) except ValueError: raise DatePickerError('Unable to find') if a == dt.day: elem.click() expected = dt.strftime('%m/%d/%Y') wait_for(lambda: self._browser.find_element_by_id(self.date_input_id).get_attribute('value') == expected, 2) return
def add_patient(self, pk, username: str, password_hash: str, first_name: str, middle_name: str, last_name: str, dob: date_type, addr: str, email: str, phone: str, emergency_contact: str, eye_color: str, bloodtype: str, height: int, weight: int, primary_hospital_id: int, primary_doctor_id: int, doctor_ids: list): """ Add a patient to be exported :param pk: The pk of the patient :param username: The username of the patient :param password_hash: The password hash for this patient :param first_name: The first name of the patient :param middle_name: The middle name of the patient :param last_name: The last name of the patient :param dob: The birth date of the patient :param addr: The address of the patient :param email: The email of the patient :param phone: The phone number of the patient :param emergency_contact: The emergency contact for the patient :param eye_color: The eye color of the patient :param bloodtype: The blood type of the patient :param height: The height of the patient (in) :param weight: The weight of the patient (lbs) :param primary_hospital_id: The id of the primary hospital for this patient :param primary_doctor_id: The id of the primary doctor for this patient :param doctor_ids: A list of doctor ids associated with this patient :return: None """ self.__pk_map['patients'][pk] = len(self.__pk_map['patients']) self.__export_scheme['patients'] += [{ 'username': username, 'password_hash': password_hash, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'dob': dob.isoformat(), 'addr': addr, 'email': email, 'phone': phone, 'emergency_contact': emergency_contact, 'eye_color': eye_color, 'bloodtype': bloodtype, 'height': height, 'weight': weight, 'primary_hospital_id': primary_hospital_id, 'primary_doctor_id': primary_doctor_id, 'doctor_ids': doctor_ids }]
def from_date(cls, date_obj: date): """Alternative constructor using a datetime.date object Creates a CustomDate object with a default time of 12:00am PDT """ year = None if len(str(date_obj.year)) == 2: if date_obj.year <= 17: year = date_obj.year + 2000 else: year = date_obj.year + 1900 else: year = date_obj.year date_string = "{0}, {1} {2}, {3} at 12:00am PDT".format( CustomDate.WEEK_INDEXES_TO_DAY_OF_WEEK[date_obj.weekday()], CustomDate.MONTH_INDEXES_TO_MONTHS[date_obj.month], date_obj.day, year) return cls(date_string)
def _response_for_date(cls, date: datetime.date) -> Dict: str_date = date.strftime(cls.DATE_FORMAT) payload = { "p_p_id": "exchangeliferayspringmvcportlet_WAR_exchangeliferayspringmvcportlet_INSTANCE_GACJA0EoQLJN", "p_p_lifecycle": 2, "p_p_state": "normal", "p_p_mode": "view", "p_p_resource_id": "ajaxGetReportForRange", "p_p_cacheability": "cacheLevelPage", "p_p_col_id": "column-1", "p_p_col_pos": 3, "p_p_col_count": 6, "fromDate": str_date, "toDate": str_date, "channelIDs": 3, "currencies": "all" } return requests.get(cls.BASE_URL, params=payload).json()
def get_cached_value(self, bank_short_name: str, currency_name: str, date: datetime.date): str_date = date.strftime(CACHE_DATE_FORMAT) search_key = "{}_{}_{}".format(bank_short_name.lower(), currency_name.lower(), str_date.lower()) str_result = self.cache.get(search_key) if str_result is None: return None str_result = str_result.decode('utf-8') buy, sell, multiplier = str_result.split(",") c = self.currency_cls(currency_name, currency_name, buy=float(buy), sell=float(sell), multiplier=int(multiplier)) return c
def _scrape_uuids(self, day: date) -> set: """ Return uuid request parameters for each job by scraping the summary page. """ # Reset self.stamp = Stamp(day, 'NO_JOBS') # Avoid doing things twice if self._is_cached(): return None url = 'http://bamboo-mec.de/ll.php5' payload = {'status': 'delivered', 'datum': day.strftime('%d.%m.%Y')} response = self.remote_session.get(url, params=payload) # The so called 'uuids' are # actually 7 digit numbers. pattern = 'uuid=(\d{7})' jobs = findall(pattern, response.text) # Dump the duplicates. return set(jobs)
def add_test(self, name: str, date: date_type, description: str, results: str, released: bool, doctor_id: int, patient_id: int): """ Add a test to be exported :param name: The name of the test :param date: The date of the test :param description: A description of the test :param results: The results associated with the test :param released: Whether or not the test is released :param doctor_id: The id of the doctor associated with the test :param patient_id: The id of the patient associated with this test :return: None """ self.__export_scheme['tests'] += [{ 'name': name, 'date': date.isoformat(), 'description': description, 'results': results, 'released': released, 'doctor_id': doctor_id, 'patient_id': patient_id }]
def first_day_of_next_month(date: datetime.date) -> datetime.date: """Returns the date corresponding to the first day of the next month for the given date.""" next_month_date = date.replace(day=28) + datetime.timedelta(days=4) return next_month_date.replace(day=1)
def __init__(self, today: date): super().__init__(start_date=date(year=today.year, month=today.month, day=today.day) \ - datetime.timedelta(days=today.weekday()), end_date=today)
def _url_for_date(self, date: datetime.date) -> str: formatted_date = date.strftime(self.DATE_FORMAT) frag = 'individual/currency-exchange/exchange/bveb/?date_hidden={}' return urljoin(self.BASE_URL, frag.format(formatted_date))
def to_url(self, value: datetime.date) -> str: return value.strftime("%Y-%m-%d")
def get_rates_for_date(self, date:datetime.date): date_string = date.strftime('%Y-%m-%d') params = {'app_id':self.app_id} response = requests.get('{base}historical/{date}.json'.format(base=self.base_url,date=date_string),params=params) print(response.json())
def datetime_to_int(dt: datetime.date) -> int: return int(dt.strftime("%Y%m%d%H%M%S"))
def wine_quality_day( task_target_date: datetime.date, root_location: PathStr = data_repo.wines_per_date) -> pd.DataFrame: return target(root_location, task_target_date.strftime("%Y-%m-%d"), "wine.csv")
def find_candidate_date(self, day1: date) -> date: """Find the next possible date starting from day1, only based on calendar, not lookimg at include/exclude days""" week = day1.isocalendar()[1] weekday = day1.weekday() year = day1.year if self.__frequency in [ "weekly", "even-weeks", "odd-weeks", "every-n-weeks" ]: # Everything except montthly # convert to every-n-weeks if self.__frequency == "weekly": period = 1 first_week = 1 elif self.__frequency == "even-weeks": period = 2 first_week = 2 elif self.__frequency == "odd-weeks": period = 2 first_week = 1 else: period = self.__period first_week = self.__first_week offset = -1 if (week - first_week) % period == 0: # Collection this week for day_name in self.__collection_days: day_index = WEEKDAYS.index(day_name) if day_index >= weekday: # Collection still did not happen offset = day_index - weekday break if offset == -1: # look in following weeks in_weeks = period - (week - first_week) % period offset = (7 * in_weeks - weekday + WEEKDAYS.index(self.__collection_days[0])) return day1 + relativedelta(days=offset) elif self.__frequency == "every-n-days": if self.__first_date is None or self.__period is None: _LOGGER.error( "(%s) Please configure first_date and period for every-n-days collection frequency.", self.__name, ) return None if (day1 - self.__first_date).days % self.__period == 0: return day1 offset = self.__period - ( (day1 - self.__first_date).days % self.__period) return day1 + relativedelta(days=offset) elif self.__frequency == "monthly": # Monthly if self.__period is None or self.__period == 1: return self.__monthly_candidate(day1) else: candidate_date = self.__monthly_candidate(day1) while (candidate_date.month - self.__first_month) % self.__period != 0: candidate_date = self.__monthly_candidate(candidate_date + relativedelta( days=1)) return candidate_date elif self.__frequency == "annual": # Annual if self.__date is None: _LOGGER.error( "(%s) Please configure the date for annual collection frequency.", self.__name, ) return None conf_date = datetime.strptime(self.__date, "%m/%d").date() candidate_date = date(year, conf_date.month, conf_date.day) if candidate_date < day1: candidate_date = date(year + 1, conf_date.month, conf_date.day) return candidate_date elif self.__frequency == "group": if self.__entities is None: _LOGGER.error("(%s) Please add entities for the group.", self.__name) return None candidate_date = None for entity in self.__entities: d = self.hass.states.get(entity).attributes.get( ATTR_NEXT_DATE).date() if candidate_date is None or d < candidate_date: candidate_date = d return candidate_date else: _LOGGER.debug( f"({self.__name}) Unknown frequency {self.__frequency}") return None
def is_business_day(day: dt.date) -> bool: return day.weekday() < 5
def __change_day_until_weekday_reached(cls, period_date: date, weekdays: List[int], nb_days: int) -> date: while period_date.weekday() not in weekdays: period_date += timedelta(days=nb_days) return period_date
def get_first_date_of_the_month(day_date: date) -> date: return day_date.replace(day=1)
def get_first_date_of_the_week(day_date: date) -> date: first_date_of_the_week = day_date - timedelta(days=day_date.weekday()) return first_date_of_the_week
async def _multi_create(self, ctx: Context, start: date, end: date = None, force=None): if end is None: last_day = calendar.monthrange(start.year, start.month)[1] end = start.replace(day=last_day) delta = end - start days: List[date] = [] past_days: List[date] = [] weekend = [5, 6, 7] day: date for i in range(delta.days + 1): day = start + timedelta(days=i) if day.isoweekday() in weekend: if day < date.today() and not force: past_days.append(day) else: days.append(day) if len(past_days) > 0: strpastdays = " ".join([day.isoformat() for day in past_days]) strpast = ( "\nFollowing dates are in the past and will be skipped:\n" "```{}```".format(strpastdays)) else: strpast = "" if len(days) > 0: strdays = " ".join([day.isoformat() for day in days]) message = ("Creating events for following days:\n```{}``` " "{}" "Reply with `ok` or `cancel`.".format(strdays, strpast)) await ctx.send(message) else: message = ("No events to be created.{}" "Use the `force` argument to override. " "See `{}help multicreate`".format(strpast, CMD)) await ctx.send(message) return self.bot.awaiting_reply = True def pred(m): return m.author == ctx.message.author \ and m.channel == ctx.channel event_time = time(hour=18, minute=30) with_time = [datetime.combine(day, event_time) for day in days] try: while True: response = await self.bot.wait_for('message', check=pred) reply = response.content.lower() if reply == 'ok': await ctx.send("Creating events") for day in with_time: await self._create_event(ctx, day, batch=True) await msgFnc.sortEventMessages(self.bot) EventDatabase.toJson() await ctx.send("Done creating events") self.bot.awaiting_reply = False return elif reply == 'cancel': await ctx.send("Canceling") self.bot.awaiting_reply = False return else: await ctx.send("Please reply with `ok` or `cancel`.") except Exception: await ctx.send('```py\n{}\n```'.format(traceback.format_exc())) self.bot.awaiting_reply = False
def get_pay_period_start_date(self, pay_date: datetime.date): return pay_date.replace(day=1)
def check_weekend(self, date: datetime.date): if date.weekday() == self.SATURDAY or date.weekday() == self.SUNDAY: return True else: return False
def _unstructure_date(obj: date) -> str: return obj.isoformat()
def format_date(day: date) -> str: return day.strftime("%Y-%m-%d")
def get_logfile_name(self, log_date: date, log_number: int) -> str: return self.logDir + '/' + log_date.__str__() + "_" + str( log_number) + ".txt"
def get_monday(day: date) -> date: return day - timedelta(days=day.weekday())
def date_para_str(data: date) -> str: return data.strftime('%d/%m/%Y')
def cli(src: str, targetdir: str, begin_date: datetime.date, end_date: datetime.date, roi: Union[str, List[float]], region: str, sgrid: bool, force_doy: bool, filter_product: str, filter_vampc: str, target_srs: str, co: Tuple[str], clip_valid: bool, round_int: int, gdal_kwarg: Union[Tuple[str], Dict[str, Union[str, int, float]]], overwrite: bool, last_smoothed: str) -> List: """Creates GeoTiff Mosaics from HDF5 files. The input can be either raw or smoothed HDF5 files. With the latter, the S-grid can also be mosaiced using the `--sgrid` flag. If no ROI is passed, then the full extent of the input files will be mosaiced, otherwise the GeoTiffs will be clipped to the ROI after warping. By default, the MODIS data will be warped to WGS1984 (EPSG:4326), but a custom spatial reference can be passed in with `--target-srs`, in wich case the target resolution has to be manually defined too. optionally, `--target-srs` can be set to `None`, using the product's native projection. If required, the output data can be clipped to the valid data range of the input data using the `--clip-valid` flag. Also, the output data can be rounded, if it's float (eg. sgrid) to defined precision, or if its integer to the defined exponent of 10 (round_int will be multiplied by -1 and passed to np.round!!!) Specific creation options can be passed to gdalwarp and gdaltranslate using the `--co` flag. The flag can be used multiple times, each input needs to be in the gdal format for COs, e.g. `KEY=VALUE`. Additional options can be passed to gdal.Translate (and with restrictions to warp) using `--gdal-kwarg`, e.g. `--gdal-kwarg xRes=10 --gdal-kwarg yRes=10`. The additional options can either be provided as a tuple with `KEY=VALUE` strings, or with a key-value dictionary. The keywords are sensitive to how gdal expects them, as they are directly passed to gdal.TranlsateOptions. For details, please check the documentation of gdal.TranslateOptions. Args: ctx (click.core.Context): Context for kwargs. src (str): Input directory (or file). targetdir (str): Target directory. begin_date (datetime.date): Start date for tiffs. end_date (datetime.date): End date for tiffs. roi (str): ROI for clipping. Passing ROI as a list[float] is also supported. region (str): Region for filename. sgrid (bool): Extract sgrid instead of data. force_doy (bool): Force DOY in filename. filter_product (str): Filter input by product code. filter_vampc (str): Filter inpout by vam parameter code. target_srs (str): Target spatial reference (in format GDAL can process) or "None". co (Tuple[str]): Creation options passed to gdal.Translate. clip_valid (bool): Clip data to valid range. round_int (int): Round integer. gdal_kwarg (Tuple[str]): translateOptions to the internal call to gdal::translate(); the Tuple of strings (item formatting: "key=value") is parsed into a dict. Alternatively, passing a dict instead of a Tuple[str] is also supported. overwrite (bool): Overwrite existing Tiffs. last_smoothed (str): Rawdate (MODIS time step) that is checked to be the last in series at time of smoothing. """ src_input = Path(src) if not src_input.exists(): msg = "src_dir does not exist." log.error(msg) raise ValueError(msg) if src_input.is_dir(): files = list(src_input.glob("*.h5")) else: files = [src_input] if filter_product is not None: product = filter_product.upper() files = [x for x in files if product in x.name] if filter_vampc: vampc = filter_vampc.upper() files = [x for x in files if vampc in x.name] if not files: msg = "No files found to process! Please check src and/or adjust filters!" log.error(msg) raise ValueError(msg) groups = [REGEX_PATTERNS["tile"].sub("*", x.name) for x in files] group_check = {".".join(x.split(".")[:-2]) for x in groups} if len(group_check) > 1: raise ValueError("Multiple product groups in input. Please filter or use separate directories!") groups = list(set(groups)) if roi is not None: if not isinstance(roi, list): roi = [float(x) for x in roi.split(',')] if len(roi) != 4: raise ValueError("ROI for clip needs to be bounding box in format xmin,ymin,xmax,ymax") roi[1], roi[3] = roi[3], roi[1] if targetdir is None: if src_input.is_dir(): targetdir = src_input else: targetdir = src_input.parent else: targetdir = Path(targetdir) if not targetdir.exists(): targetdir.mkdir() if not targetdir.is_dir(): msg = "Target directory needs to be a valid path!" log.error(msg) raise ValueError(msg) if begin_date: begin_date = begin_date.date() if end_date: end_date = end_date.date() if sgrid: dataset = "sgrid" clip_valid = False else: dataset = "data" if round_int is not None: round_int = round_int * -1 if target_srs.lower() == "none": target_srs = None gdal_kwargs = {} if gdal_kwarg: if not isinstance(gdal_kwarg, dict): gdal_kwargs.update( {key:value for x in gdal_kwarg for key, value in [x.split("=")]} ) else: gdal_kwargs = gdal_kwarg if last_smoothed is not None: last_smoothed = last_smoothed.strftime("%Y%j") click.echo("\nSTARTING modis_window.py!") mosaics = [] for group in groups: log.debug("Processing group %s", group) group_pattern = re.compile(group) group_files = [str(x) for x in files if group_pattern.match(x.name)] mosaic = ModisMosaic(group_files) mosaics.extend( mosaic.generate_mosaics( dataset=dataset, targetdir=targetdir, target_srs=target_srs, aoi=roi, overwrite=overwrite, force_doy=force_doy, prefix=region, start=begin_date, stop=end_date, clip_valid=clip_valid, round_int=round_int, last_smoothed=last_smoothed, creationOptions=list(co), **gdal_kwargs, ) ) click.echo("\nCOMPLETED modis_window.py!") return mosaics
async def create(self, day: date) -> str: await create_db_connection() scanners_conf = self.conf_dict["scanners"] est = pytz.timezone("America/New_York") start_time = datetime.combine(day, datetime.min.time()).astimezone(est) day = datetime.combine(day, datetime.min.time()).astimezone(est) self.start = day.replace(hour=9, minute=30) self.end = day.replace(hour=16, minute=0) config.market_open = start_time.replace(hour=9, minute=30, second=0, microsecond=0) config.market_close = start_time.replace(hour=16, minute=0, second=0, microsecond=0) for scanner_name in scanners_conf: scanner_object: Optional[Scanner] = None if scanner_name == "momentum": scanner_details = scanners_conf[scanner_name] try: recurrence = scanner_details.get("recurrence", None) target_strategy_name = scanner_details.get( "target_strategy_name", None) scanner_object = Momentum( data_loader=self.data_loader, trading_api=AlpacaTrader(), min_last_dv=scanner_details["min_last_dv"], min_share_price=scanner_details["min_share_price"], max_share_price=scanner_details["max_share_price"], min_volume=scanner_details["min_volume"], from_market_open=scanner_details["from_market_open"], today_change_percent=scanner_details["min_gap"], recurrence=timedelta( minutes=recurrence) if recurrence else None, target_strategy_name=target_strategy_name, max_symbols=scanner_details.get( "max_symbols", config.total_tickers), ) tlog(f"instantiated momentum scanner") except KeyError as e: tlog( f"Error {e} in processing of scanner configuration {scanner_details}" ) exit(0) else: tlog(f"custom scanner {scanner_name} selected") scanner_details = scanners_conf[scanner_name] try: spec = importlib.util.spec_from_file_location( "module.name", scanner_details["filename"]) custom_scanner_module = importlib.util.module_from_spec( spec) spec.loader.exec_module( custom_scanner_module) # type: ignore class_name = scanner_name custom_scanner = getattr(custom_scanner_module, class_name) if not issubclass(custom_scanner, Scanner): tlog( f"custom scanner must inherit from class {Scanner.__name__}" ) exit(0) scanner_details.pop("filename") if "recurrence" not in scanner_details: scanner_object = custom_scanner( data_loader=self.data_loader, **scanner_details, ) else: recurrence = scanner_details.pop("recurrence") scanner_object = custom_scanner( data_loader=self.data_loader, recurrence=timedelta(minutes=recurrence), **scanner_details, ) except Exception as e: tlog( f"[Error] scanners_runner.scanners_runner() for {scanner_name}:{e} " ) if scanner_object: self.scanners.append(scanner_object) await create_strategies( self.conf_dict, self.end - self.start, None, self.uid, day.replace(hour=9, minute=30, second=0, microsecond=0), DataLoader(), ) self.now = pd.Timestamp(self.start) self.symbols: List = [] self.portfolio_value: float = (100000.0 if not config.portfolio_value else config.portfolio_value) if "risk" in self.conf_dict: config.risk = self.conf_dict["risk"] return self.uid
def date_to_str(dt: date) -> str: return dt.strftime('%d/%m/%Y')
def get_danmaku(bvid: str = None, aid: int = None, page: int = 0, verify: utils.Verify = None, date: datetime.date = None): """ 获取弹幕 :param aid: :param bvid: :param page: 分p数 :param verify: date不为None时需要SESSDATA验证 :param date: 为None时获取最新弹幕,为datetime.date时获取历史弹幕 """ if not (aid or bvid): raise exceptions.NoIdException if verify is None: verify = utils.Verify() if date is not None: if not verify.has_sess(): raise exceptions.NoPermissionException(utils.MESSAGES["no_sess"]) api = API["video"]["info"]["danmaku"] if date is None else API["video"][ "info"]["history_danmaku"] info = get_video_info(aid=aid, bvid=bvid, verify=verify) page_id = info["pages"][page]["cid"] params = {"oid": page_id} if date is not None: params["date"] = date.strftime("%Y-%m-%d") params["type"] = 1 req = requests.get(api["url"], params=params, headers=utils.DEFAULT_HEADERS, cookies=verify.get_cookies()) if req.ok: con = req.content.decode("utf-8") try: xml = parseString(con) except Exception: j = json.loads(con) raise exceptions.BilibiliException(j["code"], j["message"]) danmaku = xml.getElementsByTagName("d") py_danmaku = [] for d in danmaku: info = d.getAttribute("p").split(",") text = d.childNodes[0].data if info[5] == '0': is_sub = False else: is_sub = True dm = utils.Danmaku(dm_time=float(info[0]), send_time=int(info[4]), crc32_id=info[6], color=utils.Color(info[3]), mode=info[1], font_size=info[2], is_sub=is_sub, text=text) py_danmaku.append(dm) return py_danmaku else: raise exceptions.NetworkException(req.status_code)
def date_to_human(date: datetime.date) -> str: return date.strftime('%a %Y-%m-%d')
def on_last_day_of_month(date: datetime.date) -> datetime.date: return date.replace(day=_last_day_of_month(date.year, date.month))
def should_go_headless(day_report: WorkedDay, target_date: date): return \ (day_report.total_hours() == 8) and \ (target_date.weekday() != 4) and \ (target_date.day != calendar.monthrange(target_date.year, target_date.month)[1])
def _format_date(date_obj: date) -> str: return date_obj.strftime("%Y%m%d")
def _get_url_for_exercise(self, date: datetime.date, username: str) -> str: date_str = date.strftime("%Y-%m-%d") return ( parse.urljoin(self.BASE_URL_SECURE, "exercise/diary/" + username) + f"?date={date_str}")
def to_url(self, d: date) -> str: """Format a date for a URL.""" return d.isoformat()
def __init__(self, year, month): self._date = Date(year, month, 1)
def format_date(date: datetime.date): if os.name == "nt": return date.strftime('%#m/%#d/%y') else: return date.strftime('%-m/%-d/%y')
def __date_to_str(d: date): return d.strftime('%Y%m%d')
def to_str(self, date: datetime.date): return date.isoformat()
def is_weekend(d: date): return False if d.weekday() < 5 else True # 5 Sat, 6 Sun
def _(arg: date, *, template: str = "%d/%m/%Y") -> str: return arg.strftime(template)
def str_date(dt: datetime.date) -> str: """Return the date as a string.""" return dt.strftime(DATE_FORMATS[0])