def trips_affecting_the_bonus_plan(city_id, date_start, date_end, city_bonus_plan_dict): date_format = "%Y-%m-%d" city_id = str(city_id) nedels = [] a = dt.strptime(date_start, date_format) b = dt.strptime(date_end, date_format) delta = b - a for i in range(delta.days + 1): year, month, day = ( (dt.strptime(date_start, date_format) + timedelta(days=i)).strftime(date_format)).split('-') nedel = datetime.date(int(year), int(month), int(day)).isocalendar()[1] if [year, nedel] not in nedels: nedels.append([year, nedel]) for i in nedels: year = i[0] week = i[1] date_from, date_to = (Week( int(year), int(week)).monday().strftime('%Y%m%d')), (Week( int(year), int(week + 1)).monday().strftime('%Y%m%d')) min_trips_for_bonus = city_bonus_plan_dict[-1][0] TotalFraudTable_ned(date_from, date_to, city_id, week, year, min_trips_for_bonus, city_bonus_plan_dict) return
def test_mix_max(self): self.assertEqual(Week.min, Week(1, 1)) self.assertEqual(Week.max, Week(9999, 52)) self.assertEqual(Week.resolution.days, 7) self.assertRaises(ValueError, lambda: Week.min - 1) self.assertRaises(ValueError, lambda: Week.max + 1)
def weekly_report(self, request): """ Gets data from database and formats weekly report """ # weeks sum and average aggregation # NOTE: only works with sqlite weeks = Expense.objects.filter(user=request.user).extra({ "week": "strftime('%Y%W', datetime)"}).values('week').\ order_by('week').annotate(total=Sum('amount'), average=Avg('amount'), count=Count('amount')) weeks = {w['week']: w for w in weeks} # expenses per week # NOTE: only works with sqlite weeks_expenses = Expense.objects.filter(user=request.user).extra( {"week": "strftime('%Y%W', datetime)"}).order_by('datetime') # adjusting data for expense in weeks_expenses: week = weeks[expense.week] if 'expenses' not in week: week['expenses'] = [] week['expenses'].append(ExpenseSerializer(expense).data) week['initialDate'] = Week(int(week['week'][:4]), int(week['week'][4:]) + 1).monday() week['finalDate'] = Week(int(week['week'][:4]), int(week['week'][4:]) + 1).sunday() return Response(sorted(weeks.values(), key=lambda x: x['week']))
def google_kursk(year,week,FraudOrder): credentials = 'templates/js/client_secret.json' scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive'] credentials = ServiceAccountCredentials.from_json_keyfile_name(credentials, scope) gc = gspread.authorize(credentials) wks=gc.open_by_key("1eYS1QbeYS3E_zV3uagBjvIt25xYLWsLbztl8sd9uG5w") date_from, date_to = Week(int(year), int(week)).monday().strftime('%Y%m%d'),Week(int(year),int(week)).sunday().strftime('%Y%m%d') name_sheet = date_from[:4] + '.' + date_from[4:6] + '.' + date_from[6:] + ' - ' + date_to[:4] + '.' + date_to[4:6] + '.' + date_to[6:] check_worksheet_name = 0 for i in wks.worksheets(): if i.title != name_sheet: pass else: check_worksheet_name = 1 if check_worksheet_name == 1: pass else: wks.duplicate_sheet(wks.worksheet('шаблон').id, new_sheet_name=name_sheet) wks.values_update( name_sheet + '!A4', params={ 'valueInputOption': 'USER_ENTERED' }, body={ 'values': FraudOrder } )
def test_replace(self): w = Week(2011, 20) self.assertEqual(w.replace(), w) self.assertEqual(w.replace(year=2010), Week(2010, 20)) self.assertEqual(w.replace(week=2), Week(2011, 2)) self.assertEqual(w.replace(week=99), Week(2012, 47)) self.assertEqual(w.replace(year=1, week=1), Week(1, 1))
def fix_weekly_date(apps, schema_editor): CountryWeeklyStatus = apps.get_model('connection_statistics', 'CountryWeeklyStatus') SchoolWeeklyStatus = apps.get_model('connection_statistics', 'SchoolWeeklyStatus') for s in SchoolWeeklyStatus.objects.all(): s.date = Week(s.year, s.week).monday() s.save() for s in CountryWeeklyStatus.objects.all(): s.date = Week(s.year, s.week).monday() s.save()
def prepareData(city, week, year, city_bonus_plan_dict): city = str(city) date_from, date_to = Week(int(year), int(week)).monday().strftime('%Y%m%d'), Week( int(year), int(week)).sunday().strftime('%Y%m%d') name_sheet = date_from[:4] + '.' + date_from[4:6] + '.' + date_from[ 6:] + ' - ' + date_to[:4] + '.' + date_to[4:6] + '.' + date_to[6:] min_trips_for_bonus = city_bonus_plan_dict[-1][0] return week, year, city, date_from, date_to, name_sheet, min_trips_for_bonus
def week_start_end_seconds(year_number, week_number): week_start = Week(year_number, week_number) week_end = Week(year_number if week_number < 52 else year_number + 1, week_number + 1 if week_number < 52 else 1) time = datetime.datetime.min.time() return (int( datetime.datetime.combine(week_start.monday(), time, tzinfo=tz.tzutc()).timestamp()), int( datetime.datetime.combine(week_end.monday(), time, tzinfo=tz.tzutc()).timestamp()))
def get_weekly_spent_time(self, week, year, board=None): start_date = Week(year, week).monday() end_date = Week(year, week).friday() spent_time_on_week_filter = { "date__gte": start_date, "date__lte": end_date } # If we pass the board, only this board spent times will be given if board is not None: spent_time_on_week_filter["board"] = board return self._sum_spent_time_from_filter(spent_time_on_week_filter)
def make_o(current_year_num, begin_week_num): current_week = Week(current_year_num, begin_week_num) commit(current_week.wednesday()) commit(current_week.thursday()) current_week = Week(current_year_num, begin_week_num + 1) commit(current_week.tuesday()) commit(current_week.friday()) current_week = Week(current_year_num, begin_week_num + 2) commit(current_week.tuesday()) commit(current_week.friday()) current_week = Week(current_year_num, begin_week_num + 3) commit(current_week.wednesday()) commit(current_week.thursday())
def get_iso_week_from_params(get_params): week_number = get_params.get('week', '') year_number = get_params.get('year', '') if week_number.isdigit() and year_number.isdigit(): week_number = int(week_number) year_number = int(year_number) asked_iso_week = Week(year_number, week_number) else: # default weekday for that dvmn lesson current_iso_week = Week(2019, 1) asked_iso_week = current_iso_week return asked_iso_week
def parse(self, response): # Read in the graph and approximation mileage from the height of the bars graph = response.xpath('//div[@class="athlete-graph"]') y_max = int( graph.xpath('./ul[@class="y-axis"]/li[last()]/text()').extract() [0]) bar_divs = graph.xpath('./ul[@class="intervals"]//div[@class="bar"]') miles = [] for div in bar_divs: style = (div.xpath('.//div[@class="fill"]/@style').extract() or ["0"])[0] height = int( re.sub('[^\d]+', '', style)) # The style attribute is e.g "height:83px;" miles.append(y_max * height / 100.0) weeks = graph.xpath('./ul[@class="intervals"]/li/@id').extract() # Data returns e.g. interval-201605 # Example: 201605 = 5th week in 2016 wb_dates = map( lambda x: Week(int(x[9:13]), int(x[-2:])).monday().strftime( '%Y-%m-%d'), weeks) # Get sidebar, but propagate this data through so we can yield: yield scrapy.Request('https://www.strava.com/athletes/' + str(response.meta['id']) + '/profile_sidebar_comparison', callback=self.parse_sidebar, headers=self.xhr_headers, meta={ 'id': response.meta['id'], 'mileage': zip(wb_dates, miles) })
def get_date_from_week_number(date_time): week_day_map = { 0: "mon", 1: "tue", 2: "wed", 3: "thu", 4: "fri", 5: "sat", 6: "sun" } current_date = date.today() match = re.match(WN_FUNCTION_REGEX, date_time) if match.group('year') is not None: year = int(match.group('year')) else: year = current_date.year if match.group('week_number') is not None: week_number = int(match.group('week_number')) else: week_number = int(current_date.strftime("%V")) if match.group('day') is not None: day = match.group('day') else: day = week_day_map[current_date.weekday()] w = Week(year, week_number) func = getattr(w, DAYS_OF_WEEK_ABBREVIATIONS[day], "sunday") return func()
def hasPromo2weeks(date, Promo2SinceYear, Promo2SinceWeek): #calculate how many weeks that it has been since launching promo2. Categorized into buckets if it's more than 25 weeks date = np.array(date) promoyear = np.array(Promo2SinceYear) promoweek = np.array(Promo2SinceWeek) num = len(date) sinceweek = np.zeros([num], dtype=int) for i in range(num): if promoyear[i] == "NA": sinceweek[i] = 0 else: start_promo2 = Week(int(promoyear[i]), int(promoweek[i])).monday() sinceweek[i] = (datetime.strptime(date[i], '%Y-%m-%d').date() - start_promo2).days // 7 if sinceweek[i] < 0: sinceweek[i] = 0 elif sinceweek[i] <= 25: pass elif sinceweek[i] <= 100: sinceweek[i] = 26 elif sinceweek[i] <= 200: sinceweek[i] = 27 else: sinceweek[i] = 28 return sinceweek
def latest_promo2_months(date, promointerval, Promo2SinceYear, Promo2SinceWeek): #the number of months since last promo. If no PromoInterval or hasn't joined promo2, return 0. Output set:[0,1,2,3] promo2int = promointerval2int(promointerval) promo2int = np.array(promo2int) date = np.array(date) promoyear = np.array(Promo2SinceYear) promoweek = np.array(Promo2SinceWeek) months_since_latest_promo2 = np.zeros([len(date)], dtype=int) for i in range(len(date)): if promo2int[i] == 0: months_since_latest_promo2[i] = 0 continue date[i] = datetime.strptime(date[i], '%Y-%m-%d').date() start_promo2 = Week(int(promoyear[i]), int(promoweek[i])).monday() if date[i] < start_promo2: months_since_latest_promo2[i] = 0 continue if date[i].month < promo2int[i]: latest_promo2_start_year = date[i].year - 1 latest_promo2_start_month = promo2int[i] + 12 - 3 else: latest_promo2_start_year = date[i].year latest_promo2_start_month = ( (date[i].month - promo2int[i]) // 3) * 3 + promo2int[i] latest_promo2_start_day = datetime(year=latest_promo2_start_year, month=latest_promo2_start_month, day=1) months_since_latest_promo2[i] = ( date[i] - latest_promo2_start_day.date()).days // 30 return months_since_latest_promo2
def get_data(self, year): type_comanys = [ dict(pk=i.pk, name=i.name) for i in TypeCompany.objects.all() ] data = [] _range = Week.last_week_of_year(year)[1] for i in range(_range): number = i + 1 date_list = Week(year, number).days() month = self.get_month_from_week(date_list) first, last = DateInsopesca(year, number).get_date_range_from_week() data.append( dict(range_week=f'{first} a {last}', inspections_total=[], month=month, total_col=0)) for type_comany in type_comanys: inspection_total = 0 companys = Company.objects.filter( type_company=type_comany['pk']) for company in companys: inspections = company.get_inspections( f'{year}-{number}', f'{year}-{number}') inspection_total += len(inspections) data[i]['inspections_total'].append(inspection_total) data[i]['total_col'] += inspection_total total = 0 for tot in data: total += tot['total_col'] return data, type_comanys, total
def execute_goals(self): week_day = datetime.datetime.now().isocalendar()[1] w = Week(datetime.datetime.now().year, week_day) start_date = w.monday() end_date = w.sunday() result = Records.objects.filter( create_date_time__date__range=(start_date, end_date), type_entry__id=1).annotate(total_value=Sum("debit")) try: total_debit = 0 for a in result: total_debit += a.total_value if a.total_value else 0 logging.warning(f"total_debit: {total_debit}") can_use = 770 - total_debit loop = asyncio.get_event_loop() bot = telepot.aio.Bot(settings.TELEGRAM_TOKEN) loop.run_until_complete( bot.sendMessage( self.user_id, f"Você ainda pode gastar {can_use} doas 770 reais estipulados para a semana." )) except BaseException as e: logging.exception("Error na meta", e)
def get_week_agenda(model, queryset, start_date): """ Get list of events that will occur in the given week. :param queryset: EventInstance queryset :param start_date: period start_date :type start_date: datetime.datetime() :return: data dictionary """ period = Week(start_date.year, start_date.date().isocalendar()[1]) end_date = utils.date_to_datetime(period.sunday(), 'max') start_date = utils.date_to_datetime(period.monday()) return { 'start_date': start_date, 'end_date': end_date, 'scope': 'Week', 'items': model.objects.in_date_range(start_date, end_date).filter(event__in=queryset), 'next_date': start_date + timedelta(days=7), 'previous_date': start_date + timedelta(days=-7), }
def get_hiatus_weeks(air_weeks): iso_week_nums = [] for week in air_weeks: dt = date(2018, int(week.split('/')[0]), int(week.split('/')[1])) wk = dt.isocalendar()[1] iso_week_nums.append(wk) index = 0 hiatus_week_nums = [] while index < len(iso_week_nums) - 1: if iso_week_nums[index + 1] - iso_week_nums[index] > 1: week_before = iso_week_nums[index] week_after = iso_week_nums[index + 1] weeks_to_add = week_after - week_before - 1 for x in range(weeks_to_add): week_before += 1 hiatus_week_nums.append(week_before) index += 1 hiatus_weeks = [] for week in hiatus_week_nums: d = Week(2018, week).monday() d = d.strftime('%m/%d') hiatus_weeks.append(d) return hiatus_weeks
def week_trips_count(self, request): trips = self.get_queryset() end_date = datetime.datetime.now() week_no = end_date.isocalendar()[1] start_week_no = week_no - 4 current_year = datetime.datetime.now().year date = Week(current_year, start_week_no).monday() start_date = datetime.datetime.combine(date, datetime.datetime.min.time()) response_data = {} trip_queryset = (trips.filter( trip_date__range=(start_date, end_date)).values("trip_date").annotate( count=Count("trip_date"))) for trip in trip_queryset: week = trip["trip_date"].isocalendar()[1] val = trip["count"] week_start_date = datetime.datetime.strptime( "{} {} 1".format(current_year, week - 1), "%Y %W %w") week_end_date = week_start_date + datetime.timedelta(days=6) week_start_date_formatted = week_start_date.strftime("%b %d") week_end_date_formatted = week_end_date.strftime("%b %d") week_formatted = week_start_date_formatted + " - " + week_end_date_formatted response_data[week_formatted] = response_data.get( week_formatted, 0) + val return Response(response_data)
def get_dates_week_nr(year, week_nr): """Get all dates from a ISO week_nr in a list Parameters ---------- year : int Year week_nr : int ISO week number Returns ------- list_days : list Constaings date objects for ISO week Note ----- if year is a leap year, funciton may not work properly """ list_days = [] monday_in_week = Week(year, week_nr).monday() for day in range(1, 8): list_days.append(monday_in_week + timedelta(days=day)) return list_days
def convert_to_catergory_str(year, week): isoweek = Week(year, week) monday = isoweek.monday() sunday = isoweek.sunday() return monday.strftime( DATE_FORMAT_SHORT_MONTH_DAY) + '~' + sunday.strftime( DATE_FORMAT_SHORT_MONTH_DAY)
def create_event(c): begin = datetime.combine(Week(c.cours.an, c.cours.semaine).day(c.creneau.jour_id-1), datetime.min.time()) \ + timedelta(hours=c.creneau.heure.hours, minutes=c.creneau.heure.minutes) end = begin + timedelta(minutes=c.creneau.duration) tutor = c.cours.tutor.username if c.cours.tutor is not None else '' location = c.room.name if c.room is not None else '' return { 'id': c.id, 'title': c.cours.module.abbrev + ' ' + c.cours.type.name + ' - ' + c.cours.groupe.train_prog.abbrev + ' ' + c.cours.groupe.nom + ' - ' + tutor, 'location': location, 'begin': begin, 'end': end, 'description': 'Cours \: ' + c.cours.module.abbrev + ' ' + c.cours.type.name + '\\n' + 'Groupe \: ' + c.cours.groupe.train_prog.abbrev + ' ' + c.cours.groupe.nom + '\\n' + 'Enseignant : ' + c.cours.tutor.username + '\\n' + 'Salle \: ' + location }
def get_sparse_schedule(user_id: int): if not user_util.get_from_id(user_id): return api_error_helpers.item_not_found("user", "id", user_id) start_str = request.args.get("start_week", default=None, type=str) end_str = request.args.get("end_week", default=None, type=str) year = request.args.get("year", default=None, type=int) if (start_str or end_str) and year: return api_error_helpers.invalid_url_args_combination( ["start_str", "end_str", "year"]) if not ((start_str and end_str) or year): if not (start_str and end_str): return api_error_helpers.missing_url_arg("start_week and end_week") else: return api_error_helpers.missing_url_arg("year") start_week = Week.fromstring(start_str).toordinal() if start_str else None end_week = Week.fromstring(end_str).toordinal() if end_str else None if year: start_week = Week(year, 1) end_week = Week.last_week_of_year(year) schedule_map = schedule_util.get_user_schedules(user_id, start_week, end_week) return jsonify(list(sched.serialize() for sched in schedule_map.values()))
def decide_forecast_time(timeby, value): if timeby == "Daily": d = "2014" + value[4:] start = parse(d) starti = (parse(d) - parse('2014-1-1')).days * 24 end = parse(d) + timedelta(days=1) endi = starti + 24 elif timeby == "Weekly": d = Week(2011, 40).monday() s = d.strftime("%Y-%m-%d") d = "2014" + s[4:] start = parse(d) starti = (parse(d) - parse('2014-1-1')).days * 24 end = parse(d) + timedelta(days=7) endi = starti + (7 * 24) else: d = get_first_date(value) start = parse(d) starti = (parse(d) - parse('2014-1-1')).days * 24 end = parse(d) + timedelta(days=30) endi = starti + (30 * 24) if end.year == 2015: end = parse("2014-12-31") endi = starti + ((end - parse(d)).days * 24) return start, end, starti, endi
def getPrevWeekString(weekString): annee = int(weekString[:4]) # on récupère le numéro de la semaine sem = int(weekString[-2:]) week = Week(annee, sem) lundi = week.monday() dimanchePrecedent = lundi + timedelta(days=-1) previousYear = dimanchePrecedent.strftime("%Y") if (int(previousYear) == annee): if (sem - 1) <= 9: prevWeekString = previousYear + "0" + str(sem - 1) else: prevWeekString = previousYear + str(sem - 1) else: prevWeekNum = dimanchePrecedent.isocalendar()[1] if prevWeekNum <= 9: prevWeekString = previousYear + "0" + str(prevWeekNum) else: prevWeekString = previousYear + str(prevWeekNum) return prevWeekString
def scrapper(self) -> str or None: date = Week(self.year_no, self.week_no) month = date.day(0).month options = webdriver.ChromeOptions() options.add_argument("--no-sandbox") options.add_argument("--disable-dev-shm-usage") options.add_argument("--headless") browser = webdriver.Chrome( options=options, executable_path=conf.CHROME_DRIVER_PATH ) log(log.INFO, "Start get url Norfolk Southern") browser.get(self.URL) log(log.INFO, "Get url Norfolk Southern") generated_html = browser.page_source soup = BeautifulSoup(generated_html, "html.parser") tags = soup.find_all("a") log(log.INFO, "Get all links Norfolk Southern") link = [ link.attrs["href"] for link in tags if f"weekly-performance-reports/{self.year_no}/investor-weekly-carloads" in link.attrs["href"] ] if not link: log(log.WARNING, "Links not found") return None log(log.INFO, "Get link with pdf for Norfolk Southern") link = "http://www.nscorp.com" + link[month - 1] log(log.INFO, "Found pdf link: [%s]", link) return link
def hasPromo2weeks(date, Promo2SinceYear, Promo2SinceWeek): if Promo2SinceYear == 0: return 0 start_promo2 = Week(Promo2SinceYear, Promo2SinceWeek).monday() weeks_since_promo2 = (date.date() - start_promo2).days // 7 if weeks_since_promo2 < 0: return 0 return min(weeks_since_promo2, 25)
def prepareData(city, week=1, year=2019): if week == '1': week = datetime.today().isocalendar()[1] - 1 if year == '2019': year = datetime.today().isocalendar()[0] city = str(city) date_from, date_to = Week(int(year), int(week)).monday().strftime('%Y%m%d'), \ Week(int(year), int(week)).sunday().strftime('%Y%m%d') name_sheet = date_from[:4] + '.' + date_from[4:6] + '.' + date_from[6:] + ' - ' \ + date_to[:4] + '.' + date_to[4:6] + '.' + date_to[6:] min_trips_for_bonus = CityDict.city_bonus_plan_dict[city][-1][0] return week, year, city, date_from, date_to, name_sheet, min_trips_for_bonus
def test_arithmetics(self): w = Week(2011, 20) self.assertEqual(str(w + 0), "2011W20") self.assertEqual(str(w + 1), "2011W21") self.assertEqual(str(w - 1), "2011W19") if sys.version < '3': self.assertEqual(str(w + long(1)), "2011W21") self.assertEqual(str(w - long(1)), "2011W19") self.assertEqual(str(w + 52), "2012W20") self.assertEqual(str(w - 104), "2009W21") self.assertEqual(w - w, 0) self.assertEqual(w - Week(2011, 1), 19) self.assertEqual(Week(2011, 1) - w, -19) self.assertEqual(str(w + Week.resolution), "2011W21") self.assertEqual(str(w - Week.resolution), "2011W19")