def runBot(): # login if PotWSettings.PotWSettings.useMultiprocessHandler: handler = MultiprocessHandler() r = praw.Reddit(user_agent="/r/DaystromInstitute PotW Program v1.0, contact /u/kraetos", handler=handler) else: r = praw.Reddit(user_agent="/r/DaystromInstitute PotW Program v1.0, contact /u/kraetos", ) r.set_oauth_app_info(client_id=PotWSettings.PotWSettings.oauthClientID, client_secret=PotWSettings.PotWSettings.oauthClientSecret, redirect_uri='http://127.0.0.1') r.refresh_access_information(PotWSettings.PotWSettings.refreshToken) logging.info("Started up.") # get the settings and turn them into dates for line in PotWSettings.PotWSettings.reminderDates.split("\n"): trimmedLine = line.strip() if not trimmedLine: continue parts = trimmedLine.split(" ") date = datetime.strptime(parts[0] + " " + parts[1] + " 2016", '%B %d %Y') if date.date() == datetime.today().date() or date.date() == datetime.today() - timedelta(1) or date.date() == datetime.today() + timedelta(1): # this is it person = parts[2] r.send_message(person, "Post of the Week", "To remind you that you're tabled for Daystrom's PotW today.") r.send_message("dxdydxdy", "Post of the Week", "To remind someone else that they're tabled for Daystrom's PotW today.") # just to make sure the bot isn't going berserk without notice logging.info("Sent message to " + person)
def _connection(self): competitions = { "ligue 1": 2015, "premier league": 2021, "bundesliga": 2002, "liga": 2014 } if self.dateOfmatch == "tomorrow": date = today + timedelta(days=1) elif self.dateOfmatch == "aftertomorrow": date = today + timedelta(days=2) else: date = today try: connection = http.client.HTTPConnection('api.football-data.org') headers = {'X-Auth-Token': self.token} uri = '/v2/competitions/%s/matches?dateFrom=%s&dateTo=%s' % ( competitions[self.competition], date.date(), date.date() ) self.py3.log(uri) connection.request('GET', uri, None, headers) response = json.loads(connection.getresponse().read().decode()) return response except Exception as e: msg = "Error: calling api (%s)" % (str(e)) self.py3.log(msg)
def test_admins_can_create_basic_incidents(mockdata, client, session): with current_app.test_request_context(): login_admin(client) date = datetime(2000, 5, 25, 1, 45) report_number = '42' address_form = LocationForm(street_name='AAAAA', cross_street1='BBBBB', city='FFFFF', state='IA', zip_code='03435') # These have to have a dropdown selected because if not, an empty Unicode string is sent, which does not mach the '' selector. link_form = LinkForm(link_type='video') license_plates_form = LicensePlateForm(state='AZ') form = IncidentForm(date_field=str(date.date()), time_field=str(date.time()), report_number=report_number, description='Something happened', department='1', address=address_form.data, links=[link_form.data], license_plates=[license_plates_form.data], officers=[]) data = process_form_data(form.data) rv = client.post(url_for('main.incident_api') + 'new', data=data, follow_redirects=True) assert rv.status_code == 200 assert 'created' in rv.data.decode('utf-8') inc = Incident.query.filter_by(date=date.date()).first() assert inc is not None
def parsing(fileName, endDate, shift = 0): ws = loading(fileName) for row in ws['A1':'Z1048577']: date = list(row)[shift + 2].value productCode = list(row)[shift + 10].value transportCode = list(row)[shift + 12].value # <--- replace master data ---> # if transportCode == None: # transportCode = '' try: transportCode = str(int(transportCode)) except: pass # <---------------------------> warehouseCode = list(row)[shift + 3].value # <--- replace master data ---> if warehouseCode == 1: warehouseCode = 'U104' if warehouseCode == 5: warehouseCode = 'U103' # <---------------------------> shippedVolume = list(row)[shift + 14].value shippedAmount = list(row)[shift + 15].value intercompany = [14003280] try: if date.date() < endDate and int(productCode) not in intercompany : dataArray.append([date.date(), int(productCode), transportCode, warehouseCode, shippedVolume, shippedAmount]) except: pass
def get_special_days(self, date_from, date_to, employee): """ Return dict of special days (Date: Name) Partly Deprecated: Since we now generate actual leave entries for public holidays they do no longer need to be deducted from the number of days (overlapping leaves cannot be created anyway). We should keep removing Sat/Sun and probably make it possible to remove other weekdays as well for countries with other work schedules """ public_leave_ids = self.env['hr.public.holiday.holidays'].search([]) special_days = {} for date in self.daterange(date_from, date_to): date_str = str(date.date()) public_leave = public_leave_ids.filtered( lambda r: r.date == date_str) if public_leave: # raise ValidationError(public_leave.name) special_days[date.date()] = 'Public Holiday: %s' \ % public_leave.name # return { # 'warning': { # 'title': "Something bad happened", # 'message': public_leave.name, # } # } elif date.weekday() == 5: special_days[date.date()] = 'Saturday' elif date.weekday() == 6: special_days[date.date()] = 'Sunday' return special_days
def file_list(request, date: date = None): template_name = 'index.html' files = os.listdir(FILES_PATH) print(date.date()) if date is None: context = { 'files': [] } for file in files: file_ctime = datetime.fromtimestamp(os.stat('files/' + file).st_ctime).date() file_mtime = datetime.fromtimestamp(os.stat('files/' + file).st_mtime).date() context['files'].append({'name': file, 'ctime': file_ctime, 'mtime': file_mtime}) else: context = { 'files': [] } for file in files: file_ctime = datetime.fromtimestamp(os.stat('files/' + file).st_ctime).date() file_mtime = datetime.fromtimestamp(os.stat('files/' + file).st_mtime).date() if date.date() == file_ctime or date.date() == file_mtime: context['files'].append({'name': file, 'ctime': file_ctime, 'mtime': file_mtime}) return render(request, template_name, context)
def parsing(fileName, endDate, shift=0): ws = loading(fileName) for row in ws['A1':'Z1048577']: date = list(row)[shift + 2].value productCode = list(row)[shift + 10].value transportCode = list(row)[shift + 12].value # <--- replace master data ---> # if transportCode == None: # transportCode = '' try: transportCode = str(int(transportCode)) except: pass # <---------------------------> warehouseCode = list(row)[shift + 3].value # <--- replace master data ---> if warehouseCode == 1: warehouseCode = 'U104' if warehouseCode == 5: warehouseCode = 'U103' # <---------------------------> shippedVolume = list(row)[shift + 14].value shippedAmount = list(row)[shift + 15].value intercompany = [14003280] try: if date.date() < endDate and int(productCode) not in intercompany: dataArray.append([ date.date(), int(productCode), transportCode, warehouseCode, shippedVolume, shippedAmount ]) except: pass
def getDataCumulativeTestsChart(json, dates): # Count totals for every day date_counts = defaultdict(int) dates_within_range = set([str(date.date()) for date in list(dates)]) date_start = str(dates[0].date()) count_before_date_range = 0 for res in json: date = str(pd.to_datetime(res["StatisticsDate"]).date()) if date in dates_within_range: date_counts[date] += 1 elif date < date_start: # Hack to count dates before our range count_before_date_range += 1 tests = [] for date in dates: tests.append(date_counts[str(date.date())]) # Add everything before first date to first date tests[0] += count_before_date_range return_json = { "testsAdminstered": list(np.cumsum(tests)) } return return_json
def pretty_date(event): date = event.date today = datetime.today().date() if date.date() == today: return "Today" elif date.date() == today - timedelta(hours=24): return "Tomorrow" else: return date.strftime('%A, %B %d')
def _get_units(self): index = self.getdoc(self.index_url) for a in index.cssselect("#element ul.snelnieuws_list li.item a"): doc = self.getdoc(a.get('href')) date = readDate(doc.cssselect("span.datum")[0].text) if date.date() < date.today().date(): break elif date.date() == date.today().date(): yield a.get('href'), doc
def get_shifts(location): """Return the active shifts at location in JSON format.""" # Get all the activated shifts at the location. activated_shifts = Shift.objects.filter(activated=True, location=location) shifts = [] # place holder for shift in activated_shifts: # shift represents a recurrence, as opposed to an instance. # Get all the occurrence dates of the shift starting from the first day of the shift. dates = shift.recurrences.occurrences( dtstart=datetime.combine(shift.start_date, shift.start_time)) for date in dates: start_time = shift.start_time end_time = shift.end_time # Check whether or not the shift time needs adjustment. if shift.spans_past_midnight: # If it does (i.e. shift spans over midnight), let's first adjust the end hour. temp_hour = shift.end_time.hour adjusted_hour = temp_hour + 23 end_time = str(adjusted_hour) + ":" + "59:00" # Really hacky, but Full Calendar does not render events as expected if the added # hour goes over 24. `temp = datetime.combine(date.date(), shift.end_time) + timedelta(days=1)` # is essentially what these calculations are trying to do. # Now let's look at the start time. Check if the start time is after midnight by # comparing it to the end time. Note that, if a shift spans past midnight, the end time # must be after midnight (and thus likely a small number, e.g. 2:00:00). This means # that if the start time is smaller than the end time (e.g. 1:00:00 < 2:00:00) then # the start time is after midnight as well. In such a case, adjust the start time. if shift.start_time < shift.end_time: temp_hour = shift.start_time.hour adjusted_hour = temp_hour + 23 start_time = str(adjusted_hour) + ":59:00" # Format the start and end datetimes to fit FullCalendar's requirements. start = str(date.date()) + 'T' + str(start_time) end = str(date.date()) + 'T' + str(end_time) sale_status = False # Check if the shift is on the date is on sale. if is_on_sale(shift.id, date): sale_status = True current_owner = get_current_owner(shift.id, date) # A custom field for brief description of a shift instance. It includes the minimum # essential information about the shift. description = str(shift.day_of_the_week) + "<br>" + \ str(shift.start_time) + "-" + str(shift.end_time) + "<br>" \ "Owner: %s" % str('current_owner') title = str(shift.location[0]) + "-" + str(current_owner) # Append the minimum required information about the shift to the place holder. shifts.append({'id': shift.id, 'title': title, 'start': start, 'end': end, 'description': description, 'sale_status': str(sale_status)}) # 'sale_status': sale_status return shifts
def get_posts(self, doc): for div in reversed(doc.cssselect("div.post")[1:]): date = readDate(div.cssselect("span.post_time")[0].text_content()) if date.date() == self.options['date']: post = HTMLDocument() post.props.date = date post.props.author = div.cssselect("span.post_sub a.username")[0].text_content() post.props.text = div.cssselect("div.postmain_right")[0] post.props.section = self.current_section yield post elif date.date() < self.options['date']: break
def check_date_in_range(self, date): """ Checks whether the date is between allowDateAfter and allowDateBefore params: date: datetime object """ if date.date() <= self.allowDateBefore.date() \ and date.date() >= self.allowDateAfter.date(): return True else: return False
def get_posts(self, doc): for div in reversed(doc.cssselect("div.post")[1:]): date = readDate(div.cssselect("span.post_time")[0].text_content()) if date.date() == self.options['date']: post = HTMLDocument() post.props.date = date post.props.author = div.cssselect( "span.post_sub a.username")[0].text_content() post.props.text = div.cssselect("div.postmain_right")[0] post.props.section = self.current_section yield post elif date.date() < self.options['date']: break
def get_db_observation(date): key_name = make_key_name(site, date.date()) if key_name in obs: return obs[key_name] o = ObservationDay.get_by_key_name(key_name) if o is None: o = ObservationDay(key_name=key_name) o.site = site o.observation_date = date.date() o.observations = Observations() obs[key_name] = o return o
def water_restriction_at(self, date=None): """If the subject was under water restriction at the specified date, return the start of that water restriction.""" date = date or self.today() water_restrictions_before = [ (s, e, rw) for (s, e, rw) in self.water_restrictions if s.date() <= date.date()] if not water_restrictions_before: return s, e, rw = water_restrictions_before[-1] # Return None if the mouse was not under water restriction at the specified date. if e is not None and date > e: return None assert e is None or e >= date assert s.date() <= date.date() return s
def get_missing_dates(dates): """Receives a range of dates and returns a sequence of missing datetime.date objects (no worries about order). You can assume that the first and last date of the range is always present (assumption made in tests). See the Bite description and tests for example outputs. """ # pandas series lets us sort by values and aparently suports dates # sorted_dates = pandas.Series(dates).sort_values() # sorted_dates = sorted(dates, key=lambda date: date.year + date.month + date.day) # full_range = pandas.date_range(sorted_dates[0], sorted_dates[-1]) full_range = list(rrule(DAILY, dtstart=min(dates), until=max(dates))) return [date.date() for date in full_range if date.date() not in dates]
def prepare_data(ddict, ddict2, dict_name): for i in ddict.keys(): date = datetime.strptime(i, '%m/%d/%y') recovered = ddict.get(i) confirmed = ddict2.get(i) dict_name.append({'date': str(date.date()), 'recovered': recovered, \ 'confirmed': confirmed})
def list_guys(message): date_str = ZaoBot.retrieve_args(message) if date_str is None: waken_guys = self.waken_guys else: try: date = parse_date(date_str) waken_guys = RedisVariable( 'zaobot:waken_guys:{}'.format(date.date())) except ValueError: self.bot.reply_to(message, '听不懂<(=-︿-=)>') return sorted_guys = self._list_guys(waken_guys) logger.debug('sorted_guys is {}'.format(list(sorted_guys))) prefix = "" if message.text.startswith('/zaobugs'): prefix = "被鸟儿吃掉的虫子:\n" elif message.text.startswith('/zaobirds'): prefix = "被大鹰吃掉的鸟儿:\n" if sorted_guys: self.bot.send_message( message.chat.id, prefix + '\n'.join( map( lambda i_guy: '{}. {}, {:%H:%M}'.format( i_guy[0]+1, *i_guy[1]), enumerate(sorted_guys)))) else: self.bot.reply_to(message, 'o<<(≧口≦)>>o 还没人起床')
def list_guys(message): date_str = ZaoBot.retrieve_args(message) if date_str is None: waken_guys = self.waken_guys else: try: date = parse_date(date_str) waken_guys = RedisVariable('zaobot:waken_guys:{}'.format( date.date())) except ValueError: self.bot.reply_to(message, '听不懂<(=-︿-=)>') return sorted_guys = self._list_guys(waken_guys) logger.debug('sorted_guys is {}'.format(list(sorted_guys))) prefix = "" if message.text.startswith('/zaobugs'): prefix = "被鸟儿吃掉的虫子:\n" elif message.text.startswith('/zaobirds'): prefix = "被大鹰吃掉的鸟儿:\n" elif message.text.startswith('/zaosheeps'): prefix = "被萌狼吃掉的小羊羔:\n" if sorted_guys: self.bot.send_message( message.chat.id, prefix + '\n'.join( map( lambda i_guy: '{}. {}, {:%H:%M}'.format( i_guy[0] + 1, *i_guy[1]), enumerate(sorted_guys)))) else: self.bot.reply_to(message, 'o<<(≧口≦)>>o 还没人起床')
def display_form(): # use original df as input (before adding features) df = read_data2() date = df.index[-1] + timedelta(days=1) print('Last available day in our database: ', df.index[-1].date()) print('Please enter weather forecast for ', date.date(), ': ') box = form() align_left = VBox([box[0], box[3]]) align_center = VBox([box[1], box[4]]) align_right = VBox([box[2], box[5]]) display(HBox([align_left, align_center, align_right])) def action_click(sender): last_day = df.index[-1] + timedelta(days=1) exp_wt = {'Date': [last_day],'Calls': [None],'AWND':[box[0].value],'WSF5': [box[1].value]\ ,'PRCP': [box[2].value],'TMIN': [box[3].value],'TMAX': [box[4].value]} df_plusone = add_new_data(exp_wt, df) print('Your input:') print('Date:', last_day.date(), 'AWND:', exp_wt['AWND'],'WSF5:', exp_wt['WSF5']\ ,'PRCP:', exp_wt['PRCP'], 'TMIN:', exp_wt['TMIN'], 'TMAX:', exp_wt['TMAX']) print() df_complete, indeps = add_features(df_plusone) model, train_data, test_data = final_model(df_complete) new_info = df_complete.drop(['Calls'], axis=1)[-1:] forecast = model.predict(X=new_info) forecast_nice = int(round(forecast[0])) # print('The forecast is:', forecast_nice) # print('Expected non-medical calls to SFPD for',date.date(),': ', forecast_nice) graph_calls_vs_AA(df, forecast_nice) HBox([align_left, align_center, align_right]).close() return box[5].on_click(action_click) return
def rapport(request): if 'date' in request.GET: date = request.GET['date'] date = datetime.strptime(date, "%Y-%m-%d") else: date = datetime.today() lendemain = date + timedelta(days=1) # on met les deux dates a minuit date = date.replace(hour=0, minute=0, second=0) lendemain = lendemain.replace(hour=0, minute=0, second=0) ajoutes = Exemplaire.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) factures = Facture.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) nb_ajoutes = ajoutes.count() nb_factures = factures.count() nb_vendus = sum([f.nb_livres() for f in factures]) prix_total_vendu = sum([f.prix_total() for f in factures]) context = { 'nb_ajoutes': nb_ajoutes, 'nb_factures': nb_factures, 'date': date.date(), 'nb_vendus': nb_vendus, 'prix_total_vendu': prix_total_vendu, } return render_to_response('encefal/rapport.html', context)
def test_work_days(teacher, auth, requester): auth.login(email=teacher.user.email) date = datetime.utcnow() + timedelta(hours=10) first_kwargs_hour = 13 kwargs = { "teacher": teacher, "day": 1, "from_hour": first_kwargs_hour, "from_minutes": 0, "to_hour": 17, "to_minutes": 0, "on_date": date, } day1 = WorkDay.create(**kwargs) kwargs.pop("on_date") kwargs["from_hour"] = 15 day2 = WorkDay.create(**kwargs) resp = requester.get("/teacher/work_days").json assert resp["data"][0]["from_hour"] == kwargs["from_hour"] day = date.date() resp = requester.get(f"/teacher/work_days?on_date=eq:{day}").json assert resp["data"][0]["from_hour"] == first_kwargs_hour resp = requester.get(f"/teacher/work_days?day=1").json assert resp["data"][0]["from_hour"] == kwargs["from_hour"]
def download_new_files(last_date): new_dates = [] new_filenames = [] with Connection(settings.SFTP_HOST, username=settings.SFTP_USER, private_key=settings.SFTP_PRIVATE_KEY) as conn: with conn.cd(settings.SFTP_DIR): dir_listing = conn.listdir() for filename in dir_listing: date = parse_filename(filename, settings.ACCOUNT_CODE) if date: stat = conn.stat(filename) if stat.st_size > SIZE_LIMIT_BYTES: logger.error("%s is too large (%s), download skipped." % (filename, stat.st_size)) continue if last_date is None or date.date() > last_date.date(): local_path = os.path.join(settings.DS_NEW_FILES_DIR, filename) new_filenames.append(local_path) new_dates.append(date) conn.get(filename, localpath=local_path) if new_dates and new_filenames: sorted_dates, sorted_files = zip(*sorted(zip(new_dates, new_filenames))) return NewFiles(list(sorted_dates), list(sorted_files)) else: return NewFiles([], [])
def rapport(request): if 'date' in request.GET: date = request.GET['date'] date = datetime.strptime(date,"%Y-%m-%d") else: date = datetime.today() lendemain = date + timedelta(days=1) # on met les deux dates a minuit date = date.replace(hour=0, minute=0, second=0) lendemain = lendemain.replace(hour=0, minute=0, second=0) ajoutes = Exemplaire.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) factures = Facture.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) nb_ajoutes = ajoutes.count() nb_factures = factures.count() nb_vendus = sum([f.nb_livres() for f in factures]) prix_total_vendu = sum([f.prix_total() for f in factures]) context = { 'nb_ajoutes':nb_ajoutes, 'nb_factures':nb_factures, 'date':date.date(), 'nb_vendus':nb_vendus, 'prix_total_vendu':prix_total_vendu, } return render_to_response('encefal/rapport.html', context)
def get_avalanche_forecast_data(seasons_to_check): url = create_avalanche_forecast_url(seasons_to_check) # Fetch forecast data data = requests.get(url).text # Create dataframe forecast_df = pd.read_json(StringIO(data)) # Filter relevant inforrmation forecast_df = forecast_df.filter(items=[ 'ValidFrom', 'RegionId', 'DangerLevel', "MountainWeather", "AvalancheProblems" ]) # Convert datetime strings to date values forecast_df['date'] = [ date.date() for date in pd.to_datetime(forecast_df['ValidFrom']) ] forecast_df = forecast_df.rename(columns={"RegionId": 'region'}) base_data_df = forecast_df.filter(items=['date', 'region', 'DangerLevel']) print(base_data_df) mountain_weather_df = get_mountain_weather_data(forecast_df) mountain_weather_df = correct_mountain_weather(mountain_weather_df) avalanche_problem_df = get_avalanche_problem_data(forecast_df) join_df = mountain_weather_df.join(avalanche_problem_df) return base_data_df.join(join_df)
def format_date(date=None, format='medium', locale=LC_TIME): """Return a date formatted according to the given pattern. >>> d = date(2007, 04, 01) >>> format_date(d, locale='en_US') u'Apr 1, 2007' >>> format_date(d, format='full', locale='de_DE') u'Sonntag, 1. April 2007' If you don't want to use the locale default formats, you can specify a custom date pattern: >>> format_date(d, "EEE, MMM d, ''yy", locale='en') u"Sun, Apr 1, '07" :param date: the ``date`` or ``datetime`` object; if `None`, the current date is used :param format: one of "full", "long", "medium", or "short", or a custom date/time pattern :param locale: a `Locale` object or a locale identifier """ if date is None: date = date_.today() elif isinstance(date, datetime): date = date.date() locale = Locale.parse(locale) if format in ('full', 'long', 'medium', 'short'): format = get_date_format(format, locale=locale) pattern = parse_pattern(format) return pattern.apply(date, locale)
def get_special_days(self, date_from, date_to, employee): public_leave_ids = self.env['hr.holidays.public.line'].search([ ('state_ids', 'in', employee.company_id.state_id.id) ]) special_days = 0 for date in self.daterange(date_from, date_to): date_str = str(date.date()) public_leave = public_leave_ids.filtered( lambda r: r.date == date_str) weeks = { 'monday': 'Monday', 'tuesday': 'Tuesday', 'wednesday': 'Wednesday', 'thursday': 'Thursday', 'friday': 'Friday', 'saturday': 'Saturday', 'sunday': 'Sunday' } second_week_off = employee.address_id.second_week_off first_week_off = employee.address_id.first_week_off if public_leave: special_days = special_days + 1 elif first_week_off and date.strftime( "%A") == weeks[first_week_off]: special_days = special_days + 1 elif second_week_off and date.strftime( "%A") == weeks[second_week_off]: special_days = special_days + 1 return special_days
def getdataCountyDailyActive(json, dates, county_mapping,county_sizes): chart_counties = ["Harjumaa", "Hiiumaa", "Ida-Virumaa", "Jõgevamaa", "Järvamaa", "Läänemaa", "Lääne-Virumaa", "Põlvamaa", "Pärnumaa", "Raplamaa", "Saaremaa", "Tartumaa", "Valgamaa", "Viljandimaa", "Võrumaa"] county_date_counts = defaultdict(int) for res in json: if res["ResultValue"] == "P": date = pd.to_datetime(res["StatisticsDate"]).date() county = county_mapping[res["County"]] if county in chart_counties: county_date_counts[(county, str(date))] += 1 countyByDay = {} activeMap100kPlayback = [] for county in chart_counties: per_day_county = [] active_per_day_county_100k = [] for date in dates: val = county_date_counts[(county, str(date.date()))] per_day_county.append(val) active_per_day_county_100k.append((val/county_sizes[county] * 100000)) # Calculate cumulative countyByDay[county] = list(map(int, pd.Series(per_day_county).rolling(14, min_periods=0).sum())) activeMap100kPlayback.append({"MNIMI": county, "sequence": list(round(pd.Series(active_per_day_county_100k).rolling(14, min_periods=0).sum(),2)), "drilldown": county}) activeList ={ "countyByDayActive": countyByDay , "activeMap100kPlayback": activeMap100kPlayback } return activeList
def weighing_at(self, date=None): """Return the weight of the subject at the specified date.""" date = date or self.today() assert isinstance(date, datetime) weighings_at = [(d, w) for (d, w) in self.weighings if d.date() == date.date()] return weighings_at[0][1] if weighings_at else None
def plan_dia_obs(request, area, ano, mes, dia): ''' Controlador correspondiente al detalle del plan quirurgico por dia para usuarios observadores Parametros: request -> Solucitud HTTP area -> Area de quirofanos a consultar ano -> Ano a consultar mes -> Mes a consultar dia -> Dia a consultar ''' ano = int(ano) mes = int(mes) dia = int(dia) areas_valores = Quirofano.objects.distinct('area').values_list('area', flat=True) quirofanos_area = Quirofano.objects.filter(area=area) if mes < 1 or mes > 12: raise Http404 if ano < 1: raise Http404 if area not in areas_valores: raise Http404 if dia < 1 or dia > calendar.monthrange(ano, mes)[1]: raise Http404 intervenciones = [] lista_intervenciones_area = IntervencionQuirurgica.objects.filter(fecha_intervencion__year=ano, fecha_intervencion__month=mes, fecha_intervencion__day=dia, reservacion__estado='A', quirofano__area=area).order_by('quirofano','hora_fin','hora_inicio') for intervencion in lista_intervenciones_area: intervencion_diccionario = {} intervencion_diccionario['objeto'] = intervencion procedimientos = intervencion.procedimientoquirurgico_set.all() intervencion_diccionario['procedimientos'] = procedimientos intervencion_diccionario['hora_inicio'] = obtener_representacion_media_hora(obtener_total_horas(intervencion.hora_inicio)) intervencion_diccionario['hora_fin'] = obtener_representacion_media_hora(obtener_total_horas(intervencion.hora_fin)) anestesiologo_id = Participacion.objects.get(procedimiento_quirurgico_id=procedimientos.first().id, rol=0).medico_id intervencion_diccionario['anestesiologo'] = Medico.objects.get(id=anestesiologo_id) intervenciones.append(intervencion_diccionario) dia_permitido = True date = datetime.strptime(str(dia)+' '+str(mes)+' '+str(ano), '%d %m %Y') current_date = datetime.now().date() if date.date() < current_date: dia_permitido = False datos = {} datos['area_nombre'] = quirofanos_area[0].get_area_display() datos['ano'] = ano datos['mes'] = mes datos['dia'] = dia datos['area_actual'] = area datos['quirofanos_area'] = quirofanos_area datos['intervenciones'] = intervenciones datos['formulario_cambio_estado_intervencion'] = CambiarEstadoIntervencionQuirurgicaForm() datos['dia_permitido'] = dia_permitido cuenta = request.user.cuenta mensajes_pendientes = Mensaje.objects.filter(cuenta=cuenta, estado='NL') datos['numero_mensajes_pendientes'] = mensajes_pendientes.count() datos['cuenta_id'] = cuenta.id return render_to_response('plan_quirurgico/plan_dia_obs.html', datos, context_instance=RequestContext(request))
def obj_converter(self, data: dict) -> datetime: if not data: return None date = parser.parse(data["dateTime"]) return datetime.combine(date.date(), date.time(), tz.gettz(data["timeZone"]))
def ywd_to_date(year, week_label, day): # Sundays mess with strptime, make sure it is 0. if day == 7: day = 0 datestruct = strptime("%d %d %d" % (year, week_label, day), "%Y %W %w") date = datetime(datestruct[0], datestruct[1], datestruct[2]) isocal_week = int(date.isocalendar()[1]) formatted_week = int(date.strftime("%W")) # week number by "%W" is zero when a new year has occoured # within the week and the weekday is in January. That won't # do any good to comparison betweeen isocalendar(). # # here we make sure the week is not zero by asking for the # weeknumber from Monday the same week. if formatted_week == 0: before_monday = date.isocalendar()[2] - 1 monday = date - timedelta(days=before_monday) formatted_week = int(monday.strftime("%W")) # for years beginning on a Monday, all is fine since # the "week label" (the one PEOPLE use) and the calculated # week is the same. if isocal_week == formatted_week: return date.date() # for the rest, turn the clock backwards since we are 1 week # ahead of time. behave = timedelta(days=7) return (datetime(datestruct[0], datestruct[1], datestruct[2]) - behave).date()
def annual(date): date = parser.parse(date) today = datetime.now() t_month = today.month t_year = today.year j_month = date.month j_year = date.year if (j_month == t_month and j_year == t_year): temp = datetime(j_year, j_month, calendar.monthrange(j_year, j_month)[1]) days_left = temp - date days_left = (days_left.days) + 1 al = ((days_left) / calendar.monthrange(j_year, j_month)[1]) * 1.25 else: c_date = datetime(t_year, t_month, calendar.monthrange(t_year, t_month)[1]).date() d = c_date - date.date() days = d.days days_left = float(days + 1) months = days_left / calendar.monthrange(t_year, t_month)[1] months = round(months) print(months) num1 = float(days_left * 15) getcontext().prec = 3 al = float(Decimal(num1) / Decimal(365)) if al > 45: al -= 45 else: pass return al
def calculate_total_RainIrrigation(crop_season, field, date, water_history_query): """ Calculate total rain/irrigations. Values are added over all records for that day. Two queries are required, since rain/irrigation can now come from the probe readings as well. Calculate min/max temps for the day as well. """ if isinstance(date, datetime): date = date.date() rainfall = Decimal(0.0) irrigation = Decimal(0.0) min_temp = None max_temp = None # Now add the values coming from the water history (soon to be renamed manual reading) wh_list = water_history_query.filter(datetime__range=d2dt_range(date)).all() if wh_list: rainfall = rainfall + sum( map( lambda wh: wh.rain if wh.rain else 0, wh_list ) ) irrigation = irrigation + sum( map( lambda wh: wh.irrigation if wh.irrigation else 0, wh_list) ) min_temp = minNone(min_temp, minNone(*map( lambda wh: wh.min_temp_24_hours, wh_list))) max_temp = max(max_temp, max(map( lambda wh: wh.max_temp_24_hours, wh_list))) ## Really need min_temp? #if DEBUG: print "Min temp, max temp for: ", date, min_temp, max_temp return ( rainfall, irrigation, min_temp, max_temp )
def kenpom_query(df, date, game): """ Returns home/away, score, and KenPom efficiency information for games """ dfq = df.query('date == "' + str(date.date()) + '" and game_id == "' + str(game) + '"') game_dict = {} for index, row in dfq.iterrows(): if row.loc['t1_side'] == 'home': game_dict['game_id'] = row.loc['game_id'] game_dict['game_date'] = row.loc['game_date'] game_dict['home_team'] = row.loc['ncaa_name'] game_dict['kenpom_off_home'] = row.loc['offensive_efficiency'] game_dict['kenpom_def_home'] = row.loc['defensive_efficiency'] game_dict['home_score'] = row.loc['t1_points'] game_dict['home_conf'] = row.loc['t1_conf'] else: game_dict['game_id'] = row.loc['game_id'] game_dict['game_date'] = row.loc['game_date'] game_dict['away_team'] = row.loc['ncaa_name'] game_dict['kenpom_off_away'] = row.loc['offensive_efficiency'] game_dict['kenpom_def_away'] = row.loc['defensive_efficiency'] game_dict['away_score'] = row.loc['t1_points'] game_dict['away_conf'] = row.loc['t1_conf'] return game_dict
def test_admins_cannot_make_incidents_without_state(mockdata, client, session): with current_app.test_request_context(): login_admin(client) date = datetime(2000, 5, 25, 1, 45) report_number = '42' address_form = LocationForm(street_name='AAAAA', cross_street1='BBBBB', city='FFFFF', state='', zip_code='03435') ooid_forms = [ OOIdForm(ooid=officer.id) for officer in Officer.query.all()[:5] ] form = IncidentForm(date_field=str(date.date()), time_field=str(date.time()), report_number=report_number, description='Something happened', department='1', address=address_form.data, officers=ooid_forms) data = process_form_data(form.data) incident_count_before = Incident.query.count() rv = client.post(url_for('main.incident_api') + 'new', data=data, follow_redirects=True) assert rv.status_code == 200 assert 'Must select a state.' in rv.data assert incident_count_before == Incident.query.count()
def observation_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) # issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(observations): obs_timestep = ObservationTimestep.get_by_site_and_datetime( site, date) if obs_timestep is None: obs_timestep = ObservationTimestep( site=site, observation_datetime=date, observation_date=date.date()) for k, v in data.items(): prop_name = snake_case(k) if hasattr(obs_timestep, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(obs_timestep, prop_name, v) obs_timestep.save() #logging.info("%s, %s" % (str(date), str(ObservationTimestep))) return Response(status=204)
def forecast_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(forecast): forecast_timestep = ForecastTimestep.find_by_site_and_dates( site, date, issued_date) if forecast_timestep is None: forecast_timestep = ForecastTimestep( site=site, forecast_datetime=date, issued_datetime=issued_date, forecast_date=date.date()) for k, v in data.items(): prop_name = snake_case(k) if hasattr(forecast_timestep, prop_name): if v == "missing": v = None setattr(forecast_timestep, prop_name, v) forecast_timestep.save() return Response(status=204)
def create_calendar(data_path, start, end): #parse the data to get date and available hours #make a list of ordered days starting with start (a date) #if start is not in the data, throw error #if there are any gaps in the data, create filler days with working_hours = 0 #data validation if isinstance(start, date) is False or isinstance(end, date) is False: raise ValueError('start and end must be of the type datetime.date') if start > end: raise ValueError('start must be before end') if isinstance(data_path,str) is False: raise ValueError('data_path must be a string') #Read calendar info into a pandas dataframe calendar_data = pd.read_csv(data_path,parse_dates=['date']) calendar_sorted = calendar_data.sort_values(by='date') #convert from a Pandas Timestamp to a datetime.date() object for consistency with the rest of the time objects calendar_sorted['date'] = [date.date() for date in calendar_sorted['date']] #Calendar object is returned by the function calendar = Calendar() #Create Day objects and append them to the Calendar.days[] for row in calendar_sorted.itertuples(): if row.date < start or row.date > end: continue #Append sufficient 'filler' days if there is a gap in the calendar file. Assume 0 working hours if day is not listed. if len(calendar.days) == 0: yesterday = None else: yesterday = calendar.days[-1].datestamp if yesterday: currentDate = yesterday + timedelta(days=1) while currentDate < row.date: day = Day(datestamp=currentDate, working_hours=0) calendar.days.append(day) currentDate = currentDate + timedelta(days=1) logging.info("Adding filler day {}".format(calendar.days[-1])) #Append the date from the data file day = Day(datestamp=row.date, working_hours=row.working_hours) calendar.days.append(day) #Check some stats to make sure the calendar is as expected if len(calendar.days) == 0: raise ValueError("No dates added - check that start and end dates are in the data source") if calendar.days[-1].datestamp != end: raise ValueError("Last date does not match end date - check that data source has sufficient data for the time period selected") #Show the calendar in the log file logging.info("Calendar generated ({} days included)".format(len(calendar.days))) logging.info("First 10 days of calendar:") for i in range(min(10,len(calendar.days))): logging.info(calendar.days[i]) return calendar
def new_close_action(cls, date=None): """ Get a new close action """ if not date: date = datetime.utcnow() return CommCareCaseAction(action_type=const.CASE_ACTION_CLOSE, date=date, visit_date=date.date(), closed_on=date)
def anticipation(date,co): if(type(co)!=float): arrival = date.date() departure = datetime.strptime(str(co),'%Y-%m-%d') departure = departure.date() time = departure - arrival return time.days return -1
def getScheduleDataInWeek(self, userId, dateStr): con = self.conn.get() try: date = self._parseDate(dateStr) date = None if date is None else date.date() return self.assistance.getScheduleDataInWeek(con, userId, date) finally: self.conn.put(con)
def is_working_day(date, week_days_off=None): """Returns True if the date is a working day""" if week_days_off is None: week_days_off = 6, 7 # default to: saturdays and sundays if date is not None: if isinstance(date, datetime): date = date.date() return date.isoweekday() not in week_days_off
def get_day_of_year(self, date=None): if date is None: date = self.value #FIXED: gabriel added check if the date is actually a datetime than to extract only the date, otherwise # the '-' operator in the return doesn't work. if isinstance(date, datetime_): date = date.date() return (date - date_(date.year, 1, 1)).days + 1
def _get_units(self): for url, doc in self.getdocs(): date = readDate(doc.cssselect("section.headlinedivider p.lfloat")[0].text_content().strip().split("am")[1]) print(date) if date.date() != self.options['date']: continue article = HTMLDocument(url = url, date = date) article.doc = doc yield article
def distro_date_is_week(date, week): if type(date) is datetime: date = date.date() if week == A_WEEK: return date in (SAT_A_DATES + WED_A_DATES) if week == B_WEEK: return date in (SAT_B_DATES + WED_B_DATES) raise Exception("Invalid value for 'week' parameter: %s" % week)
def set_time(arr, hour, minute): """ Given a list of datetimes, set the time on all of them """ results = [] t = time(hour, minute) for date in arr: d = datetime.combine(date.date(), t) results.append(d) return results
def createScheduleSpecial(self, sid, uid, dateStr, scheds): con = self.conn.get() try: date = self._parseDate(dateStr) date = None if date is None else date.date() userId = self.loginModel.getUserId(con, sid) self.assistance.createScheduleSpecial(con, userId, uid, date, scheds) con.commit() finally: self.conn.put(con)
def is_hol(date): """Recieve a date. Returns True if holiday.""" if isinstance(date,datetime): date=date.date() hols = yfhol(date.year) if date in hols.values(): return True else: return False
def get_days_to_expiration(date): year = date.year month = date.month x = calendar.monthrange(year, month) last_thursday = x[1] while True: z = calendar.weekday(year, month, last_thursday) if z != 3: last_thursday -= 1 else: return (date(year, month, last_thursday) - date.date()).days
def __init__(self, date, timestart, timefinish): """ date - datetime, but without time :-) (%d.%m.%Y) timestart - datetime (%d.%m.%Y %H:%M) timefinish - datetime (%d.%m.%Y %H:%M) """ if type(date) == datetime: self.day = date.date() else: self.day = date self.timestart = timestart self.timefinish = timefinish
def rapport(request): if 'date' in request.GET: date_init = request.GET['date'] date = datetime.strptime(date_init,"%Y-%m-%d") else: date = datetime.today() date_init = date.today().strftime("%Y-%m-%d") lendemain = date + timedelta(days=1) # on met les deux dates a minuit date = date.replace(hour=0, minute=0, second=0) lendemain = lendemain.replace(hour=0, minute=0, second=0) ajoutes = Exemplaire.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) factures = Facture.objects.all().filter(date_creation__gt=date, date_creation__lt=lendemain) nb_ajoutes = ajoutes.count() nb_factures = factures.count() nb_vendus = sum([f.nb_livres() for f in factures]) prix_total_vendu_avtaxes = sum([f.prix_avant_taxes() for f in factures]) con_tax = {} # Si les taxes ne sont pas gérés, ces variables deviennent inutiles if settings.TAXABLES: prix_total_vendu_taxes = sum([f.prix_total() for f in factures]) tps = sum([f.prix_tps() for f in factures]) tvq = sum([f.prix_tvq() for f in factures]) con_tax = { 'prix_total_taxes':prix_total_vendu_taxes or None, 'prix_tps':tps or None, 'prix_tvq':tvq or None } con_notax = { 'date_init':date_init, 'date':date.date(), 'taxable':settings.TAXABLES, 'nb_ajoutes':nb_ajoutes, 'nb_factures':nb_factures, 'nb_vendus':nb_vendus, 'prix_total_avant_taxes':prix_total_vendu_avtaxes, } context = con_tax.copy() context.update(con_notax) return render(request, 'rapport.html', context)
def increment(self, changeset): self.increment_number_of_changesets(changeset) if changeset.status == 'A' or changeset.status == 'd': return for metric in self.metrics: start_date = self.get_review_start_date(changeset, metric) end_date = self.get_review_end_date(changeset, metric) for date in self.daterange(start_date, end_date, changeset.merged): obs = self.observations.get(date.date( ), Observation(date.date(), self)) if metric == 'waiting_first_review': obs.changeset_ids.add(changeset.change_id) for heading in product([metric], self.suffixes): heading = self.merge_keys(heading[0], heading[1]) value = getattr(obs, heading) if heading.endswith('staff') and changeset.author.staff is True: value += 1 elif heading.endswith('volunteer') and changeset.author.staff is False: value += 1 elif heading.endswith('total'): value += 1 setattr(obs, heading, value) self.observations[obs.date] = obs
def test_telegram_time(self): spreadsheet = DailysSpreadsheetMock(self.test_user, self.test_chan) # Setup field field = DailysSleepField(spreadsheet) # Send sleep message with telegram time date = datetime(2018, 12, 23, 23, 44, 13) today = date.date() yesterday = today - timedelta(1) evt = EventMessage(self.server, self.test_chan, self.test_user, "sleep")\ .with_raw_data(RawDataTelegram(self.get_telegram_time(date))) field.passive_trigger(evt) # Check data is saved notif_str = spreadsheet.saved_data["sleep"][yesterday if date.hour <= 16 else today] notif_dict = json.loads(notif_str) assert "sleep_time" in notif_dict assert notif_dict["sleep_time"] == date.isoformat()
def yyyymmdd_to_date(date_str): """Convert a YYYYMMDDD formated date to python :py:class:`datetime.date` object. Args: date_str (str): the string to convert. Returns: :py:class:`datetime.date`: the python representation of the date. Raises: ValueError: when the date_str is empty or None. """ if not date_str: raise ValueError('{} is not a valid datetime string'.format(date_str)) date = datetime.strptime(date_str, '%Y%m%d') if date: return date.date()