def test__prepare_binary_time(self): """Prepare a time object for the MySQL binary protocol""" cases = [ (datetime.timedelta(hours=123, minutes=45, seconds=16), (b'\x08\x00\x05\x00\x00\x00\x03\x2d\x10', 11)), (datetime.timedelta(hours=-123, minutes=45, seconds=16), (b'\x08\x01\x06\x00\x00\x00\x15\x2d\x10', 11)), (datetime.timedelta(hours=123, minutes=45, seconds=16, microseconds=345), (b'\x0c\x00\x05\x00\x00\x00\x03\x2d\x10\x59\x01\x00\x00', 11)), (datetime.timedelta(days=123, minutes=45, seconds=16), (b'\x08\x00\x7b\x00\x00\x00\x00\x2d\x10', 11)), (datetime.time(14, 53, 36), (b'\x08\x00\x00\x00\x00\x00\x0e\x35\x24', 11)), (datetime.time(14, 53, 36, 345), (b'\x0c\x00\x00\x00\x00\x00\x0e\x35\x24\x59\x01\x00\x00', 11)) ] for data, exp in cases: self.assertEqual(exp, self._protocol._prepare_binary_time(data), "Failed preparing value '{0}'".format(data)) # Raise an error self.assertRaises(ValueError, self._protocol._prepare_binary_time, 'spam')
def test_create_entries_from_data_and_source(self): """Take the data and source, and create the sitting and entries from it""" source = self._create_source_and_load_test_json_to_entries() # check source now marked as processed source = Source.objects.get(id=source.id) # reload from db self.assertEqual( source.last_processing_success.date(), datetime.date.today() ) # check sitting created sitting_qs = Sitting.objects.filter(source=source) self.assertEqual( sitting_qs.count(), 1 ) sitting = sitting_qs[0] # check start and end date and times correct self.assertEqual( sitting.start_date, datetime.date( 2011, 9, 1 ) ) self.assertEqual( sitting.start_time, datetime.time( 14, 30, 00 ) ) self.assertEqual( sitting.end_date, datetime.date( 2011, 9, 1 ) ) self.assertEqual( sitting.end_time, datetime.time( 18, 30, 00 ) ) # check correct venue set self.assertEqual( sitting.venue.slug, 'national_assembly' ) # check entries created and that we have the right number entries = sitting.entry_set self.assertEqual( entries.count(), 64 )
def _convert_time(time_str): h, m, s = time_str.split(":") if "." in s: f = float(s) s = int(f) return time(int(h), int(m), s, (f-s)*1000000) return time(int(h), int(m), int(s), 0)
def test_valid_dt_with_missing_values(self): from datetime import date, time # GH 8689 s = Series(date_range('20130101', periods=5, freq='D')) s.iloc[2] = pd.NaT for attr in ['microsecond', 'nanosecond', 'second', 'minute', 'hour', 'day']: expected = getattr(s.dt, attr).copy() expected.iloc[2] = np.nan result = getattr(s.dt, attr) tm.assert_series_equal(result, expected) result = s.dt.date expected = Series( [date(2013, 1, 1), date(2013, 1, 2), np.nan, date(2013, 1, 4), date(2013, 1, 5)], dtype='object') tm.assert_series_equal(result, expected) result = s.dt.time expected = Series( [time(0), time(0), np.nan, time(0), time(0)], dtype='object') tm.assert_series_equal(result, expected)
def check_event_sequence(self, when, now=None, interval=timedelta(minutes=15)): """ Checks if all the measures have been saved in *when* date. If *now* is given or if * :returns (ok, missing) If ok is True, no measures are lost, if False, missing is a list """ if isinstance(when, datetime): when = when.date() start = datetime.combine(when, time(0, 0, 0)) if now: end = now else: end = datetime.combine(when, time(23, 59, 59, 9999)) qs = self.energy_set.filter(timestamp__gte=start, timestamp__lte=end) qs = qs.order_by('timestamp') measured = set(map(lambda x: x.time(), qs.values_list('timestamp', flat=True))) expected = set() t0 = start + interval while t0 <= end: expected.add(t0.time()) t0 += interval diff = expected.difference(measured) return self.EnergyMesasuresReport(qs.count(), diff, not diff)
def __producer__(self): """ """ import datetime import threading thread_sina_l2 = threading.Thread( target=self.start_sina_l2, daemon=True ) thread_sina_l2.start() while True: time.sleep(60) current = datetime.datetime.now() if current.time() > datetime.time(15, 0, 0)\ or current.time() < datetime.time(9, 0, 0): if self.sina_l2 is not None: if not self.sina_l2.stopped: self.logger.info("非盘中,暂停收行情") self.sina_l2.stop() elif current.time() > datetime.time(9,0,0) \ and current.date() == datetime.date( int(self.trading_date[0:4]), int(self.trading_date[5:7]), int(self.trading_date[8:10]) ): # 除非指数的日期与今天的日期相同,才会开启 if (self.sina_l2 is None) or (self.sina_l2.terminated == True): self.logger.info("开启SinaL2: {}".format(self.trading_date)) thread_sina_l2 = threading.Thread( target=self.start_sina_l2, daemon=True ) thread_sina_l2.start()
def test_post(self): group = GroupFactory() self.assertEqual(Course.objects.count(), 0) self.assertEqual(CourseInfo.objects.count(), 0) response = self.client.post( reverse("create_course"), { "title": "My Title", "group": group.pk, "faculty_group": group.pk, "term": 1, "year": 2016, "days": "TR", "starttime": time(hour=16), "endtime": time(hour=18), }, ) self.assertEqual(response.status_code, 302) self.assertEqual(Course.objects.count(), 1) course = Course.objects.first() self.assertEqual(course.title, "My Title") self.assertEqual(CourseInfo.objects.count(), 1) info = course.info self.assertEqual(info, CourseInfo.objects.first()) self.assertEqual(info.term, 1) self.assertEqual(info.year, 2016) self.assertEqual(info.days, "TR") self.assertEqual(info.starttime, time(hour=16)) self.assertEqual(info.endtime, time(hour=18))
def _compute_display_get(self): """ This method check if the alert can be displayed today if alert type is specific : compare specific_day(date) with today's date if alert type is week : check today is set as alert (checkbox true) eg. self['monday'] if alert type is day : True return : Message if can_display_alert is True else False """ days_codes = {'0': 'sunday', '1': 'monday', '2': 'tuesday', '3': 'wednesday', '4': 'thursday', '5': 'friday', '6': 'saturday'} can_display_alert = { 'specific': (self.specific_day == fields.Date.context_today(self)), 'week': self[days_codes[datetime.datetime.now().strftime('%w')]], 'days': True } if can_display_alert[self.alert_type]: mynow = fields.Datetime.context_timestamp(self, datetime.datetime.now()) hour_to = int(self.end_hour) min_to = int((self.end_hour - hour_to) * 60) to_alert = datetime.time(hour_to, min_to) hour_from = int(self.start_hour) min_from = int((self.start_hour - hour_from) * 60) from_alert = datetime.time(hour_from, min_from) if from_alert <= mynow.time() <= to_alert: self.display = True else: self.display = False
def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() # Attempt to parse a datetime: value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.time(*time.strptime(value, '%H:%M:%S')[3:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.time(*time.strptime(value, '%H:%M')[3:5], **kwargs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid'])
def test_delete_pair(self): simulate_login(self.testbed, '*****@*****.**', '123', True) group = Group(group_id='asgap', name='1', origin='1', admin=[u'*****@*****.**']) post_group(group) simulate_login(self.testbed) group_id = 'asgap' simulate_login(self.testbed, '*****@*****.**', '123', True) pair1 = ScheduledPair(classname='Math 1', date=datetime.date(2015, 4, 14), start_time=datetime.time(10, 40), duration=90, task='some_task', group_id='asgap') pair2 = ScheduledPair(classname='Math 2', date=datetime.date(2015, 4, 15), start_time=datetime.time(9, 40), duration=120, task='some task', group_id='asgap') post_pair(pair1) post_pair(pair2) pairs_list = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2) added_pair1 = pairs_list[0] added_pair2 = pairs_list[1] response = make_request('/' + group_id + '/delete_pair?key=' + added_pair1.key.urlsafe() + '&return_url=/pairs', 'GET') self.assertEqual(response.status_int, 302) pairs_list = ScheduledPair.query().fetch(2) self.assertEqual(len(pairs_list), 1) remained_pair = pairs_list[0] self.assertEqual(remained_pair, added_pair2)
def test_edit_pair(self): simulate_login(self.testbed, '*****@*****.**', '123', True) group = Group(group_id='asgap', name='1', origin='1', admin=[u'*****@*****.**']) post_group(group) simulate_login(self.testbed) group_id = 'asgap' simulate_login(self.testbed, '*****@*****.**', '123', True) pair = ScheduledPair(classname='Math', date=datetime.date(2015, 4, 14), start_time=datetime.time(9, 40), duration=90, task='some_task', group_id='asgap') response = post_pair(pair) added_pair = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2)[0] response = make_request('/asgap/edit_pair?key=' + added_pair.key.urlsafe(), 'GET') self.assertEqual(response.status_int, 200) pair = ScheduledPair(classname='Math 1', date=datetime.date(2016, 5, 15), start_time=datetime.time(10, 41), duration=120, task='some task\n1', group_id='asgap') response = post_pair(pair, added_pair.key.urlsafe()) self.assertEqual(response.status_int, 302) pairs_list = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2) self.assertEqual(len(pairs_list), 1) added_pair = pairs_list[0] self.check_pair_fields(added_pair, pair) response = make_request('/asgap/pairs', 'GET') self.assertEqual(response.status_int, 200)
def test_time(self): t = datetime(1, 1, 1, 3, 30, 0) deltas = np.random.randint(1, 20, 3).cumsum() ts = np.array([(t + timedelta(minutes=int(x))).time() for x in deltas]) df = DataFrame({'a': np.random.randn(len(ts)), 'b': np.random.randn(len(ts))}, index=ts) ax = df.plot() # verify tick labels ticks = ax.get_xticks() labels = ax.get_xticklabels() for t, l in zip(ticks, labels): m, s = divmod(int(t), 60) h, m = divmod(m, 60) xp = l.get_text() if len(xp) > 0: rs = time(h, m, s).strftime('%H:%M:%S') self.assertEqual(xp, rs) # change xlim ax.set_xlim('1:30', '5:00') # check tick labels again ticks = ax.get_xticks() labels = ax.get_xticklabels() for t, l in zip(ticks, labels): m, s = divmod(int(t), 60) h, m = divmod(m, 60) xp = l.get_text() if len(xp) > 0: rs = time(h, m, s).strftime('%H:%M:%S') self.assertEqual(xp, rs)
def setUp(self): area= Tree.objects.create(name='广州',tree_type=Tree.get_type(0)) car=Tree.objects.create(name='君威2014款',tree_type=Tree.get_type(1)) service=Tree.objects.create(name='玻璃贴膜',tree_type=Tree.get_type(2)) brand=Tree.objects.create(name='3M',tree_type=Tree.get_type(3)) foil_type= Tree.objects.create(name='整车',tree_type=Tree.get_type(6)) foil_model_front=Tree.objects.create(name='前挡类型',tree_type=Tree.get_type(7)) supplier=Supplier.objects.create( name='s1', area=area, address='address1', coordinate_x=120.01234, coordinate_y=30.1234, #photo=# ImageField(blank=True,null=True,upload_to='photos/suppliers') phone='1234567', time_open=time(10,0,0), time_close=time(19,0,0), description='description of s1', owner=UserManager.create_user('phiree', email=None, password=None) ) service=Service.objects.create(supplier=supplier,service_type=service,car=car)
def test_timefield_to_python_usecs(self): """TimeField.to_python should support usecs""" f = models.TimeField() self.assertEqual(f.to_python('01:02:03.000004'), datetime.time(1, 2, 3, 4)) self.assertEqual(f.to_python('01:02:03.999999'), datetime.time(1, 2, 3, 999999))
def generateCalendars(self): target = self.target_line_edit.text() xml_file = self.xml_line_edit.text() term_start = datetime.datetime.combine(self.term_start_edit.date().toPyDate(), datetime.time()) half_end = datetime.datetime.combine(self.half_end_edit.date().toPyDate(), datetime.time()) half_start = datetime.datetime.combine(self.half_start_edit.date().toPyDate(), datetime.time()) term_end = datetime.datetime.combine(self.term_end_edit.date().toPyDate(), datetime.time()) print(xml_file) if not(os.path.isfile(xml_file)): self.workLabel.setText("XML file not found") elif not(os.path.isdir(target)): self.workLabel.setText("Target directory not found") else: self.workLabel.setText('Working...') if self.week_b.isChecked(): week_start = "B" else: week_start = "A" dates = tp.timetableDates(term_start=term_start, half_end=half_end, half_start=half_start, term_end=term_end, week_start=week_start) calGroup = tp.TimeTableGroup(xml_file, dates) if not(self.first_run) and self.calThread.running: pass else: self.first_run = False self.calThread = calendarThread(calGroup, target, parent=self) self.calThread.start()
def test_str_to_time(self): tz = pytz.timezone('Asia/Kabul') with patch.object(timezone, 'now', return_value=tz.localize(datetime(2014, 1, 2, 3, 4, 5, 6))): self.assertEqual(time(3, 4), str_to_time('03:04')) # zero padded self.assertEqual(time(3, 4), str_to_time('3:4')) # not zero padded self.assertEqual(time(3, 4), str_to_time('01-02-2013 03:04')) # with date self.assertEqual(time(15, 4), str_to_time('3:04 PM')) # as PM
def test_at_time_frame(self): rng = date_range('1/1/2000', '1/5/2000', freq='5min') ts = DataFrame(np.random.randn(len(rng), 2), index=rng) rs = ts.at_time(rng[1]) self.assertTrue((rs.index.hour == rng[1].hour).all()) self.assertTrue((rs.index.minute == rng[1].minute).all()) self.assertTrue((rs.index.second == rng[1].second).all()) result = ts.at_time('9:30') expected = ts.at_time(time(9, 30)) assert_frame_equal(result, expected) result = ts.loc[time(9, 30)] expected = ts.loc[(rng.hour == 9) & (rng.minute == 30)] assert_frame_equal(result, expected) # midnight, everything rng = date_range('1/1/2000', '1/31/2000') ts = DataFrame(np.random.randn(len(rng), 3), index=rng) result = ts.at_time(time(0, 0)) assert_frame_equal(result, ts) # time doesn't exist rng = date_range('1/1/2012', freq='23Min', periods=384) ts = DataFrame(np.random.randn(len(rng), 2), rng) rs = ts.at_time('16:00') self.assertEqual(len(rs), 0)
def test_HorarioReservaInvalido_TiempoTotalMenor1h(self): ReservaInicio = datetime(year=2000,month=2,day=6,hour = 13, minute = 0, second = 0) ReservaFin = datetime(year=2000,month=2,day=6,hour = 13, minute = 59, second = 59) HoraApertura = time(hour = 12, minute = 0, second = 0) HoraCierre = time(hour = 18, minute = 0, second = 0) x = validarHorarioReserva(ReservaInicio, ReservaFin, HoraApertura, HoraCierre, 7) self.assertEqual(x, (False, 'El tiempo de reserva debe ser al menos de 1 hora.'))
def convert_dt_string(self, string): ctime = string.replace("datetime.datetime", "") ctime = ctime.replace("(", "") ctime = ctime.replace(")", "") ctime = "".join(ctime).split(", ") # Parse date, set to None if we don't have any/not enough data if len(ctime) >= 3: docdate = date(int(ctime[0]), int(ctime[1]), int(ctime[2])).strftime("%B %d, %Y") else: docdate = None # Parse if we are missing minutes and seconds field if len(ctime) == 4: doctime = time(int(ctime[3])).strftime("%H") # Parse if we are missing seconds field elif len(ctime) == 5: doctime = time(int(ctime[3]), int(ctime[4])).strftime("%H:%M") # Parse a full datetime string elif len(ctime) == 6: doctime = time(int(ctime[3]), int(ctime[4]), int(ctime[5])).strftime("%H:%M:%S") else: doctime = None if docdate and doctime: return docdate + " " + doctime elif docdate: return docdate else: return "None"
def test_add_timefence_object(self): policy = ServiceSecurityPolicy() start_time = time(hour=12, minute=30, second=30) end_time = time(hour=23, minute=45, second=45) policy.add_timefence("Fence 1", start_time, end_time, monday=True, wednesday=True, sunday=True) self.assertEqual(len(policy.timefences), 1) timefence = policy.timefences[0] self.assertIn("Monday", timefence.days) self.assertTrue(timefence.monday) self.assertNotIn("Tuesday", timefence.days) self.assertFalse(timefence.tuesday) self.assertIn("Wednesday", timefence.days) self.assertTrue(timefence.wednesday) self.assertNotIn("Thursday", timefence.days) self.assertFalse(timefence.thursday) self.assertNotIn("Friday", timefence.days) self.assertFalse(timefence.friday) self.assertNotIn("Saturday", timefence.days) self.assertFalse(timefence.saturday) self.assertIn("Sunday", timefence.days) self.assertTrue(timefence.sunday) self.assertEqual(timefence.start_time, start_time) self.assertEqual(timefence.end_time, end_time) self.assertEqual(timefence.timezone, "UTC")
def test_HorarioReservaInvalido_InicioReservacion_Mayor_FinalReservacion(self): ReservaInicio = datetime.now()+timedelta(minutes=1) ReservaFin = datetime.now() HoraApertura = time(hour = 0, minute = 0, second = 0) HoraCierre = time(hour = 23, minute = 59, second = 59) x = validarHorarioReserva(ReservaInicio, ReservaFin, HoraApertura, HoraCierre, 7) self.assertEqual(x, (False, 'El horario de inicio de reservacion debe ser menor al horario de fin de la reserva.'))
def convert_showtime_to_timeobject(self, showtime): """ Takes a showtime as string and returns a 24H formated time. Handle quirks in movie chains showtimes. """ suffixes = [('pm', 12), ('p.m.', 12), ('am',0), ('a.m.',0)] offset = 12 # by default, all showtimes are pm, even if not stated for sfx, off in suffixes: if sfx in showtime: showtime = showtime.replace(sfx, '') offset = off # split hours, minutes hours, minutes = showtime.split(':') hours = int(hours) minutes = int(minutes) if (hours == 12) and (offset == 12): offset = 0 try: result = datetime.time(int(hours)+offset, int(minutes)).strftime('%H:%M') except ValueError: result = datetime.time(int(hours), int(minutes)).strftime('%H:%M') return result
def to_time(str_times): # (08:45-10:45) ret = [] values = [int(s) for s in re.findall(r'\d+', str_times)] ret.append(datetime.time(int(values[0]), int(values[1]))) ret.append(datetime.time(int(values[2]), int(values[3]))) return ret
def timediff(self, stime, etime): stime = stime.split(":") stime = time(int(stime[0]), int(stime[1]), int(stime[2])) etime = etime.split(":") etime = time(int(etime[0]), int(etime[1]), int(etime[2])) tdiff = datetime.combine(self._date, etime) - datetime.combine(self._date, stime) return str(tdiff)
def main( dt = datetime(2015,4,24), symbols ={ 'ESM5', 'NQM5'}, sleep_max_sec = 60, verbose=0): delay = random.randint(0,sleep_max_sec) print "%s processing %s delay %d secs" % ( datetime.now().strftime('%H:%M:%S'), dt.strftime('%Y%m%d') , delay) tm.sleep(random.randint(0,sleep_max_sec)) gen = snapshot_generator() gen.instruments = symbols gen.date = dt start_snaps = time(8, 0, 0) stop_snaps = time(16, 0, 0) gen.intervals = [ timedelta(milliseconds=100), timedelta(seconds=1), timedelta(seconds=5) ] gen.interval_names = { '100ms', '1s', '5s' } gen.halflives = [ timedelta(milliseconds=200), timedelta(seconds=2), timedelta(seconds=10) ] gen.halflive_names = ['200ms','2s', '10s'] gen.verbose = verbose bt = pybt.pybt() bt.handler = gen bt.date = bt.handler.date bt.verbose = verbose for ins in gen.instruments: bt.symbols.append(ins) bt.start_time = datetime.combine( bt.date, time(7, 0, 0)) bt.end_time = datetime.combine( bt.date, time(16, 10, 0)) for interval in gen.intervals: bt.add_timer( interval, datetime.combine(bt.date,start_snaps), datetime.combine(bt.date, stop_snaps)) bt.run()
def iter_job_adverts(self): re_id = re.compile('http://offre-emploi.monster.fr/(.*?).aspx', re.DOTALL) trs = self.document.getroot().xpath("//table[@class='listingsTable']/tbody/tr") for tr in trs: if 'class' in tr.attrib and tr.attrib['class'] != 'aceHidden': a = self.parser.select(tr, 'td/div/div[@class="jobTitleContainer"]/a', 1, method='xpath') _id = u'%s' % re_id.search(a.attrib['href']).group(1) advert = MonsterJobAdvert(_id) advert.society_name = u'%s' % self.parser.select(tr, 'td/div/div[@class="companyContainer"]/div/a', 1, method='xpath').attrib['title'] advert.title = u'%s' % a.text date = self.parser.select(tr, 'td/div/div[@class="fnt20"]', 1, method='xpath').text_content().strip() now = datetime.now() number = re.search("\d+", date) if number: if 'heures' in date: date = now - timedelta(hours=int(number.group(0))) advert.publication_date = datetime.combine(date, time()) elif 'jour' in date: date = now - timedelta(days=int(number.group(0))) advert.publication_date = datetime.combine(date, time()) else: advert.publication_date = datetime.combine(now, time.min) place = self.parser.select(tr, 'td/div/div[@class="jobLocationSingleLine"]/a', method='xpath') if len(place) != 0: advert.place = u'%s' % place[0].attrib['title'] yield advert
def test_time_to_days(): from openpyxl.date_time import time_to_days FUT = time_to_days t1 = time(13, 55, 12, 36) assert FUT(t1) == 0.5800000004166667 t2 = time(3, 0, 0) assert FUT(t2) == 0.125
def __init__(self,feed,instrument,broker,openrange,p_a,p_c,isFinancialFuture): strategy.BacktestingStrategy.__init__(self,feed,broker) self.__position = None self.__instrument = instrument # self.__prices = feed[instrument].getCloseDataSeries() self.stateA = 0 # 1 dup -1 kong 0 inited self.stateB = 6 # 2 openduo 3 closeduo 4 openkong 5 closekong 6 empty self.openrange = openrange self.barsLimit = int(self.openrange / 2) + 1 self.a_plus = 0 self.a_minus = 0 self.c_plus = 0 self.c_minus = 0 self.p_a = p_a self.p_c = p_c self.lastDate = None self.__resetORHL() self.__resetCounter() if (isFinancialFuture): self.exitIntradayTradeTime = dtime.time(15,15) else: self.exitIntradayTradeTime = dtime.time(18,50) self.__clos = -1 self.__timeLimitInSeconds = self.openrange*60 ## in seconds
def test_process(self): event = Event( name="some event", location="Ballwil", start_time=datetime.combine(date.today(), time(hour=9))) self.plugin.process(event) self.assertEqual(event.departure_time, datetime.combine( date.today(), time(hour=7, minute=38)))
def test_remove_timefence(self): # Create a policy and add two time fences policy = ServiceSecurityPolicy() policy.add_timefence("my timefence", time(hour=10), time(hour=11)) policy.add_timefence("my timefence 2", time(hour=12), time(hour=13)) self.assertEqual(len(policy.timefences), 2) retrieved = policy.get_policy() factor = retrieved['factors'][1] if \ retrieved['factors'][1]['factor'] == 'timefence' else retrieved['factors'][0] self.assertEqual(len(factor['attributes']['time fences']), 2) # Remove the first time fence policy.remove_timefence('my timefence') self.assertEqual(len(policy.timefences), 1) retrieved = policy.get_policy() factor = retrieved['factors'][1] if \ retrieved['factors'][1]['factor'] == 'timefence' else retrieved['factors'][0] # Verify the correct time fence was removed self.assertEqual(len(factor['attributes']['time fences']), 1) self.assertEqual(factor['attributes']['time fences'][0]['name'], "my timefence 2") self.assertEqual(factor['attributes']['time fences'][0]['start hour'], 12) self.assertEqual(factor['attributes']['time fences'][0]['end hour'], 13) self.assertEqual(policy.timefences[0].name, "my timefence 2") self.assertEqual(policy.timefences[0].start_time.hour, 12) self.assertEqual(policy.timefences[0].end_time.hour, 13)
def timeline_for_event(event, subevent=None): tl = [] ev = subevent or event if subevent: ev_edit_url = reverse('control:event.subevent', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug, 'subevent': subevent.pk }) else: ev_edit_url = reverse('control:event.settings', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }) tl.append( TimelineEvent(event=event, subevent=subevent, datetime=ev.date_from, description=pgettext_lazy('timeline', 'Your event starts'), edit_url=ev_edit_url)) if ev.date_to: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=ev.date_to, description=pgettext_lazy('timeline', 'Your event ends'), edit_url=ev_edit_url)) if ev.date_admission: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=ev.date_admission, description=pgettext_lazy( 'timeline', 'Admissions for your event start'), edit_url=ev_edit_url)) if ev.presale_start: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=ev.presale_start, description=pgettext_lazy('timeline', 'Start of ticket sales'), edit_url=ev_edit_url)) if ev.presale_end: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=ev.presale_end, description=pgettext_lazy('timeline', 'End of ticket sales'), edit_url=ev_edit_url)) rd = event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper) if rd: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=rd.datetime(ev), description=pgettext_lazy( 'timeline', 'Customers can no longer modify their orders'), edit_url=ev_edit_url)) rd = event.settings.get('payment_term_last', as_type=RelativeDateWrapper) if rd: d = make_aware( datetime.combine(rd.date(ev), time(hour=23, minute=59, second=59)), event.timezone) tl.append( TimelineEvent(event=event, subevent=subevent, datetime=d, description=pgettext_lazy( 'timeline', 'No more payments can be completed'), edit_url=reverse('control:event.settings.payment', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }))) rd = event.settings.get('ticket_download_date', as_type=RelativeDateWrapper) if rd and event.settings.ticket_download: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=rd.datetime(ev), description=pgettext_lazy( 'timeline', 'Tickets can be downloaded'), edit_url=reverse('control:event.settings.tickets', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }))) rd = event.settings.get('cancel_allow_user_until', as_type=RelativeDateWrapper) if rd and event.settings.cancel_allow_user: tl.append( TimelineEvent( event=event, subevent=subevent, datetime=rd.datetime(ev), description=pgettext_lazy( 'timeline', 'Customers can no longer cancel free or unpaid orders'), edit_url=reverse('control:event.settings.tickets', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }))) rd = event.settings.get('cancel_allow_user_paid_until', as_type=RelativeDateWrapper) if rd and event.settings.cancel_allow_user_paid: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=rd.datetime(ev), description=pgettext_lazy( 'timeline', 'Customers can no longer cancel paid orders'), edit_url=reverse('control:event.settings.tickets', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }))) if not event.has_subevents: days = event.settings.get('mail_days_download_reminder', as_type=int) if days is not None: reminder_date = (ev.date_from - timedelta(days=days)).replace( hour=0, minute=0, second=0, microsecond=0) tl.append( TimelineEvent(event=event, subevent=subevent, datetime=reminder_date, description=pgettext_lazy( 'timeline', 'Download reminders are being sent out'), edit_url=reverse('control:event.settings.mail', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug }))) for p in event.items.filter( Q(available_from__isnull=False) | Q(available_until__isnull=False)): if p.available_from: tl.append( TimelineEvent(event=event, subevent=subevent, datetime=p.available_from, description=pgettext_lazy( 'timeline', 'Product "{name}" becomes available').format( name=str(p)), edit_url=reverse('control:event.item', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug, 'item': p.pk, }))) if p.available_until: tl.append( TimelineEvent( event=event, subevent=subevent, datetime=p.available_until, description=pgettext_lazy( 'timeline', 'Product "{name}" becomes unavailable').format( name=str(p)), edit_url=reverse('control:event.item', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug, 'item': p.pk, }))) pprovs = event.get_payment_providers() # This is a special case, depending on payment providers not overriding BasePaymentProvider by too much, but it's # preferrable to having all plugins implement this spearately. for pprov in pprovs.values(): if not pprov.settings.get('_enabled', as_type=bool): continue availability_date = pprov.settings.get('_availability_date', as_type=RelativeDateWrapper) if availability_date: d = make_aware( datetime.combine(availability_date.date(ev), time(hour=23, minute=59, second=59)), event.timezone) tl.append( TimelineEvent( event=event, subevent=subevent, datetime=d, description=pgettext_lazy( 'timeline', 'Payment provider "{name}" can no longer be selected'). format(name=str(pprov.verbose_name)), edit_url=reverse('control:event.settings.payment.provider', kwargs={ 'event': event.slug, 'organizer': event.organizer.slug, 'provider': pprov.identifier, }))) for recv, resp in timeline_events.send(sender=event, subevent=subevent): tl += resp return sorted(tl, key=lambda e: e.datetime)
def test_result_processor_no_microseconds(self): expected = datetime.time(12, 34, 56) self._assert_result_processor(expected, '12:34:56')
if random.randint(1, 10) == 1: print "Random Chance Exit at : " + str( datetime.datetime.now().time())[:8] sys.exit(0) def time_in_range(start, end, x): if start <= end: return start <= x <= end else: return start <= x or x <= end if skip_delay == 0: if time_in_range(datetime.time(2, 0, 0), datetime.time(7, 0, 0), datetime.datetime.now().time()): if random.randint(1, 25) != 1: print "Special Time Range Exit at : " + str( datetime.datetime.now().time())[:8] sys.exit(0) delay = random.randint(0, 250) print "Sleeping for " + str(delay) + " seconds" time.sleep(delay) print "New Run at : " + str(datetime.datetime.now().time())[:8] import requests import thread from threading import Thread from threading import Lock
def startdatum(self): return datetime.datetime.combine( self.informatieobject.latest_version.creatiedatum, datetime.time(0, 0)).replace(tzinfo=timezone.utc)
def reschedule(self, request, **kwargs): log = logging.getLogger('abcast.schedulerapi.reschedule') top = request.POST.get('top', None) left = request.POST.get('left', None) num_days = request.POST.get('num_days', SCHEDULER_NUM_DAYS) e = Emission.objects.get(**self.remove_api_resource_names(kwargs)) data = {"version":"2.4.1"} pph = SCHEDULER_PPH # ppd = SCHEDULER_PPD ppd = (SCHEDULER_GRID_WIDTH - SCHEDULER_GRID_OFFSET) / int(num_days) top = float(top) / pph * 60 offset_min = int(15 * round(float(top)/15)) left = float(left) / ppd offset_d = int(round(float(left))) log.debug('minutes (offset): %s' % offset_min) log.debug('days (offset): %s' % offset_d) # add offsets time_start = datetime.datetime.combine(e.time_start.date(), datetime.time(0)) time_start = time_start + datetime.timedelta(minutes=offset_min, days=offset_d) time_start = time_start + datetime.timedelta(hours=SCHEDULER_OFFSET) time_end = time_start + datetime.timedelta(milliseconds=e.content_object.get_duration()) log.debug('time_start: %s' % time_start) log.debug('time_end: %s' % time_end) success = True # check if in past now = datetime.datetime.now() lock_end = now + datetime.timedelta(seconds=SCHEDULER_LOCK_AHEAD) if lock_end > time_start: data = { 'message': _('You cannot schedule things in the past!') } success = False # check if slot is free es = Emission.objects.filter(time_end__gt=time_start, time_start__lt=time_end).exclude(pk=e.pk) if es.count() > 0: data = { 'message': _('Sorry, but the desired time does not seem to be available.') } success = False if success: e.time_start = time_start data['status'] = True # always save to trigger push-update e.save() return self.json_response(request, data)
# 'nifty data ': { # 'task': 'api.tasks.broadcastNiftyData', # 'schedule': timedelta(seconds=20), # }, # 'sell data ': { # 'task': 'api.tasks.broadcastSellData', # 'schedule': timedelta(seconds=100), # }, 'ticker data': { 'task': 'api.tasks.broadcastTickerData', 'schedule': timedelta(seconds=20), }, 'Delete history': { 'task': 'api.tasks.delete_history', 'schedule': crontab(hour=18, minute=30), }, } # Share market start and end time _start_time = datetime.time(hour=19, minute=00, second=30) #,second=00) _end_time = datetime.time(hour=1, minute=29, second=30) #,minute=30,second=00) CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { "hosts": [('redis', 6379)], }, }, }
def refresh_currency(self): """Refresh the currencies rates !!for all companies now""" _logger.info( 'Starting to refresh currencies with service %s (company: %s)', self.service, self.company_id.name) curr_obj = self.env['res.currency'] rate_obj = self.env['res.currency.rate'] company = self.company_id # The multi company currency can be set or no so we handle # The two case if company.auto_currency_up: main_currency = curr_obj.search( [('base', '=', True), ('company_id', '=', company.id)], limit=1) if not main_currency: main_currency = curr_obj.search( [('base', '=', True)], limit=1) if main_currency.name != 'THB': return super(CurrencyRateUpdateService, self).refresh_currency() if not main_currency: raise UserError(_('There is no base currency set!')) if main_currency.rate != 1: raise UserError(_('Base currency rate should be 1.00!')) note = self.note or '' try: # We initalize the class that will handle the request # and return a dict of rate factory = CurrencyGetterFactoryTHB() getter = factory.register(self.service) curr_to_fetch = map(lambda x: x.name, self.currency_to_update) res, log_info = getter.get_updated_currency( curr_to_fetch, main_currency.name, self.max_delta_days ) rate_name = \ fields.Datetime.to_string(datetime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0)) for curr in self.currency_to_update: if curr.id == main_currency.id: continue do_create = True for rate in curr.rate_ids: if rate.name == rate_name: rate.rate = res[curr.name] do_create = False break if do_create: vals = { 'currency_id': curr.id, 'rate_input': res[curr.name], # with Currency Ext 'name': rate_name } rate_obj.create(vals) _logger.info( 'Updated currency %s via service %s', curr.name, self.service) # Show the most recent note at the top msg = '%s \n%s currency updated. %s' % ( log_info or '', fields.Datetime.to_string(datetime.today()), note ) self.write({'note': msg}) except Exception as exc: error_msg = '\n%s ERROR : %s %s' % ( fields.Datetime.to_string(datetime.today()), repr(exc), note ) _logger.error(repr(exc)) self.write({'note': error_msg}) if self._context.get('cron', False): midnight = time(0, 0) next_run = (datetime.combine( fields.Date.from_string(self.next_run), midnight) + _intervalTypes[str(self.interval_type)] (self.interval_number)).date() self.next_run = next_run
def read_time_millis(data, writer_schema=None, reader_schema=None): h = int(data / MLS_PER_HOUR) m = int(data / MLS_PER_MINUTE) % 60 s = int(data / MLS_PER_SECOND) % 60 mls = int(data % MLS_PER_SECOND) * 1000 return time(h, m, s, mls)
def _read_hdr(self): """Read header from EDF file. It only reads the header for internal purposes and adds a hdr. """ with self.filename.open('rb') as f: hdr = {} assert f.tell() == 0 assert f.read(8) == b'0 ' # recording info hdr['subject_id'] = decode(f.read(80)).strip() hdr['recording_id'] = decode(f.read(80)).strip() # parse timestamp date_str = decode(f.read(8)).strip() if date_str == '': edf_date = DEFAULT_DATETIME.date() else: (day, month, year) = [int(x) for x in findall('(\d+)', date_str)] # Y2K: cutoff is 1985 if year >= 85: year += 1900 else: year += 2000 edf_date = date(year, month, day) time_str = decode(f.read(8)).strip() if time_str == '': edf_time = DEFAULT_DATETIME.time() else: (hour, minute, day) = [int(x) for x in findall('(\d+)', time_str)] edf_time = time(hour, minute, day) hdr['start_time'] = datetime.combine(edf_date, edf_time) # misc hdr['header_n_bytes'] = int(f.read(8)) f.seek(44, 1) # reserved for EDF+ hdr['n_records'] = int(f.read(8)) hdr['record_length'] = float(f.read(8)) # in seconds nchannels = hdr['n_channels'] = int(f.read(4)) # read channel info channels = range(hdr['n_channels']) hdr['label'] = [decode(f.read(16)).strip() for n in channels] hdr['transducer'] = [decode(f.read(80)).strip() for n in channels] hdr['physical_dim'] = [decode(f.read(8)).strip() for n in channels] hdr['physical_min'] = [float(f.read(8)) for n in channels] hdr['physical_max'] = [float(f.read(8)) for n in channels] hdr['digital_min'] = [float(f.read(8)) for n in channels] hdr['digital_max'] = [float(f.read(8)) for n in channels] hdr['prefiltering'] = [ decode(f.read(80)).strip() for n in channels ] hdr['n_samples_per_record'] = [int(f.read(8)) for n in channels] f.seek(32 * nchannels, 1) # reserved assert f.tell() == hdr['header_n_bytes'] self.hdr = hdr
def read_time_micros(data, writer_schema=None, reader_schema=None): h = int(data / MCS_PER_HOUR) m = int(data / MCS_PER_MINUTE) % 60 s = int(data / MCS_PER_SECOND) % 60 mcs = data % MCS_PER_SECOND return time(h, m, s, mcs)
class CMySQLCursorPreparedTests(tests.CMySQLCursorTests): tbl = "prep_stmt" create_table_stmt = ( "CREATE TABLE {0} (" "my_null INT, " "my_bit BIT(7), " "my_tinyint TINYINT, " "my_smallint SMALLINT, " "my_mediumint MEDIUMINT, " "my_int INT, " "my_bigint BIGINT, " "my_decimal DECIMAL(20,10), " "my_float FLOAT, " "my_double DOUBLE, " "my_date DATE, " "my_time TIME, " "my_datetime DATETIME, " "my_year YEAR, " "my_char CHAR(100), " "my_varchar VARCHAR(100), " "my_enum ENUM('x-small', 'small', 'medium', 'large', 'x-large'), " "my_geometry POINT, " "my_blob BLOB)") insert_stmt = ( "INSERT INTO {0} (" "my_null, " "my_bit, " "my_tinyint, " "my_smallint, " "my_mediumint, " "my_int, " "my_bigint, " "my_decimal, " "my_float, " "my_double, " "my_date, " "my_time, " "my_datetime, " "my_year, " "my_char, " "my_varchar, " "my_enum, " "my_geometry, " "my_blob) " "VALUES (?, B'1111100', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, " "POINT(21.2, 34.2), ?)") data = (None, 127, 32767, 8388607, 2147483647, 4294967295 if ARCH_64BIT else 2147483647, decimal.Decimal("1.2"), 3.14, 4.28, datetime.date(2018, 12, 31), datetime.time(12, 13, 14), datetime.datetime(2019, 2, 4, 10, 36, 00), 2019, "abc", u"MySQL 🐬", "x-large", "random blob data") exp = (None, 124, 127, 32767, 8388607, 2147483647, 4294967295 if ARCH_64BIT else 2147483647, decimal.Decimal("1.2000000000"), 3.140000104904175, 4.28000020980835, datetime.date(2018, 12, 31), datetime.timedelta(0, 43994), datetime.datetime(2019, 2, 4, 10, 36), 2019, "abc", u"MySQL \U0001f42c", "x-large", bytearray(b"\x00\x00\x00\x00\x01\x01\x00\x00\x003333335" b"@\x9a\x99\x99\x99\x99\x19A@"), "random blob data") def setUp(self): config = tests.get_mysql_config() self.cnx = CMySQLConnection(**config) self.cur = self.cnx.cursor(prepared=True) self.cur.execute(self.create_table_stmt.format(self.tbl)) def tearDown(self): self.cur.execute("DROP TABLE IF EXISTS {0}".format(self.tbl)) self.cur.close() self.cnx.close() def test___init__(self): self.assertIsInstance(self.cur, CMySQLCursorPrepared) def test_callproc(self): self.assertRaises(errors.NotSupportedError, self.cur.callproc, None) def test_close(self): cur = self.cnx.cursor(prepared=True) self.assertEqual(None, cur._stmt) cur.close() def test_fetchone(self): self.cur.execute(self.insert_stmt.format(self.tbl), self.data) self.cur.execute("SELECT * FROM {0}".format(self.tbl)) row = self.cur.fetchone() self.assertEqual(row, self.exp) row = self.cur.fetchone() self.assertIsNone(row) def test_fetchall(self): self.cur.execute(self.insert_stmt.format(self.tbl), self.data) self.cur.execute("SELECT * FROM {0}".format(self.tbl)) rows = self.cur.fetchall() self.assertEqual(len(rows), 1) self.assertEqual(rows[0], self.exp) def test_fetchmany(self): data = [self.data[:], self.data[:], self.data[:]] self.cur.executemany(self.insert_stmt.format(self.tbl), data) self.cur.execute("SELECT * FROM {0}".format(self.tbl)) rows = self.cur.fetchmany(size=2) self.assertEqual(len(rows), 2) self.assertEqual(rows[0], self.exp) self.assertEqual(rows[1], self.exp) rows = self.cur.fetchmany(1) self.assertEqual(len(rows), 1) self.assertEqual(rows[0], self.exp) def test_executemany(self): data = [self.data[:], self.data[:]] self.cur.executemany(self.insert_stmt.format(self.tbl), data) self.cur.execute("SELECT * FROM {0}".format(self.tbl)) rows = self.cur.fetchall() self.assertEqual(len(rows), 2) self.assertEqual(rows[0], self.exp) self.assertEqual(rows[1], self.exp)
except OSError: log.debug("Process %d gone" % (pid, )) appPool.hook_tool_run('autorepl') MNTLOCK = mntlock() mypid = os.getpid() templog = '/tmp/repl-%d' % (mypid) now = datetime.datetime.now().replace(microsecond=0) if now.second < 30 or now.minute == 59: now = now.replace(second=0) else: now = now.replace(minute=now.minute + 1, second=0) now = datetime.time(now.hour, now.minute) # (mis)use MNTLOCK as PIDFILE lock. locked = True try: MNTLOCK.lock_try() except IOError: locked = False if not locked: sys.exit(0) AUTOREPL_PID = -1 try: with open('/var/run/autorepl.pid') as pidfile: AUTOREPL_PID = int(pidfile.read()) except:
CLIENT_ID = "" while not TENANT_ID: TENANT_ID = input("Please enter your Azure tenant id: ") while not CLIENT_ID: CLIENT_ID = input("Please enter your Azure client id: ") config["Azure"] = {"Tenant_Id": TENANT_ID, "Client_Id": CLIENT_ID} with open("azure_config.ini", "w") as configfile: config.write(configfile) AUTHORITY = 'https://login.microsoftonline.com/' + TENANT_ID ENDPOINT = 'https://graph.microsoft.com/v1.0' SCOPES = [ 'User.Read', 'User.ReadBasic.All' ] workday_start = time(8) workday_end = time(19) workdays = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"] width = 0 height = 0 blinkThread = None after_work = False globalRed = 0 globalGreen = 0 globalBlue = 0 token='' points = [] fullname = '' brightness_led = 0.5 sleepValue = 30 # seconds # #############
def update_model(self): self.__model.clear() timelist_set_list( self.__model, datetime.time(0, 0), datetime.time(23, 59), self.__format)
def index(request): message1 = '' message2 = '' message3 = '' txt1 = "" txt2 = "" txt3 = "" print('config', config.post_success) submitted = False if request.method == 'POST': if 'btnform1' in request.POST: print('---btnform1---') form1 = deeds1Form(request.POST) if form1.is_valid(): print('---form1 valid---') # print('final deed', form1.cleaned_data['deed']) cd1 = form1.cleaned_data form1.save() config.post_success = 1 # assert False return HttpResponseRedirect('/index?submitted=True') elif 'btnform2' in request.POST: print('---btnform2---') form2 = deeds2Form(request.POST) if form2.is_valid(): print('---form2 valid---') cd2 = form2.cleaned_data form2.save() config.post_success = 1 # assert False return HttpResponseRedirect('/index?submitted=True') elif 'btnform3' in request.POST: print('---btnform3---') form3 = deeds3Form(request.POST) if form3.is_valid(): print('---form3 valid---') cd3 = form3.cleaned_data form3.save() config.post_success = 1 # assert False return HttpResponseRedirect('/index?submitted=True') print('empty post') form1 = deeds1Form() form2 = deeds2Form() form3 = deeds3Form() else: print('---GET---') form1 = deeds1Form() form2 = deeds2Form() form3 = deeds3Form() if 'submitted' in request.GET: submitted = True all_deeds1 = deeds1.objects.all().order_by('-date_time') # print(all_deeds1) if (all_deeds1): date_time1 = deeds1.objects.values_list( 'date_time', flat=True).distinct() # print(date_time) all_messages = deeds1.objects.values_list( 'deed', flat=True).distinct().order_by('-date_time') for message in all_messages: message1 = message1 + message txt = cleanhtml(message) + "\r\r" txt1 = txt1 + txt # print('message1', message1) # form1 = deeds1Form(hide_condition=True) all_deeds2 = deeds2.objects.all().order_by('-date_time') # print(all_deeds2) if (all_deeds2): date_time2 = deeds2.objects.values_list( 'date_time', flat=True).distinct() # print(date_time) all_messages = deeds2.objects.values_list( 'deed', flat=True).distinct().order_by('-date_time') for message in all_messages: message2 = message2 + message txt = cleanhtml(message) + "\r\r" txt2 = txt2 + txt # print('message2', message2) # form2 = deeds2Form(hide_condition=True) all_deeds3 = deeds3.objects.all().order_by('-date_time') # print(all_deeds3) if (all_deeds3): date_time3 = deeds3.objects.values_list( 'date_time', flat=True).distinct() # print(date_time) all_messages = deeds3.objects.values_list( 'deed', flat=True).distinct().order_by('-date_time') for message in all_messages: message3 = message3 + message txt = cleanhtml(message) + "\r\r" txt3 = txt3 + txt # print('message1', message1) # form3 = deeds3Form(hide_condition=True) context = {} context['text1'] = '<textarea readonly rows="10" class="v-border" id="txt1area" style="width:100%">' + txt1 + '</textarea>' context['text2'] = '<textarea readonly rows="10" class="v-border" id="txt2area" style="width:100%">' + txt2 + '</textarea>' context['text3'] = '<textarea readonly rows="10" class="v-border" id="txt3area" style="width:100%">' + txt3 + '</textarea>' context['deed1'] = '<div class="mqcontainer">' + \ '<div class="marquee">' + message1 + '</div>' + '</div>' context['deed2'] = '<div class="mqcontainer">' + \ '<div class="marquee">' + message2 + '</div>' + '</div>' context['deed3'] = '<div class="mqcontainer">' + \ '<div class="marquee">' + message3 + '</div>' + '</div>' context['form1'] = form1 context['form2'] = form2 context['form3'] = form3 now = timezone.now().strftime('%H:%M:%S') now1 = datetime.now().time().strftime('%H:%M:%S') day = datetime.today().weekday() + 1 print('day', day) show = is_time_between(time(22, 00), time(22, 45)) print('show', show) print('time now ', now) context['now'] = now if show: context['hide'] = False else: context['hide'] = False context['now1'] = now1 if mobile(request): context['mobile'] = True else: context['mobile'] = False if config.post_success == 1: print('post success') context['post_success'] = True config.post_success = 0 else: print('post failed') context['post_success'] = False return render(request, 'index.html', context)
for x in subjecttallylist: # check if exists in unique_list or not if x not in subjecttallylist_unique_list: subjecttallylist_unique_list.append(x) for x in datetallylist: # check if exists in unique_list or not if x not in datetallylist_unique_list: datetallylist_unique_list.append(x) #Print out Headers for CSV sys.stdout.write("Subject,Date,State/Info,") for x in range(0, (totalperiods*periodlength)): #96 sets of 15 minute periods for all minutes in a 24 hour period timeholder = datetime.today() #initialize datetimeobject timeholder = (datetime.combine(date.today(), time(0,0,0)) + timedelta(minutes=1*x)) if(elapsedminortime==False): sys.stdout.write(datetime.strftime(timeholder, '%H:%M:%S')) else: sys.stdout.write(str(x)) if ((x<(totalperiods*periodlength)-1)): #suppress final comma sys.stdout.write(",") print () #print newline after the header row is finished for listeddate in datetallylist_unique_list: # display entries for a single date ontimelist = [] activetimelist = [] xorlist = [] for rowindex, row in enumerate(queryreturn): #page thru all returned rows from query, also return an index number related to the row
def test_make_stmt_execute(self): """Make a MySQL packet with the STMT_EXECUTE command""" statement_id = 1 self.assertRaises(errors.InterfaceError, self._protocol.make_stmt_execute, statement_id, ('ham', 'spam'), (1, 2, 3)) data = ('ham', 'spam') exp = ( '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00\x01\x0f' '\x00\x0f\x00\x03\x68\x61\x6d\x04\x73\x70\x61\x6d' ) res = self._protocol.make_stmt_execute(statement_id, data, (1, 2)) self.assertEqual(exp, res) # Testing types cases = [ ('ham', '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00' '\x01\x0f\x00\x03\x68\x61\x6d'), (decimal.Decimal('3.14'), '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00' '\x01\x00\x00\x04\x33\x2e\x31\x34'), (255, '\x01\x00\x00\x00\x80\x01\x00\x00\x00\x00\x01\x01\x80\xff'), (-128, '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00\x01\x01\x00\x80'), (datetime.datetime(1977, 6, 14, 21, 20, 30), '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00' '\x01\x0c\x00\x07\xb9\x07\x06\x0e\x15\x14\x1e'), (datetime.time(14, 53, 36, 345), '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00\x01\x0b\x00' '\x0c\x00\x00\x00\x00\x00\x0e\x35\x24\x59\x01\x00\x00'), (3.14, '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00\x01\x05\x00' '\x1f\x85\xeb\x51\xb8\x1e\x09\x40'), ] for data, exp in cases: res = self._protocol.make_stmt_execute(statement_id, (data,), (1,)) self.assertEqual( exp, res, "Failed preparing statement with '{0}'".format(data)) # Testing null bitmap data = (None, None) exp = '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x03\x01' res = self._protocol.make_stmt_execute(statement_id, data, (1, 2)) self.assertEqual(exp, res) data = (None, 'Ham') exp = ( '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x01\x01\x0f\x00' '\x03\x48\x61\x6d' ) res = self._protocol.make_stmt_execute(statement_id, data, (1, 2)) self.assertEqual(exp, res) data = ('a',) * 11 exp = ( '\x01\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01' '\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00' '\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x01\x61\x01\x61\x01\x61' '\x01\x61\x01\x61\x01\x61\x01\x61\x01\x61\x01\x61\x01\x61' '\x01\x61' ) res = self._protocol.make_stmt_execute(statement_id, data, (1,) * 11) self.assertEqual(exp, res) # Raise an error passing an unsupported object as parameter value class UnSupportedObject(object): pass data = (UnSupportedObject(), UnSupportedObject()) self.assertRaises(errors.ProgrammingError, self._protocol.make_stmt_execute, statement_id, data, (1, 2))
class XIDXExchangeCalendar(ExchangeCalendar): """ Exchange calendar for the Indonesia Stock Exchange (XIDX). Open Time: 09:00, Western Indonesian Time (WIB) Close Time: 15:50, Western Indonesian Time (WIB) Regularly-Observed Holidays: - New Year's Day(Jan 1) - Good Friday (Friday before Easter) - Labor Day (May 1) - Ascension Day Of Jesus Christ (39 days after Easter, always Thursday) - Pancasila Day (Jun 1) - Independence Day (Aug 17) - Christmas Day (Dec 25) - New Year's Eve (called "Trading Holiday", Dec 31) - Chinese New Year (from Gregorian year 2002 onward) - Islamic New Year - Eid al-Fitr (Festival of Breaking the Fast) - Eid al-Adha (Festival of the Sacrifice) - Isra Mikraj of the Prophet Muhammad - Birth of the Prophet Muhammad - Vesak Day - Hindu Saka New Year (also called Nyepi, or Balinese Day of Silence) Election holidays are also observed, as well as additional "common leave" days around many holidays. """ name = "XIDX" tz = timezone("Asia/Jakarta") open_times = ((None, time(9)), ) close_times = ((None, time(15, 50)), ) @property def regular_holidays(self): return HolidayCalendar([ new_years_day(), GoodFriday, Holiday( "Labor Day", month=5, day=1, start_date="2014-05-01", ), # XXX: The Indonesia Stock Exchange was open on Ascension # Day 2003, but closed the next day. We are not sure why. # (The 2003-05-30 closure is listed in adhoc_holidays.) ascension_day(end_date="2003"), ascension_day(start_date="2004"), Holiday( "Pancasila Day", month=6, day=1, start_date="2017-06-01", ), Holiday( "Independence Day", month=8, day=17, ), # Christmas Eve is not an official Indonesian holiday, but # December 24th and 26th are frequently observed as common # leave. These dates are listed in adhoc_holidays. christmas(), new_years_eve(), ]) chinese_new_year = chinese_lunar_new_year_dates[ # The Indonesia Stock Exchange did not close for Chinese New # Year in 1998, 1999, or 2001. (It fell on a Saturday in 2000.) chinese_lunar_new_year_dates.year >= 2002] common_leave = pd.to_datetime([ # Indonesia sometimes observes additional "common leave" days # around the usual observed dates of holidays. # Common leave for New Year's Day. "2002-12-30", "2005-12-30", "2009-01-02", "2017-01-02", # Common leave for Chinese New Year. "2008-02-08", # Common leave for Ascension Day. "2003-05-30", "2006-05-26", "2007-05-18", # Common leave for Independence Day. "2003-08-18", "2006-08-18", "2008-08-18", # Common leave for Christmas. "2002-12-24", "2002-12-26", "2003-12-24", "2003-12-26", "2004-12-24", "2005-12-26", "2007-12-24", "2009-12-24", "2010-12-24", "2011-12-26", "2012-12-24", "2013-12-26", "2014-12-26", "2016-12-26", "2017-12-26", "2018-12-24", "2019-12-24", "2020-12-24", ]) islamic_new_year = pd.to_datetime([ # Islamic/Hijri/Hijriyah New Year. # Includes common leave. "2002-03-15", "2003-03-03", "2004-02-23", "2005-02-10", "2006-01-31", "2008-01-10", # First Islamic New Year of 2008. "2008-01-11", "2008-12-29", # Second Islamic New Year of 2008. "2009-12-18", "2010-12-07", "2012-11-15", "2012-11-16", "2013-11-05", "2015-10-14", "2017-09-21", "2018-09-11", "2020-08-20", ]) eid_al_fitr = pd.to_datetime([ # Eid al-Fitr (Festival of Breaking the Fast). # Includes common leave. "2002-12-05", "2002-12-06", "2002-12-09", "2002-12-10", "2003-11-24", "2003-11-25", "2003-11-26", "2003-11-27", "2003-11-28", "2004-11-15", "2004-11-16", "2004-11-17", "2004-11-18", "2004-11-19", "2005-11-02", "2005-11-03", "2005-11-04", "2005-11-07", "2005-11-08", "2006-10-23", "2006-10-24", "2006-10-25", "2006-10-26", "2006-10-27", "2007-10-12", "2007-10-15", "2007-10-16", "2008-09-30", "2008-10-01", "2008-10-02", "2008-10-03", "2009-09-18", "2009-09-21", "2009-09-22", "2009-09-23", "2010-09-08", "2010-09-09", "2010-09-10", "2010-09-13", "2010-09-14", "2011-08-29", "2011-08-30", "2011-08-31", "2011-09-01", "2011-09-02", "2012-08-20", "2012-08-21", "2012-08-22", "2013-08-05", "2013-08-06", "2013-08-07", "2013-08-08", "2013-08-09", "2014-07-28", "2014-07-29", "2014-07-30", "2014-07-31", "2014-08-01", "2015-07-16", "2015-07-17", "2015-07-20", "2015-07-21", "2016-07-04", "2016-07-05", "2016-07-06", "2016-07-07", "2016-07-08", "2017-06-23", "2017-06-26", "2017-06-27", "2017-06-28", "2017-06-29", "2017-06-30", "2018-06-11", "2018-06-12", "2018-06-13", "2018-06-14", "2018-06-15", "2018-06-18", "2018-06-19", "2019-06-03", "2019-06-04", "2019-06-05", "2019-06-06", "2019-06-07", "2020-05-22", "2020-05-25", "2020-05-26", "2020-05-27", ]) eid_al_adha = pd.to_datetime([ # Eid al-Adha (Festival of the Sacrifice). # Includes common leave. "2002-02-22", "2003-02-12", "2004-02-02", "2005-01-21", "2006-01-10", "2006-12-29", # NOTE: Eid al-Adha occured twice in 2006, on Tuesday 01-10 and # Sunday 12-31. The exchange was closed on Friday 2006-12-29 as # a make-up holiday. "2007-12-20", "2007-12-21", "2008-12-08", "2009-11-27", "2009-11-28", "2010-11-17", "2012-10-26", "2013-10-14", "2013-10-15", "2015-09-24", "2016-09-12", "2017-09-01", "2018-08-22", "2020-07-31", ]) isra_mikraj = pd.to_datetime([ # Isra and Mi'raj (Ascension of the Prophet Muhammad). # Called "Isra Mikraj" in Indonesia. # # Occurs on 27 Rajab on the Hijri calendar, but the mapping of # Hijri to Gregorian dates varies. For example, in 2018 many # countries observed this holiday on Friday 04-13; but by # Indonesian reckoning it fell on Saturday 04-14 that year. # # See https://www.idx.co.id/en-us/news/trading-holiday/ # # Includes common leave. "2002-10-04", "2003-09-22", "2004-09-13", "2005-09-02", "2006-08-21", "2008-07-30", "2009-07-20", "2011-06-29", "2012-05-18", "2013-06-06", "2014-05-27", "2016-05-06", "2017-04-24", "2019-04-03", ]) birth_of_prophet_muhammad = pd.to_datetime([ # Birth of the Prophet Muhammad. # Includes common leave. "2003-05-15", "2004-05-03", "2005-04-22", "2006-04-10", "2008-03-20", "2009-03-09", "2010-02-26", "2011-02-15", "2013-01-24", "2014-01-14", "2015-12-24", "2016-12-12", "2017-12-01", "2018-11-20", "2020-10-29", ]) vesak_day = pd.to_datetime([ # Vesak Day (Buddha's Birthday). # Sometimes called "Hari Raya Waisak" in Indonesia. # Includes common leave. "2003-05-16", "2004-06-03", "2005-05-24", "2007-06-01", "2008-05-20", "2010-05-28", "2011-05-17", "2014-05-15", "2015-06-02", "2017-05-11", "2018-05-29", "2020-05-07", ]) hindu_saka_new_year = pd.to_datetime([ # Hindu Saka New Year (also called Nyepi, or Balinese Day of Silence). # Includes common leave. "2003-04-02", "2004-03-22", "2005-03-11", "2006-03-30", "2006-03-31", "2007-03-19", "2008-03-07", "2009-03-26", "2010-03-16", "2012-03-23", "2013-03-12", "2014-03-31", "2016-03-09", "2017-03-28", "2019-03-07", "2020-03-25", ]) spontaneous_closures = pd.to_datetime([ # Trading suspension due to global financial crisis. "2008-10-09", "2008-10-10", ]) election_holidays = pd.to_datetime([ # Local and gubernatorial election holidays. "2004-04-05", "2004-07-05", "2004-09-20", "2009-04-09", "2009-07-08", "2014-04-09", "2014-07-09", "2015-12-09", "2017-02-15", "2017-04-19", "2019-04-17", ]) @property def adhoc_holidays(self): return list( chain( self.chinese_new_year, self.common_leave, self.islamic_new_year, self.eid_al_fitr, self.eid_al_adha, self.isra_mikraj, self.birth_of_prophet_muhammad, self.vesak_day, self.hindu_saka_new_year, self.spontaneous_closures, self.election_holidays, ))
def test_should_sync_true(self): domain = Domain(name='test', default_timezone='UTC') last_sync = datetime.combine(date.today() - timedelta(days=1), time(23, 59, 59)) # yesterday at 23:59:59 self.assertTrue(should_sync(domain, SyncLog(date=last_sync)))
# datetime module stores information about dates and times in module classes # it differs from time module which works with floats for representing seconds since epoch import datetime # times are represented with datetime.time class t = datetime.time(1, 2, 3, 89) print(t) print('hour :', t.hour) print('minute :', t.minute) print('second :', t.second) print('microsecond:', t.microsecond) print('tzinfo :', t.tzinfo) t = datetime.time(minute=43, hour=12, second=56, microsecond=32) # kwarg parameters print(t) # min, max,resolution are datetime.time class variable attributes print('Earliest :', datetime.time.min) print('Latest :', datetime.time.max) print('Resolution:', datetime.time.resolution)
def crawl_feedly(from_dt, rss_field): global headers today = datetime.now() days = timedelta(days=31) yesterday = today - days s = yesterday.timestamp() t = time(0, 0) dt = datetime.combine(from_dt, t) s = dt.timestamp() #datetime.datetime.fromtimestamp(s).strftime('%c') ms = s * 1000 newerthan = "{:.0f}".format(ms) headers = { #[email protected] (expires on 2018-aug-26) "Authorization": "A3iuGsp9UjnsSiLwl5ZoPrLZj3mO4d16muxgezgpLesPhJ4YoKgC0XdiW_ucnm7b1Z-o5DKK6oLqoW9SRNUkoTcQ8npBBmqbOF03zF3tFWaNI0Lir_hrAahmVuypG5BXVZidJJ4PuaXr4zg5pYRE32OxO0N05X_A2sdZC93oWwQU1GVLJ9evh3qmu0WXYPVXpxffytgnFjUg2JB1zGK3KJkbDl-6ioJudiD2IZczA0R52tPwFZZ0FimkE3zV:feedlydev" } params_streams = { # "count" : "100", "count": "1000", "ranked": "newest", "unreadOnly": "false", "newerThan": newerthan } #url = "http://cloud.feedly.com/v3/profile" #r = requests.get(url, headers=headers) url = "http://cloud.feedly.com/v3/subscriptions" r = requests.get(url, headers=headers) if r.status_code != 200: return False feeds = r.json() for feed in feeds: feed_id = feed['id'] feed_title = feed['title'].encode("ascii", 'replace') # the category label can contain the subset and category name category_label = feed['categories'][0]['label'] label_split = category_label.split('-') if len(label_split) > 1: feed_subset = label_split[0].strip() feed_category = label_split[1].strip() else: feed_subset = 'SI' feed_category = label_split[0].strip() print("crawl_feedly: scraping feed category/title", feed_category, feed_title) if rss_field == '' or category_label == rss_field: url = "http://cloud.feedly.com/v3/streams/contents" params_streams['streamId'] = feed_id r = requests.get(url, headers=headers, params=params_streams) stream = r.json() if 'items' in stream: bulk_data = None bulk_data = [] for entry in stream['items']: feedlymap = models.FeedlyMap() feedlymap.post_id = entry['id'] feedlymap.url = "" try: feedlymap.published_date = datetime.fromtimestamp( entry['published'] / 1000) except: last_year = datetime.now().year - 1 feedlymap.published_date = datetime( last_year, 1, 1, 00, 00, 00) feedlymap.subset = feed_subset feedlymap.category = feed_category feedlymap.feed = feed_title if 'topics' in feed: feedlymap.feed_topics = feed['topics'] if 'keywords' in entry: feedlymap.body_topics = entry['keywords'] if 'title' in entry: feedlymap.title = entry['title'] if 'canonicalUrl' in entry: feedlymap.url = entry['canonicalUrl'] if len(feedlymap.url) == 0: if 'originId' in entry: n = entry['originId'].find('http') if n > 0: feedlymap.url = entry['originId'][n:] if len(feedlymap.url) == 0: if 'origin' in entry: origin = entry['origin'] feedlymap.url = origin['htmlUrl'] feedlymap.post_id = feedlymap.url if 'summary' in entry: bs = BeautifulSoup(entry['summary']['content'], "lxml") # in case of RSS feed if 'content' in entry: bs = BeautifulSoup( entry['content']['content'], "lxml") # in case of Google News feed feedlymap.body = bs.get_text().encode("ascii", 'replace') data = elastic.convert_for_bulk(feedlymap, 'update') bulk_data.append(data) bulk(models.client, actions=bulk_data, stats_only=True) return True
class SliderTest(testutil.DeltaGeneratorTestCase): """Test ability to marshall slider protos.""" def test_just_label(self): """Test that it can be called with no value.""" st.slider("the label") c = self.get_delta_from_queue().new_element.slider self.assertEqual(c.label, "the label") self.assertEqual(c.default, [0]) self.assertEqual(c.disabled, False) def test_just_disabled(self): """Test that it can be called with disabled param.""" st.slider("the label", disabled=True) c = self.get_delta_from_queue().new_element.slider self.assertEqual(c.disabled, True) PST = timezone(timedelta(hours=-8), "PST") AWARE_DT = datetime(2020, 1, 1, tzinfo=PST) AWARE_DT_END = datetime(2020, 1, 5, tzinfo=PST) AWARE_TIME = time(12, 00, tzinfo=PST) AWARE_TIME_END = time(21, 00, tzinfo=PST) # datetimes are serialized in proto as micros since epoch AWARE_DT_MICROS = 1577836800000000 AWARE_DT_END_MICROS = 1578182400000000 AWARE_TIME_MICROS = 946728000000000 AWARE_TIME_END_MICROS = 946760400000000 @parameterized.expand( [ (1, [1], 1), # int ((0, 1), [0, 1], (0, 1)), # int tuple ([0, 1], [0, 1], (0, 1)), # int list (0.5, [0.5], 0.5), # float ((0.2, 0.5), [0.2, 0.5], (0.2, 0.5)), # float tuple ([0.2, 0.5], [0.2, 0.5], (0.2, 0.5)), # float list (AWARE_DT, [AWARE_DT_MICROS], AWARE_DT), # datetime ( (AWARE_DT, AWARE_DT_END), # datetime tuple [AWARE_DT_MICROS, AWARE_DT_END_MICROS], (AWARE_DT, AWARE_DT_END), ), ( [AWARE_DT, AWARE_DT_END], # datetime list [AWARE_DT_MICROS, AWARE_DT_END_MICROS], (AWARE_DT, AWARE_DT_END), ), (AWARE_TIME, [AWARE_TIME_MICROS], AWARE_TIME), # datetime ( (AWARE_TIME, AWARE_TIME_END), # datetime tuple [AWARE_TIME_MICROS, AWARE_TIME_END_MICROS], (AWARE_TIME, AWARE_TIME_END), ), ( [AWARE_TIME, AWARE_TIME_END], # datetime list [AWARE_TIME_MICROS, AWARE_TIME_END_MICROS], (AWARE_TIME, AWARE_TIME_END), ), ] ) def test_value_types(self, value, proto_value, return_value): """Test that it supports different types of values.""" ret = st.slider("the label", value=value) self.assertEqual(ret, return_value) c = self.get_delta_from_queue().new_element.slider self.assertEqual(c.label, "the label") self.assertEqual(c.default, proto_value) NAIVE_DT = datetime(2020, 2, 1) NAIVE_DT_END = datetime(2020, 2, 4) NAIVE_TIME = time(6, 20, 34) NAIVE_TIME_END = time(20, 6, 43) DATE_START = date(2020, 4, 5) DATE_END = date(2020, 6, 6) @parameterized.expand( [ (NAIVE_DT, NAIVE_DT), # naive datetime ((NAIVE_DT, NAIVE_DT_END), (NAIVE_DT, NAIVE_DT_END)), ([NAIVE_DT, NAIVE_DT_END], (NAIVE_DT, NAIVE_DT_END)), (NAIVE_TIME, NAIVE_TIME), # naive time ((NAIVE_TIME, NAIVE_TIME_END), (NAIVE_TIME, NAIVE_TIME_END)), ([NAIVE_TIME, NAIVE_TIME_END], (NAIVE_TIME, NAIVE_TIME_END)), (DATE_START, DATE_START), # date (always naive) ((DATE_START, DATE_END), (DATE_START, DATE_END)), ([DATE_START, DATE_END], (DATE_START, DATE_END)), ] ) def test_naive_timelikes(self, value, return_value): """Ignore proto values (they change based on testing machine's timezone)""" ret = st.slider("the label", value=value) c = self.get_delta_from_queue().new_element.slider self.assertEqual(ret, return_value) self.assertEqual(c.label, "the label") def test_range_session_state(self): """Test a range set by session state.""" state = st.session_state state["slider"] = [10, 20] slider = st.slider( "select a range", min_value=0, max_value=100, key="slider", ) assert slider == [10, 20] def test_value_greater_than_min(self): ret = st.slider("Slider label", 10, 100, 0) c = self.get_delta_from_queue().new_element.slider self.assertEqual(ret, 0) self.assertEqual(c.min, 0) def test_value_smaller_than_max(self): ret = st.slider("Slider label", 10, 100, 101) c = self.get_delta_from_queue().new_element.slider self.assertEqual(ret, 101) self.assertEqual(c.max, 101) def test_max_min(self): ret = st.slider("Slider label", 101, 100, 101) c = self.get_delta_from_queue().new_element.slider self.assertEqual(ret, 101), self.assertEqual(c.min, 100) self.assertEqual(c.max, 101) def test_value_out_of_bounds(self): # Max int with pytest.raises(StreamlitAPIException) as exc: max_value = JSNumber.MAX_SAFE_INTEGER + 1 st.slider("Label", max_value=max_value) self.assertEqual( "`max_value` (%s) must be <= (1 << 53) - 1" % str(max_value), str(exc.value) ) # Min int with pytest.raises(StreamlitAPIException) as exc: min_value = JSNumber.MIN_SAFE_INTEGER - 1 st.slider("Label", min_value=min_value) self.assertEqual( "`min_value` (%s) must be >= -((1 << 53) - 1)" % str(min_value), str(exc.value), ) # Max float with pytest.raises(StreamlitAPIException) as exc: max_value = 2e308 st.slider("Label", value=0.5, max_value=max_value) self.assertEqual( "`max_value` (%s) must be <= 1.797e+308" % str(max_value), str(exc.value) ) # Min float with pytest.raises(StreamlitAPIException) as exc: min_value = -2e308 st.slider("Label", value=0.5, min_value=min_value) self.assertEqual( "`min_value` (%s) must be >= -1.797e+308" % str(min_value), str(exc.value) ) def test_step_zero(self): with pytest.raises(StreamlitAPIException) as exc: st.slider("Label", min_value=0, max_value=10, step=0) self.assertEqual( "Slider components cannot be passed a `step` of 0.", str(exc.value) ) def test_outside_form(self): """Test that form id is marshalled correctly outside of a form.""" st.slider("foo") proto = self.get_delta_from_queue().new_element.slider self.assertEqual(proto.form_id, "") @patch("streamlit._is_running_with_streamlit", new=True) def test_inside_form(self): """Test that form id is marshalled correctly inside of a form.""" with st.form("form"): st.slider("foo") # 2 elements will be created: form block, widget self.assertEqual(len(self.get_all_deltas_from_queue()), 2) form_proto = self.get_delta_from_queue(0).add_block slider_proto = self.get_delta_from_queue(1).new_element.slider self.assertEqual(slider_proto.form_id, form_proto.form.form_id) def test_inside_column(self): """Test that it works correctly inside of a column.""" col1, col2 = st.columns(2) with col1: st.slider("foo") all_deltas = self.get_all_deltas_from_queue() # 4 elements will be created: 1 horizontal block, 2 columns, 1 widget self.assertEqual(len(all_deltas), 4) slider_proto = self.get_delta_from_queue().new_element.slider self.assertEqual(slider_proto.label, "foo")
def test_CheckTimeSubSeconds(self): t = datetime.time(7, 15, 0, 500000) self.cur.execute("insert into test(t) values (?)", (t,)) self.cur.execute("select t from test") t2 = self.cur.fetchone()[0] self.assertEqual(t, t2)
def test_should_sync_false(self): domain = Domain(name='test', default_timezone='UTC') last_sync = datetime.combine(date.today(), time()) # today at 00:00:00 self.assertFalse(should_sync(domain, SyncLog(date=last_sync)))
def Time(hour, minute, second, microsecond=0, tzinfo=None): return datetime.time(hour, minute, second, microsecond, tzinfo)
def refresh_currency(self): """Refresh the currencies rates !!for all companies now""" rate_obj = self.env['res.currency.rate'] for srv in self: _logger.info( 'Starting to refresh currencies with service %s (company: %s)', srv.service, srv.company_id.name) company = srv.company_id # The multi company currency can be set or no so we handle # The two case if company.auto_currency_up: main_currency = company.currency_id # No need to test if main_currency exists, because it is a # required field if float_compare( main_currency.with_context(company_id=company.id).rate, 1, precision_rounding=main_currency.rounding): raise UserError( _("In company '%s', the rate of the main currency (%s) " "must be 1.00 (current rate: %s).") % (company.name, main_currency.name, main_currency.rate)) try: # We initalize the class that will handle the request # and return a dict of rate getter = CurrencyGetterType.get(srv.service) curr_to_fetch = [x.name for x in srv.currency_to_update] res, log_info = getter.get_updated_currency( curr_to_fetch, main_currency.name, srv.max_delta_days) rate_name = \ fields.Datetime.to_string(datetime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0)) for curr in srv.currency_to_update: if curr == main_currency: continue rates = rate_obj.search([('currency_id', '=', curr.id), ('company_id', '=', company.id), ('name', '=', rate_name)]) if not rates: rate = res[curr.name] if type(rate) is dict: inverted = rate.get('inverted', None) direct = rate.get('direct', None) if inverted is None and direct is None: raise UserError( _('Invalid rate from %(provider)s ' 'for %(currency)s : %(rate)s') % { 'provider': self.service, 'currency': curr.name, 'rate': rate, }) elif inverted is None: inverted = 1 / direct elif direct is None: direct = 1 / inverted else: rate = float(rate) direct = rate inverted = 1 / rate rate = direct # Used in currency_rate_inverted module. We do # not want to add a glue module for the currency # update. if 'rate_inverted' in self.env[ 'res.currency']._fields: if curr.with_context( force_company=company.id).\ rate_inverted: rate = inverted vals = { 'currency_id': curr.id, 'rate': rate, 'name': rate_name, 'company_id': company.id, } rate_obj.create(vals) _logger.info( 'Updated currency %s via service %s ' 'in company %s', curr.name, srv.service, company.name) # Show the most recent note at the top msg = '%s <br/>%s currency updated.' % ( log_info or '', fields.Datetime.to_string(datetime.today())) srv.message_post(body=msg) except Exception as exc: error_msg = '%s ERROR: %s' % (fields.Datetime.to_string( datetime.today()), repr(exc)) _logger.error(repr(exc)) srv.message_post(body=error_msg, message_type='comment', subtype='mt_comment') if self._context.get('cron'): midnight = time(0, 0) next_run = (datetime.combine(datetime.today(), midnight) + _intervalTypes[str(srv.interval_type)]( srv.interval_number)).date() srv.next_run = next_run return True
def test_time(self): # real testing of time() is done in dateformat.py self.assertEqual(time(datetime.time(13), u"h"), u'01') self.assertEqual(time(datetime.time(0), u"h"), u'12')
def startup(): print('main called') msg_queue = mq.MessageQueue(all_burst_limit=28, all_time_limit_ms=1050) request = Request(con_pool_size=8) TOKEN = open('campobot/token/token_test.txt', 'r').read().strip() campo_bot = MQBot(TOKEN, request=request, mqueue=msg_queue) updater = Updater(bot=campo_bot) dispatcher = updater.dispatcher logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) jobs = updater.job_queue jobs.run_daily(callback=report_notification_job, time=datetime.time(hour=12, minute=30)) # Help Handler dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_INTRO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_HOURS)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_CRON)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_PUBS)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_VIDEOS)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_RETURNS)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_STUDIES)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_REPORT)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_HOURS_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_CRON_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_VIDEOS_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_PUBS_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_RETURNS_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_STUDIES_DEMO)) dispatcher.add_handler( CallbackQueryHandler(callback=help_callback, pattern=CALLBACK_HELP_REPORT_DEMO)) dispatcher.add_handler( RegexHandler(callback=help_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.HELP_COMMAND))) # Cron Handler dispatcher.add_handler( CallbackQueryHandler(callback=cron_callback, pattern=CALLBACK_CRON_START, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=cron_callback, pattern=CALLBACK_CRON_UPDATE, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=cron_callback, pattern=CALLBACK_CRON_STOP, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=cron_callback, pattern=CALLBACK_CRON_SAVE, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=cron_callback, pattern=CALLBACK_CRON_DISCARD, pass_user_data=True)) dispatcher.add_handler( RegexHandler(callback=cron_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.CRON_COMMAND), pass_user_data=True)) # Videos Handler dispatcher.add_handler( CallbackQueryHandler(callback=video_callback, pattern=CALLBACK_VIDEO_ADD_ONE)) dispatcher.add_handler( CallbackQueryHandler(callback=video_callback, pattern=CALLBACK_VIDEO_ADD_THREE)) dispatcher.add_handler( CallbackQueryHandler(callback=video_callback, pattern=CALLBACK_VIDEO_REMOVE_ONE)) dispatcher.add_handler( RegexHandler(callback=video_offline_add_callback, pattern=Regex.VIDEO_OFFLINE_ADD)) dispatcher.add_handler( RegexHandler(callback=video_offline_remove_callback, pattern=Regex.VIDEO_OFFLINE_REMOVE)) dispatcher.add_handler( RegexHandler(callback=video_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.VIDEO_COMMAND))) # Report Handler dispatcher.add_handler( CallbackQueryHandler(callback=reports_callback, pattern=CALLBACK_REPORT_LAST_MONTH)) dispatcher.add_handler( CallbackQueryHandler(callback=reports_callback, pattern=CALLBACK_REPORT_CURRENT_MONTH)) dispatcher.add_handler( RegexHandler(callback=reports_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.REPORT_COMMAND))) # Studies Handler dispatcher.add_handler( CallbackQueryHandler(callback=studies_callback, pattern=CALLBACK_STUDIES_ADD_ONE)) dispatcher.add_handler( CallbackQueryHandler(callback=studies_callback, pattern=CALLBACK_STUDIES_ADD_THREE)) dispatcher.add_handler( CallbackQueryHandler(callback=studies_callback, pattern=CALLBACK_STUDIES_REMOVE_ONE)) dispatcher.add_handler( RegexHandler(callback=studies_offline_add_callback, pattern=Regex.STUDIES_OFFLINE_ADD)) dispatcher.add_handler( RegexHandler(callback=studies_offline_remove_callback, pattern=Regex.STUDIES_OFFLINE_REMOVE)) dispatcher.add_handler( RegexHandler(callback=studies_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.STUDIES_COMMAND))) # Pubs Handler dispatcher.add_handler( CallbackQueryHandler(callback=pubs_callback, pattern=CALLBACK_PUBS_ADD_ONE)) dispatcher.add_handler( CallbackQueryHandler(callback=pubs_callback, pattern=CALLBACK_PUBS_ADD_THREE)) dispatcher.add_handler( CallbackQueryHandler(callback=pubs_callback, pattern=CALLBACK_PUBS_REMOVE_ONE)) dispatcher.add_handler( RegexHandler(callback=pubs_offline_add_callback, pattern=Regex.PUBS_OFFLINE_ADD)) dispatcher.add_handler( RegexHandler(callback=pubs_offline_remove_callback, pattern=Regex.PUBS_OFFLINE_REMOVE)) dispatcher.add_handler( RegexHandler(callback=pubs_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.PUBS_COMMAND))) # Hours Handler dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_MINUTES_ADD, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD_ONE_HOUR, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD_TWO_HOURS, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_REMOVE_ONE_HOUR, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_REMOVE_TWO_HOURS, pass_user_data=True)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD_THIRTY_MINUTES)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD_TEN_MINUTES)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_ADD_FIVE_MINUTES)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_REMOVE_THIRTY_MINUTES)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_REMOVE_TEN_MINUTES)) dispatcher.add_handler( CallbackQueryHandler(callback=hours_callback, pattern=CALLBACK_HOURS_REMOVE_FIVE_MINUTES)) dispatcher.add_handler( RegexHandler(callback=callback_offline_add_hours, pattern=Regex.HOURS_OFFLINE_ADD)) dispatcher.add_handler( RegexHandler(callback=callback_offline_remove_hours, pattern=Regex.HOURS_OFFLINE_REMOVE)) dispatcher.add_handler( RegexHandler(callback=callback_offline_add_minutes, pattern=Regex.HOURS_OFFLINE_ADD_MINUTES)) dispatcher.add_handler( RegexHandler(callback=callback_offline_remove_minutes, pattern=Regex.HOURS_OFFLINE_REMOVE_MINUTES)) dispatcher.add_handler( RegexHandler(callback=hours_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.HOURS_COMMAND))) # Returns Handler dispatcher.add_handler( CallbackQueryHandler(callback=returns_callback, pattern=CALLBACK_RETURNS_ADD_ONE)) dispatcher.add_handler( CallbackQueryHandler(callback=returns_callback, pattern=CALLBACK_RETURNS_ADD_THREE)) dispatcher.add_handler( CallbackQueryHandler(callback=returns_callback, pattern=CALLBACK_RETURNS_REMOVE_ONE)) dispatcher.add_handler( CallbackQueryHandler(callback=returns_callback, pattern=CALLBACK_RETURNS_LIST)) dispatcher.add_handler( CallbackQueryHandler(callback=returns_callback, pattern=CALLBACK_RETURNS_INSTERESTED)) dispatcher.add_handler( RegexHandler(callback=returns_offline_add_callback, pattern=Regex.RETURNS_OFFLINE_ADD)) dispatcher.add_handler( RegexHandler(callback=returns_offline_remove_callback, pattern=Regex.RETURNS_OFFLINE_REMOVE)) dispatcher.add_handler( RegexHandler(callback=returns_inline, pattern=str(Regex.START_WITH_EMOJI_SLASH + Regex.RETURNS_COMMAND))) dispatcher.add_handler( RegexHandler(callback=returns_people_callback, pattern=Regex.RETURNS_PEOPLE)) dispatcher.add_handler( RegexHandler(callback=returns_people_remove_callback, pattern=Regex.RETURNS_PEOPLE_REMOVE)) # Commands Handler dispatcher.add_handler(CommandHandler('start', start, pass_user_data=True)) dispatcher.add_handler( RegexHandler(callback=callback_404, pattern=Regex.NOT_FOUND_404)) # Must be last one dispatcher.add_error_handler(error) # Start MongoDB and Bot startup_mongodb() updater.start_polling()
2019-03-24 2019-03-25 2019-03-26 ''' #10. Write a Python program to add 5 seconds with the current time. #additional information https://stackoverflow.com/questions/656297/python-time-timedelta-equivalent from datetime import date, datetime, time, timedelta dttt = datetime.today().time() print(dttt) #print 20:00:49.896064 print( type(dttt) ) #print <class 'datetime.time'>. RM: it's not <class 'datetime.datetime'> #fiveseconds = dttt + timedelta(minutes=5) fiveseconds = datetime.combine(date.today(), time(dttt.hour, dttt.minute, dttt.second)) + timedelta(seconds=5) print(fiveseconds.time()) #print 20:00:54 #11. Write a Python program to convert Year/Month/Day to Day of Year in Python. #RM: print the day number of the year or day count of the year; e.g. Dec 31 is 365 day number. monthdayyear = "04/10/2018" datestringsplit = monthdayyear.split("/") print(datestringsplit) #print ['04', '10', '2018'] dateformat = datestringsplit[0] + " " + datestringsplit[ 1] + " " + datestringsplit[2] print(dateformat) #print 04 10 2018 print(datetime.strptime(dateformat, "%m %d %Y")) #print 2018-04-10 00:00:00 dateformat = datetime.strptime(dateformat, "%m %d %Y") print(dateformat.strftime("%j")) #print 100 #quicker monthdayyear = "04/10/2018" print(datetime.strptime(monthdayyear, "%m/%d/%Y")) #print 2018-04-10 00:00:00
def _day_time_step_generator(time_step_size): assert time_step_size % datetime.timedelta(minutes=1) == datetime.timedelta(minutes=0) start_time = datetime.time(0, 0) for minutes in range(0, 24 * 60, int(time_step_size.total_seconds() / 60)): yield WeekMarkovChain._add_delta_to_time(start_time, datetime.timedelta(minutes=minutes))