def __init__(self, title, date_start=None, date_end=None): """ "title" will appear in the first line of the array (the grayed one) "date_start" (datetime object) define where the X axis should start. Leave it empty to let the chart autoscale on the datas. "date_end" (datetime object) define where the Y axis should end. Leave it empty to let the chart autoscall on the datas "translations" can contain translations for label texts """ self.title = title if isinstance(date_start, time.struct_time): self.date_start = datetime.datetime(*date_start[:3]) else: self.date_start = date_start if self.date_start != None: self.date_start = self.date_start.replace(hour=0, minute=0, second=0, microsecond=0) if isinstance(date_end, time.struct_time): self.date_end = datetime.datetime(*date_end[:3]) else: self.date_end = date_end if self.date_end != None: self.date_end = self.date_end.replace(hour=0, minute=0, second=0, microsecond=0) self.items = {} self.items_order = [] self.scale = None self.lines_number = None
def ical(request): CALENDAR_NAME = u"profsoux" CALENDAR_SHORT_NAME = u"profsoux.ru" events = ScheduleSection.objects.all() cal = Calendar() cal.add("prodid", u"-//%s//%s//" % (CALENDAR_NAME, CALENDAR_SHORT_NAME)) cal.add("version", "2.0") cal.add("calscale", "GREGORIAN") cal.add("X-ORIGINAL-URL", CALENDAR_SHORT_NAME) cal.add("method", "PUBLISH") for event in events: ical_event = Event() ical_event.add("uid", str(event.id) + "@" + CALENDAR_SHORT_NAME) title = event.title or u"" if event.lecture: speakers = event.lecture.get_speakers() ical_event.add("summary", u"%s%s «%s»" % (title, speakers, event.lecture.title)) else: ical_event.add("summary", title) dtstart = datetime.datetime(2005, 4, 4, 8, 0, 0, tzinfo=pytz.utc) dtend = datetime.datetime(2005, 4, 4, 10, 0, 0, tzinfo=pytz.utc) ical_event.add("dtstart", dtstart) ical_event.add("dtend", dtend) ical_event.add("dtstamp", dtstart) cal.add_component(ical_event) response = HttpResponse(cal.to_ical(), mimetype="text/calendar") response["Content-Disposition"] = "attachment; filename=%s.ics" % "ical" return response
def get_data_timespan_db_index_query(dev_id,start_date,end_date,celc): return_list=[] connection = Connection() db = connection.lws collection = db.tempData start_date = parse_date(start_date) end_date = parse_date(end_date) start_dt_obj = datetime.datetime(start_date['year'],start_date['month'],start_date['day']) end_dt_obj = datetime.datetime(end_date['year'],end_date['month'],end_date['day']) print start_dt_obj print end_dt_obj #import time #start = time.time() #db.tempData.find({"h":{$gte:1,$lte:10},"min":{$gte:0,$lte:1},"s":{$gte:0,$lte:10}}).count() #db_data = collection.find({"phid":dev_id,"datetime":{"$gte":start_date,"$lt":end_date},"s":0,"min":0}) db_data = collection.find({"phid":dev_id,"date":{"$gte":start_dt_obj,"$lt":end_dt_obj},"h":{"$gte":start_date['hour'],"$lt":end_date['hour']},"min":{"$gte":start_date['min'],"$lt":end_date['min']},"s":{"$gte":start_date['second'],"$lt":end_date['second']}}) #db_data = collection.find({"phid":dev_id,"date":{"$gte":start_dt_obj,"$lt":end_dt_obj}}) #print time.time() - start if db_data.count()==0: print 'no data' return 0 else: print db_data.count() return db_data
def test_clean_hidden(self, dbsession): import datetime from subscity.models.cinema import Cinema from subscity.models.movie import Movie m1 = Movie(api_id='fake_movie1', title='title1') m2 = Movie(api_id='fake_movie2', title='title2', hide=True) m3 = Movie(api_id='fake_movie3', title='title3', hide=True) c1 = Cinema(api_id='fake_cinema1', city='msk', name='c1') c2 = Cinema(api_id='fake_cinema2', city='msk', name='c2') c3 = Cinema(api_id='fake_cinema3', city='spb', name='c3') [dbsession.add(x) for x in [m1, m2, m3, c1, c2, c3]] dbsession.commit() # not a hidden movie s1 = Screening(cinema_api_id='fake_cinema1', movie_api_id='fake_movie1', city='msk', date_time=datetime.datetime(2017, 2, 15, 13, 0)) # will be deleted s2 = Screening(cinema_api_id='fake_cinema2', movie_api_id='fake_movie2', city='msk', date_time=datetime.datetime(2017, 2, 16, 20, 0)) # a hidden movie but from a different city s3 = Screening(cinema_api_id='fake_cinema3', movie_api_id='fake_movie3', city='spb', date_time=datetime.datetime(2017, 2, 16, 21, 0)) [dbsession.add(s) for s in [s1, s2, s3]] dbsession.commit() result = Screening.clean_hidden('msk') assert result == 1 screenings = dbsession.query(Screening).all() assert [s.id for s in screenings] == [s1.id, s3.id]
def test_no_show(self): """ Tests the instance when the student is scheduled to work a shift but does not work it. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 02, 11, 11, 30, 27), outtime=datetime.datetime(1927, 02, 11, 14, 46, 37), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-03-11' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "14:45:00", "In": "11:30:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [] expected_no_shows = [{'In': '11:30:00', 'Out': '14:45:00', 'Shift': 1, 'name': 'User 1', 'netid': 'user1'}] expected_missing_netids = [] self.assertEqual(results, (expected_no_shows, expected_conflicts, expected_missing_netids)) shift.delete()
def test_similar_shifts(self): """ This test does not currently pass becuas there is a bug in the code. In the process of fixing it. Supposed to test the instance that the student has two shifts in a 24 hour time span but only works one of the shifts. """ # This shift was worked the day before the day being examined--date = '1927-03-11' shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 11, 02, 18, 49, 20), outtime=datetime.datetime(1927, 11, 02, 22, 21, 25), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-11-03' service = 'dummy_service' # This shift was supposed to be worked on date being examined--'1927-11-03' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "22:15:00", "In": "20:45:00", "Shift": 1}]}}): request = c_utils.compare(date, service) expected_conflict = [] expected_no_show = [{'In': '20:45:00', 'Out': '22:15:00', 'Shift': 1, 'name': 'User 1', 'netid': 'user1'}] expected_missing_netids = [] self.assertEqual(request, (expected_no_show, expected_conflict, expected_missing_netids)) shift.delete()
def test_hierarchy(self): """Does the code/parent base class resolution work""" import datetime a, b, c, d, e, f = [amcattest.create_test_code(label=l) for l in "abcdef"] # A: b (validto = 2010) # +a # # a should have parent b, even when requesting hierarchy of 2013. Z = amcattest.create_test_codebook(name="Z") Z.add_code(code=b, validto=datetime.datetime(2010, 1, 1)) Z.add_code(code=a, parent=b) tree = Z.get_tree(datetime.datetime(2013,1,1)) self.assertEqual(tree[0].children[0].label, 'a') # A: a # +b # +c A = amcattest.create_test_codebook(name="A") A.add_code(a) A.add_code(b, a) A.add_code(c, b) self.assertEqual(self.standardize(A), 'a:None;b:a;c:b') # D: d # +e # +f D = amcattest.create_test_codebook(name="D") D.add_code(d) D.add_code(e, d) D.add_code(f, d) self.assertEqual(self.standardize(D), 'd:None;e:d;f:d')
def check_valid_min_max_args(min_val, max_val, step, parser, range_axis): """ If a val range was specified, checks that they are valid numbers and the min is less than the max """ from cis.parse_datetime import parse_as_number_or_datetime, parse_as_float_or_time_delta, \ parse_datetimestr_to_std_time from cis.time_util import convert_datetime_to_std_time import datetime ax_range = {} if min_val is not None: dt = parse_as_number_or_datetime(min_val, range_axis + "min", parser) if isinstance(dt, list): ax_range[range_axis + "min"] = convert_datetime_to_std_time(datetime.datetime(*dt)) else: ax_range[range_axis + "min"] = dt if max_val is not None: dt = parse_as_number_or_datetime(max_val, range_axis + "max", parser) if isinstance(dt, list): ax_range[range_axis + "max"] = convert_datetime_to_std_time(datetime.datetime(*dt)) else: ax_range[range_axis + "max"] = dt if step is not None: ax_range[range_axis + "step"] = parse_as_float_or_time_delta(step, range_axis + "step", parser) return ax_range
def test_interpret_results(self): """ Tests that the message that is passed back to the temaplate is correct. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 03, 11, 11, 30, 27), outtime=datetime.datetime(1927, 03, 11, 14, 46, 37), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-03-11' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "14:45:00", "In": "11:30:00", "Shift": 1}]}}): msg = c_utils.interpret_results(date, service) expected_msg = [{'status': 'Clock Out Late', 'comm_in': u'IN: ', 'color': 'blacker', 'sched_in': '11:30 AM', 'clock_out': '02:46 PM', 'date': '1927-03-11', 'change': datetime.timedelta(0, 97), 'comm_out': u'OUT: ', 'clock_in': '11:30 AM', 'name': u'User 1', 'netid': 'user1', 'sched_out': '02:45 PM'}] expected_missing_ids = [] self.assertEqual(msg, (expected_msg, expected_missing_ids)) shift.delete()
def test_slightly_late(self): """ Tests the instance that the student clocks out slightly late and clocks in on time. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 11, 03, 11, 30, 00), outtime=datetime.datetime(1927, 11, 03, 14, 46, 37), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-11-03' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "14:45:00", "In": "11:30:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [{'clock_in': '11:30:00', 'sched_in': '11:30:00', 'diff_out_late': datetime.timedelta(0, 97), 'comm_in': u'IN: ', 'clock_out': '14:46:37', 'netid': 'user1', 'name': u'User 1', 'sched_out': '14:45:00', 'comm_out': u'OUT: '}] expected_no_shows = [] expected_missing_netids = [] self.assertEqual(results, (expected_no_shows, expected_conflicts, expected_missing_netids)) shift.delete()
def test_no_show_and_conflict(self): """ Tests when the user is scheduled for two shifts in one day but only works one of the shifts. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 12, 19, 9, 30, 35), outtime=datetime.datetime(1927, 12, 19, 11, 30, 20), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-12-19' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "11:30:00", "In": "09:30:00", "Shift": 1}, {"Out": "18:00:00", "In": "15:00:00", "Shift": 2}]}}): results = c_utils.compare(date, service) expected_conflicts = [{'name': u'User 1', 'netid': 'user1', 'comm_in': u'IN: ', 'comm_out': u'OUT: '}] expected_no_shows = [{'In': '15:00:00', 'Out': '18:00:00', 'Shift': 2, 'name': u'User 1', 'netid': 'user1'}] expected_missing_ids = [] self.assertEqual(results, (expected_no_shows, expected_conflicts, expected_missing_ids))
def test_shiftnote(self): """ Tests when the user deletes the auto filled 'IN: \n\nOUT: ' and putting their own """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 03, 11, 11, 30, 27), outtime=datetime.datetime(1927, 03, 11, 14, 46, 37), shiftnote='I deleted the auto filled stuff and put my own note', in_clock=self.pclock, out_clock=self.pclock) date = '1927-03-11' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "14:45:00", "In": "11:30:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [{'clock_in': '11:30:27', 'sched_out': '14:45:00', 'clock_out': '14:46:37', 'comm_out': u'', 'sched_in': '11:30:00', 'netid': 'user1', 'diff_out_late': datetime.timedelta(0, 97), 'name': u'User 1', 'comm_in': u'I deleted the auto filled stuff and put my own note'}] expected_no_shows = [] expected_missing_ids = [] self.assertEqual(results, (expected_no_shows, expected_conflicts, expected_missing_ids)) shift.delete()
def ical(request): CALENDAR_NAME = u'profsoux' CALENDAR_SHORT_NAME = u'profsoux.ru' events = ScheduleSection.objects.all() cal = Calendar() cal.add('prodid', u'-//%s//%s//' % (CALENDAR_NAME, CALENDAR_SHORT_NAME)) cal.add('version', '2.0') cal.add('calscale', 'GREGORIAN') cal.add('X-ORIGINAL-URL', CALENDAR_SHORT_NAME) cal.add('method', 'PUBLISH') for event in events: ical_event = Event() ical_event.add('uid', str(event.id) + '@' + CALENDAR_SHORT_NAME) title = event.title or u"" if event.lecture: speakers = event.lecture.get_speakers() ical_event.add('summary', u"%s%s «%s»" % (title, speakers, event.lecture.title)) else: ical_event.add('summary', title) dtstart = datetime.datetime(2005, 4, 4, 8, 0, 0, tzinfo=pytz.utc) dtend = datetime.datetime(2005, 4, 4, 10, 0, 0, tzinfo=pytz.utc) ical_event.add('dtstart', dtstart) ical_event.add('dtend', dtend) ical_event.add('dtstamp', dtstart) cal.add_component(ical_event) response = HttpResponse(cal.to_ical(), mimetype="text/calendar") response['Content-Disposition'] = 'attachment; filename=%s.ics' % 'ical' return response
def DELETE_OVERDU_BACKUP(): filelist=[] filelist=os.listdir(backupdir) for i in filelist: print backupdir+i backupset=backupdir+i ftl =time.strftime('%Y-%m-%d',time.gmtime(os.stat(backupdir+i).st_mtime)) year,month,day=ftl.split('-') ftll=datetime.datetime(int(year),int(month),int(day)) year,month,day=time.strftime('%Y-%m-%d',time.gmtime()).split('-') localtll=datetime.datetime(int(year),int(month),int(day)) days=(localtll-ftll).days print days if days==0: RSYNC_BACKUP(backupdir+i) elif days>=7: try: os.remove(backupdir+i) except: print "delete overdue backup failed" else: print "ok"
def graph(request,username): print("Graph is working") username1 = username user = User.objects.get(username = username1) obj1 = Order.objects.filter(liked = user).order_by('created_at') obj2 = obj1.values_list('created_at') data = [] count = 1 for element in obj2: x=[] a = element[0] print("a = ") print(a) a1 = datetime.datetime(a.year,a.month,a.day,a.hour,a.minute,a.second) print("a1 = ") print(a1) b = datetime.datetime(1970,1,1,0,0,0,) print("b = ") print(b) c = (a1-b).total_seconds() d = int(c*1000) x.append(d) x.append(count) count = count+1 data.append(x) print(data) s = json.dumps(data) print(s) return HttpResponse(s, content_type = "application/json")
def main(): # dbMgr = DatabaseManager('flydive', 'mysql') dbMgr = DatabaseManager('flydive', 'sqlite') con = Connections(src_iata='WRO', dst_iata='EIN') import datetime res = dbMgr.queryFlightDetails(con, True, datetime.datetime(2017, 6, 20), datetime.datetime(2017, 6, 25)) print(res[0].connection.carrierCode)
def test_out_early(self): """ Test the instance when the student clocks in on time and clocks out early. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 06, 23, 18, 00, 00), outtime=datetime.datetime(1927, 06, 23, 23, 40, 00), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-06-23' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "23:45:00", "In": "18:00:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [{'clock_in': '18:00:00', 'sched_out': '23:45:00', 'clock_out': '23:40:00', 'comm_out': u'OUT: ', 'sched_in': '18:00:00', 'netid': 'user1', 'diff_out_early': datetime.timedelta(0, 300), 'name': u'User 1', 'comm_in': u'IN: '}] expected_no_shows = [] expected_missing_netids = [] self.assertEqual(results, (expected_no_shows, expected_conflicts, expected_missing_netids)) shift.delete()
def test_get_bounds_from_1d(self): across_180 = np.array([-180, -90, 0, 90, 180], dtype=float) bounds_180 = get_bounds_from_1d(across_180) desired = [[-225.0, -135.0], [-135.0, -45.0], [-45.0, 45.0], [45.0, 135.0], [135.0, 225.0]] self.assertEqual(bounds_180.tolist(), desired) dates = get_date_list(datetime.datetime(2000, 1, 31), datetime.datetime(2002, 12, 31), 1) with self.assertRaises(NotImplementedError): get_bounds_from_1d(np.array(dates)) with self.assertRaises(ValueError): get_bounds_from_1d(np.array([0], dtype=float)) just_two = get_bounds_from_1d(np.array([50, 75], dtype=float)) desired = [[37.5, 62.5], [62.5, 87.5]] self.assertEqual(just_two.tolist(), desired) just_two_reversed = get_bounds_from_1d(np.array([75, 50], dtype=float)) desired = [[87.5, 62.5], [62.5, 37.5]] self.assertEqual(just_two_reversed.tolist(), desired) zero_origin = get_bounds_from_1d(np.array([0, 50, 100], dtype=float)) desired = [[-25.0, 25.0], [25.0, 75.0], [75.0, 125.0]] self.assertEqual(zero_origin.tolist(), desired)
def test_24th_hour(self): """ Tests that time is set to 00:00:00 when time passed in is 24:00:00. This works but creates a bug. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 03, 11, 18, 49, 20), outtime=datetime.datetime(1927, 03, 11, 23, 59, 06), shiftnote='IN: \n\nOUT: ', in_clock=self.pclock, out_clock=self.pclock) date = '1927-03-11' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "24:00:00", "In": "18:50:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [{'name': u'User 1', 'netid': 'user1', 'comm_in': u'IN: ', 'comm_out': u'OUT: '}] expected_no_show = [] expected_missing_netids = [] self.assertEqual(results, (expected_no_show, expected_conflicts, expected_missing_netids)) shift.delete()
def get_data_timespan_db_date_query(dev_id,start_date,end_date,celc): return_list=[] connection = Connection() db = connection.lws collection = db.tempData start_date = parse_date(start_date) end_date = parse_date(end_date) #import time #start = time.time() #db.tempData.find({"h":{$gte:1,$lte:10},"min":{$gte:0,$lte:1},"s":{$gte:0,$lte:10}}).count() start_date = datetime.datetime(start_date['year'],start_date['month'],start_date['day'],start_date['hour'],start_date['min'],start_date['second']) end_date = datetime.datetime(end_date['year'],end_date['month'],end_date['day'],end_date['hour'],end_date['min'],end_date['second']) print start_date print end_date import lws_server_sensor_config db_data = collection.find({"phid":dev_id,"datetime":{"$gte":start_date,"$lt":end_date}}) return_list=[] #print time.time() - start if db_data.count()==0: print 'no data' return 0 else: for thing in db_data: thing = json.loads(mongo_dumps(thing)) thing['sensor_data'] = lws_server_sensor_config.transpose_to_strings(thing['sensor_data']) thing['sensor_data'] = lws_server_sensor_config.convert_to_real_values(thing['sensor_data'],celc,True) return_list.append(thing) #print thing print db_data.count() #return return json.dumps(return_list)
def test_another_no_show_case(self): """ This test does not currently pass because of a bug in the code that I am working on fixing. Supposed to test when the shifts are more than 23 hours apart from each other but less than 24 hours. """ shift = c_models.Shift.objects.create(person=self.user1, intime=datetime.datetime(1927, 03, 12, 14, 12, 41), outtime=datetime.datetime(1927, 03, 12, 19, 02, 06), shiftnote='IN: \n\nOUT:', in_clock=self.pclock, out_clock=self.pclock) date = '1927-03-11' service = 'dummy_service' with patch.object(c_utils, 'read_api', return_value={"Shifts": {"user1": [{"Out": "14:45:00", "In": "11:30:00", "Shift": 1}]}}): results = c_utils.compare(date, service) expected_conflicts = [] expected_no_show = [{'In': '11:30:00', 'Out': '14:45:00', 'Shift': 1, 'name': 'User 1', 'netid': 'user1'}] expected_missing_netids = [] self.assertEqual(results, (expected_no_show, expected_conflicts, expected_missing_netids)) shift.delete()
def test_week(): print datetime.datetime(2006,9,4).isocalendar()[1] print time.strftime("%W") timeStamp = 1449849600000/1000 dateArray = datetime.datetime.utcfromtimestamp(timeStamp) print dateArray.strftime("%Y-%W")
def get_boundary_dates(self): """ return the boundrary dates of the graphic. Define either by the dates given to the constructor or by the data (see () ) """ # if both date are defined in the constructor, no need to search for boundary dates in the datas if self.date_start != None and self.date_end != None: return (self.date_start, self.date_end) # go through all the datas to find the first and last date max_date = None min_date = None for item_id in self.items_order: for period in self.items[item_id].periods: if max_date == None or max_date < period.date_end: max_date = period.date_end if min_date == None or min_date > period.date_start: min_date = period.date_start # dates define in the constructor have priority of those defined by periods if self.date_start != None: min_date = self.date_start if self.date_end != None: max_date = self.date_end if isinstance(min_date, time.struct_time): min_date = datetime.datetime(*min_date[:3]) min_date = min_date.replace(hour=0, minute=0, second=0, microsecond=0) if isinstance(max_date, time.struct_time): max_date = datetime.datetime(*max_date[:3]) max_date = max_date.replace(hour=0, minute=0, second=0, microsecond=0) return (min_date, max_date)
def generar_horarios(centro, fecha_asistencia): fecha_asistencia_date = datetime.datetime.strptime(fecha_asistencia, '%Y-%m-%d').date() now = datetime.datetime.today() #Para establecer el tiempo promedio tiempo_atencion = centro.tiempo_atencion tiempo_viejo = CentrosTiemposAtencion.objects.filter(fecha = fecha_asistencia_date).first() if tiempo_viejo: tiempo_atencion = tiempo_viejo.tiempo_atencion if now.date()>=fecha_asistencia_date and now.time() > centro.hora_apertura_manana: hora_inicial_manana = now.time() else: hora_inicial_manana = centro.hora_apertura_manana if now.date()>=fecha_asistencia_date and now.time() > centro.hora_apertura_tarde: hora_inicial_tarde = now.time() else: hora_inicial_tarde = centro.hora_apertura_tarde # hora_inicial_manana = centro.hora_apertura_manana if now < centro.hora_apertura_manana else now # hora_inicial_tarde = centro.hora_apertura_tarde if now < centro.hora_apertura_tarde else now if hora_inicial_manana < centro.hora_cierre_manana: cantidad_minutos_manana = diff_times_in_minutes(hora_inicial_manana,centro.hora_cierre_manana) else: cantidad_minutos_manana = 0 if hora_inicial_tarde < centro.hora_cierre_tarde: cantidad_minutos_tarde = diff_times_in_minutes(hora_inicial_tarde,centro.hora_cierre_tarde) else: cantidad_minutos_tarde = 0 cantidad_bloques_manana = cantidad_minutos_manana / tiempo_atencion cantidad_bloques_tarde = cantidad_minutos_tarde / tiempo_atencion lista_bloques = [] contador_horas = hora_inicial_manana # print "Bloq man",cantidad_bloques_manana,"bloq tard",cantidad_bloques_tarde for i in range(0,cantidad_bloques_manana): datetime_aux = datetime.datetime(2014,1,1,contador_horas.hour,contador_horas.minute) proxima_hora = (datetime_aux + datetime.timedelta(minutes = tiempo_atencion)).time() bloque = Bloque(contador_horas, proxima_hora) cantidad_citas = NumeroOrden.objects.filter(hora_atencion = contador_horas, fecha_atencion = fecha_asistencia).exclude(solicitud_inspeccion__estatus__codigo = 'solicitud_cancelada').count() bloque.capacidad = centro.peritos.filter(activo=True).count() - cantidad_citas lista_bloques.append(bloque) contador_horas = proxima_hora contador_horas = hora_inicial_tarde for i in range(0,cantidad_bloques_tarde): datetime_aux = datetime.datetime(2014,1,1,contador_horas.hour,contador_horas.minute) proxima_hora = (datetime_aux + datetime.timedelta(minutes = tiempo_atencion)).time() bloque = Bloque(contador_horas, proxima_hora) cantidad_citas = NumeroOrden.objects.filter(hora_atencion = contador_horas, fecha_atencion = fecha_asistencia).exclude(solicitud_inspeccion__estatus__codigo = 'solicitud_cancelada').count() bloque.capacidad = centro.peritos.filter(activo=True).count() - cantidad_citas lista_bloques.append(bloque) contador_horas = proxima_hora return lista_bloques
def test_bars_timeframe(self): instrument = Instrument.objects.get(name="GBPUSD") start = datetime.datetime(2014,05,03) end = datetime.datetime(2014,05,06,hour=23,minute=59, second=59) list = instrument.get_bar_collection_timeframe() self.assertTrue(list) for i in list: self.assertTrue(start <= i <= end)
def cart(request): orders = Order.objects.all() ordersTupleMaker = list() orderPrices = list() productOrdered = list() productOrderedNewQ = list() price = 0 contains = Contains.objects.all() message = '' fail = None for order in orders: ordersTupleMaker.append(order) for i in range(len(orders)): contain = Contains.objects.all().filter(order=orders[i]) for j in range(len(contain)): price += contain[j].product.price * contain[j].quantity orderPrices.append(price) price = 0 ordersAndPrices = zip(ordersTupleMaker,orderPrices) if request.POST.get('finalizeOrder'): date = request.POST['date'] dt = parser.parse(date) first_date = datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) last_date = first_date = datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute+1, dt.second) order = Order.objects.all().get(date__range=(dt, last_date)) for contain in contains: if contain.order == order: product = Product.objects.get(name=contain.product.name, price=contain.product.price, description=contain.product.description) productOrdered.append(product) if product.stock_quantity > contain.quantity: productOrderedNewQ.append(product.stock_quantity - contain.quantity) else: fail = True message = "Cannot complete order. Not enough stock to satisfy order." if fail == None: for i in range(len(productOrdered)): product = productOrdered[i] product.stock_quantity = productOrderedNewQ[i] product.save() if fail == None: order.paid = True order.save() fail = False message = "Success! Payment received. Order completed." messages.add_message(request, messages.INFO, message) return render_to_response('onlineStore/cart.html', {'orders':orders, 'contains':contains, 'ordersAndPrices':ordersAndPrices}, context_instance=RequestContext(request))
def get_allmsgweekandmonth(lab_id): allmsg = [] u = User.objects.filter(lab=lab_id) for i in range(len(u)): mem = {} mem['name'] = u[i].username mem['lab'] = lab_id #获取本周总在线时长和本周平均在线时长 d = datetime.datetime.now() now_week = d.weekday() monday = datetime.date.today() - datetime.timedelta(days=now_week) weekend = [] thisweekalltime = 0; db = MySQLdb.connect("localhost","root","900706","mydb" ) cursor = db.cursor() for j in range(0,7): w_day = monday + datetime.timedelta(days=j) sql = "select times from myapp_daydata where mac='%s' and days='%s' " % (u[i].mac, w_day) cursor.execute(sql) userdata = cursor.fetchall() if len(userdata) == 1: thisweekalltime += float(userdata[0][0]) mem['weekalltime'] = round(thisweekalltime / 60.0, 2) mem['weekpingtime'] = round(thisweekalltime / 60.0 / ( now_week + 1 ), 2) lastdate = datetime.date.today() thisweekalltime = 0 thismonthdays = 0 if lastdate.day == 1: thisweekalltime = 0 thismonthdays = 0 else: startdatetime = datetime.datetime(lastdate.year, lastdate.month, 1, 0, 0, 0) enddatetime = datetime.datetime(lastdate.year, lastdate.month, lastdate.day - 1, 23, 59, 59) sql = """select * from myapp_daydata where mac = '%s' and days between '%s' and '%s'""" % (u[i].mac, startdatetime, enddatetime) cursor.execute(sql) userdata = cursor.fetchall() #thisweekalltime = 0 #thismonthdays = 0 for row in userdata: thismonthdays += 1 thisweekalltime += float(row[3]) mem['monthalltime'] = round(thisweekalltime / 60.0, 2) if thismonthdays == 0: mem['monthpingtime'] = 0 else: mem['monthpingtime'] = round(thisweekalltime / 60.0 / ( lastdate.day - 1), 2) allmsg.append(mem) allmsg.sort(key=lambda obj:obj.get('monthalltime'), reverse=True) return allmsg
def __init__(self): self.datetimelist = [ (datetime.datetime(2000, 1, 1, 0, 0, 0, 0), datetime.datetime(2000, 1, 1, 5, 0, 0, 0)), (datetime.datetime(2000, 1, 2, 10, 0, 0, 0), datetime.datetime(2000, 1, 2, 15, 0, 0, 0)), (datetime.datetime(2000, 1, 3, 20, 0, 0, 0), datetime.datetime(2000, 1, 3, 23, 0, 0, 0)), (datetime.datetime(2000, 1, 4, 10, 0, 0, 0), datetime.datetime(2000, 1, 4, 15, 0, 0, 0)), (datetime.datetime(2000, 1, 5, 0, 0, 0, 0), datetime.datetime(2000, 1, 5, 15, 0, 0, 0)), ] self.integers = []
def cleanBetween( self, tsLower, tsUpper ): if not tsLower and not tsUpper: return tsLower = tsLower or datetime.datetime(1900,1,1,0,0,0) tsUpper = tsUpper or datetime.datetime(datetime.datetime.now().year+1000,1,1,0,0,0) with self.conn: self.conn.execute( 'DELETE from photo WHERE ts BETWEEN ? AND ?', (tsLower,tsUpper) ) self.conn.execute( 'DELETE from trigger WHERE ts BETWEEN ? AND ?', (tsLower,tsUpper) )
def _add_periods(self): """ draw periods' boxes in the chart. with text and style """ X_pos = 1 Y_pos = 3 #for each Y item for i in self.data.items_order: # for each period of this item for period in self.data.items[i].periods: #find the box's color color = period.color #if the color of the period is not define, if period.color == None: #try to use the color of the line if self.data.items[i].color != None: color = self.data.items[i].color #if no color are define, use default ones else: color = self.DEFAULT_TIMEBOX_COLOR #find the grid color #if the grid color is not defined grid = period.grid if period.grid == None: grid = color #handle time strcut and datetimes date_start = period.date_start if isinstance(date_start,time.struct_time): date_start = datetime.datetime(*date_start[:3]) date_start = date_start.replace(hour=0, minute=0, second=0, microsecond=0) date_end = period.date_end if isinstance(date_end,time.struct_time): date_end = datetime.datetime(*date_end[:3]) date_end = date_end.replace(hour=0, minute=0, second=0, microsecond=0) X_start = X_pos + period.scale_item_start X_end = X_pos + period.scale_item_end Y_start = Y_pos + self.data.items[i].Y_pos + period.offset Y_end = Y_start #draw the period in the right color self.styles.append(('BACKGROUND', (X_start, Y_start), (X_end, Y_end), color)) self.styles.append(('INNERGRID', (X_start, Y_start), (X_end, Y_end), 0.3, grid)) self.styles.append(('BOX', (X_start, Y_start), (X_end, Y_end), 0.2, colors.black)) self.styles.append(('LEFTPADDING', (X_start, Y_start), (X_end, Y_end), 2)) self.table[Y_start][X_start] = self._trunc_label(period.label, period.scale_item_end - period.scale_item_start + 1)
print("*** possible calls are:") print("*** python " + inspect.getfile(inspect.currentframe())) print("*** python " + inspect.getfile(inspect.currentframe()) + " 2014 07 23 16 10 ") print( " date and time must be completely given" ) print("*** ") cwd = os.getcwd() if len(sys.argv) == 1: ## automatic choise of time ## either fixed time_start = datetime.datetime(2015, 5, 30, 12, 00, 0) ## or last time available #RSS=False #from my_msg_module import get_last_SEVIRI_date #time_start = get_last_SEVIRI_date(RSS, delay=10) elif len(sys.argv) == 5: year = int(sys.argv[2]) month = int(sys.argv[3]) day = int(sys.argv[4]) hour = int(sys.argv[5]) minute = int(sys.argv[6]) time_start = datetime.datetime(year, month, day, hour, minute) else: print_usage() time_end = time_start + datetime.timedelta(minutes=12)
def doy2ymd(year, dayofyear): import datetime as dt return dt.datetime(year, 1, 1) + dt.timedelta(dayofyear - 1)
def test_datetime(self): import datetime dates = [datetime.datetime(2012, 1, x) for x in range(1, 20)] index = Index(dates) self.assertEqual(index.inferred_type, 'datetime64')
from datetime import datetime current_time = datetime.now() print(current_time) print() import math math.pi print(math.pi) print() import datetime print(datetime.datetime(2019, 5, 7).weekday()) print() import random print(random.randint(1, 100)) print("break") import random def random_number(): number = random.randint(1, 10) print(number) random_number()
] plot_url = py.plot(data, filename='date-axes') import plotly.plotly as py import plotly.graph_objs as go import datetime def to_unix_time(dt): epoch = datetime.datetime.utcfromtimestamp(0) return (dt - epoch).total_seconds() * 1000 x = [ datetime.datetime(year=2013, month=10, day=04), datetime.datetime(year=2013, month=11, day=05), datetime.datetime(year=2013, month=12, day=06) ] data = [go.Scatter(x=x, y=[1, 3, 6])] layout = go.Layout(xaxis=dict(range=[ to_unix_time(datetime.datetime(2013, 10, 17)), to_unix_time(datetime.datetime(2013, 11, 20)) ])) fig = go.Figure(data=data, layout=layout) plot_url = py.plot(fig, filename='python-datetime-custom-ranges')
inserts.append(tuple(['NG', data_type.name, tag, *insert])) # c.executemany( # 'insert into commodity_future_term_structure (product, type, tag, time, m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', # inserts) # conn.commit() try: c.executemany( 'insert into commodity_future_term_structure (product, type, tag, time, m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', inserts) conn.commit() except: logger.info("cannot insert data") conn.close() if __name__ == '__main__': import datetime datetime_start = datetime.datetime(2017, 1, 1) datetime_end = datetime.datetime(2019, 1, 3) for i in range((datetime_end - datetime_start).days + 1): (df, ) = DbCandleLoader().loadTimeSeries( (CandleDataType.CLOSE, ), dateBegin=datetime_start + timedelta(days=i), dateEnd=datetime_start + timedelta(days=i + 1)) if not df.empty: DbCandleLoader().storeTermStructure(df, CandleDataType.CLOSE, "MIN")
def storeTermStructure(self, data: pd.DataFrame, data_type: CandleDataType, tag: str): conn = sqlite3.connect(self.db) c = conn.cursor() table_exist = '''SELECT name FROM sqlite_master WHERE type='table' AND name='commodity_future_term_structure';''' c.execute(table_exist) if c.fetchone() is None: create_table = ''' create table commodity_future_term_structure ( product varchar(32), type varchar(32), tag varchar(32), time timestamp, m0 float, m1 float, m2 float, m3 float, m4 float, m5 float, m6 float, m7 float, m8 float, m9 float, m10 float, m11 float, primary key (product, type, tag, time) ) ''' c.execute(create_table) # for simplicity, I'm going to auto-roll the contracts at 6 calendar days before the end of month. I know that this # should happen at 3 bd before, but I am not loading a bd calendar in yet columns = [(datetime.datetime(int('20' + year), MonthCodes().month_by_code(code), 1) - timedelta(days=6), 'NG' + code + year) for year in ['17', '18', '19'] for code in MonthCodes().future_codes_order] min_date = data['time'].min() max_date = data['time'].max() inserts = [] cols_to_add = {c[1]: NaN for c in columns if c[1] not in data.columns} data = data.assign(**cols_to_add) for i in range( int(divmod((max_date - min_date).total_seconds(), 60)[0]) + 1): time = min_date + timedelta(minutes=i) l = data[data['time'] == time].copy().reset_index() cols = [ elem[1] for elem in columns if elem[0] >= time and elem[1] in l.columns ][:12] insert = l[['time', *cols]] new_column_names = [ 'time', *['m' + str(i) for i in range(len(cols))] ] insert.columns = new_column_names if not insert.empty: insert = [ None if type(insert[key][0]) == float and math.isnan(insert[key][0]) else insert[key][0] for key in insert ] insert[0] = insert[0].strftime('%Y-%m-%d %H:%M') inserts.append(tuple(['NG', data_type.name, tag, *insert])) # c.executemany( # 'insert into commodity_future_term_structure (product, type, tag, time, m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', # inserts) # conn.commit() try: c.executemany( 'insert into commodity_future_term_structure (product, type, tag, time, m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', inserts) conn.commit() except: logger.info("cannot insert data") conn.close()
def test_commit_w_bound_client(self): import json import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} message = Struct(fields={"foo": Value(bool_value=True)}) IID1 = "IID1" IID2 = "IID2" IID3 = "IID3" TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) TRACE1 = "12345678-1234-5678-1234-567812345678" TRACE2 = "12345678-1234-5678-1234-567812345679" TRACE3 = "12345678-1234-5678-1234-567812345670" SPANID1 = "000000000000004a" SPANID2 = "000000000000004b" SPANID3 = "000000000000004c" ENTRIES = [ { "textPayload": TEXT, "insertId": IID1, "timestamp": _datetime_to_rfc3339(TIMESTAMP1), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE1, "spanId": SPANID1, "traceSampled": True, }, { "jsonPayload": STRUCT, "insertId": IID2, "timestamp": _datetime_to_rfc3339(TIMESTAMP2), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE2, "spanId": SPANID2, "traceSampled": False, }, { "protoPayload": json.loads(MessageToJson(message)), "insertId": IID3, "timestamp": _datetime_to_rfc3339(TIMESTAMP3), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE3, "spanId": SPANID3, "traceSampled": True, }, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text( TEXT, insert_id=IID1, timestamp=TIMESTAMP1, trace=TRACE1, span_id=SPANID1, trace_sampled=True, ) batch.log_struct( STRUCT, insert_id=IID2, timestamp=TIMESTAMP2, trace=TRACE2, span_id=SPANID2, trace_sampled=False, ) batch.log_proto( message, insert_id=IID3, timestamp=TIMESTAMP3, trace=TRACE3, span_id=SPANID3, trace_sampled=True, ) batch.commit() self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, None, None))
def test_log_text_explicit(self): import datetime from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" TEXT = "TEXT" DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) ENTRIES = [{ "logName": ALT_LOG_NAME, "textPayload": TEXT, "labels": LABELS, "insertId": IID, "severity": SEVERITY, "httpRequest": REQUEST, "timestamp": "2016-12-31T00:01:02.999999Z", "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_text( TEXT, log_name=ALT_LOG_NAME, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
) >>> time.localtime() time.struct_time( tm_year=2021, tm_mon=3, tm_mday=28, tm_hour=15, tm_min=23, tm_sec=50, tm_wday=6, tm_yday=87, tm_isdst=1 ) >>> time.time() 1616941458.149149 # datetime, timezones and tiemdeltas >>> now = datetime.now() >>> utcnow = datetime.utcnow() >>> now datetime.datetime(2021, 3, 28, 15, 25, 16, 258274) >>> utcnow datetime.datetime(2021, 3, 28, 14, 25, 22, 918195) >>> now.date() datetime.date(2021, 3, 28) >>> now.day, now.month, now.year (28, 3, 2021) >>> now.date() == date.today() True >>> now.time() datetime.time(15, 25, 16, 258274) >>> now.hour, now.minute, now.second, now.microsecond (15, 25, 16, 258274) >>> now.ctime() 'Sun Mar 28 15:25:16 2021' >>> now.isoformat()
def translate_date (yyyy,mm,dd,hh,mi): d = datetime.datetime(int(yyyy),int(mm),int(dd),int(hh),int(mi)) for_js = int(time.mktime(d.timetuple())) * 1000 return for_js
def datetime_to_int(t): return (t - datetime.datetime(1970, 1, 1)).total_seconds()