def parseMeetString(meetString): m, t, w, r, f = 0, 0, 0, 0, 0 meetString = str.split(str(meetString)) if len(meetString) > 3: for i in range(3, len(meetString)): meetString[2] = meetString[2] + ' ' + meetString[i] meetString = meetString[:3] if meetString[0].find("M"): m = 1 if meetString[0].find("T"): t = 1 if meetString[0].find("W"): w = 1 if meetString[0].find("R"): r = 1 if meetString[0].find("F"): f = 1 time = meetString[1][:13] startTime, endTime = time.split('-') startTime += 'M' endTime += 'M' startTime = datetime.time(datetime.strptime(startTime, "%I:%M%p")) endTime = datetime.time(datetime.strptime(endTime, "%I:%M%p")) building = ''.join([i for i in meetString[2] if not i.isdigit()]) roomNumber = filter(str.isdigit, meetString[2]) return [m, t, w, r, f], startTime, endTime, building, roomNumber
def test_closes_at_start(self): """ Tests search for a spot that closes at exactly the time the search range begins. Search range: 13:00 - 14:00 This should return spot 2 and 6, but don't assert as it matches a valid search outside the scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=4)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=5)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) # don't assert spot2, see above docstring self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots)
def run(self): #print self.getQueue() while len(urlQueue.queue): url = urlQueue.getUrl() print 'URL : ', url file_name = url.split('/')[-1] #if not os.path.isfile(file_name): try: u = urllib2.urlopen(url) except: continue f = open(file_name, 'wb') meta = u.info() print url print meta #file_size = int(meta.getheaders("Content-Length")[0]) print "Downloading: %s" % (file_name) print datetime.time(datetime.now()) #f.write(u.read()) #file_size_dl = 0 block_sz = 8192 while True: buffer = u.read(block_sz) if not buffer: break #file_size_dl += len(buffer) f.write(buffer) print "Downloading: %s" % (file_name) f.close() p = open('downloaded.txt', 'a') p.write(file_name) p.close() print datetime.time(datetime.now())
def generateStartTimeBasedOnFreq(self, busLine, capacity, frequency, startTime): """ Generate all the trips within a time slice given a single starting time Args: busLine: an integer representing the bus line ID frequency: the headway in minutes between successive buses startTime: a datetime object representing the start time within the time slice Return: an array containing all the starting times for the bus trips within the corresponding time slice. """ # we make sure the starting time is in between the upper and lower bound of our time slices startTimeArray = [] lineTimes = {} for x in DB.timeSliceArray: start = datetime.datetime.combine(Fitness.yesterday, datetime.time(x[0], 0, 0)) end = datetime.datetime.combine(Fitness.yesterday, datetime.time(x[1], 59, 59)) if start <= startTime <= end: nextStartTime = startTime + datetime.timedelta(minutes=frequency) nextStartTime2 = startTime - datetime.timedelta(minutes=frequency) startTimeArray.append([startTime, capacity]) if nextStartTime <= end: startTimeArray.append([nextStartTime, capacity]) if nextStartTime2 >= start: startTimeArray.append([nextStartTime2, capacity]) while nextStartTime <= end: nextStartTime = nextStartTime + datetime.timedelta(minutes=frequency) if nextStartTime <= end: startTimeArray.append([nextStartTime, capacity]) while nextStartTime2 >= start: nextStartTime2 = nextStartTime2 - datetime.timedelta(minutes=frequency) if nextStartTime2 >= start: startTimeArray.append([nextStartTime2, capacity]) return sorted(startTimeArray)
def test_open_and_close_after_range(self): """ Tests search for a spot that opens and closes after the search range. Search range: 10:00 - 11:00 This should return spot 1, but don't assert as it satisfies a valid case outside the scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=1)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=2)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) # don't assert spot1, see above docstring. self.assertFalse(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots) self.assertFalse(self.spot6.json_data_structure() in spots)
def extract_features(curr_point, previous_point): """ points are given as dicts with keys (arrival_time, departure_time, lat, lng, cluster) """ current_cluster = [0] * len(cluster_to_index) previous_cluster = [0] * len(cluster_to_index) current_cluster[cluster_to_index[curr_point['cluster']]] = 1 previous_cluster[cluster_to_index[previous_point['cluster']]] = 1 datetime = dateutil.parser.parse(curr_point['departure_time']) day_of_week = [-1] * 7 day_of_week[datetime.weekday()] = 1 hour_bin_size = 6 hour_bin = [-1] * (24 / hour_bin_size) hour_bin[datetime.time().hour / hour_bin_size] = 1 is_weekend = 1 if (day_of_week == 5 or day_of_week == 6) else -1 ispm = 1 if datetime.time().hour >= 12 else -1 mwf = 1 if (day_of_week == 0 or day_of_week == 2 or day_of_week == 4) else -1 features = day_of_week + hour_bin + [is_weekend, ispm] features += current_cluster #features += previous_cluster return features
def test_spot_opening_within_range(self): """ Tests search for a spot that opens during the search range. This should return spot 1. Search range: today 7:00 - 11:00 """ start_query_time = datetime.time(self.now - timedelta(hours=2)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=2)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertTrue(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots) self.assertFalse(self.spot6.json_data_structure() in spots)
def test_spot_open_hours_span_entire_range(self): """ Tests search for a spot that opens before the search start time and closes after the search end time on the same day. Search range: 13:00 15:00 . This should return spot 2. """ start_query_time = datetime.time(self.now + timedelta(hours=4)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=6)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertTrue(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots) self.assertTrue(self.spot6.json_data_structure() in spots)
def test_spot_closing_within_range(self): """ Tests search for a spot that closes during the search range. Search range: today 11:00 - 14:00 This should return spot 1 and 2, we don't test for 2 or 6 because it's returned for a valid reason that is outside the scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=2)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=5)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertTrue(self.spot1.json_data_structure() in spots) # Don't assert on spot2, see above docstring. self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots)
def test_to_json_date_time_fields(self): """Check that Date, DateTime and Time Fields return correct ISO-formatted dates from trailguide.api.transform.to_json""" class TestDateTime(models.Model): the_date = models.DateField() the_datetime = models.DateTimeField() the_time = models.TimeField() now = datetime.now() instance = TestDateTime( the_date=datetime.date(now), the_datetime=now, the_time=datetime.time(now) ) result = to_json(instance) self.assertIsInstance( result, str, "trailguide.api.transform.to_json did not return a string when testing DateTime fields." ) self.assertEqual( json.loads(result), { "id": None, "the_date": datetime.date(now).isoformat(), "the_datetime": now.isoformat(), "the_time": datetime.time(now).isoformat() }, "trailguide.api.transform.to_json did not return correctly formatted ISO date strings" )
def test_open_within_range_and_close_within_range_next_day(self): """ Tests search for a spot that opens within the search range and closes within the search range the next day. Search range: today 18:00 - tomorrow 9:00 This should return spot 3, 4, and 5, but don't assert spot3 as it is returned for a valid reason outside of the scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=9)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.tomorrow] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) # Don't assert spot3, see reason in docstring above self.assertTrue(self.spot4.json_data_structure() in spots) self.assertTrue(self.spot5.json_data_structure() in spots)
def updateGUI(base, top, sock): global nextStatus global currentStatus global time1 if nextStatus != ClientStatus.noUpdate: print(nextStatus) time.sleep(1) for child in base.winfo_children(): child.destroy() print(nextStatus) if nextStatus == ClientStatus.offline: makeOfflineWindow(base, top, sock) if nextStatus == ClientStatus.inLobby: makeInLobbyWindow(base, top, sock) if nextStatus == ClientStatus.inRoom: makeInRoomWindow(base, top, sock) if nextStatus == ClientStatus.attacking: makeAttackingWindow(base, top, sock) time1 = datetime.time(datetime.now()) if nextStatus == ClientStatus.defending: makeDefendingWindow(base, top, sock) time1 = datetime.time(datetime.now()) if nextStatus == ClientStatus.waiting: makeWaitingWindow(base, top, sock) if nextStatus == ClientStatus.playingActions: makePlayActionWindow(base, top, sock) if nextStatus == ClientStatus.endGame: makeGameOverWindow(base, top, sock) currentStatus = nextStatus nextStatus = ClientStatus.noUpdate
def do_retweets(): keywords = os.path.join(config.local_folder, "keywords.txt") # make query query = "select screen_name, tweet_id, created_at, status from tuits where " for line in codecs.open(keywords, "r", "utf8").readlines(): line = line.strip() query += "status like '%" + line + "%' OR " query = re.sub(" OR $", "", query) query += "AND retweeted='' collate NOCASE order by tweet_id desc" query += " limit 5" #print query # publicidad está prohibida desde esta fecha DATE_LIMIT = datetime(2014, 1, 24, 0, 0) FROM_TIME = datetime.time(datetime.strptime("8:00", "%H:%M")).hour now = datetime.time(datetime.now()).hour # retweet between 8 and 23 hours retweeted = [] if now - FROM_TIME < 15: # if now - FROM_TIME < 15: dbfile = os.path.join(config.local_folder, "tuits.db") db = dataset.connect("sqlite:///" + dbfile) table = db['tuits'] res = db.query(query) for i in res: i['retweeted'] = "yes" retweeted.append(i) if len(retweeted) > 0: for i in retweeted: table.update(i, ['tweet_id']) retweet(i) print "Retweeted %s" % i['status'] sleep(6)
def test_opens_at_end(self): """ Tests search for a spot that opens at exactly the time the search range ends. Search range: 4:00 - 10:00 This should NOT return the spot. Returns spot5 but don't assert against spot5 as it is returned as a valid result but for reasons out of scope of this test. """ start_query_time = datetime.time(self.now - timedelta(hours=5)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=1)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots)
def blacklist_whitelist_notification(notice_type): """ Send email notification whne destination number matched with blacklist or whitelist. **Usage**: blacklist_whitelist_notification.delay(notice_type) """ if notice_type == NOTICE_TYPE.blacklist_prefix: notice_type_name = "blacklist" if notice_type == NOTICE_TYPE.whitelist_prefix: notice_type_name = "whitelist" logger = blacklist_whitelist_notification.get_logger() logger.info("TASK :: %s_notification called" % notice_type_name) notice_type_obj = notification.NoticeType.objects.get(default=notice_type) try: notice_obj = notification.Notice.objects.filter(notice_type=notice_type_obj).latest("added") # Get time difference between two time intervals prevtime = str(datetime.time(notice_obj.added.replace(microsecond=0))) curtime = str(datetime.time(datetime.now().replace(microsecond=0))) FMT = "%H:%M:%S" diff = datetime.strptime(curtime, FMT) - datetime.strptime(prevtime, FMT) # if difference is more than X min than notification resend if int(diff.seconds / 60) >= settings.DELAY_BETWEEN_MAIL_NOTIFICATION: # blacklist notification id - 3 | whitelist notification type - 4 notify_admin_without_mail(notice_type, "*****@*****.**") except: # blacklist notification type - 3 | whitelist notification type - 4 notify_admin_without_mail(notice_type, "*****@*****.**") logger.debug("TASK :: %s_notification finished" % notice_type_name) return True
def test_span_late_night(self): """ Tests a search range where the spot's open time is before the start on one day, and the close time is beyond the end of range on the next day. Search range: today 20:00 - tomorrow 2:00 """ start_query_time = datetime.time(self.now + timedelta(hours=11)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now - timedelta(hours=7)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.tomorrow] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) # Don't assert spot3, see docstring above self.assertTrue(self.spot4.json_data_structure() in spots)
def test_open_outside_range_and_close_within_range_next_day(self): """ Tests a search range that spans midnight. This should return spot 3, 4, and 5. Search range: today 20:00 - tomorrow 9:00 Don't assert against spot3 and spot5 as it is returned as a valid result but for reasons out of scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=11)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.tomorrow] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) # Don't assert spot3, see docstring above self.assertTrue(self.spot4.json_data_structure() in spots)
def test_close_within_late_night_search(self): """ Tests a search range that crosses midnight, with a spot that closes during the first half of that range. (SPOT-2228) """ start_query_time = datetime.time(self.now + timedelta(hours=13)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now - timedelta(hours=4)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.tomorrow] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertFalse(self.spot2.json_data_structure() in spots) # Don't assert spot3, see docstring above self.assertTrue(self.spot4.json_data_structure() in spots) # Don't assert spot5, see docstring above self.assertTrue(self.spot6.json_data_structure() in spots)
def test_multiple_fuzzy_ranges(self): start_query_time = datetime.time(self.now - timedelta(hours=2)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=2)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) start_query_time2 = datetime.time(self.now + timedelta(hours=2)) start_query_time2 = start_query_time2.strftime("%H:%M") start_query_day2 = self.day_dict[self.today] start_query2 = "%s,%s" % (start_query_day2, start_query_time2) end_query_time2 = datetime.time(self.now + timedelta(hours=4)) end_query_time2 = end_query_time2.strftime("%H:%M") end_query_day2 = self.day_dict[self.today] end_query2 = "%s,%s" % (end_query_day2, end_query_time2) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': [start_query, start_query2], 'fuzzy_hours_end': [end_query, end_query2]}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertTrue(self.spot1.json_data_structure() in spots) self.assertTrue(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots) self.assertTrue(self.spot6.json_data_structure() in spots)
def nop_form(request): context = {} date = None time = None if request.method == 'GET' \ and ('date' in request.GET or 'time' in request.GET): form = NopForm(request.GET) if form.is_valid(): date = form.cleaned_data['date'] time = form.cleaned_data['time'] else: form = NopForm(initial={'date': datetime.date(datetime.now()), 'time': datetime.time(datetime.now())}) if not date: date = datetime.date(datetime.now()) if not time: time = datetime.time(datetime.now()) result = _bydate(date.year, date.month, date.day, time.hour, time.minute) context['nowplaying'] = result context['form'] = form return render_to_response('nop_form.html', context)
def runOnce(self): db = DB() # Setting the start time boundary of request that we want startTime = datetime.datetime.combine(Fitness.yesterday, datetime.datetime.strptime(Fitness.firstMinute, Fitness.formatTime).time()) # Setting the end time boundary of request that we want endTime = datetime.datetime.combine(Fitness.yesterday, datetime.datetime.strptime(Fitness.lastMinute, Fitness.formatTime).time()) # Create index for the people going on the bus Fitness.request = db.grpReqByBusstopAndTime(startTime, endTime) self.createRequestIndex(Fitness.request) # Create index for the people going down the bus Fitness.requestOut = db.getReqCountByEndBusStop(startTime, endTime) self.createRequestIndexOut(Fitness.requestOut) #<--------------------------------Functions for new encoding including multiple line----------------------------------> busLines = set(db.busLine) for line in busLines: for x in db.timeSliceArray: start = datetime.datetime.combine(Fitness.yesterday,datetime.time(x[0], 0, 0)) end = datetime.datetime.combine(Fitness.yesterday, datetime.time(x[1], 59, 59)) requestBetweenTimeSlices = db.getTravelRequestBetween(start, end, line) for count in enumerate(requestBetweenTimeSlices, start=1): countingNoOfRequest = (count[0]) try: finalNoReqBetweenTimeSlice = countingNoOfRequest except: print("No requests found for the particular date you desire") Fitness.totalRequestsBusline[(line, start, end)] = finalNoReqBetweenTimeSlice
def test_open_and_close_before_range(self): """ Tests search for a spot that opens and closes before the search range. Search range: 14:00 - 17:00 This should NOT return any spots, except spot6 which is returned for a valid reason outside the scope of this test. """ start_query_time = datetime.time(self.now + timedelta(hours=5)) start_query_time = start_query_time.strftime("%H:%M") start_query_day = self.day_dict[self.today] start_query = "%s,%s" % (start_query_day, start_query_time) end_query_time = datetime.time(self.now + timedelta(hours=8)) end_query_time = end_query_time.strftime("%H:%M") end_query_day = self.day_dict[self.today] end_query = "%s,%s" % (end_query_day, end_query_time) client = Client() response = client.get( "/api/v1/spot", {'fuzzy_hours_start': start_query, 'fuzzy_hours_end': end_query}) spots = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertFalse(self.spot1.json_data_structure() in spots) self.assertTrue(self.spot2.json_data_structure() in spots) self.assertFalse(self.spot3.json_data_structure() in spots) self.assertFalse(self.spot4.json_data_structure() in spots) self.assertFalse(self.spot5.json_data_structure() in spots)
def set_ics_event(self, ics_file, partner): for event in Calendar.from_ical(ics_file).walk('vevent'): #~ if not event.get('uid'): #~ event.add('uid',reduce(lambda x,y: x ^ y, map(ord, str(event.get('dtstart') and event.get('dtstart').dt or '' + event.get('summary') + event.get('dtend') and event.get('dtend').dt or ''))) % 1024) summary = '' description = unicode(event.get('description', '')) if unicode(event.get('summary')) and len(unicode(event.get('summary'))) < 35: summary = unicode(event.get('summary')) elif len(unicode(event.get('summary'))) >= 35: summary = unicode(event.get('summary'))[:35] if not event.get('description'): description = unicode(event.get('summary')) record = {r[1]:r[2] for r in [ ('dtstart','start_date',event.get('dtstart') and event.get('dtstart').dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), ('dtend','stop_date',event.get('dtend') and event.get('dtend').dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), #~ ('dtstamp','start_datetime',event.get('dtstamp') and event.get('dtstamp').dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), #~ ('description','description',description), ('duration','duration',event.get('duration')), ('location','location',event.get('location') and unicode(event.get('location')) or partner.ics_location), ('class','class',event.get('class') and str(event.get('class')) or partner.ics_class), ('summary','name',summary), ('rrule', 'rrule',event.get('rrule') and event.get('rrule').to_ical() or None), ] if event.get(r[0])} partner_ids = self.env['res.partner'].get_attendee_ids(event) #~ raise Warning(partner_ids) if partner_ids: partner_ids.append(partner.id) else: partner_ids = [partner.id] record['partner_ids'] = [(6,0,[partner_ids])] #~ record['partner_ids'] = [(6,0,self.env['res.partner'].get_attendee_ids(event)[0] and self.env['res.partner'].get_attendee_ids(event)[0].append(partner.id) or [partner.id])] #~ raise Warning(record['partner_ids']) #~ record['attendee_ids'] = [(6,0,[attendee])] record['ics_subscription'] = True record['start'] = record.get('start_date') record['stop'] = record.get('stop_date') or record.get('start') record['description'] = description record['show_as'] = partner.ics_show_as record['allday'] = partner.ics_allday #~ record['rrule'] = event.get('rrule').to_ical() #~ raise Warning(record['rrule_type'].to_ical) tmpStart = datetime.time(datetime.fromtimestamp(mktime(strptime(record['start'], DEFAULT_SERVER_DATETIME_FORMAT)))) tmpStop = datetime.fromtimestamp(mktime(strptime(record['stop'], DEFAULT_SERVER_DATETIME_FORMAT))) if tmpStart == time(0,0,0) and tmpStart == datetime.time(tmpStop): record['allday'] = True if not record.get('stop_date'): record['allday'] = True record['stop_date'] = record['start_date'] elif record.get('stop_date') and record['allday']: record['stop_date'] = vDatetime(tmpStop - timedelta(hours=24)).dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT) record['stop'] = record['stop_date'] _logger.error('ICS %s' % record) self.env['calendar.event'].create(record)
def view_port_clock(): active = True while active == True: date_id = p2r.geometry.add_text((datetime.time(datetime.now())),(0,0,0),20) print date_id p2r.utility.sleep(1000) p2r.geometry.text_object_text(date_id, datetime.time(datetime.now())) print "done"
def searchGoogle (productName, searchTerm, resultAmount): now = datetime.now() todayTime = '%02d %02d' % (datetime.time(now).hour,datetime.time(now).minute) todayDate = '%04d %02d %02d' % (datetime.date(now).year,datetime.date(now).month,datetime.date(now).day) #productName = 'archetype' #searchTerm = 'archetype vfx pipeline nederhorst torrent' urlList = [] for url in search (searchTerm,stop=resultAmount): urlList.append(url) accetableSites = [ 'amazon.com', 'thegnomonworkshop.com', 'cgchannel.com', 'vray.info', 'thefoundry.co.uk', 'youtube.com', 'vimeo.com', 'pixologic.com', 'twitter.com', 'facebook.com', '3dtotal.com', 'tutsplus.com', 'gnomonschool.com', 'chillingeffects.org', 'deviantart.com' ] outData = { 'product': { 'name':productName, 'searchterm':searchTerm, 'date':todayDate, 'time':todayTime } } illegalURLs = [] for url in urlList: foundIllegalSite = False for goodSite in accetableSites: if goodSite.lower() in url.lower(): break else: illegalURLs.append(url) logger.info('Found : %s' % url) #illegalURLs = [targ for targ in urlList if any(re.search(r'(?!\b{})'.format(goodSite), targ, re.I) for goodSite in accetableSites)] #print matches outData['product']['sites']= illegalURLs return outData
def getTimeSlice(self, startTime): ''' Evaluates the time slice a given starting time in a gene belongs to. @ param startTime datetime @ return (start, end) datetime.datetime objects ''' startTimeArray = [] for x in DB.timeSliceArray: start = datetime.datetime.combine(Fitness.yesterday, datetime.time(x[0], 0, 0)) end = datetime.datetime.combine(Fitness.yesterday, datetime.time(x[1], 59, 59)) if start <= startTime <= end: return start
def test_open_until(self): dummy_cache = cache.get_cache('django.core.cache.backends.dummy.DummyCache') with patch.object(models, 'cache', dummy_cache): # Create a spot that isn't open now but will be in an hour. spot = Spot.objects.create(name="This spot is open later") now = datetime.now() spot_open = datetime.time(now + timedelta(hours=1)) spot_close = datetime.time(now + timedelta(hours=3)) day_lookup = ["su", "m", "t", "w", "th", "f", "sa"] day_num = int(time.strftime("%w", time.localtime())) today = day_lookup[day_num] SpotAvailableHours.objects.create(spot=spot, day=today, start_time=spot_open, end_time=spot_close) # Verify the spot is closed now c = Client() response = c.get("/api/v1/spot", {'open_now': True}) spots = json.loads(response.content) spot_returned = False for s in spots: if s['id'] == spot.pk: spot_returned = True self.assertTrue(not spot_returned, "The spot that is open later is not in the spots open now") # Get a spot that is open until spot_close at_time = datetime.time(now + timedelta(hours=2)) at_time = at_time.strftime("%H:%M") until_time = spot_close.strftime("%H:%M") day_dict = {"su": "Sunday", "m": "Monday", "t": "Tuesday", "w": "Wednesday", "th": "Thursday", "f": "Friday", "sa": "Saturday", } at_query_day = day_dict[today] at_query = "%s,%s" % (at_query_day, at_time) until_query = "%s,%s" % (at_query_day, until_time) response = c.get("/api/v1/spot", {'open_at': at_query, 'open_until': until_query}) spots = json.loads(response.content) spot_returned = False for s in spots: if s['id'] == spot.pk: spot_returned = True self.assertTrue(spot_returned, "Got the spot that is open later")
def create_late(self, user, date): late = models.Late( user_id=user.id, date=date, explanation='explanation', justified=None, late_start=datetime.time(date), late_end=datetime.time(date), work_from_home=False, ) models.DBSession.add(late)
def log(level,message): formatted= '%s#L%s: %s' % (datetime.time(datetime.now()), level, message) logfile.write(formatted) print formatted if level > 19: info={} info['time']='%s'%datetime.time(datetime.now()) info['level']=level info['message']=message info['instance_id']=instance_id info['version']='3_2' info['instance_name']=instance_name beanstalk.put(json.dumps(info, separators=(',',':')),ttr=10)
def resume_auto(self): # returns system state to autonomous, to be triggered via the scheduler, or via a request hook from the web ui. self.state = 'autonomous' app.logger.debug("Resume auto called, system state is now : %s" % self.state) app.logger.info("Looking to see if current time falls within any events.") current_time = datetime.time(datetime.now()) for event in auto_state_events: start_time = datetime.time(datetime.strptime(event['event_start_time'],time_format)) end_time = datetime.time(datetime.strptime(event['event_end_time'],time_format)) if current_time > start_time and current_time < end_time: app.logger.info("Event : '%s' falls within the current time, executing state." % event['event_name']) self.auto_transition(state=event['event_state']) break
from datetime import datetime from datetime import date from datetime import timedelta todays_date = date.today() #print todays date print("Today's date is", todays_date) print("Year is", todays_date.year, "Month is ", todays_date.month, "Day is", todays_date.day) todays_datetime = datetime.now() #print date and time of present condition print("The currnet date and time is ", todays_datetime) time = datetime.time(todays_datetime) #print time of present condition print("Time is ", time) #Printing the day of week days = [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ] weekendday = date.weekday(todays_date) print("Today is ", days[weekendday]) #print the datetime in formatted way print(todays_date.strftime("%A,%d %B,%Y")) print(todays_date.strftime("%c")) #prints the local date and time print(todays_date.strftime("%x")) #prints the local date print(todays_date.strftime("%X")) #prints the local time #print(todays_date.strftime("%H:%M:%S %P"))
print(d2) print(type(d2)) oneweekdelta = timedelta(weeks=1) oneweekhencedate = today + oneweekdelta print(oneweekhencedate) print(type(oneweekhencedate)) taskcompletetion = (2, 3) twoweekdelta = timedelta(weeks=taskcompletetion[0], days=taskcompletetion[1]) twoweekdate = today + twoweekdelta print(twoweekdate) print(type(twoweekdate)) now = datetime.now() print(now) print(type(now)) nowtime = datetime.time(now) print(nowtime) print(type(nowtime)) print(nowtime.hour) print(nowtime.minute) print(today) print(today.strftime('%m/%d/%Y'))
def strToTime(self, t, ms): """从字符串时间转化为time格式的时间""" hh, mm, ss = t.split(':') tt = datetime.time(int(hh), int(mm), int(ss), microsecond=ms) return tt
while True: ret, img=cam.read() #img=cv2.resize(img,(340,220)) #convert BGR to HSV imgHSV= cv2.cvtColor(img,cv2.COLOR_BGR2HSV) # create the Mask mask=cv2.inRange(imgHSV,lowerBound,upperBound) cg=cv2.countNonZero(mask) maskb = cv2.inRange(imgHSV, lowblue,highblue) cb=cv2.countNonZero(maskb) print 'Blue Pixels :',cb #change the threshold on these values if cb>500: #threshold of blue pixels from datetime import datetime a=datetime.time(datetime.now()) a=str(a) t=a.split(':') t=t[:2] t[0]=str(abs(int(t[0])-12)) maskOpen1=cv2.morphologyEx(maskb,cv2.MORPH_OPEN,kernelOpen) maskClose1=cv2.morphologyEx(maskOpen1,cv2.MORPH_CLOSE,kernelClose) maskFinal1=maskClose1 conts1,h=cv2.findContours(maskFinal1.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE) cptotal1=[] for c in conts1: M = cv2.moments(c)
def environment_info(self): if self._environment_info is None: info = {} info["transformers_version"] = version info["framework"] = self.framework if self.framework == "PyTorch": info["use_torchscript"] = self.args.torchscript if self.framework == "TensorFlow": info["eager_mode"] = self.args.eager_mode info["use_xla"] = self.args.use_xla info["framework_version"] = self.framework_version info["python_version"] = platform.python_version() info["system"] = platform.system() info["cpu"] = platform.processor() info["architecture"] = platform.architecture()[0] info["date"] = datetime.date(datetime.now()) info["time"] = datetime.time(datetime.now()) info["fp16"] = self.args.fp16 info["use_multiprocessing"] = self.args.do_multi_processing info["only_pretrain_model"] = self.args.only_pretrain_model if is_psutil_available(): info["cpu_ram_mb"] = bytes_to_mega_bytes( psutil.virtual_memory().total) else: logger.warning( "Psutil not installed, we won't log available CPU memory." "Install psutil (pip install psutil) to log available CPU memory." ) info["cpu_ram_mb"] = "N/A" info["use_gpu"] = self.args.is_gpu if self.args.is_gpu: info[ "num_gpus"] = 1 # TODO(PVP) Currently only single GPU is supported if is_py3nvml_available(): nvml.nvmlInit() handle = nvml.nvmlDeviceGetHandleByIndex( self.args.device_idx) info["gpu"] = nvml.nvmlDeviceGetName(handle) info["gpu_ram_mb"] = bytes_to_mega_bytes( nvml.nvmlDeviceGetMemoryInfo(handle).total) info[ "gpu_power_watts"] = nvml.nvmlDeviceGetPowerManagementLimit( handle) / 1000 info[ "gpu_performance_state"] = nvml.nvmlDeviceGetPerformanceState( handle) nvml.nvmlShutdown() else: logger.warning( "py3nvml not installed, we won't log GPU memory usage. " "Install py3nvml (pip install py3nvml) to log information about GPU." ) info["gpu"] = "N/A" info["gpu_ram_mb"] = "N/A" info["gpu_power_watts"] = "N/A" info["gpu_performance_state"] = "N/A" info["use_tpu"] = self.args.is_tpu # TODO(PVP): See if we can add more information about TPU # see: https://github.com/pytorch/xla/issues/2180 self._environment_info = info return self._environment_info
for i in range(3, 37): country = cg.cell(row=i, column=1).value Country.append(country) Open_time[country] = cg.cell(row=i, column=4).value Close_time[country] = cg.cell(row=i, column=5).value # ==================================== Read 'Street Account Data' File ==================================== f = openpyxl.load_workbook(path + filename) sheet = f.get_sheet_by_name("Global Ratings") n = sheet.max_row + 1 for i in range(2, n): ny_date = sheet.cell('A' + str(i)).value ny_date = dt.strptime(ny_date, '%m/%d/%y') ny_time = sheet.cell('B' + str(i)).value ny_time = dt.time(dt.strptime(ny_time, '%H:%M')) ny_dt = dt.combine(ny_date, ny_time) country = sheet.cell('H' + str(i)).value try: loc_dt = ny_dt.astimezone(tz[country]) loc_time = dt.time(loc_dt) sheet.cell('D' + str(i)).value = loc_dt.strftime('%m/%d/%y') sheet.cell('E' + str(i)).value = loc_dt.strftime('%H:%M') if loc_time > Open_time[country] and loc_time < Close_time[country]: sheet.cell('AT' + str(i)).value = 'Y' else: sheet.cell('AT' + str(i)).value = 'N' except: continue f.save(filename)
def convertString(data): ''' Converts string received from device via socket as the device is capable of sending data through on server gateway''' try: dg = '' eb = '' today = datetime.today() d = {} keyword = '' bytestrans = '' packet = '' hardver = '' softver = '' imei = '' sim = '' signal = '' lat = '' dlat = '' lon = '' dlon = '' ff = '' crc = '' string = data data_check = data[0:63] n = 2 data = [data[i:i + n] for i in range(0, len(data), n)] print(data) j = 0 for i in data: if j in range(0, 5): keyword = keyword + i if j == 4: print("keyword = ", keyword) keyword = bytearray.fromhex(keyword).decode() elif j in range(5, 6): bytestrans = bytestrans + i print("bytes Transmitted = ", bytestrans) bytestrans = int(bytestrans, 16) elif j in range(6, 7): packet = packet + i print("Packet Received = ", packet) elif j in range(7, 11): hardver = hardver + i if j == 10: print("Hardware Version = ", hardver) hardver = bytearray.fromhex(hardver).decode() elif j in range(11, 15): softver = softver + i if j == 14: print("Software Version = ", softver) softver = bytearray.fromhex(softver).decode() elif j in range(15, 30): imei = imei + i if j == 29: print("Device ID = ", imei) imei = bytearray.fromhex(imei).decode() elif j in range(30, 34): sim = sim + i if j == 33: print("Sim = ", sim) try: sim = bytearray.fromhex(sim).decode() except Exception as e: print("error can't decode sim hex: ", e) elif j in range(34, 35): signal = signal + i print("Signal Strength = ", signal) signal = int(signal, 16) elif j in range(35, 39): lat = lat + i if j == 38: print("Latitude = ", lat) lat = "".join(map(str.__add__, lat[-2::-2], lat[-1::-2])) lat = struct.unpack('!f', lat.decode('hex'))[0] elif j in range(39, 40): dlat = dlat + i print("Direction of LAT = ", dlat) dlat = bytearray.fromhex(dlat).decode() elif j in range(40, 44): lon = lon + i if j == 43: print("Longitude = ", lon) lon = "".join(map(str.__add__, lon[-2::-2], lon[-1::-2])) lon = struct.unpack('!f', lon.decode('hex'))[0] elif j in range(44, 45): dlon = dlon + i print("Direction of LON = ", dlon) dlon = bytearray.fromhex(dlon).decode() elif j in range(45, 49): ff = ff + i if j == 48: print(ff, " got ff") ff = "".join(map(str.__add__, ff[-2::-2], ff[-1::-2])) ff = struct.unpack('!f', ff.decode('hex'))[0] ff = format(ff, '.2f') elif j == 49: year = '20' + str(data[j]) month = data[j + 1] day = data[j + 2] hours = data[j + 3] minutes = data[j + 4] seconds = data[j + 5] print("CHECK here Jyess ;-)") print(year) print(month) print(day) print(hours) print(minutes) print(seconds) try: if year != "2000": today = datetime(int(year), int(month), int(day), int(hours), int(minutes), int(seconds)) print("Date Time by Device = ", str(today)) except Exception as e: print(e) print("YMDHMS") elif j == 55: h_size = len(i) * 4 status = (bin(int(i, 16))[2:]).zfill(h_size) print("old status = ", status) status = status[::-1] print("new status = ", status) dg = status[0] eb = status[1] if dg == '1' and eb == '1': pt = '1' else: pt = '0' supply_batt = status[2] acc_s = status[3] s_bit4 = status[4] s_bit5 = status[5] s_bit6 = status[6] s_bit7 = status[7] print("DG = ", dg) print("EB = ", eb) print("Supply from External Battery = ", supply_batt) print("Accelerometer Status = ", acc_s) print("Future Status bit 4 = ", s_bit4) print("Future Status bit 5 = ", s_bit5) print("Future Status bit 6 = ", s_bit6) print("Future Status bit 7 = ", s_bit7) elif j == 56: fut1 = data[j] fut1 = int(fut1, 16) fut2 = data[j + 1] fut2 = int(fut2, 16) fut3 = data[j + 2] fut3 = int(fut3, 16) fut4 = data[j + 3] fut4 = int(fut4, 16) fut5 = data[j + 4] fut5 = int(fut5, 16) fut6 = data[j + 5] fut6 = int(fut6, 16) fut7 = data[j + 6] fut7 = int(fut7, 16) fut_bit = str(fut1) + str(fut2) + str(fut3) + str(fut4) + str( fut5) + str(fut6) + str(fut7) print("Future Byte 1 to 7 = ", fut_bit) elif j in range(63, 65): crc = crc + i if j == 64: print("CRC is ", crc) old_crc = crc crc = int(crc, 16) j += 1 ######################################################################################################################################## d.update({ 'keyword': str(keyword), 'bytestrans': bytestrans, 'packet': packet, 'hardver': str(hardver), 'softver': str(softver), 'ID': str(imei), 'sim': str(sim), 'signal': signal, 'lat': lat, 'lon': lon, 'dlat': str(dlat), 'dlon': str(dlon), 'ff': ff, 'date': str(datetime.date(today)), 'time': str(datetime.time(today).strftime("%H:%M:%S")), 'dg': dg, 'eb': eb, 'supply_batt': supply_batt, 'acc_s': acc_s, 'crc': crc, 'pt': pt, 'Receive_Type': 'INTERNET' }) print(d) date = d.get('date', None) time = d.get('time', None) print("date = ", date) print("time = ", time) decoded = str(keyword) + str( bytestrans ) + packet + hardver + softver + str(imei) + sim + str(signal) + str( lat ) + dlat + str( lon ) + dlon + ff + year + month + day + hours + minutes + seconds + status + fut_bit + str( crc) return decoded except Exception as e: print(e)
def main(): start_time = datetime.time(datetime.now())
import time start_time = time.time() #main() print("--- %s seconds ---" % (time.time() - start_time)) ''' csv: very convenient for reading and writing csv files collections: useful extensions of the usual data types including OrderedDict, defaultdict and namedtuple random: generates pseudo-random numbers, shuffles sequences randomly and chooses random items string: more functions on strings. This module also contains useful collections of letters like string.digits (a string containing all characters which are valid digits). re: pattern-matching in strings via regular expressions math: some standard mathematical functions os: interacting with operating systems os.path: submodule of os for manipulating path names sys: work directly with the Python interpreter json: good for reading and writing json files (good for web work) ''' from datetime import datetime import pytz utc = pytz.utc ist = pytz.timezone('Asia/Kolkata') now = datetime.time(tz=utc) ist_now = now.astimezone(ist) print(now) print(ist_now)
def train(args): # set cuda device ---------------------- os.environ["CUDA_VISIBLE_DEVICES"] = args.device # str today = str(datetime.date(datetime.now())) year_month_date = today.split('-') date_to_save = year_month_date[0][2:] + year_month_date[ 1] + year_month_date[2] # ------------------------------------------------------------------------------ save_dir = args.save_dir + '/%s_results_reducedofficehome' % date_to_save # root directory in which OfficeHomeDataset_10072016 is placed in dataset_root = args.data_dir # use pytorch official ImageNet pre-trained model resnet_model_dir = args.resnet_model_dir + '/resnet50-19c8e357.pth' # ------------------------------------------------------------------------------ init_rate = args.lr # 0.001 num_epochs = args.num_epoch batch_size = args.batch_size cls_lr = args.cls_lr fc_dim = args.fc_dim source = args.source target = args.target w_l2 = args.weight_L2norm w_kl = args.weight_kl nu_sigmoid = args.nu_sigmoid tgt_keep = args.tgt_keep exp_name = 'norm_alea_tgtkeep%.1f_' % ( tgt_keep) + source[0] + '2' + target[0] save_log_file = save_dir + '/train_%s.txt' % (exp_name) os.makedirs(save_dir) if not os.path.exists(save_dir) else None img_resize, img_size = 256, 224 mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] train_transforms = transforms.Compose([ transforms.ToPILImage(), transforms.Resize(img_resize), transforms.RandomHorizontalFlip(), transforms.RandomCrop(img_size), # augmentation on the source samples # transforms.RandomAffine(degrees=10, translate=(0.1, 0.1), scale=None, shear=10), # transforms.ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.2), transforms.ToTensor(), transforms.Normalize(mean, std) ]) test_transforms = transforms.Compose([ transforms.ToPILImage(), transforms.Resize(img_resize), transforms.CenterCrop(img_size), transforms.ToTensor(), transforms.Normalize(mean, std) ]) train_src_set = OfficeHomeReducedDataset(dataset_root, source, transforms=train_transforms) train_tgt_set = OfficeHomeReducedDataset(dataset_root, target, transforms=train_transforms) test_set = OfficeHomeReducedDataset(dataset_root, target, transforms=test_transforms) train_size, test_size = len(train_src_set), len(test_set) num_class = test_set.num_class num_batches = min(train_size // batch_size, test_size // batch_size) train_src_loader = DataLoader(train_src_set, sampler=RandomSampler(train_src_set), batch_size=batch_size, drop_last=True, num_workers=2, pin_memory=True) train_tgt_loader = DataLoader(train_tgt_set, sampler=RandomSampler(train_tgt_set), batch_size=batch_size, drop_last=True, num_workers=2, pin_memory=True) test_loader = DataLoader(test_set, sampler=SequentialSampler(test_set), batch_size=batch_size, drop_last=False, num_workers=2, pin_memory=True) # net ----------------------------------------------------------------------------- netG = resnet50backbone(pre_trained=resnet_model_dir) netF = ResAFNAleatoricClassifierV2(channels=(2048, fc_dim, num_class), nu_sigmoid=nu_sigmoid) best_state_dict_G = netG.state_dict() best_state_dict_F = netF.state_dict() netG.cuda().train() netF.cuda().train() netF.apply( weights_init) # classifier initialization as in original AFN code opt_g = optim.SGD(netG.parameters(), lr=init_rate, weight_decay=0.0005) opt_f = optim.SGD(netF.parameters(), lr=init_rate * cls_lr, momentum=0.9, weight_decay=0.0005) criterion_cls = nn.CrossEntropyLoss().cuda() # -------------------------------------------------------------------- l_ce_val, l_norm_val, l_s_kl_val, l_t_kl_val, best_acc = 0.0, 0.0, 0.0, 0.0, 0.0 bs_keep = int(batch_size * tgt_keep) log_writer = open(save_log_file, 'w') time_now = str(datetime.time(datetime.now()))[:8] log_with_print(log_writer, 'Start time: %s (Date: %s)' % (time_now, date_to_save)) log_with_print(log_writer, '***************************') log_with_print(log_writer, 'Exp: %s (on gpu%s)' % (exp_name, args.device)) log_with_print(log_writer, '***************************') log_with_print(log_writer, 'Data directory: %s' % dataset_root) log_with_print(log_writer, 'Save directory: %s' % save_dir) log_with_print(log_writer, 'learning rate G: %f' % init_rate) log_with_print(log_writer, 'learning rate F: %f' % (init_rate * cls_lr)) log_with_print(log_writer, 'Weight of L2Norm: %f' % w_l2) log_with_print(log_writer, 'Weight of KLD: %f' % w_kl) log_with_print(log_writer, 'nu_sigmoid: %f' % nu_sigmoid) log_with_print(log_writer, 'fc_dim: %d' % fc_dim) log_with_print(log_writer, 'tgt_keep: %d(of%d)' % (bs_keep, batch_size)) log_with_print( log_writer, 'Dataset size: %d %d\tBatch size: %d' % (train_size, test_size, batch_size)) log_with_print(log_writer, 'Start training on %d batches...' % (num_batches)) log_writer.flush() s_epsilons = Normal(torch.tensor([0.0]), torch.tensor([1.0])) src_sigma, tgt_sigma = 0.0, 0.0 start = time.time() for epoch in range(1, num_epochs + 1): it = 1 netG.train() netF.train() for (img_src, label_s, _), (img_tgt, _, _) in zip(train_src_loader, train_tgt_loader): img_src, label_s = img_src.cuda(), label_s.cuda() img_tgt = img_tgt.cuda() opt_g.zero_grad() opt_f.zero_grad() epsilons = s_epsilons.sample( sample_shape=torch.Size([batch_size * 2])).cuda() s_logit, s_mus, s_sigmas = netF.forward_train_sigmoid( netG(img_src), epsilons[:batch_size]) t_logit, t_mus, t_sigmas = netF.forward_train_sigmoid( netG(img_tgt), epsilons[batch_size:]) l_ce = criterion_cls(s_logit, label_s) l_norm = w_l2 * (get_L2norm_loss_self_driven(s_mus) + get_L2norm_loss_self_driven(t_mus)) l_s_kl = 0.5 * (s_mus**2 + s_sigmas**2 - 2.0 * torch.log(s_sigmas) - 1.0).mean() if tgt_keep > 0.0: t_stds = t_sigmas.mean(dim=1).detach() ids = t_stds.sort(descending=False) t_mus = t_mus[ids[1][:bs_keep], :] t_sigmas = t_sigmas[ids[1][:bs_keep], :] l_t_kl = 0.5 * (t_mus**2 + t_sigmas**2 - 2.0 * torch.log(t_sigmas) - 1.0).mean() l_total = l_ce + l_norm + w_kl * (l_s_kl + l_t_kl) l_t_kl_val = float(l_t_kl.item()) else: l_total = l_ce + l_norm + w_kl * l_s_kl l_total.backward() opt_g.step() opt_f.step() if it == num_batches: l_ce_val = float(l_ce.item()) l_norm_val = float(l_norm.item()) l_s_kl_val = float(l_s_kl.item()) # l_t_kl_val = float(l_t_kl.item()) src_sigma = float(s_sigmas.mean().item()) tgt_sigma = float(t_sigmas.mean().item()) break it += 1 netF.eval() netG.eval() correct = 0.0 with torch.no_grad(): for image, label, _ in test_loader: image, label = image.cuda(), label.cuda() output = netF.forward_inference(netG(image)) pred = output.data.max(1)[1] correct += float(pred.eq(label.data).cpu().sum()) acc_ep = float(correct) / float(test_size) * 100.0 if acc_ep > best_acc: best_acc = acc_ep best_state_dict_G = netG.cpu().state_dict() best_state_dict_F = netF.cpu().state_dict() netG.cuda() netF.cuda() time_taken = (time.time() - start) / 60.0 log_with_print( log_writer, 'epoch%02d: l_ce:%f l_norm:%.2f l_s_kl:%f l_t_kl:%f s_sigma:%.4f t_sigma:%.4f test_acc:%.2f/%.2f in %.1fmin' % (epoch, l_ce_val, l_norm_val, l_s_kl_val, l_t_kl_val, src_sigma, tgt_sigma, acc_ep, best_acc, time_taken)) log_writer.flush() time_taken = (time.time() - start) / 3600.0 time_now = str(datetime.time(datetime.now()))[:8] log_with_print(log_writer, 'Finish time: %s (Date: %s)' % (time_now, date_to_save)) torch.save(best_state_dict_G, save_dir + '/%s_best_%.2f_netG.pth' % (exp_name, best_acc)) torch.save(best_state_dict_F, save_dir + '/%s_best_%.2f_netF.pth' % (exp_name, best_acc)) log_with_print(log_writer, '\nBest accuracy: %.2f' % best_acc) log_with_print(log_writer, 'Total time taken: %.1f hrs' % time_taken) log_writer.close() print('Finish training')
counter -= 1 clkLastState = clkState time.sleep(.01) t = threading.Thread(target=sensor) t.start() # Main dc.config_IO() dc.direction(0) # 0 ou 1 para trocar direcao p = dc.config_PWM() pos = 0 while True: h = datetime.time( datetime.now()).hour - 3 # hora considerando fuso horario pos = h - 8 # posicao de acordo com horario if pos < 0: pos = 0 if pos < 8: while counter != pos: p.ChangeDutyCycle(40) # Duty cicle de pre-partida time.sleep(.5) p.ChangeDutyCycle(60) # Duty cicle de partida time.sleep(.2) p.ChangeDutyCycle(0) # Para movimento time.sleep(2) time.sleep(60) else: pos = 0
def str2time(s): return datetime.time(hour=int(s[0:1], minute=int(s[3:4])))
def get_cur_time(): return datetime.time(datetime.now().replace(microsecond=0))
# INDIVIDUAL COMPONENT print(today.day, today.month, today.year) # GET WEEKDAY NUMBER ( MON=0, SUN=6): You can use to index a list print(today.weekday()) # Example: days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] print(days[today.weekday()]) # GET TIME INFORMATION today = datetime.now() print('Today time is:{}'.format(today)) # Get CURRENT TIME t = datetime.time(datetime.now()) # It gets time from datetime class with now print(t) # FORMAT THE OUTPUT now = datetime.now() print(now) # To present in the desired format use print(now.strftime('Year is %Y')) print(now.strftime('%a,%d %B,%y')) # LOCAL TIME print(now.strftime('%c, %x,%X')) print(now.strftime('Current time: %I: %M:%S %p')) print(now.strftime('Current time: %H %M')) # MATH FOR DATE/TIME
for hashtag in hashtagLinReg: numOfTweets = {} numOfRetweets = {} numOfFollowers = {} maxNumOfFollowers = {} numOfFriends = {} numOfStatuses = {} numOfRankScore = {} featureVec = [] labelVec = [] with open('tweet_data/tweets_%s.txt' % hashtag, 'r') as tweetFile: print('Hashtag: %s File Loaded' % hashtag) for tweetLine in tweetFile: tweet = json.loads(tweetLine) t = tweet['firstpost_date'] hour_ = datetime.time(datetime.fromtimestamp(t)).hour minute_ = datetime.time(datetime.fromtimestamp(t)).minute second_ = datetime.time(datetime.fromtimestamp(t)).second tWind = t - minute_ * 60 - second_ if (tWind in numOfTweets): numOfTweets[tWind] += 1 numOfRetweets[tWind] += tweet['metrics']['citations']['total'] numOfFollowers[tWind] += tweet['author']['followers'] if (tweet['author']['followers'] > maxNumOfFollowers[tWind]): maxNumOfFollowers[tWind] = tweet['author']['followers'] numOfFriends[tWind] += tweet['tweet']['user']['friends_count'] numOfStatuses[tWind] += tweet['tweet']['user'][ 'statuses_count'] numOfRankScore[tWind] += tweet['metrics']['ranking_score'] else:
def availableFish(currentTime, currentDate) -> dict: fishDict = defaultdict(str) fishJson = [] with open('fish.txt') as fishFile: allFishes = json.load(fishFile) for x in allFishes.values(): calcFishes(x, fishDict, fishJson) fishFile.close() return fishDict, fishJson if __name__ == '__main__': pst = pytz.timezone('America/Los_Angeles') currentTime = datetime.time(datetime.now(tz=pst)) currentDate = datetime.date(datetime.now(tz=pst)) fishDict, fishJson = availableFish(currentTime, currentDate) with open('available_fish.json', 'w') as af: json.dump(fishJson, af) for x in fishDict: print(x, fishDict[x])
def in_schedule_dt(self, datetime: datetime) -> bool: return self.in_schedule(datetime.date(), datetime.time())
TradesDF = pd.DataFrame(columns=[ 'Agent#', 'Time Period', 'TradeID', 'Quantity (kWh)', 'Price (c/kWh)', 'Avg. Price for Time Period (c/kwh)', 'Amount paid/received for Trade Transaction (c)' ]) SolarDF = pd.DataFrame(columns=[ 'Agent#', 'Time Period', 'Solar Irradiation (W/m^2)', 'Num_solar panels', 'Solar Output energy (kWh)' ]) SOC_DF = pd.DataFrame(columns=['Agent#', 'Time Period', 'SOC (%)']) Metrics_DF = pd.DataFrame( columns=['Time Period', 'Num Iterations', 'Time (s)']) #admin to start trading period cat = pytz.timezone('Africa/Johannesburg') today = datetime.time(datetime.now(tz=cat)) time = today.hour today = datetime.date(datetime.now(tz=cat)) date = str(today) for i in range(num_agents): web3.eth.defaultAccount = web3.eth.accounts[i + 1] tx_hash = contract.functions.deregisterPeer().transact() web3.eth.waitForTransactionReceipt(tx_hash) print('Peers deregistered') web3.eth.defaultAccount = web3.eth.accounts[0] tx_hash = contract.functions.startTradingPer(date, time).transact() web3.eth.waitForTransactionReceipt(tx_hash) #if ((contract.functions.init().call()) and (contract.functions.iteration_complete().call()==contract.functions.is_optimal().call()==0) and (contract.functions.iteration().call()==contract.functions.localresCounter().call()==0) and (contract.functions.tradeCountIter().call() == contract.functions.trade_penCount().call()== contract.functions.numApprovedTrades().call()==0)): print("Trading period started")
df.loc[df["Language"] == "und"] # In[ ]: df["RT"].value_counts() # In[ ]: df["Time Zone"].value_counts().nlargest(10) # In[ ]: dfU = df["Username"].value_counts().nlargest(10) display(dfU) # In[ ]: dfloc = df["Location"].value_counts().nlargest(10) display(dfloc) # In[ ]: import datetime import math df_time = df["Created at"] df_time = df_time.apply( lambda dt: datetime.time(dt.hour, 15 * (math.floor(dt.minute / 15)))) display(df_time) df_time.value_counts()
except OSError: os.mkdir('%s/Logs' % curDir) logFiles = os.listdir('%s/Logs' % os.getcwd()) numLogs = len(logFiles) logFiles.sort() #If there are more than 20 log files, delete the oldest one. if numLogs > 20 and sys.argv[6] == '1': try: os.remove('%s/Logs/%s' % (curDir, logFiles[0])) except OSError: print "Unable to remove oldest logfile." date = '%s' % datetime.date(datetime.now()) cTime = '%s' % datetime.time(datetime.now()) cTime = cTime[:cTime.find('.')] logname = '%s_%s_lp.log' % (date, cTime) log = open('%s/Logs/%s' % (curDir, logname), 'w+') lpSock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) lpSock.bind(('127.0.0.1', FRONTEND_PORT)) try: subprocess.call([(curDir + '/' + lpArk + '/ThrowUser_LinuxUser'), (curDir + '/' + lpArk + '/blob.lp')], stdout=out, preexec_fn=preexec_fcn) except: print "Unable to locate back end executatble. This should be located in Lp/<LP Architecture>" out.close()
def isTime(hour, minute): time = datetime.time(datetime.now()) currentMinute = time.minute currentHour = time.hour return currentHour == hour and currentMinute == minute
BATCH_SIZE = 128 NB_EPOCH = 40 #40 NB_CLASSES = 10 VERBOSE = 1 VALIDATION_SPLIT = 0.2 OPTIM = RMSprop() OPTIM_SEL = "ADAM" # "RMS", "SGD", "ADAM" if ("SGD" == OPTIM_SEL): OPTIM = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) elif ("ADAM" == OPTIM_SEL): OPTIM = Adam() else: OPTIM = RMSprop() print("\nStart Time: ", datetime.time(datetime.now()), "\n") #load dataset print("Loading CIFAR10 dataset ...") (X_train, y_train), (X_test, y_test) = cifar10.load_data() print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert to categorical print("Converting labels to one-hot ...") Y_train = np_utils.to_categorical(y_train, NB_CLASSES) Y_test = np_utils.to_categorical(y_test, NB_CLASSES) # float and normalization print("Normalize pixels to [0, 1.0] ...") X_train = X_train.astype('float32')
from keras.preprocessing.image import ImageDataGenerator, img_to_array, load_img import keras.backend as K from keras.layers import Activation from keras.utils.generic_utils import get_custom_objects #from tensorflow.keras import backend as k # https://www.tensorflow.org/install/gpu - Need CUDA 10.0, cuDNN >=7.4.1, tensorflow >= 1.13 # cuDNN 7.4.1 not available on conda ==> get tensorflow-gpu==1.12.0 med CUDA 9.0 og cuDNN == 7.3.1 # conda install tensorflow-gpu==1.12.0 || eller pip install tensorflow-gpu==1.12.0 config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config) keras.backend.set_session(sess) # current date and time now = datetime.now() now_t = datetime.time(now) timestamp = "{}{}_time_{}_{}_{}".format(now.strftime("%b"), now.day, now_t.hour, now_t.minute, now_t.second) # The default input size is 224x224. verbose = 1 seed = 69 def get_callbacks(model_name): # Define model checkpoint checkpoint = ModelCheckpoint( 'dl_weights/%s-epoch{epoch:02d}-acc{acc:.4f}-loss{loss:.4f}' '-valacc{val_acc:.4f}-valloss{val_loss:.4f}.hdf5' % model_name, monitor='loss',
class Entry: DEFAULT_EVENT_DUR = timedelta(minutes=30) MIDNIGHT = dt.time(dt(1979, 1, 1, 0, 0)) RE = re.compile(r""" \b (?P<location>in \s+ .+) ($|\bat\b) """, re.X) def __init__(self, raw): self.raw = str(raw) self.__parse__() def __parse__(self): event = self.raw self.dates = search_dates(event, languages=['en'], settings={'DATE_ORDER': 'DMY'}) if self.dates is None: print("No dates found, boo", file=sys.stderr) sys.exit(10) event = ''.join({event.replace(str(atuple[0]), '') for atuple in self.dates}) event = re.sub(r"\s\s", " ", event) self.text = event # If we can grab a location (has 'in <location>' in it), then set the # location and remove that from the event text/title. self.location = '' r = self.RE.search(self.raw) if r: self.location = r.group('location') def generate_url(self): # URL format something like # http://www.google.com/calendar/event?ctext=+{query}+&action=TEMPLATE&pprop=HowCreated%3AQUICKA # Format is [event] at [time] on [date] in [location] # https://calendar.google.com/calendar/render?action=TEMPLATE& # text=foo& # dates=20170101T270000Z/20170101T280000Z& # details=Describe your event.& # location=Event Location& # trp=true base = 'https://calendar.google.com/calendar/render' quer = {'action': 'TEMPLATE', 'text': self.text, # 'dates': self.dates, # this now comes from a function() 'location': self.location, 'trp': True} # go off and beat the date guessed in to the format we want. # returns a Dict of date/dates: <format o dates> x = self.fixate_dates() quer.update(x) # Python 2: self.uri = base + '?' + urllib.urlencode(quer) return self.uri # Actually open it in a web browser def open_url(self): self.generate_url() # generate the URI in the first place. # print("opening " + str(self.uri)) webbrowser.open(self.uri) # dateparser will return some date/times grabbed from the input. It will do # it's best to make sense. This is the function that does that. A lot of # gross magic happens here. # returns a Dict of date/dates: <format o dates> def fixate_dates(self): if len(self.dates) == 1: lone_date = self.dates[0][1] if dt.time(lone_date) == self.MIDNIGHT: # this blindly assumes I do nothing at midnight. I know. # all day event, just has the day. x = {'date': d(lone_date)} else: # regular event, put a finish time of whatever duration we set, # so it knows how long to make the event. d2 = lone_date + self.DEFAULT_EVENT_DUR x = {'dates': d(lone_date) + '/' + d(d2)} elif len(self.dates) == 2: # having two dates could mean it guessed at two, or there is a # start and a finish. Bleh. d1 = self.dates[0][1] d2 = self.dates[1][1] f = self.__fucky__(d1, d2) if f is not False: print('fudging dates a bunch.') fudge = f + self.DEFAULT_EVENT_DUR x = {'dates': d(f) + '/' + d(fudge)} return x # we don't necessarily know the order here, as it depends how dateparser # does, so it could be one way, it could be the other, hence this # abomination. def __fucky__(self, d1, d2): for x, y in {d1: d2, d2: d1}.items(): if dt.time(x) == self.MIDNIGHT \ and dt.date(y) == dt.date(dt.today()): # actually return a dt object with the date from x and the time # from y (the inverse of what we check for) return dt.combine(dt.date(x), dt.time(y)) return False
def to_time(ts): return datetime.time(ts)
NB_CLASSES = 10 VERBOSE = 1 VALIDATION_SPLIT = 0.2 OPTIM = RMSprop() #OPTIM = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) OPTIM_SEL = "ADAM" # "RMS", "SGD", "ADAM" if("SGD" == OPTIM_SEL): OPTIM = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) elif("ADAM" == OPTIM_SEL): OPTIM = Adam() else: OPTIM = RMSprop() print("\n\tStart time: ", datetime.time(datetime.now()), "\n") print("\n\tLoading CIFAR-10 images ...\n") #load dataset ''' load_data() uses keras utility get_file() See. https://www.tensorflow.org/api_docs/python/tf/keras/utils/get_file ''' (X_train, y_train), (X_test, y_test) = cifar10.load_data() print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert to categorical Y_train = np_utils.to_categorical(y_train, NB_CLASSES)
def test_open_until(self): dummy_cache = cache.get_cache( 'django.core.cache.backends.dummy.DummyCache') with patch.object(models, 'cache', dummy_cache): # Create a spot that isn't open now but will be in an hour. spot = Spot.objects.create(name="This spot is open later") now = datetime.now() spot_open = datetime.time(now + timedelta(hours=1)) spot_close = datetime.time(now + timedelta(hours=3)) day_lookup = ["su", "m", "t", "w", "th", "f", "sa"] day_num = int(time.strftime("%w", time.localtime())) today = day_lookup[day_num] SpotAvailableHours.objects.create(spot=spot, day=today, start_time=spot_open, end_time=spot_close) # Verify the spot is closed now c = Client() response = c.get("/api/v1/spot", {'open_now': True}) spots = json.loads(response.content) spot_returned = False for s in spots: if s['id'] == spot.pk: spot_returned = True self.assertTrue( not spot_returned, "The spot that is open later is not in the spots open now") # Get a spot that is open until spot_close at_time = datetime.time(now + timedelta(hours=2)) at_time = at_time.strftime("%H:%M") until_time = spot_close.strftime("%H:%M") day_dict = { "su": "Sunday", "m": "Monday", "t": "Tuesday", "w": "Wednesday", "th": "Thursday", "f": "Friday", "sa": "Saturday", } at_query_day = day_dict[today] at_query = "%s,%s" % (at_query_day, at_time) until_query = "%s,%s" % (at_query_day, until_time) response = c.get("/api/v1/spot", { 'open_at': at_query, 'open_until': until_query }) spots = json.loads(response.content) spot_returned = False for s in spots: if s['id'] == spot.pk: spot_returned = True self.assertTrue(spot_returned, "Got the spot that is open later")
def BaysianInit(): def __init__(self,baysian.calc): return datetime.time()+""+self.baysian.calc.props()
from kivy.lang import Builder from kivy.properties import ObjectProperty from kivy.uix.popup import Popup from kivy.uix.screenmanager import ScreenManager, Screen from kivy.cache import Cache from kivy.core.window import Window from datetime import datetime import random import os import webbrowser import subprocess global path date = str(datetime.date(datetime.now())) time = str(datetime.time(datetime.now())) #db = DataBase(.txt) class LogOn(Screen): lName = ObjectProperty(None) lEDI = ObjectProperty(None) notlog = ObjectProperty(None) pinp = ObjectProperty(None) global user def logOne(self): global user path = os.getcwd() + "/"
from datetime import datetime from psychopy import core, visual, gui, data, misc, event,sound#Loads different libraries that will be used in this script import csv,random #This function creates the dialog box at the beginning of the script that you enter the subject's name and has the date automatically entered into it try: expInfo = misc.fromFile('lastParams.pickle') except: expInfo = {'participant':'','session':'001'} expInfo['dateStr']= datetime.time(datetime.now()) dlg = gui.DlgFromDict(expInfo, title='File Name', fixed=['dateStr']) if dlg.OK: misc.toFile('lastParams.pickle', expInfo) else: core.quit() fileName = 'Output/'+expInfo['participant'] +'_'+expInfo['session']+'_'+expInfo['dateStr']#Stores the information in the dialog box to a variable to be used to create a file dataFile_All = open(fileName+'_All'+'.csv','w') #Creates a text file that stores the timing and other variables within the script dataFile_All.write('TrialTag,TrialStart,TrialDur,QStart,QDur,QResponse,QCorrResponse\n') print dataFile_All Col1 = "Sentence" Col2 = "OpacityS" Col3 = "Illustration" Col4 = "OpacityI" Col5 = "StimCat" Col6 = "Family" Col7 = "ActPass" Col8 = "CanRel" Col9 = "Stx" Col10 = "Anlnan" Col11 = "VrbType"