def main(): from time import time start_time = time.now() check_pos(200, 300, 400) end_time = time.now() print(end_time - start_time)
def train(): from time import time from train_ml import download_data, train_and_evaluate, serialization start_time = time.now() for mode in ['train', 'test']: download_data(mode) model, vectorizer = train_and_evaluate() serialization(model, vectorizer) response = time.now() - start_time return response
def test_filter_date_range(): class Lifetime(models.NodeModel): dob = models.DateProperty(indexed=True) mid_life_crisis = models.DateTimeProperty(indexed=True) tod = models.DateTimeProperty(indexed=False) date = datetime.date time = datetime.datetime bdays = [date(1952, 3, 5), date(1975, 8, 11), date(1988, 7, 27)] crises = [ time(1992, 3, 6, 2, 15, 30), time(2007, 8, 13, 16, 10, 10), time(2020, 8, 1, 8, 7, 59, 99) ] tods = [ time(2022, 3, 6, 2, 15, 30), time(2047, 10, 30, 22, 47, 1), time(2060, 8, 15, 8, 7, 59) ] for t in zip(bdays, crises, tods): Lifetime.objects.create(dob=t[0], mid_life_crisis=t[1], tod=t[2]) low, high = date(1975, 9, 11), time.now() query = Lifetime.objects.filter(dob__range=(low, high)) assert all(low < l.dob < high.date() for l in query) nowish = date(2011, 8, 10) query = Lifetime.objects.filter(mid_life_crisis__lt=nowish) eq_(len(query), 2) the_singularity = date(2032, 12, 12) query = Lifetime.objects.filter(tod__gt=the_singularity) eq_(len(query), 2)
def test_filter_date_range(): class Lifetime(models.NodeModel): dob = models.DateProperty(indexed=True) mid_life_crisis = models.DateTimeProperty(indexed=True) tod = models.DateTimeProperty(indexed=False) date = datetime.date time = datetime.datetime bdays = [date(1952, 3, 5), date(1975, 8, 11), date(1988, 7, 27)] crises = [time(1992, 3, 6, 2, 15, 30), time(2007, 8, 13, 16, 10, 10), time(2020, 8, 1, 8, 7, 59, 99)] tods = [time(2022, 3, 6, 2, 15, 30), time(2047, 10, 30, 22, 47, 1), time(2060, 8, 15, 8, 7, 59)] for t in zip(bdays, crises, tods): Lifetime.objects.create(dob=t[0], mid_life_crisis=t[1], tod=t[2]) low, high = date(1975, 9, 11), time.now() query = Lifetime.objects.filter(dob__range=(low, high)) assert all(low < l.dob < high.date() for l in query) nowish = date(2011, 8, 10) query = Lifetime.objects.filter(mid_life_crisis__lt=nowish) eq_(len(query), 2) the_singularity = date(2032, 12, 12) query = Lifetime.objects.filter(tod__gt=the_singularity) eq_(len(query), 2)
def train_and_persist(svm_model, train_d, train_t, fp): # your code here # 1. train the classifier print 'Training svm model' from time import time st = time.now() svm_model.fit(train_d, train_t) et = time.now() duration = et - st with open("stats.txt", "a+") as myfile: myfile.write(duration) with open(fp, 'wb') as output: # Overwrites any existing file. cPickle.dump(svm_model, output) print 'svm model trained and persisted...'
def trade_complete(self, data): """订单完成后,执行任务""" event_type = data.get('event_type') # FIXME 定时时间需要计算 eta = time.now() # TODO 先查看是否已有订单已经完成的任务,保证不会重复创建任务 task = trade_complete(data, eta=eta) task_id = task.id instance_id = data.get() key = '{}:{}'.format(event_type, instance_id) self.push_task_id(key, task_id)
def hello_world(): print('what is now?') print(time.now()) return 'Hello World'
from time import time print(time.now())
def capture(self): picamera.PiCamera.capture('image-' + str(time.now()) + '.jpg')