def newChallenge(): if validate(session): userData = users.find_one({'username': session['username']}) userCode = content.find_one({ 'owner': userData['_id'], 'type': 'challenge' }) if userCode == None: linkID = make_linkID() content.insert_one({ '_id': str(uuid.uuid4()), 'type': 'challenge', 'name': 'testchallenge', 'title': 'Title?', 'owner': userData['_id'], 'created': float(time.time()), 'modified': float(time.time()), 'args_mutable': [], 'visibility': 'private', 'linkID': linkID, 'description': 'This is the description. Visit the Description tab to edit it.', 'code': DEFAULT_SOLUTION, 'starterCode': DEFAULT_STARTER_CODE }) else: linkID = userCode['linkID'] return redirect('/' + linkID) else: session['intent'] = "/newchallenge" return redirect("/signin")
def json_to_bson(obj): for name in obj: value = obj[name] if type(value) == str: if re.match(r'^[0-9 a-f]{24}$', value) is not None: obj[name] = ObjectId(value) elif re.match(r'^D:([0-9]*)$', value) is not None: obj[name] = time.time() else: pass elif isinstance(value, list): for index in range(len(value)): tmp = value[index] if isinstance(tmp, dict): value[index] = json_to_bson(tmp) elif type(tmp) == str: if re.match(r'^[0-9 a-f]{24}$', tmp) is not None: value[index] = ObjectId(tmp) elif re.match(r'^D:([0-9]*)$', tmp) is not None: value[index] = time.time() else: pass elif isinstance(value, dict): obj[name] = json_to_bson(value) return obj
def doc_gen(docs, tin, titles, flag=''): if flag: cursor = tin.find(no_cursor_timeout=True) count = 0 n = 0 ts = time.time() for ti in cursor: docs[ ti['SUBJECT_ID']] = docs[ti['SUBJECT_ID']] + '\t' + intvl_word( ti[titles[0]], ti[titles[1]], ti[flag]) count += 1 if count % 1000000 == 0: n += 1 te = time.time() print('\t', n, te - ts) ts = time.time() cursor.close() else: if docs: for ti in tin.find(): for title in titles: docs[ti['SUBJECT_ID']] = docs[ ti['SUBJECT_ID']] + '\t' + ti[title] else: for ti in tin.find(): doc = '' for title in titles: doc = doc + ' ' + ti[title] docs[ti['SUBJECT_ID']] = doc return docs
def nextGeneration(self): start = time.time() self.generation += 1 backend = 'threading' with parallel_backend(backend): Parallel(n_jobs=4)(delayed(self.nextIteration)(population) for population in self.populations) #with Parallel(n_jobs=4,backend=backend) as parallel: # for population in self.populations: # population.nextGeneration(parallel) log = {} if self.verbose >= 1: log["generation"] = self.generation log["elapsedTime"] = time.time() - start if self.verbose >= 2: fitness = {} for population in self.populations: fitness[population.goal] = [ population.avg(), population.median(), population.best() ] log["fitness"] = fitness if self.verbose != 0: print(log)
def execute_modify_items(self, modify_items, concurency): if len(modify_items) == 0: return {} statements_and_params = [] if ISDEBUG: logger.log("prepare data for cassandra") st = time.time() for item in modify_items: if item[0] == 'insert': statements_and_params.append( (self.get_insert_stmt(), self.get_insert_args(item[1]))) elif item[0] == 'delete': statements_and_params.append( (self.get_delete_stmt(), self.get_delete_args(item[1]))) else: raise ValueError('unknown modify item type') if ISDEBUG: logger.log("prepare data finished in {0} ms".format( (time.time() - st) * 1000)) logger.log("start modify operation. count: {0}".format( len(modify_items))) st = time.time() if len(statements_and_params) == 1: self.session.execute(statements_and_params[0][0], statements_and_params[0][1]) else: execute_concurrent(self.session, statements_and_params, raise_on_first_error=True, concurrency=concurency) if ISDEBUG: logger.log("modify completed in {0} ms".format( (time.time() - st) * 1000))
def thread_grabImg(self, url): try: print "curr url is %s." % url html_cont = self.dLoader.download(url) title, links = self.contParser.parser_img_cont(html_cont) if links is None or len(links) == 0: print "url is %s, not src." % url return None if title is None: title = time.time() try: if not os.path.isdir(title): os.mkdir(title) except: title = time.time() if not os.path.isdir(title): os.mkdir(title) params = [] index = 0 for link in links: params.append(([title, link, index], None)) index += 1 pool = threadpool.ThreadPool(12) requests = threadpool.makeRequests(self.contOutputer.output_img, params) [pool.putRequest(req) for req in requests] pool.wait() except: print "url is %s, error." % url
def crawle_all(self, code, num): if code: self.crawle_one(code) return fund_code_list = utils.load_fund_list() pbar = tqdm(total=min(num, len(fund_code_list))) counter = 0 error = 0 start = time.time() for fund in fund_code_list: try: success = self.crawle_one(fund.code) if not success: error += 1 except: logger.exception("爬取 [%s] 失败...", fund.code) error += 1 continue counter += 1 if counter > num: break # time.sleep(1) # random.random()) pbar.update(1) pbar.close() end = time.time() logger.info("合计爬取[%d]条,[%d]失败,[%d]成功,平均耗时:%.2f 秒/条", counter, error, counter - error, (end - start) / counter)
def start_automatic(self): start_time_ref = time.time() while self.mode == AUTO: try: # start_time = time.time() # wait_time = 1.0 # while wait_time > 0: # wait_time = CONST_TIME + start_time - time.time() # time.sleep(0.1) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) pid_value = self.pid.update(measured_value) pwm_value = LEDModel.get_pwm(pid_value) output = "{0}, {1}, {2}, {3}".format(time.time() - start_time_ref, measured_value, pid_value, pwm_value) print output self.led.update_all(pwm_value) try: self.output_file.write(output + "\n") # self.database.insert_into_control_table(measured_value, pid_value) except: # does not matter if saving exists pass except Exception as e: print e # exception probably caused by reading, sleep to give some time to update time.sleep(0.1) pass
def run(): #net = sumolib.net.readNet('campusmap.net.xml') # oldUpcomingLightID = '' """execute the TraCI control loop""" """ This stuff powers through the fist 2 steps""" step = 0 traci.simulationStep() traci.simulationStep() step += 2 traci.vehicle.add("bike", "bike1", typeID='typeBike') #Adds bike to simulation traci.vehicle.setSpeed("bike",0) while True: starttime=time.time() updatePosition() upcominglights = traci.vehicle.getNextTLS("bike") if len(upcominglights) > 0: tank = str(upcominglights[0][0])# tank holds the current traficlight ID, #trafic light identification logic will have to be tied to this variable LightLocation(tank) # this pulls the trafic light location out and stores it in variables for usage greenz(tank) #has to be here beacuse flask crashes traci if it is after the simulation step #this must be processed in the simulation, can1t be done in the webserver it will break things #likely because it is not thread safe traci.simulationStep() step += 1 time.sleep(1.0 - ((time.time() - starttime) % 1.0)) traci.close()
def main(): # Start the program execution clock program_start_time = time.time() human_readable_time = time.strftime("%H:%M:%S", time.localtime(program_start_time)) print("\nProgram clock started at -> [ {} ]".format( str(human_readable_time)), end='\n') # Print an initialization message print("\nInitializing program.....\n", end='\n') # Check the source directory for "xxxxyyzz.gz" files # Extracts the .gz files into .csv fileOperations.checkgzFiles() # Create DB connection and execute threads on DB dbOperations.executeThreads() # Print program execution time message program_elapsed_sec = (time.time() - program_start_time) #program_elapsed_min = round((program_elapsed_sec / 60),2) print( '\nProgram executed in [ {} ] second(s).'.format(program_elapsed_sec), end='\n') # Print exit message print('\nExiting.....', 'Bye!', sep='\n', end='\n')
def user_stats(df): """Displays statistics on bikeshare users.""" print('\nCalculating User Stats...\n') start_time = time.time() # TO DO: Display counts of user types user_types = df['User Type'].value_counts() print('The number of users by type:', user_types) # TO DO: Display counts of gender gender = df['Gender'].value_counts() print('The number of users by gender:', gender) # TO DO: Display earliest, most recent, and most common year of birth # earliest is the lowest, most recent is the highest and most commor is the mode youngest_user = df['Birth Year'].min() print('The youngest user is:', youngest_user) oldest_user = df['Birth Year'].max() print('The oldest user is:', oldest_user) popular_year = df['Birth Year'].mode() print("\nThis took %s seconds." % (time.time() - start_time)) print('-' * 40)
def _update(self): current_time = int(time.time()) last_refresh = 0 if self._last_refresh is None else self._last_refresh if current_time >= (last_refresh + self._refresh_rate): self._update_data() self._last_refresh = int(time.time())
def time_stats(df): """Displays statistics on the most frequent times of travel.""" print('\nCalculating The Most Frequent Times of Travel...\n') start_time = time.time() # TO DO: display the most common month df['Start Time'] == pd.to_datetime(df['Start Time']) df['month'] = df['Start Time'].dt.month common_month = df['month'].mode()[0] print('The most common or popular month:', common_month) # TO DO: display the most common day of week df['Start Time'] == pd.to_datetime(df['Start Time']) df['date'] = df['Start Time'].dt.date day_of_week = df['month'].mode()[0] print('The most common or popular day of the week:', day_of_week) # TO DO: display the most common start hour df['Start Time'] == pd.to_datetime(df['Start Time']) df['hour'] = df['Start Time'].dt.hour popular_hour = df['hour'].mode()[0] print('The most common or popular hour of the day:', popular_hour) print("\nThis took %s seconds." % (time.time() - start_time)) print('-' * 40)
def mandelbrot(width, height, d=2, zoom=1): """Returns the image of the Mandelbrot set given 'd' as a .png""" print("Performing preparations...") startTime = float(time.time()) bitMapArray = np.zeros([height, width, 3], dtype=np.uint8) print("Calculating Mandelbrot Set...") for x in range(0,width): for y in range(0,height): coords = screenToCartesian(x,y,width,height,zoom) if(coords[0] == 0): zeroX = x if(coords[1] == 0): zeroY = y n = testCoord(coords[0],coords[1],d) color(bitMapArray,x,y,n,255//3,255//3) bitMapArray = mapAxis(bitMapArray,width,height,zeroX,zeroY) img = Image.fromarray(bitMapArray) name = "mandelbrot-" + currentTime() + ".png" img.save(name) #name = 'atprstd.png' timeTaken = (float(time.time())) - startTime print("Done - Saved as: " + name) print("Time taken to output: " + str(timeTaken) + " seconds") return name
def df2csv(dataframe, ticker): """Save the contents of a dataframe to a csv file in the data/ directory. File name will include the ticker (required arg) and current timestamp. :param dataframe: :param ticker: :return: """ if dataframe is None: raise EmptyDataError('Dataframe cannot be empty.') if not ticker or ticker is None: raise ValueError('[!] Invalid ticker value.') datafile = os.path.relpath('data/{}_data_{}.csv'.format( ticker, time.time())) try: dataframe.to_csv(datafile, index=False) except FileNotFoundError: print('[?] Retrying one directory level up.') datafile = os.path.relpath('../data/{}_data_{}.csv'.format( ticker, time.time())) try: dataframe.to_csv(datafile, index=False) except FileNotFoundError: raise FileNotFoundError( '[!] Unable to save dataframe to CSV file.') finally: print('[+] File saved:\t{}'.format(datafile))
def ajax_change_log(self, request, tl, one, two, module, extra, prog): cl = self.get_change_log(prog) last_fetched_index = int(request.GET['last_fetched_index']) #check whether we have a log entry at least as old as the last fetched time #if not, we return a command to reload instead of the log #note: negative number implies we want to debug dump changelog if cl.get_earliest_index( ) is not None and last_fetched_index != 0 and cl.get_earliest_index( ) > last_fetched_index: return { "other": [{ 'command': "reload", 'earliest_index': cl.get_earliest_index(), 'latest_index': cl.get_latest_index(), 'time': time.time() }] } return { "changelog": cl.get_log(last_fetched_index), 'other': [{ 'time': time.time() }] }
def api_newContent(contentType): if validate(session): userData = users.find_one({'username': session['username']}) print(contentType) if contentType not in ['challenge', 'editor_standalone']: return error_json("api_new_contentType") linkID = make_linkID() newContent = { 'type': contentType, '_id': str(uuid.uuid4()), 'description': '## Edit me in the Description tab with Markdown', 'title': 'Untitled', 'name': linkID, 'linkID': linkID, 'owner': userData['_id'], 'created': float(time.time()), 'modified': float(time.time()), 'args_mutable': [], 'visibility': 'private', } if contentType == "challenge": newContent = { **newContent, 'code': DEFAULT_SOLUTION, 'starterCode': DEFAULT_STARTER_CODE } if contentType == "editor_standalone": newContent = { **newContent, 'code': DEFAULT_CODE, 'starterCode': DEFAULT_STARTER_CODE } content.insert_one(newContent) return success_json(newContent) else: return error_json("api_general_session")
def main(): parser = argparse.ArgumentParser("Online Khabar Scrapper") parser.add_argument("-n", "--news_link", default="https://www.onlinekhabar.com", metavar="LINK", help="News Link") parser.add_argument("-s", "--source", default="onlinekhabar", metavar="SOURCE", help="News source name") parser.add_argument("-d", "--given_date", default=None, metavar="DATE", help="Date Format : 2020/04") parser.add_argument("-sp", "--start_page", default=1, type=int, metavar="PAGE", help="Starting page to scrap") parser.add_argument("-ep", "--end_page", default=15, type=int, metavar="PAGE", help="End page to scrap") args = parser.parse_args() news_link = args.news_link news_source_name = args.source start_page = args.start_page page_num = args.end_page # Take today's date if not given on arguments given_date = args.given_date if args.given_date else date.today().strftime("%Y/%m") logger.info("Scrapping at least {} pages from each categories ahead of {} date".format(page_num-start_page, given_date)) start_time = time.time() scrappy = Scrapper(news_link=news_link, source=news_source_name, given_date=given_date, start_page=start_page, page_num=page_num) scrappy.extractContent() seconds = time.time() - start_time logger.info("Total time taken to scrap : {}".format(time.strftime("%H:%M:%S",time.gmtime(seconds))))
def train(train_iter, test_iter, net, loss, optimizer, device, num_epochs): ls = [] for _ in range(num_epochs): start = time.time() for batch_i, (X, y) in enumerate(train_iter): X = X.to(device) y = y.to(device) l = loss(net(X).view(-1), y) optimizer.zero_grad() l.backward() optimizer.step() test_loss_epoch = 0 for (X_test, y_test) in test_iter: X_test = X_test.to(device) y_test = y_test.to(device) l = loss(net(X_test).view(-1), y_test) test_loss_epoch += l test_loss_epoch /= len(test_iter) ls.append(test_loss_epoch) # 打印结果和作图 print('loss: %f, %f sec per epoch' % (ls[-1], time.time() - start)) plt.plot(np.linspace(0, num_epochs, len(ls)), ls) plt.xlabel('epoch') plt.ylabel('loss')
def main() -> None: """ Python program that uses Apache Spark to sum a list of numbers """ spark_conf = SparkConf() \ .setAppName("AddNumbers") \ .setMaster("local[4]") spark_context = SparkContext(conf=spark_conf) logger = spark_context._jvm.org.apache.log4j logger.LogManager.getLogger("org").setLevel(logger.Level.WARN) init_time = time.time() #stopWords = ("que", "de", "y", "la", "a", "el", "en") stopWords = set(stopwords.words('spanish')) print("NUmber of stopwords:" + str(len(stopWords))) results = spark_context.textFile("data/quijote.txt") \ .flatMap(lambda line: line.split(' ')) \ .filter(lambda word: word not in stopWords) \ .map(lambda word: (word, 1)) \ .reduceByKey(lambda a, b: a + b) \ .sortBy(lambda pair: pair[1], ascending=False) \ .take(10) finish_time = time.time() for (word, count) in results: print("%s: %i" % (word, count)) print("Computing time: " + str(finish_time - init_time)) spark_context.stop()
def control(screen_id, movement, percentage, ms=5000.0): ensure_motor_power() ss = [screen_id] if (screen_id == 0): ss = [1, 2, 3, 4] flash('Sunscreen ' + str(ss) + ' going ' + movement + '...') print "relais ", ss, " on " for s in ss: GPIO.output(RELAY_SUNSCREEN[s - 1], GPIO.LOW) time.sleep(1) direction = 1 if (movement == 'down'): direction = -1 motor_event(int(((255.0 / 100.0) * percentage) * direction)) # measure time, and measure overload start_time = time.time() overload = GPIO.input(RELAY_MOTOR_OVERLOAD) print(overload) while (((time.time() - start_time) < int(ms / 1000.0)) and (overload == 0)): time.sleep(0.1) overload = GPIO.input(RELAY_MOTOR_OVERLOAD) if (overload): print("motor overload") motor_event(0) time.sleep(1) for s in ss: GPIO.output(RELAY_SUNSCREEN[s - 1], GPIO.HIGH) print "relais ", ss, " off " flash('Sunscreen ' + str(ss) + ' is ' + movement + '.')
def do_test(times, __expected): startTime = time.time() instance = Iditarod() exception = None try: __result = instance.avgMinutes(times) except: import traceback exception = traceback.format_exc() elapsed = time.time() - startTime # in sec if exception is not None: sys.stdout.write("RUNTIME ERROR: \n") sys.stdout.write(exception + "\n") return 0 if tc_equal(__expected, __result): sys.stdout.write("PASSED! " + ("(%.3f seconds)" % elapsed) + "\n") return 1 else: sys.stdout.write("FAILED! " + ("(%.3f seconds)" % elapsed) + "\n") sys.stdout.write(" Expected: " + pretty_str(__expected) + "\n") sys.stdout.write(" Received: " + pretty_str(__result) + "\n") return 0
def xmeansClusteringLogEntries(log_entries): print("\n\n┻━┻︵ \(°□°)/ ︵ ┻━┻ ") print("START CLUSTERING OUR DENSE VECTORS FROM THE OVERLAP MATRIX") # our new numpy array that has a dimension representing the size of the dense vector # created from our bipartite matrix. They should all be uniform so any single # sample will do. numpy_fv_array = np.zeros(shape=(len(log_entries), len(log_entries[0].overlap_vector))) # craft a master numpy array of all feature vectors. index = 0 for entry in log_entries: numpy_fv_array[index] = entry.overlap_vector index += 1 # build our kmeans object #km = MiniBatchKMeans(n_clusters=10, init='k-means++', max_iter=300, n_init=10, verbose=0, batch_size=10000) km = MiniBatchKMeans(init='k-means++', max_iter=300, verbose=0, batch_size=10000) # fit our feature vectors to lables. t0 = time.time() label_set = km.fit_predict(numpy_fv_array) num_of_labels = len(np.unique(label_set)) print("done in %0.3fs" % (time.time() - t0)) # now apply those lables back into the subset objects for future cluster corrolation. index = 0 for entry in log_entries: entry.overlap_label = int(km.labels_[index]) print("CLUSTERING ROUND 2 COMPLETED, CLUSTER LABLES HAVE BEEN ASSSIGNED.") return num_of_labels
def test_delta(self): import time before = time.time() time.sleep(1) after = time.time() print "////" + "%.2f seconds" % (after - before) print sys.path
def test_create_report_error_people_seen(self): region = 'CC' trail_name = 'test_trail' trailhead = create_trail_and_trailhead(name=trail_name, region=region, coordinates=fake.word(), filters=None) time = datetime.now() path = reverse('reports_trailhead', args=( region, trailhead.trail.id, trailhead.id, )) response = self.client.post( path, { 'trail': trailhead.trail.id, 'trailhead': trailhead.id, 'date_hiked': time.date(), 'day_hiked': 'Th', 'trail_begin': time.time(), 'trail_end': time.time(), 'pkg_location': 'P', 'pkg_estimate_begin': 29, 'pkg_estimate_end': 34, 'cars_seen': 34, 'people_seen': -344, }) self.assertEqual(response.status_code, 200) self.assertTemplateUsed('reports.html') self.assertContains(response, 'Ensure this value is greater than or equal to 0')
def wrapped(*args, **kwargs): ts = time.time() result = func(*args, **kwargs) te = time.time() print('{method} ({args}, {kw}) took {time} sec'.format( method=func.__name__, args=args, kw=kwargs, time=te - ts)) return result
def setdatetime(): form_setdatetime = SetDateTimeForm() if form_setdatetime.validate_on_submit(): if isNotBlank(form_setdatetime.date_new.data): # date is set in controller/rtc.py pass if isNotBlank(form_setdatetime.time_new.data): # time is set in controller/rtc.py pass if conf.IPC_FLAG: msg = json.dumps({ "cmd": "set-datetime", "date": form_setdatetime.date_new.data, "time": form_setdatetime.time_new.data }) logger.info("{}".format(msg)) queue_c_to_s.put(msg) result = queue_s_to_c.get() # ok return redirect(url_for('setdatetime')) import time datum = time.strftime("%Y-%m-%d", time.localtime(time.time())) uhrzeit = time.strftime("%H:%M:%S", time.localtime(time.time())) return render_template('setdatetime.html', title='Einstellungen', form_setdatetime=form_setdatetime, uhrzeit=uhrzeit, datum=datum)
def adquisitionData(tiempo): global datosGx, datosGy, datosGz, numSamples, accelArrx, Gx, rawGx, previa global velocidad, distancia, flag, t, accX offsetGx = 0 t0 = time.time() if offsetX == 0: accelArrx.append(accX) # secorrige el valor de Gx if (offsetX != 0): offsetGx = accX - offsetX # poner filtro pasa bajos a offsetGX if (abs(offsetGx) < 0.02): offsetGx = 0 rawGx += ("%f, " % (offsetGx)) #rawGx += ("%f, " % (Gx)) numSamples = numSamples + 1 # Se setean el flag de thread para permitir al trhead de adquisición, tomar el dato. event.set() time.sleep(tiempo / 1000.0) lapsetime = ((time.time() - t0) * 1000) deltav = ((offsetGx * 9.8 * (lapsetime / 1000))) deltav += ((offsetGx - previa) / 2) * 9.8 * (lapsetime / 1000) velocidad = velocidad + deltav if (offsetGx == 0) and (abs(velocidad) < 0.05): velocidad = 0 distancia = distancia + (velocidad * lapsetime / 1000) previa = offsetGx #print("Velocidad = %f m/s, Distancia = %f m - #Samples = %i SampleTime = %2.2f ms" % (velocidad, distancia, numSamples, lapsetime), end = '\r') print( "Velocidad = %f m/s, Distancia = %f m - #Samples = %i SampleTime = %2.2f ms, Gx: %f " % (velocidad, distancia, numSamples, lapsetime, accX - offsetX), end='\r')
def SynGeoQuery(): import time start_time = time.time() # Connect to the existing Fog database try: conn = psycopg2.connect("dbname=fog1 user=andrei password=andrei123") except: print "I am unable to connect to the database" # Open a cursor to perform database operations cur = conn.cursor() cur.execute("""SELECT COUNT(geo_time) OVER (), geopge02.station_name, geopge02.geo_time, synop5.synop_time, synop5.ct_low, synop5.ca, synop5.synop_date \ FROM geopge02 INNER JOIN synop5 ON \ (synop5.block_no=geopge02.block_no) AND (synop5.station_no=geopge02.station_no) AND (geo_date>=date '2015-04-01') \ AND (geo_date <= date '2016-04-30') AND (geo_date=synop_date) AND (geo_time=synop_time) \ AND (geopge02.ct[4]>=1 AND geopge02.ct[5] <=4 AND synop5.ca<=2);""") print("cur rows=", cur.rowcount) print("am ajuns pana aici...in SynGeoQUery") print("cur rows=", cur.rowcount) # Close communication with the database cur.close() conn.close() print("--- %s seconds ---" % (time.time() - start_time))
def USS(A, b): print("--USS--") l1norm = [] ratio = [] iter_data = [] time_per_k = [] for k in sketch_size: r = 0 temp_data = [] temp_time = 0 # print("---- Sample Size ----",k) for i in range(iterations): index = np.random.choice(row, k, replace=False) A_sketch = A[index, :] b_sketch = b[index] start = time.time() x_tilde = np.array(l1RegressionSolver(A_sketch, b_sketch)) end = time.time() temp_time += end - start regression_value = np.linalg.norm((A.dot(x_tilde) - b), ord=1) temp_data.append(regression_value) r += regression_value ## for loop ends: time_per_k.append(temp_time / iterations) iter_data.append(temp_data) r /= iterations l1norm.append(r) ratio = np.array(l1norm) / np.array(l1normOpt) print("l1norm_each sample size-", l1norm) print("ratio_each sample size-", ratio) return ratio, l1norm, time_per_k
def test_single_report(self): region = 'CC' trailhead = create_trail_and_trailhead(name=fake.name(), region=region, coordinates=fake.word(), filters=None) time = datetime.now() report = create_report( report={ 'trail': trailhead.trail, 'trailhead': trailhead, 'date_hiked': fake.date(), 'trail_begin': time.time(), 'trail_end': fake.time(), 'pkg_estimate_begin': fake.pyint(min_value=0, max_value=100), 'pkg_estimate_end': fake.pyint(min_value=0, max_value=100), 'cars_seen': fake.pyint(), 'people_seen': fake.pyint(), 'horses_seen': fake.boolean(), 'dogs_seen': fake.boolean() }) response = self.client.get( reverse('report', args=( region, trailhead.trail.id, trailhead.id, report.id, ))) self.assertEqual(response.status_code, 200) self.assertTemplateUsed('report.html') self.assertEqual(response.context['report'].trail_begin, time.time())
def calc_uptime(id, ids, sensors, d): #### first chunk to get meta data on the weather station dates = [] for i in range(0, d.__len__()): dates.append(datetime.strptime(d[i], "%Y-%m-%d")) step0 = time.time() ids.create_index("id") sensors.create_index("ts") sensor = ids.find_one({'id': int(id)}) step1 = time.time() print "Uptime: 1: Data Loaded... (%ss)" % (round((step1 - step0), 1)) name = sensor["name"] lat = sensor["lt"] lon = sensor["ln"] mob_no = sensor["ph"] carrier = sensor["carrier"] f = int(sensor["freq"]) #### second chunk to actually calculate the uptime misses=0 hangs=0 attempts=0 uploads=0 htimes=[] hvals=[] for i in range(0, dates.__len__()): if i > 0: diff = dates[i] - dates[i-1] # Misses if diff < timedelta(minutes=(10*f+1)) and diff > timedelta(minutes=(f+1)): misses += round(diff.total_seconds()/(60*f),0) - 1 attempts += round(diff.total_seconds()/(60*f),0) uploads += 1 mtimes.append(dates[i]) mvals.append(misses) # Hangs elif diff > timedelta(minutes=(10*f+.5)): hangs += 1 attempts += 1 uploads += 1 htimes.append(dates[i]) hvals.append(hangs) # Uploads else: attempts += 1 uploads += 1 in_field = dates[-1] - dates[0] pct = round(100*uploads/attempts, 2) uptime = round(100*uploads/(in_field.total_seconds()/(60*f)), 2) analytics = {"id": id, "name": name, "last_update": dates[-1], "uptime": uptime, "infield_days": in_field.days, "pct": pct, "attempts": format(int(attempts), ","), "uploads": format(uploads, ","), "misses": format(int(misses), ","), "hangs": hangs, "htimes": htimes, "lt": lat, "ln": lon, "mob_no": mob_no, "carrier": carrier, "freq": str(f)} return analytics
def intialise_proxy_manager(options): """ Proxy Manager initialization. :param dict options: Proxy manager configuration parameters. """ proxy_manager = None if options['Botnet_mode'] is not None: proxy_manager = Proxy_manager() answer = "Yes" proxies = [] if options['Botnet_mode'][0] == "miner": miner = Proxy_Miner() proxies = miner.start_miner() if options['Botnet_mode'][0] == "list": # load proxies from list proxies = proxy_manager.load_proxy_list( options['Botnet_mode'][1] ) answer = raw_input( "[#] Do you want to check the proxy list? [Yes/no] : " ) if answer.upper() in ["", "YES", "Y"]: proxy_q = multiprocessing.Queue() proxy_checker = multiprocessing.Process( target=Proxy_Checker.check_proxies, args=(proxy_q, proxies,) ) logging.info("Checking Proxies...") start_time = time.time() proxy_checker.start() proxies = proxy_q.get() proxy_checker.join() proxy_manager.proxies = proxies proxy_manager.number_of_proxies = len(proxies) if options['Botnet_mode'][0] == "miner": logging.info("Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)") miner.export_proxies_to_file("proxies.txt", proxies) if answer.upper() in ["", "YES", "Y"]: logging.info( "Proxy Check Time: %s", time.strftime( '%H:%M:%S', time.localtime(time.time() - start_time - 3600) ) ) cprint("Done") if proxy_manager.number_of_proxies is 0: ServiceLocator.get_component("error_handler").FrameworkAbort("No Alive proxies.") proxy = proxy_manager.get_next_available_proxy() # check proxy var... http:// sock:// options['OutboundProxy'] = [] options['OutboundProxy'].append(proxy["proxy"][0]) options['OutboundProxy'].append(proxy["proxy"][1])
def get_optimal_clusters(self, attribute_name_order, add_result_funct): if not self.data_has_class or self.data_has_continuous_class: return jitter_size = 0.001 * self.clusterOptimization.jitterDataBeforeTriangulation domain = Orange.data.Domain([Orange.feature.Continuous("xVar"), Orange.feature.Continuous("yVar"), self.data_domain.class_var]) # init again, in case that the attribute ordering took too much time self.scatterWidget.progressBarInit() start_time = time.time() count = len(attribute_name_order)*(len(attribute_name_order)-1)/2 test_index = 0 for i in range(len(attribute_name_order)): for j in range(i): try: attr1 = self.attribute_name_index[attribute_name_order[j]] attr2 = self.attribute_name_index[attribute_name_order[i]] test_index += 1 if self.clusterOptimization.isOptimizationCanceled(): secs = time.time() - start_time self.clusterOptimization.setStatusBarText("Evaluation stopped (evaluated %d projections in %d min, %d sec)" % (test_index, secs/60, secs%60)) self.scatterWidget.progressBarFinished() return data = self.create_projection_as_example_table([attr1, attr2], domain = domain, jitter_size = jitter_size) graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict = self.clusterOptimization.evaluateClusters(data) all_value = 0.0 classes_dict = {} for key in valuedict.keys(): add_result_funct(valuedict[key], closuredict[key], polygon_vertices_dict[key], [attribute_name_order[i], attribute_name_order[j]], int(graph.objects[polygon_vertices_dict[key][0]].getclass()), enlarged_closure_dict[key], other_dict[key]) classes_dict[key] = int(graph.objects[polygon_vertices_dict[key][0]].getclass()) all_value += valuedict[key] add_result_funct(all_value, closuredict, polygon_vertices_dict, [attribute_name_order[i], attribute_name_order[j]], classes_dict, enlarged_closure_dict, other_dict) # add all the clusters self.clusterOptimization.setStatusBarText("Evaluated %d projections..." % (test_index)) self.scatterWidget.progressBarSet(100.0*test_index/float(count)) del data, graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict, classes_dict except: type, val, traceback = sys.exc_info() sys.excepthook(type, val, traceback) # print the exception secs = time.time() - start_time self.clusterOptimization.setStatusBarText("Finished evaluation (evaluated %d projections in %d min, %d sec)" % (test_index, secs/60, secs%60)) self.scatterWidget.progressBarFinished()
def start_three_channel(self): start_time_ref = time.time() while self.mode == AUTO_3: try: read_packet = self.sensor_reader.handle_reading(self.sensor_reader.readline()); # print MOTES_COLOR[int(read_packet[0])] + " : " + read_packet[1] key = MOTES_COLOR[int(read_packet[0])] if key == 'red': red_pid_value = self.red_pid.update(int(read_packet[1])) red_pwm_value = LEDModel.get_red_pwm(red_pid_value) red_output = "{0}: {1}, {2}, {3}, {4}".format(key, time.time() - start_time_ref, int(read_packet[1]), red_pid_value, red_pwm_value) print red_output self.led.update_value_red(red_pwm_value) try: self.output_file.write(red_output + "\n") # self.database.insert_into_control_table(measured_value, pid_value) except: # does not matter if saving exists pass elif key == 'green': green_pid_value = self.green_pid.update(int(read_packet[1])) green_pwm_value = LEDModel.get_green_pwm(green_pid_value) green_output = "{0}: {1}, {2}, {3}, {4}".format(key, time.time() - start_time_ref, int(read_packet[1]), green_pid_value, green_pwm_value) print green_output self.led.update_value_green(green_pwm_value) try: self.output_file.write(green_output + "\n") # self.database.insert_into_control_table(measured_value, pid_value) except: # does not matter if saving exists pass elif key == 'blue': blue_pid_value = self.blue_pid.update(int(read_packet[1])) blue_pwm_value = LEDModel.get_blue_pwm(blue_pid_value) blue_output = "{0}: {1}, {2}, {3}, {4}".format(key, time.time() - start_time_ref, int(read_packet[1]), blue_pid_value, blue_pwm_value) print blue_output self.led.update_value_blue(blue_pwm_value) try: self.output_file.write(blue_output + "\n") # self.database.insert_into_control_table(measured_value, pid_value) except: # does not matter if saving exists pass except Exception as e: print e # exception probably caused by reading, sleep to give some time to update # time.sleep(0.1) pass
def single_measure(self, repeats): i = 0 start_time = time.time() while i < repeats: try: measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) output = "{}, {}, {}".format(time.time() - start_time, i, measured_value) print output self.output_file.write(output + "\n") i = i + 1 except Exception as e: print e pass
def cast(self,time,retro=False): '''force time to be in workhours''' if self.isworktime(time): return time #ok if retro: if not self.isworkday(time) or time.time()<self.start: return _datetime(self.prevworkday(time.date()),self.end) #only remaining case is time>self.end on a work day return _datetime(time.date(),self.end) else: if not self.isworkday(time) or time.time()>self.end: return _datetime(self.nextworkday(time.date()),self.start) #only remaining case is time<self.start on a work day return _datetime(time.date(),self.start)
def single_loop(self): i = 0 start_time = time.time() while i < 255: try: self.led.update_all(i) # time.sleep(0.5) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) output = "{}, {}, {}".format(time.time() - start_time, i, measured_value) print output self.output_file.write(output + "\n") i += 1 except Exception as e: print e pass
def OpenOrder(self, expiry, strike, amount, price, buy_sell="SELL"): data = [{ "put_call": self.put_call, "expiry_time": int(expiry), "asset": self.asset, "strike": str(strike), "amount": int(amount), "price": str(price), "open_close": "OPEN", "type": "LIMIT", "deriv_type": self.d_type, "side": buy_sell }] if self.real_time: return self.coinut.new_orders(data) else: self.balance += float(price) * int(amount) rand_id = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in xrange(36)) return [{ "status": "ORDER_OPEN", "put_call": self.put_call, "expiry_time": int(expiry), "timestamp": time.time(), "price": str(price), "open_close": "OPEN", "deriv_type": self.d_type, "side": buy_sell, "amount": int(amount), "asset": self.asset, "strike": str(strike), "type": "LIMIT", "id": rand_id }]
def __init__(self, parent, request, path, name): self.path = path self.request = request self.__name__ = name self.__parent__ = parent self.lastModifiedTimestamp = float(os.path.getmtime(path)) or time.time()
def current_user(self): if not hasattr(self, "_current_user"): self._current_user = None user_id = CookiesUtil.parseCookie(self.request.cookies.get('fsq_user'), FOURSQUARE_APP_SECRET) if user_id: self._current_user = User.get_by_key_name(user_id) if (self._current_user == None or self._current_user.access_token == None) and self.request.get("code"): args = dict(client_id=FOURSQUARE_APP_ID, redirect_uri=self.request.path_url, client_secret=FOURSQUARE_APP_SECRET, code=self.request.get("code"), grant_type='authorization_code') token_url = 'https://foursquare.com/oauth2/access_token?' response = json.load(urllib.urlopen(token_url + urllib.urlencode(args))) if 'access_token' not in response: self.redirect(self.request.path_url) return access_token = response['access_token'] response = json.load(urllib.urlopen('https://api.foursquare.com/v2/users/self?' + urllib.urlencode(dict(oauth_token=access_token)))) profile = response['response']['user'] user = User(key_name=str(profile["id"]), id=str(profile["id"]), name=profile["firstName"] + ' ' + profile["lastName"], photo=profile["photo"], access_token=access_token) user.put() CookiesUtil.setCookie(self.response, 'fsq_user', str(profile["id"]), expires=time.time() + 30 * 60 * 60 * 24 * 100, secret=FOURSQUARE_APP_SECRET) self._current_user = user elif (self._current_user == None or self._current_user.access_token == None) and not self.request.get("code"): self._current_user = None return self._current_user
def run_tests(): sys.stdout.write("Iditarod (250 Points)\n\n") passed = cases = 0 case_set = set() for arg in sys.argv[1:]: case_set.add(int(arg)) with open("Iditarod.sample", "r") as f: while True: label = f.readline() if not label.startswith("--"): break times = [] for i in range(0, int(f.readline())): times.append(f.readline().rstrip()) times = tuple(times) f.readline() __answer = int(f.readline().rstrip()) cases += 1 if len(case_set) > 0 and (cases - 1) in case_set: continue sys.stdout.write(" Testcase #%d ... " % (cases - 1)) passed += do_test(times, __answer) sys.stdout.write("\nPassed : %d / %d cases\n" % (passed, cases)) T = time.time() - 1454700573 PT, TT = (T / 60.0, 75.0) points = 250 * (0.3 + (0.7 * TT * TT) / (10.0 * PT * PT + TT * TT)) sys.stdout.write("Time : %d minutes %d secs\n" % (int(T/60), T%60)) sys.stdout.write("Score : %.2f points\n" % points)
def round(self): import math # # Data to draw the chart. In this demo, the data buffer will be filled by a random # data generator. In real life, the data is probably stored in a buffer (eg. a # database table, a text file, or some global memory) and updated by other means. # # We use a data buffer to emulate the last 240 samples. sampleSize = 240 dataSeries1 = [0] * sampleSize dataSeries2 = [0] * sampleSize dataSeries3 = [0] * sampleSize timeStamps = [0] * sampleSize # Our pseudo random number generator firstDate = chartTime2(time.time()) - len(timeStamps) for i in range(0, len(timeStamps)) : p = firstDate + i timeStamps[i] = p dataSeries1[i] = math.cos(p * 7 * 18463) * 10 + 1 / (math.cos(p) * math.cos(p ) + 0.01) + 20 dataSeries2[i] = 100 * math.sin(p / 27.7) * math.sin(p / 10.1) + 150 dataSeries3[i] = 100 * math.cos(p / 6.7) * math.cos(p / 11.9) + 150
def InsertSingleEvent(calendar_client, title='bseu-api event', content='study hard', where='in space', start_time=None, end_time=None, ucalendar=None): event = gdata.calendar.data.CalendarEventEntry() event.title = atom.data.Title(text=title) event.content = atom.data.Content(text=content) event.where.append(gdata.calendar.data.CalendarWhere(value=where)) if start_time is None: # Use current time for the start_time and have the event last 1 hour start_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime()) end_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(time.time() + 3600)) else: start_time = (start_time - timedelta(hours=3)).strftime('%Y-%m-%dT%H:%M:%S.000Z') end_time = (end_time - timedelta(hours=3)).strftime('%Y-%m-%dT%H:%M:%S.000Z') event.when.append(gdata.calendar.data.When(start=start_time, end=end_time)) try: if ucalendar is None: calendar_client.InsertEvent(event) else: calendar_client.InsertEvent(event, ucalendar) except Exception, e: logging.error('import was unsuccessful - skipping: %s' % e)
def write_log(mode,text): import datetime __time = datetime.datetime.fromtimestamp(int(time.time())).strftime("%a %b %d %H:%M:%S %Y") note = '['+__time+'] ['+mode+'] '+text+'\n' f = open('comment.log','a') f.write(note) f.close
def pull_feed(feed_url, posts_to_show=5, cache_expires=30): CACHE_FILE = ''.join([CACHE_FOLDER, template.defaultfilters.slugify(feed_url), '.cache']) try: cache_age = os.stat(CACHE_FILE)[8] except: #if file doesn't exist, make sure it gets created cache_age = 0 #is cache expired? default 30 minutes (30*60) if (cache_age + cache_expires*60 < time.time()): try: #refresh cache urllib.urlretrieve(feed_url,CACHE_FILE) except IOError: #if downloading fails, proceed using cached file pass #load feed from cache feed = feedparser.parse(open(CACHE_FILE)) posts = [] for i in range(posts_to_show): pub_date = feed['entries'][i].updated_parsed published = date(pub_date[0], pub_date[1], pub_date[2] ) posts.append({ 'title': feed['entries'][i].title, 'summary': feed['entries'][i].summary, 'link': feed['entries'][i].link, 'date': published, }) return {'posts': posts}
def update_opdrachten(self): try: r = requests.get('http://jotihunt.net/api/1.0/opdracht') r.json() except: mockresponse = namedtuple('mockresponse', ['status_code']) r = mockresponse(status_code=404) r.status_code = 404 if r.status_code == 404 or 'error' in r.json().keys() and r.json()['error'] != self.errors['opdrachten']: opdrachten = [] self.errors['opdrachten'] = r.json()['error'] self.jh_bot.bot.sendMessage(self.chat_id, 'De jotihuntsite gaf een error tijdens het binnenhalen van de opdrachten: ' + r.json()['error']) elif 'error' in r.json().keys(): opdrachten = [] else: self.errors['opdrachten'] = None opdrachten = r.json()['data'] for o in opdrachten: if o['ID'] not in [op.id for op in self.opdrachten]: opdracht = Opdracht(o['ID']) opdracht.last_warning = time.time() self.opdrachten.append(opdracht) self.jh_bot.bot.sendMessage(self.chat_id, 'Er is een nieuwe opdracht met de titel: [' + opdracht.titel + ']' + '(http://jotihunt.net/bericht/?MID=' + o['ID'] + ')' + ".\n Hier kunnen we " + str(opdracht.max_punten) + "punten mee verdienen." + "We kunnen hier nog " + str(math.floor(opdracht.remaining_time('uur'))) + ' uur en ' + str(math.floor(opdracht.remaining_time('minuten') % 60)) + ' minuten over doen.', parse_mode=telegram.ParseMode.MARKDOWN )
def handle_uploaded_file(f): # get timestamp t = str(time.time()).replace('.', '') k = 'file_' + f.name path = os.path.join('/Users/fshaw/Desktop/test/', k) destination = open(path, 'w+') for chunk in f.chunks(): destination.write(chunk)
def rate_limit_status(twitter): """Print current Twitter API rate limit status.""" r = twitter.account.rate_limit_status() print("Remaining API requests: %i/%i (hourly limit)" % (r['remaining_hits'], r['hourly_limit'])) print("Next reset in %is (%s)" % (int(r['reset_time_in_seconds'] - time.time()), time.asctime(time.localtime(r['reset_time_in_seconds']))))
def findTimeSlot(self, time): #find the time slot for the student according to scan in time for timeslot in self.timeslot: if time.time() > timeslot[0] and time.time() < timeslot[1]: return self.timeslot[timeslot] h, m, p = '{:%I}'.format(time), '{:%M}'.format(time), '{:%p}'.format(time) m = int(m) if m > 40: m = '00' h = '{:%I}'.format(time + timedelta(hours=1)) elif m > 10: m = '30' else: m = '00' return h + ':' + m + ' ' + p
def stairs_colors(self): i = 0 self.led.update_all(0); start_time = time.time() print "Start iterating..." while i < 255: try: self.led.update_rgb_tuple((i, 0, 0)) # time.sleep(0.5) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) output = "{}, {}, {}".format(time.time() - start_time, i, measured_value) print output self.output_file.write(output + "\n") i += 1 except Exception as e: print e pass i = 0 while i < 255: try: self.led.update_rgb_tuple((0, i, 0)) # time.sleep(0.5) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) output = "{}, {}, {}".format(time.time() - start_time, i, measured_value) print output self.output_file.write(output + "\n") i += 1 except Exception as e: print e pass i = 0 while i < 255: try: self.led.update_rgb_tuple((0, 0, i)) # time.sleep(0.5) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) output = "{}, {}, {}".format(time.time() - start_time, i, measured_value) print output self.output_file.write(output + "\n") i += 1 except Exception as e: print e pass
def ajax_schedule_last_changed_helper(self, prog): ret = { 'val': str(self.ajax_schedule_get_uuid(prog)), 'msg': 'UUID that changes every time the schedule is updated', 'time' : time.time(), 'latest_index' : self.get_change_log(prog).get_latest_index() } response = HttpResponse(content_type="application/json") json.dump(ret, response) response.raw_value = ret # So that other functions can call this view and get the original return value back return response
def comment_log(self,comment): import datetime __time = datetime.datetime.fromtimestamp(int(time.time())).strftime("%a %b %d %H:%M:%S %Y") comment = comment.split('_') note = '['+__time+'] ['+comment[0]+'] '+comment[1]+'\n' f = open('comment.log','a') f.write(note) f.close() return "Success"
def ajax_change_log(self, request, tl, one, two, module, extra, prog): cl = self.get_change_log(prog) last_fetched_index = int(request.GET['last_fetched_index']) #check whether we have a log entry at least as old as the last fetched time #if not, we return a command to reload instead of the log #note: negative number implies we want to debug dump changelog if cl.get_earliest_index() is not None and last_fetched_index !=0 and cl.get_earliest_index() > last_fetched_index: return { "other" : [ { 'command' : "reload", 'earliest_index' : cl.get_earliest_index(), 'latest_index' : cl.get_latest_index(), 'time' : time.time() } ] } return { "changelog" : cl.get_log(last_fetched_index), 'other' : [ { 'time': time.time() } ] }
def PassSubmit(): expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'可用' md=hashlib.md5() md.update(request.values.get("UserName")) i=str(md.hexdigest())[1:10] expert_info.ExpertCertificateID='zj-'+i vt=time.localtime(time.time()+31622400) expert_info.ValidTime=time.strftime('%Y-%m-%d',vt) expert_info.save() return json.dumps({'time':time.strftime('%Y-%m-%d',vt),'ExpertCertificateID':'zj-'+i})
def get_rrd_pic_path(check_item,cluster_name,schema_name,table_name,days = 1): full_file_path = create_or_get_rrd_database(check_item,cluster_name,schema_name,table_name) full_image_path = rrd_image_path + check_item + '.' + cluster_name if schema_name != '': full_image_path += '.' + schema_name if table_name != '': full_image_path += '.' + table_name full_image_path += '.%s.png' % days #Reference #rrdtool graph target.png --start 1357622790 --end 1357623120 DEF:mymem=target.rrd:mem:AVERAGE LINE1:mymem#FF0000 graph(str(full_image_path), '--start', str(int(time.time()) - 86400* days ), '--end', str(int(time.time())), '--vertical-label', 'Rows', '--title', 'Table Row Count of past %s days' % days, str('DEF:myvariable=%s:%s:AVERAGE' % (full_file_path, check_item)), 'LINE1:myvariable#FF0000') return full_image_path.split('/')[-1]
def parseStation(element): stop, = element.getElementsByTagName('BasicStop') station, = stop.getElementsByTagName('Station') x = [stop.getElementsByTagName(x) for x in ('Dep', 'Arr')] arrdep, = x[0] + x[1] time = Hafas.parseTime(getText(arrdep.getElementsByTagName('Time')[0])) dt = datetime.combine(connection.date, time.time().min) + timedelta(0, time.second + time.minute * 60 + time.hour * 60 * 60 + (time.day - 1) * 24 * 60 * 60 ) return HafasConnectionStation(HafasStation.fromDOMElement(station), dt)
def opdracht_reminders(self): reminders = [[None, (1, 'dag'), (1, 'dag')], [(1, 'dag'), (1, 'uur'), (2, 'uur')], [(1, 'uur'), (30, 'minuten'), (10, 'minuten')], [(30, 'minuten'), (10, 'minuten'), (5, 'minuten')], [(15, 'minuten'), (3, 'minuten'), (3, 'minuten')], [(3, 'minuten'), (0, 'minuten'), (1, 'minuten')], [(0, 'minuten'), (-2, 'minuten'), (1, 'minuten')], [(-2, 'minuten'), None, None]] for opdracht in self.opdrachten: if opdracht.id in skip_reminder or opdracht.ingeleverd is not None: return for reminder in reminders: if None in reminder: if reminder[0] is None: reminder[0] = (opdracht.remaining_time() + 9001, 'seconds') while not reminder[0][0] > opdracht.remaining_time(reminder[0][1]): reminder[0] = ( opdracht.remaining_time() + (random.choice([1, -1]) * random.choice(range(1000))), 'seconds') if reminder[1] is None: reminder[1] = (opdracht.remaining_time() - 9001, 'seconds') while not reminder[1][0] > opdracht.remaining_time(reminder[1][1]): reminder[1] = ( opdracht.remaining_time() + (random.choice([1, -1]) * random.choice(range(1000))), 'seconds') if reminder[0][0] > opdracht.remaining_time(reminder[0][1]) and reminder[1][ 0] < opdracht.remaining_time(reminder[1][1]): d_time = time.time() - opdracht.last_warning if reminder[2] is not None and d_time > convert_tijden(reminder[2][0], reminder[2][1]): opdracht.last_warning = time.time() self.jh_bot.bot.sendMessage(self.chat_id, 'Reminder voor de opdracht: [' + opdracht.titel + '](.http://jotihunt.net/bericht/?MID=' + str(opdracht.id) + "\n Hier kunnen we " + str(opdracht.max_punten) + "punt" + ('en' * (opdracht.max_punten != 0)) + "mee verdienen." + "We kunnen hier nog " + str(math.floor(opdracht.remaining_time('uur'))) + ' uur en ' + str(math.floor(opdracht.remaining_time('minuten') % 60)) + ' minuten over doen.' + '\n', parse_mode=telegram.ParseMode.MARKDOWN)
def datetime_filter(t): delta = int(time.time() - t) if delta < 60: return u'1分钟前' if delta < 3600: return u'%s分钟前' % (delta // 60) if delta < 86400: return u'%s小时前' % (delta // 3600) if delta < 604800: return u'%s天前' % (delta // 86400) dt = datetime.fromtimestamp(t) return u'%s年%s月%s日' % (dt.year, dt.month, dt.day)