def SaveStepPos(options, timedata, extra_file=-1): if options.testing_disconnected: return 0 t0 = clock() currentPosition = GetPosition() currentPosition = array([currentPosition[0], currentPosition[1]]) #print "getting pos took ", clock() - t0 t_obj = localtime() t_ms = time_now() - floor(time_now()) t_tot = 0 t_tot += t_obj.tm_mday * 86400000 t_tot += t_obj.tm_hour * 3600000 t_tot += t_obj.tm_min * 60000 t_tot += t_obj.tm_sec * 1000 t_tot += t_ms * 1000 currentTime = t_tot # Ths should be modified to whatever Teddy wants to have here. #print "getting time took ", clock() - t0 if options.extra_file != -1 and os.path.exists(options.saving_directory): extra_file.write( to_str(currentTime, 13) + "\t" + to_str(currentPosition[0]) + "\t" + to_str(currentPosition[1]) + "\t 0.000000 \n") outfile.write( to_str(currentTime, 13) + "\t" + to_str(currentPosition[0]) + "\t" + to_str(currentPosition[1]) + "\t 0.000000 \n") #print "writing took ", clock() - t0 pos_save_time = clock() - t0 timedata.pos_save_total += pos_save_time return currentPosition
def update_me(self): flex_2d_data = self.arr_data[self.slice_pos:self.slice_pos + 1, 0:self.img_w, 0:self.img_h] flex_2d_data.reshape(flex.grid(self.img_w, self.img_h)) arr_i = self.arr_img(flex_2d_data, self.flex_2d_mask, i_min=-3.0, i_max=500) q_img = QImage(arr_i.data, np.size(arr_i[0:1, :, 0:1]), np.size(arr_i[:, 0:1, 0:1]), QImage.Format_RGB32) self.imageLabel.setPixmap(QPixmap.fromImage(q_img)) self.setWidget(self.imageLabel) self.update() #self.show() #print "self.slice_pos=", self.slice_pos dif_time = time_now() - self.my_time self.my_time = time_now() #print "time spent=", dif_time self.slice_pos += 1 if self.slice_pos >= self.arr_data.all()[0]: self.slice_pos = 0
def ini_reflection_table(pckl_file_path=None): if (pckl_file_path != None): table = flex.reflection_table.from_pickle(pckl_file_path) local_bbox = table[5]['bbox'] n_refs = len(table) bbox_col = map(list, table["bbox"]) try: hkl_col = map(str, table["miller_index"]) print hkl_col[0] print hkl_col[n_refs - 1] except: hkl_col = [] # in the image viewer, the img_select variable is part of the class n_imgs = img_select.maximum() print "\n\n\n" #print "a =", a flat_data_lst = [] if (n_imgs > 0): firts_time = time_now() a = lst_ext.arange_list(bbox_col, hkl_col, n_imgs) print a print "\n building flat_data_lst (diff time) =", time_now( ) - firts_time, "\n" else: flat_data_lst = [None]
def scraper(url, parse_type, **kwargs): headers = {'Accept': '''application/json'''} auth_check = True auth_timer = 0 while True: if TIMER.check_timeout(60 * 30): GBF.refresh() TIMER.reset() try: response = GBF.request('get', url, headers=headers).json() rows = parser(response, parse_type, **kwargs) return rows except (ConnectionResetError, ConnectionError, ConnectionAbortedError, JSONDecodeError): if parse_type == 'guild_members': raise if (auth_timer - time_now() > 60) or auth_check: GBF.refresh() TIMER.reset() GBF.get('http://game.granbluefantasy.jp/#authentication') sleep(5) # Hard coded mobage login GBF.find_element_by_xpath( '//*[@id="mobage-login"]/img').click() alert_operator('Reauthentication', pause=False) auth_timer = time_now() sleep(10) continue except: alert_operator('???', pause=False)
def execute(self, context): self.log.info(f'Start {self.target_table} LoadDimensionOperator') # connect to Airflow PostgresHook for Redshift connection redshift_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) # check if empty table before loading is needed if self.delete_records_before_load: self.log.info( f'Deleting records in the "{self.target_table}" table.') delete_records = f'DELETE FROM {self.target_table}' start_time = time_now() redshift_hook.run(delete_records) self.log.info('Delete {target_table} records took {took}'.format( target_table=self.target_table, took=time_now() - start_time)) self.log.info(f'Delete {self.target_table} records completed!') self.log.info( f'Loading dimension records into "{self.target_table}" table.') start_time = time_now() redshift_hook.run(self.sql_query) self.log.info( 'Loading dimension records to "{target_table}" took: {took:.2f} seconds' .format(target_table=self.target_table, took=time_now() - start_time)) self.log.info( f'Loading dimension records to "{self.target_table}" status: DONE!' )
def execute(self, context): self.log.info('Start executing StageToRedshiftOperator') # get all relevant connection hook to run command aws_hook = 'test' #AwsBaseHook(self.aws_credentials_id) aws_credentials = aws_hook.get_credentials() redshift_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) render_key = self.s3_key.format(year=context['execution_date.year'], month=context['execution_date.month'], full_date=context["ds"]) s3_path = f's3://{self.s3_bucket}/{render_key}' formatted_copy_query = StageToRedshiftOperator.copy_sql_query.format( target_table=self.target_table, source=s3_path, access_key_id=aws_credentials.access_key, secret_access_key=aws_credentials.secret_key, ignore_header=self.ignore_header, delimiter=self.delimiter, format_option=self.format_option, ) self.log.info(f'Copying data to "{self.target_table}" staging table.') start_time = time_now() redshift_hook.run(formatted_copy_query) self.log.info( 'Copying data to "{self.target_table}" took: {took:.2f} seconds'. format(target_table=self.target_table, took=time_now() - start_time)) self.log.info(f'Copying data to "{self.target_table}" status: DONE!')
def addWebOrder(): from typing import Dict, List # Converts string date into datetime object from datetime import datetime date: datetime = datetime.strptime(request.form["date"], '%m/%d/%Y') # Category is a work in progress # category= request.form["category"] location: str = request.form["loc"] subtotals: Dict[str, float] = { 'Nishant': float(request.form["nishant"]), 'Arjun': float(request.form["arjun"]), 'Param': float(request.form["param"]), } newUserName = request.form["userName"] if newUserName: subtotals[newUserName] = float(request.form["userTotal"]) total = float(request.form["total"]) # Adds order to database from DB import DB from Order import Order from time import time as time_now start = time_now() excessive: List[Dict] = DB.add(Order(date, location, subtotals, total)) print("Add time:", time_now() - start) # Sends member a confirmation message Twilio.Communication.send( "\nAdded order from %s for a total of $%.2f" % (location, total), "Nishant") Twilio.alert(excessive)
def ini_reflection_table(self, pckl_file_path): if (pckl_file_path != None): firts_time = time_now() print "[pickle file] =", pckl_file_path table = flex.reflection_table.from_pickle(pckl_file_path) print "table =", table print "len(table) = ", len(table) n_refs = len(table) bbox_col = map(list, table["bbox"]) try: hkl_col = map(str, table["miller_index"]) except: hkl_col = [] n_imgs = self.img_select.maximum() self.flat_data_lst = [] if (n_imgs > 0): self.flat_data_lst = lst_arange(bbox_col, hkl_col, n_imgs) print "\n building flat_data_lst (diff time) =", time_now( ) - firts_time, "\n" else: self.flat_data_lst = [None] self.set_img()
def SaveStepPos(options, timedata, extra_file=-1): if options.testing_disconnected: return 0 t0 = clock() currentPosition = GetPosition() currentPosition = array([currentPosition[0], currentPosition[1]]) #print "getting pos took ", clock() - t0 t_obj = localtime() t_ms = time_now() - floor(time_now()) t_tot = 0 t_tot += t_obj.tm_mday*86400000 t_tot += t_obj.tm_hour*3600000 t_tot += t_obj.tm_min*60000 t_tot += t_obj.tm_sec*1000 t_tot += t_ms*1000 currentTime = t_tot # Ths should be modified to whatever Teddy wants to have here. #print "getting time took ", clock() - t0 if options.extra_file != -1 and os.path.exists(options.saving_directory): extra_file.write(to_str(currentTime, 13) + "\t" + to_str(currentPosition[0]) + "\t" + to_str(currentPosition[1]) + "\t 0.000000 \n") outfile.write(to_str(currentTime, 13) + "\t" + to_str(currentPosition[0]) + "\t" + to_str(currentPosition[1]) + "\t 0.000000 \n") #print "writing took ", clock() - t0 pos_save_time = clock() - t0 timedata.pos_save_total += pos_save_time return currentPosition
def _wrapped(*args, **kwargs): start_time = time_now() try: return f(*args, **kwargs) finally: dur = 1000.0 * (time_now() - start_time) self._client.timing(self._stat, dur, self._tags, self._rate)
def execute(self, context): self.log.info('Start performing data quality check') redshift_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) self.log.info(f'Checking data quality on "{self.target_table}" table') start_time = time_now() table_records = redshift_hook.get_records( f'SELECT COUNT(*) FROM {self.target_table}') self.log.info( 'Data quality check took on "{target_table}": {took:.2f} seconds'. format(target_table=self.target_table, took=time_now() - start_time)) if len(table_records) < 1 or len(table_records[0]) < 1: self.log.warn( f'Data quality check on "{self.target_table}" status: FAILED!') self.log.warn( f'Data quality check response: "{self.target_table}" return no results' ) raise ValueError( f'Data quality check failed on {self.target_table}') self.log.info( f'Data quality check on "{self.target_table}" status: PASSED!') self.log.info( f'Number of results on "{self.target_table}" table: {len(table_records)}' ) self.log.info( f'Number of records inside the results on "{self.target_table}" table: {len([table_records[0]])}' )
def _wrapped(*args, **kwargs): start_time = time_now() try: return f(*args, **kwargs) finally: elapsed_time_ms = 1000.0 * (time_now() - start_time) self.client.timing(self.stat, elapsed_time_ms, self.rate)
def simostrar(self): self.recordings = self.source.boolean ret = False self.poll_interval = 120000 debugtxt('simostrar') if self.recordings: ret = False else: try: for timer in NavigationInstance.instance.RecordTimer.timer_list: if timer.state == timer.StatePrepared or timer.begin - time_now( ) < 50 and timer.begin - time_now( ) >= 0 and not timer.disabled: self.poll_interval = 5000 ret = True debugtxt('simostrar hay') break elif timer.state == timer.StateWaiting and not timer.disabled: self.poll_interval = 30000 debugtxt('simostrar hay') ret = True break except: pass return ret
def execute(self, context): self.log.info('Start performing data quality check') redshift_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) self.log.info(f'Checking data quality on "{self.target_table}" table') start_time = time_now() table_records = redshift_hook.get_records( f'SELECT COUNT(*) FROM {self.target_table}') self.log.info( 'Data quality check took on "{target_table}": {took:.2f} seconds'. format(target_table=self.target_table, took=time_now() - start_time)) num_records = table_records[0][0] if num_records < 1: self.log.warn( f'Data quality check on "{self.target_table}" status: FAILED!') self.log.warn( f'Data quality check response: "{self.target_table}" contained 0 rows' ) raise ValueError( f'Data quality check failed on {self.target_table}') self.log.info( f'Data quality check on "{self.target_table}" status: PASSED!') self.log.info( f'Number of records on "{self.target_table}" table: {num_records}')
def add_tweet_for_tracking(tweet_id, update_every=30, start_tracking_now=True, if_previously_paused="resume"): for tweet in tracked_tweets: if tweet["id"] == tweet_id: return for tweet in currently_untracked_tweets: if tweet["id"] == tweet_id: if if_previously_paused == "resume": resume_tracking(tweet["id"], tweet) elif if_previously_resumed == "restart": delete_tracked_tweet_and_all_its_retweets( tweet["id"], tweet, currently_untracked_tweets) return tweet = {} tweet["id"] = tweet_id tweet["retweets"] = twitter_api.statuses.retweets(_id=tweet_id) tweet["object"] = twitter_api.statuses.show(_id=tweet_id) tweet["last_updated"] = time_now() tweet["update_every"] = update_every tweet["last_hour_checkpoint"] = time_now() tweet["retweets_per_hour_list"] = [] tweet["retweet_count_an_hour_ago"] = 0 tweet[ "exponentially_smoothed_per_hour_rate"] = initial_exponentially_smoothed_per_hour_rate if start_tracking_now: tracked_tweets.append(tweet) else: not_currently_tracked_tweets.append(tweet)
def reset_board(self): """ @overrides TelecortexBaseSession.reset_board """ self.last_idle = time_now() self.last_loo_rate = time_now() self.ser.reset_output_buffer() self.send_cmd_without_linenum("M9999") self.flush_in() self.set_linenum(0)
def main(): conf = TeleCortexThreadManagerConfig( name="parallel_jvb", description= "send f****d up rainbow circles to several telecortex controllers in parallel", default_config='dome_simplified') conf.parse_args() conf.parser.print_help() pix_map_normlized_smol = MAPS_DOME_SIMPLIFIED['smol'] pix_map_normlized_big = MAPS_DOME_SIMPLIFIED['big'] frameno = 0 seed = random.random() * 50 start_time = time_now() five_minutes = 60 * 5 manager = conf.setup_manager() while manager.any_alive: frameno += 1 if frameno > 2**16 or (start_time - time_now() > five_minutes): frameno = 0 seed = random.random() driver = PanelDriver(pix_map_normlized_smol, pix_map_normlized_big, IMG_SIZE, MAX_HUE, MAX_ANGLE) pixel_list_smol, pixel_list_big = driver.crazy_rainbows(frameno, seed) pixel_str_smol = pix_array2text(*pixel_list_smol) pixel_str_big = pix_array2text(*pixel_list_big) for server_id, server_panel_info in conf.panels.items(): if not manager.sessions.get(server_id): continue for panel_number, map_name in server_panel_info: size = map_name.split('-')[0] if size == 'big': pixel_str = pixel_str_big elif size == 'smol': pixel_str = pixel_str_smol else: raise UserWarning('panel size unknown') manager.chunk_payload_with_linenum(server_id, "M2600", {"Q": panel_number}, pixel_str) manager.wait_for_workers_idle() for server_id in manager.sessions.keys(): manager.chunk_payload_with_linenum(server_id, "M2610", None, None)
def __init__(self): self.lock = Lock() # for parallel processing self.urls = Counter() self.emojis = Counter() self.hashtags = Counter() self.time_start = time_now() self.time_end = time_now() self.count = 0 self.emoji_characters = Emoji().emojis # this is a list of emojis self.count_tweet_containing_url = 0 self.count_tweet_containing_photo_url = 0 self.count_tweet_containing_emoji = 0
def onPositionRequested(self): """survey instance wants a position, trigger send position emit""" if self.current_position == None or self.current_position_time == None: raise MOOSConnectionWarning('Nones in current position') return elif time_now() - self.current_position_time > self.time_buffer: raise MOOSPositionWarning('Time since last update too old - Disconnected?') return else: print('\n\tTime Now: %f\t current_position_time: %f' % \ (time_now(), self.current_position_time)) out = (self.current_position[p] for p in self.desired_variables) self.sendPosition.emit(out)
def onPositionRequested(self): """survey instance wants a position, trigger send position emit""" if self.current_position == None or self.current_position_time == None: raise MOOSConnectionWarning('Nones in current position') return elif time_now() - self.current_position_time > self.time_buffer: raise MOOSPositionWarning( 'Time since last update too old - Disconnected?') return else: print('\n\tTime Now: %f\t current_position_time: %f' % \ (time_now(), self.current_position_time)) out = (self.current_position[p] for p in self.desired_variables) self.sendPosition.emit(out)
def fetch_input(self) -> None: self.last_update = time_now() success = self._try_fetch_raw() if success: # analog input events = dict() game_pad = self._raw_state.Gamepad # analog_l analog_l = self._read_analog(self.input_state.analog_l, game_pad.sThumbLX, game_pad.sThumbLY, events) # analog_r analog_r = self._read_analog(self.input_state.analog_r, game_pad.sThumbRX, game_pad.sThumbRY, events) # trigger_l trigger_l = self._read_trigger(self.input_state.trigger_l, game_pad.bLeftTrigger, events) # trigger_r trigger_r = self._read_trigger(self.input_state.trigger_r, game_pad.bRightTrigger, events) # digital input buttons = dict() for btn_code in ButtonFlags: self._read_button(btn_code, events, buttons) # create new official state self.input_state = GamePadState(analog_l=analog_l, analog_r=analog_r, trigger_l=trigger_l, trigger_r=trigger_r, buttons=buttons, events=events)
def test_is_access_token_expired(increase_to_time_now: int, is_token_expired: bool, backend_client): """Explanation: '3534' ~ '58,9' minutes, so we increase time() with it and check for expiration.""" with patch("requests_ms_auth.ms_backend_application_client.time.time") as time_mock: time_mock.return_value = time_now() + increase_to_time_now # adjust time() for comparing with '_expire_at' res = backend_client.is_access_token_expired() assert res is is_token_expired
def limit_and_return(self, motion): """ Apply limits to the requested motion based on the current state of the MotionLimit, returning the closest Motion which complies with the specified limits. :param triangula.chassis.Motion motion: The requested :class:`triangula.chassis.Motion`, :return: The modified motion, or the supplied one if it complied with the limits """ now = time_now() if self.last_motion is None: self.last_motion = motion self.last_motion_time = now return motion # Calculate the requested linear acceleration magnitude in mm/s/s to achieve the desired motion change time_delta = now - self.last_motion_time motion_delta = abs(motion.translation - self.last_motion.translation) linear_acceleration = motion_delta / time_delta angular_acceleration = abs(motion.rotation - self.last_motion.rotation) / time_delta scaling = 1.0 if linear_acceleration > self.linear_acceleration_limit: scaling = self.linear_acceleration_limit / linear_acceleration if angular_acceleration > self.angular_acceleration_limit: scaling = min(scaling, self.angular_acceleration_limit / angular_acceleration) scaled_translation = motion.translation * scaling + self.last_motion.translation * (1.0 - scaling) ":type : euclid.Vector2" scaled_rotation = motion.rotation * scaling + self.last_motion.rotation * (1.0 - scaling) scaled_motion = Motion(rotation=scaled_rotation, translation=scaled_translation) self.last_motion = scaled_motion self.last_motion_time = now return scaled_motion
def simostrar(self): self.recordings = self.source.boolean ret=0 if self.recordings: ret=0 if self.poll_interval!=30000: self.poll_interval=30000 elif self.type == self.HayTimer: try: for timer in NavigationInstance.instance.RecordTimer.timer_list: if not self.noblink and (timer.state == timer.StatePrepared or ((timer.begin - time_now()) < 50 and (timer.begin - time_now())>=0)) and not timer.disabled: ret=2 break if timer.state == timer.StateWaiting and not timer.disabled: ret=1 if self.noblink: break; except: pass if self.noblink: if self.poll_interval!=30000: self.poll_interval=30000 else: if self.poll_interval!=5000: self.poll_interval=5000 return ret
def _make_fc_config(index: Index, config: dict, dry_run): """ Refine output fc configuration file. Before returning the updated file, ensure that the products exist for the given FC config. """ if not os.path.exists(config['location']): os.makedirs(config['location']) elif not os.access(config['location'], os.W_OK): _LOG.warning( 'Current user appears not have write access output location: %s', config['location']) source_product, output_product = _ensure_products(config, index, dry_run) # The input config has `source_product` and `output_product` fields which are names. Perhaps these should # just replace them? config['nbart_product'] = source_product config['fc_product'] = output_product config['variable_params'] = _build_variable_params(config) if 'task_timestamp' not in config: config['task_timestamp'] = int(time_now()) return config
async def do_task(self, url): result = await self.get_page(self.restore_url(url)) data = { 'url': url, 'domain': urlparse(url).netloc, } if result['status'] != 200: data['status'] = 'error' if result['status']: data['error'] = 'get status : {}, with reason : {}'.format( result['status'], result['reason']) else: data['error'] = result['error'] else: result = await self.loop.run_in_executor(self.executor, self.get_socials, result['data']) if not result['status']: data['status'] = 'error' data['error'] = result['error'] else: data['status'] = 'OK' data['links'] = result['data'] await self.save_to_db(data) self.queue.task_done() self.coro -= 1 self.count += 1 if not self.count % 100: print('ready {} in {} sec'.format(self.count, (time_now() - self.start_time)))
def limit_and_return(self, motion): """ Apply limits to the requested motion based on the current state of the MotionLimit, returning the closest Motion which complies with the specified limits. :param triangula.chassis.Motion motion: The requested :class:`triangula.chassis.Motion`, :return: The modified motion, or the supplied one if it complied with the limits """ now = time_now() if self.last_motion is None: self.last_motion = motion self.last_motion_time = now return motion # Calculate the requested linear acceleration magnitude in mm/s/s to achieve the desired motion change time_delta = now - self.last_motion_time motion_delta = abs(motion.translation - self.last_motion.translation) linear_acceleration = motion_delta / time_delta angular_acceleration = abs(motion.rotation - self.last_motion.rotation) / time_delta scaling = 1.0 if linear_acceleration > self.linear_acceleration_limit: scaling = self.linear_acceleration_limit / linear_acceleration if angular_acceleration > self.angular_acceleration_limit: scaling = min(scaling, self.angular_acceleration_limit / angular_acceleration) scaled_translation = motion.translation * scaling + self.last_motion.translation * (1.0 - scaling) ':type : euclid.Vector2' scaled_rotation = motion.rotation * scaling + self.last_motion.rotation * (1.0 - scaling) scaled_motion = Motion(rotation=scaled_rotation, translation=scaled_translation) self.last_motion = scaled_motion self.last_motion_time = now return scaled_motion
def __repr__(self): """Returns the representation of the achievement.""" state = 'active' if (self.created_at.timestamp() + self.expires_in > time_now()) else 'expired' return ( f'<{self.__class__.__name__} {state}, access_token={self.access_token!r}, scopes count=' f'{len(self.scopes)}>')
def simostrar(self): self.recordings = self.source.boolean ret = False self.poll_interval = 120000 debugtxt('simostrar') if self.recordings: ret = False else: try: for timer in NavigationInstance.instance.RecordTimer.timer_list: if timer.state == timer.StatePrepared or timer.begin - time_now() < 50 and timer.begin - time_now() >= 0 and not timer.disabled: self.poll_interval = 5000 ret = True debugtxt('simostrar hay') break elif timer.state == timer.StateWaiting and not timer.disabled: self.poll_interval = 30000 debugtxt('simostrar hay') ret = True break except: pass return ret
def after_requests(response): if app.config['ENABLE_LOGGING']: if request.path == '/favicon.ico' or request.path.startswith( '/static'): return response now = time_now() duration = round(now - g.start, 2) dt = datetime.datetime.fromtimestamp(now) timestamp = rfc3339(dt, utc=True) ip = request.headers.get('X-Forwarded-For', request.remote_addr) host = request.host.split(':', 1)[0] args = dict(request.args) log_params = [ ('method', request.method), ('path', request.path), ('status', response.status_code), ('duration', duration), ('time', timestamp), ('ip', ip), ('host', host), ('params', args), ] parts = ["{}={}".format(name, value) for name, value in log_params] line = " | ".join(parts) app.logger.info(line) return response
def _update(self): # checks if the cycle_end is in the past # ex: spiro last_taken = 2019-04-20 and spiro cycle_end = 1 # if spiro last_taken (20) + spiro cycle (1) less than date today (21) # spiro advances to next cycle_end if self.cycle_end < time_now(): debug_log(f"{self}._update - {self.cycle_end} < {time_now()}") # date_ce = date cycle ends date_ce = timestamp_to_date(self.cycle_end) debug_log("_update for", str(self), "starting date_ce", date_ce) if self.doses_taken < self.doses_per_cycle: self.missed_doses = self.doses_per_cycle - self.doses_taken # cycle_end = (cycle_end + cycle) until cycle_end + cycle > date_today # doses_taken = 0 # while (date_ce + timedelta(days=self.cycle_days)) < date.today(): while date_ce <= date.today(): # date_ce = date_ce + timedelta(days=self.cycle_days) debug_log('_update increasing date_ce', date_ce) date_ce = increase_date(date_ce, self.cycle_days) self.doses_taken = 0 debug_log('_update updated date_ce is', date_ce) # self.cycle_end = int(mktime(strptime(str(date_ce), '%Y-%m-%d'))) self.cycle_end = date_to_timestamp(date_ce)
def chance(self): chance = self._chance if chance is None: self._chance = chance = (1.0 + (time_now() - self.last_present) * DAY_CHANCE_MULTIPLIER) * self.weight return chance
def update_gamepads(): for g in XInputGamepad: if g.disabled: pass elif g.connected() or time_now() - g.last_update > 5: # update connected devices or try to reconnect after 5 secs g.fetch_input()
async def _beat(self): data = { 'op': self.HEARTBEAT, 'd': int(time_now() * 1000), } await self.send_as_json(data)
def _make_wofs_config(index, config, dry_run): """ Refine the configuration The task-app machinery loads a config file, from a path specified on the command line, into a dict. This function is an opportunity, with access to the datacube index, to modify that dict before it is used (being passed on to both the make-tasks and the do-task). For a dry run, we still need to create a dummy DatasetType to generate tasks (e.g. via the GridSpec), but a normal run must index it as a product in the database and replace the dummy with a fully-fleshed DatasetType since the tasks involve writing metadata to file that is specific to the database instance (note, this may change in future). """ if not dry_run: _LOG.info('Created DatasetType %s', config['product_definition']['name']) # true? not yet. config['wofs_dataset_type'] = _get_product(index, config['product_definition'], dry_run) config['variable_params'] = _build_variable_params(config) if 'task_timestamp' not in config: config['task_timestamp'] = int(time_now()) if not os.access(config['location'], os.W_OK): _LOG.error( 'Current user appears not have write access output location: %s', config['location']) return config
def stop(self, send=True): if self._start_time is None: raise RuntimeError('Timer has not started.') dt = time_now() - self._start_time self.ms = 1000.0 * dt # Convert to milliseconds. if send: self.send() return self
def pay(self): if self.completed: return game_store_items = StoreList.get(self.game) store_user = game_store_items.get_store_user(self.user) store_user.transfer_items(self) self.completed_time = time_now() self.completed = True
def test_get_nodes_after_time(self): now = time_now() tree = SyncTree(**temp_info) root = tree.root # pk 0 root_child1 = tree.add_node(root, **temp_info) # pk 1 root_child2 = tree.add_node(root, **temp_info) # pk 2 root_child1_child1 = tree.add_node(root_child1, **temp_info) # pk 3 root_child2_child1 = tree.add_node(root_child2, **temp_info) # pk 4 root_child2_child1_child1 = tree.add_node(root_child2_child1, **temp_info) # pk 5 set_all = set([root, root_child1, root_child2, root_child1_child1, root_child2_child1, root_child2_child1_child1]) empty = set() tree.refresh_tree() changed_nodes = tree.get_nodes_after_time(now) self.assertSetEqual(changed_nodes, set_all) now = time_now() changed_nodes = tree.get_nodes_after_time(now) self.assertSetEqual(changed_nodes, empty) root_child1_child1.abc = "asg" tree.refresh_tree() changed_nodes = tree.get_nodes_after_time(now) self.assertSetEqual(changed_nodes, set([root_child1_child1, root_child1, root])) root.abd = "asg" tree.refresh_tree() changed_nodes = tree.get_nodes_after_time(now) self.assertSetEqual(changed_nodes, set([root_child1_child1, root_child1, root])) now = time_now() changed_nodes = tree.get_nodes_after_time(now) self.assertSetEqual(changed_nodes, empty) root_child2_child1_child1.abc = "def" root_child1.abc = "def" tree.refresh_tree() self.assertEqual(tree.get_nodes_after_time(now), set([root, root_child1, root_child2, root_child2_child1, root_child2_child1_child1]))
def get_photo(self, user, title): fluid = User.get(name=equals(user)).flickr_uid[0].uid print "Looking for title " + title params = { 'api_key':"745bf5cec0e4c9a5e9d225ce015b2e84", 'method':"flickr.people.getPublicPhotos", 'user_id':fluid, 'format':'json' } resp = loads(self._get_request('api.flickr.com','/services/rest/?'+urlencode(params)).read()[14:-1]) photo = filter(lambda p:p['title']==unquote(title), resp['photos']['photo'])[0] photo.update({'user':user,'type':'photo','time':time_now()}) find_type('MessageHub').put_message(photo) response.send(200)
def __init__(self, pk, update_hash_queue, _depth=0, **info_data): self._set_base_attribute('_pk', pk) self._set_base_attribute('_parent', None) self._set_base_attribute('_update_hash_queue', update_hash_queue) self._set_base_attribute('_children', []) self._set_base_attribute('_number_of_children', lambda: len(self._children)) self._set_base_attribute('_children_hash', DEFAULT_HASH_VALUE) self._set_base_attribute('_hash', DEFAULT_HASH_VALUE) self._set_base_attribute('_depth', _depth) self._set_base_attribute('_info', InformationNode(pk, **info_data)) self._set_base_attribute('_updated_at', time_now()) self._set_base_attribute('_base_attributes',[ '_pk', '_parent', '_update_hash_queue', '_depth' '_children', '_children_hash', '_hash', '_info', '_updated_at']) self._update_hash()
def handleMsg(self, time, name, valu): """ update the current position when possible """ if time not in self.partial_positions: self.partial_positions[time] = {} self.partial_positions[time][name] = valu ts = sorted(self.partial_positions.iterkeys()) # from low to high for t in ts: cull = False if len(self.partial_positions[t]) == self.num_var: # update self.current_position = self.partial_positions[t] self.current_position_time = time_now() cull = True if time - t > self.time_buffer: cull = True if cull: del self.partial_positions[t] break # only one can be completed @ a time
def limit_and_return(self, values): """ Take a list of values, update the internal state of the RateLimit and return a modified list of values which are restricted by the configured limit function. :param float[] values: Values to attempt to apply :return: New values to apply, modified by the configured limit function """ now = time_now() if self.previous_time is None: self.previous_time = now self.previous_values = values return values updated_values = [ self.limit_function(previous_value, self.previous_time, value, now) for (previous_value, value) in zip(self.previous_values, values) ] self.previous_values = updated_values self.previous_time = now return updated_values
def set(self, user, new_score): score_time = time_now() self._read_leaderboard() try: users_score = self.user_scores[user.username] old_score = users_score.score if (self.sort_by == 1 and old_score >= new_score) or (self.sort_by == -1 and old_score <= new_score): return {'bestScore': old_score} users_score.score = new_score users_score.score_time = score_time if self.aggregate: self.aggregate_score += new_score - old_score self._write_leaderboard() return {'newBest': True, 'prevBest': old_score} except KeyError: # User has no score on the leaderboard self._add_score(UserScore(user.username, new_score, score_time)) self._write_leaderboard() return {'newBest': True}
def testrun_finished(self, timestamp=None): if timestamp is None: timestamp = time_now() self.run_endtime = timestamp
def testrun_started(self, timestamp=None): if timestamp is None: timestamp = time_now() self.run_starttime = timestamp
def __init__(self, game, key, meta_data, index): self.user_scores = {} self.scores = [] self.aggregate = False self.aggregate_score = 0 self.lock = Lock() self.errors = [] self.warnings = [] self.path = None def error(msg): self.errors.append(msg) def warning(msg): self.warnings.append(msg) if not self.validate_key.match(key): error('invalid key format "%s"' % key) self.key = key self.index = index if 'title' not in meta_data or meta_data['title'] is None: error('title property missing for key "%s"' % key) self.title = '' else: self.title = meta_data['title'] if 'aggregate' in meta_data: if isinstance(meta_data['aggregate'], bool): self.aggregate = meta_data['aggregate'] else: warning('aggregate property must be a boolean for key "%s"' % key) self.aggregate = False else: self.aggregate = False try: sort_by = int(meta_data['sortBy']) if sort_by != -1 and sort_by != 1: error('sortBy must either -1 or 1 for key "%s"' % key) except KeyError: warning('sortBy property missing for key "%s"' % key) sort_by = 1 except ValueError: error('sortBy must either -1 or 1 for key "%s"' % key) sort_by = 1 self.sort_by = sort_by if 'icon' in meta_data: warning('"icon" yaml property has been deprecated please use ' '"icon256", "icon48" or "icon32" for leaderboard key "%s"' % key) try: icon_path = meta_data['icon256'] if path_exists(get_absolute_path(join_path(game.path, icon_path))): if splitext(icon_path)[1] != '.png': warning('icon256 must be in PNG format for key "%s"' % key) else: error('icon256 file does not exist for key "%s"' % key) except KeyError: warning('no icon256 (using default) for key "%s"' % key) self.game = game self.default_scores = [] default_scores = meta_data.get('default-scores', []) for (i, s) in enumerate(default_scores): if not isinstance(s, dict): warning('Default score must an array of objects for key "%s"' % key) continue user = s.get('user', None) if user is None: email = s.get('email', None) if email is None: warning('Default score must contain user or email for key "%s"' % key) continue try: user = email.split('@', 1)[0] # for tests if user.startswith('no-reply+'): user = user[9:] except AttributeError: warning('Default score email "%s" must be a string for key "%s"' % (email, key)) continue if 'score' in s: try: score = float(s['score']) if isinf(score) or isnan(score): warning('Default score for user "%s" must be a number for key "%s"' % (user, key)) continue user_score = UserScore(user, score, time_now() - i) self.default_scores.append(user_score) except (ValueError, TypeError): warning('Default score for user "%s" must be a number for key "%s"' % (user, key)) continue else: warning('Default score for user "%s" missing score for key "%s"' % (user, key)) continue
def print_out_received_data(mavfile, args = False): loop = True; while not command.end_entered() and loop: try: msg_type = sys.argv[1] loop = False except: pass if loop == True: print('----------------------------') list_mav_msg.print_list() msg_type = raw_input('------------------------------------ \nEnter the MAV_MSG type (help/end/return): ') else: pass if '_' not in msg_type and not msg_type.isdigit(): splitted = msg_type.split() name = '' for word in splitted: name += word.upper()+'_' msg_type = name[:-1] #removing the last _ from the string print('Changed input to: ' + msg_type) else: pass id_not_found = False try: msg_type = int(msg_type) ID = msg_type msg_type = list_mav_msg.list_mav_msg_working[str(msg_type)] print('--------------------------------\n ID '+ str(ID) + ' found: ' + msg_type + '\n-------------------------------') except: pass #print(msg_type) #print(type(msg_type)) if msg_type == 'end': sys.exit() elif msg_type == 'help' or msg_type == 'ls': list_mav_msg.print_list() elif msg_type == 'PARAM_VALUE': print('-----------------------\nGenerating list of all Parameters') print('Bad implementation... First go to mavproxy. See how many parameters and hardcode the param_count value') param_count = 403 par_name = [0,]*param_count par_value = [0,]*param_count ls_name = [0,]*param_count ls_id = [0,]*param_count print('Reading out all param_ids') for i in range(0,param_count): #if i == 4: name, value, data = receive_msg(mavfile, 'PARAM_VALUE', True, param_id = i) par_name[i] = value[0] ls_name[i] = value[0] par_value[i] = value[1] ls_id[i] = str( value[4]) print(i) par_data = zip (par_name, par_value) #Used to generate the Dictionary 'param_id_list' print('saving in params_id_list.py') ls = dict(zip(ls_id, ls_name)) #print(ls) txt = open('param_id_list.py', 'w') txt.write('param_dic = {\n') for i in ls_id: txt.write('\''+ str(i)+ '\'' + ' : ' + '\''+ ls[str(i)]+ '\'' +', \n') txt.write('}') txt.close() print('saving in params.py') #for i in par_data: #print(i) txt=open('params.py', 'w') par_data_dic = dict(par_data) for i in par_name: s = '\'' + i + '\'' + ':' + '\'' + str(par_data_dic[i]) +'\'' + ','+ '\n' txt.write(s) txt.close() else: if id_not_found: print('ID not found') break else: pass #plot_str = raw_input ('------------------------------------\n Plot data? (y/n)') plot_str = 'n' if plot_str == 'y': plot = True else: plot = False if loop == True: log_str = raw_input('-------------------------------------\n Log data? (y/n) ') else: log_str = 'n' log = False path = 'log_received_data.txt' if log_str == 'y': log = True cmt_log = raw_input('Add comment: ') cmt = 'Comment: ' +cmt_log + '\n---------------------------------------\n' path_input = raw_input('Add name for log file (Hit Enter for logging into log_received_data.txt: ') if path_input != '': path = '../log/' + path_input + '.csv' else: pass else: log = False #print("Not Receiving messages till terminated") #run_str = "n" if loop == True: run_str = raw_input('--------------------------------------\n Receiving messages till terminated? (y/n): ') else: run_str = 'n' if run_str == 'y': run = True tol_value = 5 tol = True print("no noise tolerance") tol_confirm = "n" #tol_confirm = raw_input('---------------------------------\n Tolerance (noise) in update? (y/n): ') if tol_confirm == 'y': tol = True tol_value = int(raw_input('----------------------------------\n Value of the tolerance: ')) elif tol_confirm == 'n': tol = False else: pass elif run_str == 'n': run = False else: print('Answer not accepted. Only receiving one message') run = False break if loop == True: slicing = "n" #slicing = raw_input('-----------------------------------\n Slicing the list? (y/n) ') else: slicing = "n" if slicing == 'y': start = int(raw_input('Start: ')) -1 end = int(raw_input('End: ')) name, value, data = receive_msg(mavfile, msg_type, True, True, start, end) if 'time' in name[0]: #time_check = raw_input('Print time (for log purposes)? (y/n)') time_check = 'y' if time_check == 'y': time = True #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time, True, start, end) else: time = False #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time, True, start, end) else: time = False pass if log == True: #write_log_number_line(cmt) log_data(name, path) log_data(value, path) else: pass for line in data: print(line) while not command.end_entered() and run: new_name, new_value, new_data = receive_msg(mavfile, msg_type,time, True, start, end) #Only print further data if it differs from the first one if not matching(new_value,value, tol, tol_value): if log == True: log_data(new_value, path) else: pass print('Values changed:') for line in new_data: print(line) value = new_value else: #print(msg_type) #print(type(msg_type)) print('Getting ' + msg_type) sleep(1) time_start = time_now() name, value, data = receive_msg(mavfile, msg_type, True) if 'time' in name[0]: if loop == True: print("Printing time for log purposes") time_check = "y" #time_check = raw_input('--------------------------------\n Print time (for log purposes)? (y/n)') else: time_check = 'n' if time_check == 'y': time = True print('Printing time') #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time) else: time = False print('Not printing time') #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time) else: pass if log == True: #write_log_number_line(cmt) log_data(name, path) log_data(value, path) else: pass for line in data: print(line) while not command.end_entered() and run: new_name, new_value, new_data = receive_msg(mavfile, msg_type, time) #Only print further data if it differs from the first one if not matching(new_value,value, tol, tol_value): if log == True: log_data(new_value, path) else: pass print('Values changed:') for line in new_data: print(line) value= new_value if time_now()-time_start > 120: break
def catch_tweets(tweet_catcher): """ Process to be spawned in parallel with main program Waits for tweets and processes them as they come in """ while True: r = tweet_catcher.api.request('statuses/sample') try: for tweet_data in r: t = Thread(target=tweet_catcher.worker, kwargs={'tweet_data': tweet_data, 'time': time_now()}) t.start() except (ChunkedEncodingError, TypeError) as e: # yeah... I don't really know why these errors happen, but just go ahead and keep processing print(e) continue
def test_node_gets_present_time_as_updated_time_on_insertion(self): now = time_now() node = Node(0, set(), **temp_info) self.assertAlmostEqual(now, node.get_update_time(), places=0)
def print_out_received_data(mavfile, args = False): # Calibrate accelerometer if input is -ca try: if sys.argv[1] == '-ca': calibrate_acc(mavfile) sys.exit() else: pass except: pass # Calibrate compass if input is -cc try: if sys.argv[1] == '-cc': calibrate_mag(mavfile) sys.exit() else: pass except: pass # Looping in a while loop loop = True; # Duration while the received data is printed out time_run_duration = 60; while not command.end_entered() and loop: extern = False try: # Extern means that '27' as input from extern was typed in if sys.argv[1] != '-ca' and sys.argv[1] != '-cc': msg_type = sys.argv[1] extern = True else: extern = False pass except: pass # Get a Prompt if no input from extern if loop == True and extern == False: print('----------------------------') list_mav_msg.print_list() msg_type = raw_input('------------------------------------ \nEnter the MAV_MSG type (help/end/return): ') else: pass if '_' not in msg_type and not msg_type.isdigit(): splitted = msg_type.split() name = '' for word in splitted: name += word.upper()+'_' msg_type = name[:-1] #removing the last _ from the string print('Changed input to: ' + msg_type) else: pass id_not_found = False try: msg_type = int(msg_type) ID = msg_type msg_type = list_mav_msg.list_mav_msg_working[str(msg_type)] print('--------------------------------\n ID '+ str(ID) + ' found: ' + msg_type + '\n-------------------------------') except: pass if msg_type == 'end': sys.exit() elif msg_type == 'help' or msg_type == 'ls': list_mav_msg.print_list() elif msg_type == 'PARAM_VALUE': print('-----------------------\nGenerating list of all Parameters') print('Bad implementation... First go to mavproxy. See how many parameters and hardcode the param_count value') # Hard coded amount of parameters. Each ID from 0 to param_count will be saved param_count = 403 par_name = [0,]*param_count par_value = [0,]*param_count ls_name = [0,]*param_count ls_id = [0,]*param_count print('Reading out all param_ids') for i in range(0,param_count): name, value, data = receive_msg(mavfile, 'PARAM_VALUE', True, param_id = i) par_name[i] = value[0] ls_name[i] = value[0] par_value[i] = value[1] ls_id[i] = str( value[4]) print(i) par_data = zip (par_name, par_value) #Used to generate the Dictionary 'param_id_list' print('saving in params_id_list.py') ls = dict(zip(ls_id, ls_name)) txt = open('param_id_list.py', 'w') txt.write('param_dic = {\n') for i in ls_id: txt.write('\''+ str(i)+ '\'' + ' : ' + '\''+ ls[str(i)]+ '\'' +', \n') txt.write('}') txt.close() print('saving in params.py') txt=open('params.py', 'w') par_data_dic = dict(par_data) for i in par_name: s = '\'' + i + '\'' + ':' + '\'' + str(par_data_dic[i]) +'\'' + ','+ '\n' txt.write(s) txt.close() else: if id_not_found: print('ID not found') break else: pass plot_str = 'n' if plot_str == 'y': plot = True else: plot = False if loop == True: log_str = raw_input('-------------------------------------\n Log data? (y/n) ') else: log_str = 'n' log = False path = 'log_received_data.txt' if log_str == 'y': log = True time_run_duration = int(raw_input('Enter the duration of the logging: ')) #cmt_log = raw_input('Add comment: ') cmt_log = '' cmt = 'Comment: ' +cmt_log + '\n---------------------------------------\n' path_input = raw_input('Add name for log file (Hit Enter for logging into log_received_data.txt: ') if path_input == 'gri' or path_input == 'gpi' or path_input == 'all': path_input += get_pos_param(mavfile,'get') else: pass if path_input != '': path = '../log/' + path_input + '.csv' else: pass name_counter = 1 while os.path.isfile(path): path = '../log/' + path_input +'_'+ str(name_counter) +'.csv' else: log = False #print("Not Receiving messages till terminated") #run_str = "n" if loop == True: #run_str = raw_input('--------------------------------------\n Receiving messages till terminated? (y/n): ') run_str= 'y' else: run_str = 'n' if run_str == 'y': run = True tol_value = 5 tol = True print("no noise tolerance") tol_confirm = "n" #tol_confirm = raw_input('---------------------------------\n Tolerance (noise) in update? (y/n): ') if tol_confirm == 'y': tol = True tol_value = int(raw_input('----------------------------------\n Value of the tolerance: ')) elif tol_confirm == 'n': tol = False else: pass elif run_str == 'n': run = False else: print('Answer not accepted. Only receiving one message') run = False break if loop == True: slicing = "n" #slicing = raw_input('-----------------------------------\n Slicing the list? (y/n) ') else: slicing = "n" if slicing == 'y': start = int(raw_input('Start: ')) -1 end = int(raw_input('End: ')) name, value, data = receive_msg(mavfile, msg_type, True, True, start, end) if 'time' in name[0]: #time_check = raw_input('Print time (for log purposes)? (y/n)') time_check = 'y' if time_check == 'y': time = True #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time, True, start, end) else: time = False #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time, True, start, end) else: time = False pass if log == True: #write_log_number_line(cmt) log_data(name, path) log_data(value, path) else: pass for line in data: print(line) while not command.end_entered() and run: new_name, new_value, new_data = receive_msg(mavfile, msg_type,time, True, start, end) #Only print further data if it differs from the first one if not matching(new_value,value, tol, tol_value): if log == True: log_data(new_value, path) else: pass print('Values changed:') for line in new_data: print(line) value = new_value else: if msg_type == 'ALL': path_gpi = path.replace('.csv','_gpi.csv') path_gri = path.replace('.csv','_gri.csv') path_ri = path.replace('.csv','_ri.csv') time_start = time_now() gri_name, gri_value, gri_data = read_gri(mavfile) gpi_name, gpi_value, gpi_data = read_gpi(mavfile) ri_name, ri_value, ri_data = read_ri(mavfile) if log == True: #write_log_number_line(cmt) log_data(gpi_name, path_gpi) log_data(gpi_value, path_gpi) log_data(gri_name, path_gri) log_data(gri_value, path_gri) log_data(ri_name, path_ri) log_data(ri_value, path_ri) else: pass for line in gpi_data: print(line) print('---------------') for line in gri_data: print(line) print('---------------') #for line in ri_data: #print(line) #print('---------------') while not command.end_entered() and run: ngri_name, ngri_value, ngri_data = read_gri(mavfile) ngpi_name, ngpi_value, ngpi_data = read_gpi(mavfile) nri_name, nri_value, nri_data = read_ri(mavfile) #if not matching(new_value,value, tol, tol_value): if log == True: #write_log_number_line(cmt) log_data(ngpi_value, path_gpi) log_data(ngri_value, path_gri) log_data(nri_value, path_ri) else: pass for line in gpi_data: print(line) print('---------------') for line in gri_data: print(line) print('---------------') #for line in ri_data: #print(line) #print('---------------') gri_value = ngri_value gpi_value = ngpi_value ri_value = nri_value if time_now()-time_start > time_run_duration: break else: #print(msg_type) #print(type(msg_type)) print('Getting ' + msg_type) time_start = time_now() name, value, data = receive_msg(mavfile, msg_type, True) if 'lat' and 'lon' in name: lat_idx = name.index('lat') lat_offset = value[lat_idx] lon_idx = name.index('lon') lon_offset = value[lon_idx] offset = True else: offset = False if 'time' in name[0]: if loop == True: #print("Printing time for log purposes") time_check = "y" #time_check = raw_input('--------------------------------\n Print time (for log purposes)? (y/n)') else: time_check = 'n' if time_check == 'y': time = True print('Printing time') #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time) else: time = False print('Not printing time') #Getting the data for the first time name, value, data = receive_msg(mavfile, msg_type, time) else: pass if log == True: #write_log_number_line(cmt) log_data(name, path) log_data(value, path) else: pass for line in data: print(line) print(type(data)) time_compare = time_now() while not command.end_entered() and run: new_name, new_value, new_data = receive_msg(mavfile, msg_type, time) #Only print further data if it differs from the first one if not matching(new_value,value, tol, tol_value) and time_now()-time_compare > 0.5: #if not matching(new_value,value, tol, tol_value): if log == True: log_data(new_value, path) else: pass if offset == True: new_value[lat_idx] = new_value[lat_idx]-lat_offset new_value[lon_idx] = new_value[lon_idx]-lon_offset new_data = zip(new_name, new_value) else: pass print('Values changed:') for line in new_data: print(line) time_compare = time_now() value= new_value if time_now()-time_start > time_run_duration: break
def post(self, name, body, time=None):#send a message self.hub.put_message({'time':time_now(),'name':name,'message':body,'type':'message'}) response.send(200)
def start(self): self.ms = None self._sent = False self._start_time = time_now() return self
def _touch(self): self._set_base_attribute('_updated_at', time_now())