def process(queue): """Process the queue, possibly modifying it. May return a command.""" command = None last_temp = _get_last_sample(queue, 'temp') if last_temp and last_temp[1] >= MAX_TEMP: LOG.info('shutting down since temperature of %s exceeds maximum of %s' % (last_temp[1], MAX_TEMP)) queue['status'] = (common.timestamp(), 'shutdown: CPU temp = %s (max: %s)' % (last_temp[1], MAX_TEMP)) command = ('exit', 100) # 100 = shutdown else: queue['status'] = (common.timestamp(), 'ok') return command
def process(queue): """Process the queue, possibly modifying it. May return a command.""" command = None last_temp = _get_last_sample(queue, 'temp') if last_temp and last_temp[1] >= MAX_TEMP: LOG.info( 'shutting down since temperature of %s exceeds maximum of %s' % (last_temp[1], MAX_TEMP)) queue['status'] = (common.timestamp(), 'shutdown: CPU temp = %s (max: %s)' % (last_temp[1], MAX_TEMP)) command = ('exit', 100) # 100 = shutdown else: queue['status'] = (common.timestamp(), 'ok') return command
def __init__(self, name, type, source=None, subtype=None, case_id=None): self.name = name self.created = timestamp() self.type = type self.subtype = subtype self.source = source self.parent = None self.children = [] self.case_id = case_id self.tags = [] self.notes = [] self.data = {} if self.type == 'host': if is_ipv4(self.name): self.subtype = 'ipv4' elif is_ipv6(self.name): self.subtype = 'ipv6' elif is_fqdn(self.name): self.subtype = 'fqdn' else: warning('host type cannot be determined. must be one of: ipv4, ipv6, fqdn') self.subtype = 'unknown' elif self.type == 'hash': result = is_hash(self.name) if result is None: warning('hash is not a valid md5, sha1, sha256, or sha512') self.subtype = 'unknown' else: self.subtype = result
def create_dataset_hmd(cfg, args, datacfg): """Release HD Map Dataset (AI Dataset). Execute all the steps from creation to saving AI Dataset from the Annotation Database. Every AI Datasets is creates as a new database AIDS_<ddmmyy_hhmmss> ready to be consumed for AI Training without dependency. This is further used in the TEPPr (Training, Evaluate, Prediction, Publish with reporting) workflow """ log.info("-----------------------------") import dataset.hmd_to_aids as hmd2aids tic = time.time() cfg['TIMESTAMP'] = common.timestamp() # createdb = False # ## Check required args # if not args.from_path or not args.to_path: # createdb = True # log.info("createdb: {}".format(createdb)) aids, datacfg = hmd2aids.prepare_datasets(cfg, args, datacfg) res = create_db(cfg, args, datacfg, aids) toc = time.time() total_exec_time = '{:0.2f}s'.format(toc - tic) log.info("\n Done: total_exec_time: {}".format(total_exec_time)) return res
def get_sample(self): humidity, temperature = Adafruit_DHT.read_retry(Dht._SENSOR, Dht._PIN, retries=Dht._RETRIES) if humidity == None or temperature == None: self._log.warning('failed to read temperature/humidity') return 'temp_hum', (common.timestamp(), temperature, humidity)
def get_sample(self): bmp = bmp183() bmp.measure_pressure() pre = bmp.pressure / 100.0 if pre == None: self._log.warning('failed to read Pressure') return 'press', (common.timestamp(), pre)
def get_sample(self): if random_value(0, 10) < 1: self._door_open = 1 - self._door_open self._events.append((common.timestamp(), self._door_open)) events = self._events self._events = [] return 'door', events
def parse_annon_filename(filepath): """Annotation file names are semantically named such that it empowers for different types of annotations in computer vision tasks to be carried out without duplicating the images on the filesystem. And, different annotators can annotate same image for different type of annotation tasks like for detection, classification, scene type, keypoint annoations etc. This also empower for generating different statistics for tracking and management. Semantics of annotation file name is illustrated below: # -------------------------------------------------------- * example: Annotation file is saved with the name: `images-p1-230119_AT1_via205_010219.json` Level-1: After removing the file extension, split with '_' (underscore) * returns 4 groups (or Level 2) indexed * ex: [0,1,2,3] => ['images-p1-230119', 'AT1', 'via205', '010219'], where: 'images-p1-230119' => Reference to `image folder` used for the annotations present in this file 'AT1' => Annotator ID 'via205' => Annotation tool with version; here VIA annotation tool and version 2.05 '010219' => Release date on which the annotation file was provided by the annotator * additionally, joining [0,1] back with '_' (underscore) provides the reference for directory under which images are present ex: [0,1] => ['images-p1-230119', 'AT1'] joining it back with '_' (underscore) gives: 'images-p1-230119_AT1' Level-2: For each Level 1 items can be split with '-' (minus) wherever possible (absence of this will not result in error during split) * ex: 'images-p1-230119' => ['images','p1','230191'], where: 'images' => directory name under which images would be allocated. 'p1' => part ID of the images '230191' => date on which the images cut was taken and assigned to the annotators """ name = os.path.basename(filepath) try: ref = os.path.splitext(name)[0].split('_') ref_img = ref[0].split('-') name_ids = { "image_rel_date": str(ref_img[2]), "image_part": "-".join(ref_img[:2]), "annotator_id": ref[1], "annotation_rel_date": str(ref[3]), "annotation_tool": ref[2], "image_dir": '_'.join(ref[:2]), "rel_filename": name } except Exception as e: log.info( "Unable to parse annontation filename, hence falling back to defaults", exc_info=True) name_ids = { "image_rel_date": common.modified_on(filepath, True), "image_part": None, "annotator_id": common.id_generator(5), "annotation_rel_date": common.timestamp(), "annotation_tool": None, "image_dir": os.path.dirname(filepath), "rel_filename": name } log.info("name_ids: {}".format(name_ids)) return name_ids
def get_sample(self): # TODO: It's a little awkward that we need to start a subprocess synchronously. sample = self._query_all_apis() if sample: # Map numeric network type to string. sample['nwtype'] = HuaweiStatus._NW_TYPES.get(sample['nwtype'], sample['nwtype']) sample['ts'] = common.timestamp() return 'link', sample
def __init__(self, **kwargs): DeviceCls.__init__(self, **kwargs) assert ('protocol_ref' in kwargs) self.latest_confirm = timestamp() ': :type self.protocol: ServerProtocol' self.protocol_ref = kwargs['protocol_ref'] assert ('conn_hash' in kwargs) self.conn_hash = kwargs['conn_hash']
def get_sample(self): camera.resolution = (Picture._RESOLUTION_HOR, Picture._RESOLUTION_VER) camera.start_preview() sleep(2) timestamp = common.timestamp() camera.capture('/var/www/html/pictures/picture%s.jpg' % timestamp) camera.stop_preview() return 'picture', timestamp
def get_sample(self): if random_value(0, 10) < 1: self._count += 1 if random_value(0, 2) < 1 else -1 self._count = min(self._count, 9) self._count = max(self._count, 0) self._pilots.append((common.timestamp(), self._count)) pilots = self._pilots self._pilots = [] return 'pilots', pilots
def get_sample(self): humidity, temperature = Adafruit_DHT.read_retry(Dht._SENSOR, Dht._PIN, retries=Dht._RETRIES) if humidity == None or temperature == None: self._log.warning('failed to read temperature/humidity') if humidity > 100: #Humidity can't be higer than 100% self._log.warning('Temp & humidity wrong value. Reading again') while humidity > 100: #Read new humidity data as the sensor failed humidity, temperature = Adafruit_DHT.read_retry(Dht._SENSOR, Dht._PIN, retries=Dht._RETRIES) return 'temp_hum', (common.timestamp(), temperature, humidity)
def get_sample(self): # TODO: It's a little awkward that we need to start a subprocess synchronously. sample = self._query_all_apis() if sample: # Map numeric network type to string. sample['nwtype'] = HuaweiStatus._NW_TYPES.get( sample['nwtype'], sample['nwtype']) sample['ts'] = common.timestamp() return 'link', sample
def _oauth_parameter(self, has_token=True): parameters = { 'oauth_consumer_key': self.consumer_key, 'oauth_timestamp': common.timestamp(), 'oauth_nonce': next_oauth_once(), 'oauth_signature_method': self.SIG_METHOD, 'oauth_version': self.VERSION, } if has_token: parameters['oauth_token'] = self.oauth_token return parameters
def get_sample(self): ticks = self._ticks.get_and_reset() wide = C.RAIN_SIZE_WIDE_MM() long = C.RAIN_SIZE_LONG_MM() drops = C.RAIN_DROPS() area_mm2 = wide * long area_m2 = area_mm2 / 1000000 drops_m2 = drops / area_m2 l_m2 = drops_m2 * 0.001 / 20 rain = ticks * l_m2 return 'rain', (common.timestamp(), rain)
def get_sample(self): nwtype = int(random_value(3, 6)) if nwtype == 5: nwtype = 7 sample = { 'nwtype': huawei_status.HuaweiStatus._NW_TYPES[nwtype], 'strength': random_value(65, 85), 'upload': self._upload_mb * 1024 * 1024, 'download': self._download_mb * 1024 * 1024, 'ts': common.timestamp()} self._upload_mb += 1 self._download_mb += 1 return 'link', sample
def get_sample(self): nwtype = int(random_value(3, 6)) if nwtype == 5: nwtype = 7 sample = { 'nwtype': huawei_status.HuaweiStatus._NW_TYPES[nwtype], 'strength': random_value(65, 85), 'upload': self._upload_mb * 1024 * 1024, 'download': self._download_mb * 1024 * 1024, 'ts': common.timestamp() } self._upload_mb += 1 self._download_mb += 1 return 'link', sample
def get_sample(self): now_ts = common.timestamp() self._avg = max(0, random_value(-2, 10)) maxKmh = self._avg + random.random() * self._avg mean_ts = (now_ts + self._last_ts) / 2 wind = { 'avg': self._avg, 'max': maxKmh, 'max_ts': mean_ts, 'hist': {int(self._avg): 1}, 'start_ts': self._last_ts, 'end_ts': now_ts} self._last_ts = now_ts return 'wind', wind
def sniff_callback(self, line): try: time_epoch, src_ip, dst_ip, tcp_src_port, tcp_dst_port, udp_src_port, udp_dst_port, protocol = line.split( "\t") SniffHandler.packet.append({ "time": timestamp(float(time_epoch)), "src_ip": src_ip, "dst_ip": dst_ip, "tcp_src_port": tcp_src_port, "tcp_dst_port": tcp_dst_port, "udp_src_port": udp_src_port, "udp_dst_port": udp_dst_port, "protocol": protocol, }) except: pass
def main(args): try: log.info("----------------------------->\nargs:{}".format(args)) image_basepath = args.image_basepath out_path = args.out_path if not out_path: out_path = common.timestamp() log.info("out_path: {}".format(out_path)) for i, subset in enumerate(args.subset): ai_annon_data_home_local = image_basepath[i] create_tfr_dataset( args, subset=subset, ai_annon_data_home_local=ai_annon_data_home_local, timestamp=out_path) except Exception as e: log.error("Exception occurred", exc_info=True)
def create_modelinfo(mi): """create modelinfo configuration in consistent way """ from modelinfo import modelcfg modelinfocfg = { k: mi[k] if k in mi else modelcfg[k] for k in modelcfg.keys() } timestamp = common.timestamp() modelinfocfg['problem_id'] = 'rld' modelinfocfg['rel_num'] = modelinfocfg['timestamp'] = timestamp modelinfocfg['model_info'] = apputil.get_modelinfo_filename(modelinfocfg) log.info("modelinfocfg: {}".format(modelinfocfg)) return modelinfocfg
def get_sample(self): if self._calibration_mode: raise RuntimeError( 'get_sample() is not supported in calibration mode') up_to_time = common.timestamp() revs = self._revolutions.get_and_reset() if revs: # In case there was another edge after we read the time, update up_to_time. It would be odd # for window N to contain a timestamp in window N+1. (Technically up_to_time should be # exclusive, so setting it to the time of the last revolution is incorrect. But we can ignore # this since if the same timestamp were to occur again (revolution of zero duration...) it # would be ignored anyway, and any larger timestamp would correctly fall into the next # sample.) up_to_time = max(up_to_time, revs[-1]) for ts in revs: self._stats.next_timestamp(ts) return 'wind', self._stats.get_stats_and_reset(up_to_time)
def get_sample(self): now_ts = common.timestamp() self._avg = max(0, random_value(-2, 10)) maxKmh = self._avg + random.random() * self._avg mean_ts = (now_ts + self._last_ts) / 2 wind = { 'avg': self._avg, 'max': maxKmh, 'max_ts': mean_ts, 'hist': { int(self._avg): 1 }, 'start_ts': self._last_ts, 'end_ts': now_ts } self._last_ts = now_ts return 'wind', wind
def parse_result(self, lines): result = [] #去掉首尾两行非包信息 lines = lines[1:-1] for line in lines: time_epoch, src_ip, dst_ip, tcp_src_port, tcp_dst_port, udp_src_port, udp_dst_port, protocol = line.split( "\t") result.append({ "time": timestamp(float(time_epoch)), "src_ip": src_ip, "dst_ip": dst_ip, "tcp_src_port": tcp_src_port, "tcp_dst_port": tcp_dst_port, "udp_src_port": udp_src_port, "udp_dst_port": udp_dst_port, "protocol": protocol, }) return result
def __init__(self, name, type, source=None, subtype=None, case_id=None): self.name = name self.created = timestamp() self.type = type self.subtype = subtype self.source = source self.parent = None self.children = [] self.case_id = case_id self.tags = [] self.notes = [] self.data = {} if self.subtype is None: if self.type == 'host': if is_ipv4(name): self.subtype = 'ipv4' elif is_ipv6(name): self.subtype = 'ipv6' elif is_fqdn(name): self.subtype = 'fqdn' else: warning('host subtype is not one of: ipv4, ipv6, fqdn') elif self.type == 'hash': hash_type = is_hash(name) if hash_type is None: warning('hash is not a valid md5, sha1, sha256, or sha512') else: self.subtype = hash_type elif self.type == 'user': self.subtype = 'account' elif self.type == 'email': self.subtype = 'account' elif self.type == 'btc': self.subtype = 'cryptocurrency address'
def __init__(self, calibration_mode=False): self._log = LOGGER_FACTORY.get_logger('weather.wind') self._revolutions = wind_revolutions.Revolutions( C.WIND_EDGES_PER_REV()) self._startup_time = common.timestamp() # TODO: Consider removing start timestamp and only use sample start/end timestamps. self._calibration_mode = calibration_mode if calibration_mode: self._calibration_logger = calibration_logger.CalibrationLogger() self._revolutions.calibration_init(self._calibration_logger) self._register_callback( self._revolutions.calibration_add_edge_and_log) else: self._register_callback(self._revolutions.add_edge) self._stats = wind_stats.WindStats(C.WIND_HSF(), C.WIND_LSF(), C.WIND_MAX_ROTATION(), self._startup_time) self._log.info('initialized - CALIBRATION MODE' if calibration_mode else 'initialized') self._log.info('pin=%d edges=%d debounce=%dms LSF=%g HSF=%g max=%dms' % (C.WIND_INPUT_PIN(), C.WIND_EDGES_PER_REV(), C.WIND_DEBOUNCE_MILLIS(), C.WIND_LSF(), C.WIND_HSF(), C.WIND_MAX_ROTATION()))
def _consider_door_open_locked(self, door_open): """Append new door_open state if it differs from the previous state. Must hold lock.""" if door_open != self._previous_door_open: self._events.append((common.timestamp(), door_open)) self._previous_door_open = door_open
def get_sample(self): return 'temp_hum', (common.timestamp(), random_value(-5, 15), random_value(40, 80))
def get_sample(self): sample = {} for i, channel in enumerate(SpiAdc._CHANNELS): vref = SpiAdc._VREFS[i] sample[channel] = (common.timestamp(), self._read(channel) * vref) return 'adc', sample
def add_edge(self, pin_ignored): """Count one edge.""" with self._lock: self._edges = (self._edges + 1) % self._edges_per_rev if self._edges == 0: self._revs.append(common.timestamp())
def __init__(self): self._count = 0 self._pilots = [(common.timestamp(), 0)]
def get_sample(self): return 'meta', {'stratum': raspisys.get_stratum(), # TODO: Avoid subprocess? 'client_version': C.CLIENT_VERSION(), 'cts': common.timestamp()}
def __init__(self): self._last_ts = common.timestamp()
def get_sample(self): sample = {} for c in self._channels: sample[c] = (common.timestamp(), random_value(10, 15)) return 'adc', sample
def get_sample(self): return 'temp', (common.timestamp(), raspisys.get_temperature())
def get_sample(self): return 'rain', (common.timestamp(), random_value(0, 100))
def get_sample(self): return 'vane', (common.timestamp(), random_value(0, 360))
def _append_count_locked(self): self._pilots.append((common.timestamp(), self._count))