def __str__(self): s = "SubPile\n" s += "number of files: %i\n" % len(self.files) s += "timerange: %s - %s\n" % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += "networks: %s\n" % ", ".join(sl(self.networks.keys())) s += "stations: %s\n" % ", ".join(sl(self.stations.keys())) s += "locations: %s\n" % ", ".join(sl(self.locations.keys())) s += "channels: %s\n" % ", ".join(sl(self.channels.keys())) s += "deltats: %s\n" % ", ".join(sl(self.deltats.keys())) return s
def __str__(self): s = 'SubPile\n' s += 'number of files: %i\n' % len(self.files) s += 'timerange: %s - %s\n' % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(list(self.networks.keys()))) s += 'stations: %s\n' % ', '.join(sl(list(self.stations.keys()))) s += 'locations: %s\n' % ', '.join(sl(list(self.locations.keys()))) s += 'channels: %s\n' % ', '.join(sl(list(self.channels.keys()))) s += 'deltats: %s\n' % ', '.join(sl(list(self.deltats.keys()))) return s
def __str__(self): s = 'SubPile\n' s += 'number of files: %i\n' % len(self.files) s += 'timerange: %s - %s\n' % (util.time_to_str( self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks.keys())) s += 'stations: %s\n' % ', '.join(sl(self.stations.keys())) s += 'locations: %s\n' % ', '.join(sl(self.locations.keys())) s += 'channels: %s\n' % ', '.join(sl(self.channels.keys())) s += 'deltats: %s\n' % ', '.join(sl(self.deltats.keys())) return s
def __str__(self): s = 'TracesFile\n' s += 'abspath: %s\n' % self.abspath s += 'file mtime: %s\n' % util.time_to_str(self.mtime) s += 'number of traces: %i\n' % len(self.traces) s += 'timerange: %s - %s\n' % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(list(self.networks.keys()))) s += 'stations: %s\n' % ', '.join(sl(list(self.stations.keys()))) s += 'locations: %s\n' % ', '.join(sl(list(self.locations.keys()))) s += 'channels: %s\n' % ', '.join(sl(list(self.channels.keys()))) s += 'deltats: %s\n' % ', '.join(sl(list(self.deltats.keys()))) return s
def __str__(self): s = "TracesFile\n" s += "abspath: %s\n" % self.abspath s += "file mtime: %s\n" % util.time_to_str(self.mtime) s += "number of traces: %i\n" % len(self.traces) s += "timerange: %s - %s\n" % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += "networks: %s\n" % ", ".join(sl(self.networks.keys())) s += "stations: %s\n" % ", ".join(sl(self.stations.keys())) s += "locations: %s\n" % ", ".join(sl(self.locations.keys())) s += "channels: %s\n" % ", ".join(sl(self.channels.keys())) s += "deltats: %s\n" % ", ".join(sl(self.deltats.keys())) return s
def __str__(self): s = 'TracesFile\n' s += 'abspath: %s\n' % self.abspath s += 'file mtime: %s\n' % util.time_to_str(self.mtime) s += 'number of traces: %i\n' % len(self.traces) s += 'timerange: %s - %s\n' % (util.time_to_str( self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks.keys())) s += 'stations: %s\n' % ', '.join(sl(self.stations.keys())) s += 'locations: %s\n' % ', '.join(sl(self.locations.keys())) s += 'channels: %s\n' % ', '.join(sl(self.channels.keys())) s += 'deltats: %s\n' % ', '.join(sl(self.deltats.keys())) return s
def iter_event_names(self, time_range=None, magmin=0., magmax=10., latmin=-90., latmax=90., lonmin=-180., lonmax=180.): yearbeg, monbeg, daybeg = time.gmtime(time_range[0])[:3] yearend, monend, dayend = time.gmtime(time_range[1])[:3] p = [] a = p.append a('format=geojson') a('catalog=%s' % self.catalog.lower()) a('starttime=%s' % util.time_to_str(time_range[0], format='%Y-%m-%dT%H:%M:%S')) a('endtime=%s' % util.time_to_str(time_range[1], format='%Y-%m-%dT%H:%M:%S')) if latmin != -90.: a('minlatitude=%g' % latmin) if latmax != 90.: a('maxlatitude=%g' % latmax) if lonmin != -180.: a('minlongitude=%g' % lonmin) if lonmax != 180.: a('maxlongitude=%g' % lonmax) if magmin != 0.: a('minmagnitude=%g' % magmin) if magmax != 10.: a('maxmagnitude=%g' % magmax) url = 'http://comcat.cr.usgs.gov/fdsnws/event/1/query?' + '&'.join(p) logger.debug('Opening URL: %s' % url) page = urllib2.urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) events = self._parse_events_page(page) for ev in events: self.events[ev.name] = ev for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: yield ev.name
def iter_event_names(self, time_range=None, **kwargs): qkwargs = {} for k in 'magmin magmax latmin latmax lonmin lonmax'.split(): if k in kwargs and kwargs[k] is not None: qkwargs[k] = '%f' % kwargs[k] if time_range is not None: form = '%Y-%m-%d_%H-%M-%S' if time_range[0] is not None: qkwargs['tmin'] = util.time_to_str(time_range[0], form) if time_range[1] is not None: qkwargs['tmax'] = util.time_to_str(time_range[1], form) for name in self.retrieve(**qkwargs): yield name
def _parse_events_page(self, page): events = [] for line in page.splitlines(): toks = line.strip().split(',') if len(toks) != 9: continue try: int(toks[0]) except: continue t = util.str_to_time(','.join(toks[:4]).strip(), format='%Y,%m,%d,%H%M%S.OPTFRAC') lat = float(toks[4]) lon = float(toks[5]) mag = float(toks[6]) depth = float(toks[7]) catalog = toks[8] name = 'USGS-%s-' % catalog + util.time_to_str(t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event( lat=lat, lon=lon, time=t, name=name, depth=depth*1000., magnitude=mag, catalog=catalog) events.append( ev ) return events
def user_register(self, user_name, password, e_mail, key_value={}): result = False description = "Unknow reason." if str(user_name).strip() == "": logging.info("username is empty.") result = False description = "username is empty." else: if self.user_is_existing(user_name.strip()) == True: result = False description = "User is existing." logging.debug("[{0}] user is created before registration.") else: now_time = util.time_to_str() kv_str = util.dict_to_str(key_value) cur = self.sqlite_conn.cursor() cur.execute( "insert into user_info(user_name, password, e_mail, register_time, key_value) values (?, ?, ?, ?, ?)", (user_name.strip(), password.strip(), e_mail.strip(), now_time, kv_str)) #commit data change self.sqlite_conn.commit() result = True description = "User is created." logging.info("[{0}] user is created.".format(user_name)) return result, description
def _parse_events_page(self, page): events = [] for line in page.splitlines(): toks = line.strip().split(',') if len(toks) != 9: continue try: int(toks[0]) except: continue t = util.str_to_time(','.join(toks[:4]).strip(), format='%Y,%m,%d,%H%M%S.OPTFRAC') lat = float(toks[4]) lon = float(toks[5]) mag = float(toks[6]) depth = float(toks[7]) catalog = toks[8] name = 'USGS-%s-' % catalog + util.time_to_str( t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event(lat=lat, lon=lon, time=t, name=name, depth=depth * 1000., magnitude=mag, catalog=catalog) events.append(ev) return events
def get_hash(self): e = self return util.base36encode( abs( hash((util.time_to_str(e.time), str(e.lat), str(e.lon), str(e.depth), str(e.magnitude), e.catalog, e.name, e.region)))).lower()
def get_hash(self): e = self return util.base36encode( abs( hash( ( util.time_to_str(e.time), str(e.lat), str(e.lon), str(e.depth), str(e.magnitude), e.catalog, e.name, e.region, ) ) ) ).lower()
def restart_managed_object(ctx, obj: typing.Union[client.V1Deployment, client.V1DaemonSet]): restarted_on = time_to_str(datetime.utcnow()) obj.spec.template.metadata.annotations[ctx.obj['restart_label']] = restarted_on apps = client.AppsV1Api() if isinstance(obj, (client.V1Deployment,)): apps.patch_namespaced_deployment( name=obj.metadata.name, namespace=obj.metadata.namespace, body=obj, ) elif isinstance(obj, (client.V1DaemonSet,)): apps.patch_namespaced_daemon_set( name=obj.metadata.name, namespace=obj.metadata.namespace, body=obj, )
def user_typing_record(self, user_name, typing, key_value={}): result = False description = "Unknow reason." if self.user_is_existing(user_name.strip()) == True: now_time = util.time_to_str() kv_str = util.dict_to_str(key_value) cur = self.sqlite_conn.cursor() cur.execute( "insert into user_record(user_name, typing_record, record_time, key_value) values (?, ?, ?, ?)", (user_name.strip(), typing.strip(), now_time, kv_str)) #commit data change self.sqlite_conn.commit() result = True description = "Data is record." else: result = False description = "User is not existing." return result, description
def _parse_events_page(self, page): import json doc = json.loads(page) events = [] for feat in doc['features']: props = feat['properties'] geo = feat['geometry'] lon, lat, depth = [ float(x) for x in geo['coordinates'] ] t = util.str_to_time('1970-01-01 00:00:00') + props['time'] *0.001 if props['mag'] is not None: mag = float(props['mag']) else: mag = None if props['place'] != None: region = str(props['place']) else: region = None catalog= str(props['net'].upper()) name = 'USGS-%s-' % catalog + util.time_to_str(t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event( lat=lat, lon=lon, time=t, name=name, depth=depth*1000., magnitude=mag, region=region, catalog=catalog) events.append(ev) return events
def _parse_events_page(self, page): import json doc = json.loads(page) events = [] for feat in doc['features']: props = feat['properties'] geo = feat['geometry'] lon, lat, depth = [float(x) for x in geo['coordinates']] t = util.str_to_time('1970-01-01 00:00:00') + props['time'] * 0.001 if props['mag'] is not None: mag = float(props['mag']) else: mag = None if props['place'] != None: region = str(props['place']) else: region = None catalog = str(props['net'].upper()) name = 'USGS-%s-' % catalog + util.time_to_str( t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event(lat=lat, lon=lon, time=t, name=name, depth=depth * 1000., magnitude=mag, region=region, catalog=catalog) events.append(ev) return events
def sdate(t): return util.time_to_str(t, '%Y-%m-%d')
url = url[:-1] # Remove the last ',' url += "&cc=" + cc + "&filters=price_overview" data = retrieve_data(url) if data is None: continue # Save the price of the data to the file for (app_id, app_name) in this_time_apps: if data[str(app_id)]["success"] is False: log_warning("Failed to retrieve the price data for '%s' (id: %d)" % (app_name, app_id)) else: # Check if it has price data if "price_overview" not in data[str(app_id)]["data"]: if args.fine: log_fine("App '%s' (id: %d) is free." % (app_name, app_id)) continue # Retrieve the price price = data[str(app_id)]["data"]["price_overview"] init_price = price["initial"] final_price = price["final"] # Save to the file file_path = cc_dir + "/app_price_" + str(app_id) + ".txt" with open(file_path, mode='a') as f: f.write("%d %d %d %s\n" % (START_TIME, init_price, final_price, time_to_str(START_TIME))) # == Finish == log_info("Finished. It takes %f seconds totally." % (time.time() - START_TIME))
def sdatetime(t): return util.time_to_str(t, format='%Y-%m-%dT%H:%M:%S')
def test_time_to_str(self): now_secs = 1512202121.623 now_str = "2017-12-02 08:08:41 UTC" print(now_str) self.assertEqual(now_str, time_to_str(now_secs))