def apply_holidays(self): for h in self.bh.holidays: if h[1] == "Holidays": continue d = h[0] if not (self.start <= d <= self.end): continue weekday = d.weekday() + 1 if weekday in self.bh.weekends: log.warning(f"Skipping weekend day {d}") continue if d not in self.days: log.info(f"Adding new day entry list for day {d}") self.days[d] = StrictList(Entry) dwh = self.bh.get_daily_working_hours() - self.bh.breaks dwstart = self.bh.worktimings[0] dtime = add_hours(d, dwstart) dtime = self.get_timezone().localize(dtime) self.project_seconds[h[1]] += tdelta(hours=dwh).total_seconds() self.total_time += tdelta(hours=dwh) entry = Entry(h[1], dtime, add_hours(dtime, dwh), tdelta(hours=dwh), ["off"]) pause_entry = Entry(h[1], entry.end, add_hours(entry.end, self.bh.breaks), tdelta(hours=self.bh.breaks), ["pause", "off"]) log.info(f"Adding entry {entry} to {d}") self.days[d].append(entry)
def __init__(self, startDate, fhr, flagged, lat, lon, mslp, maxwind, fcstDate=None): self.start_date = startDate self.fhr = fhr if str(flagged) == '0' or str(flagged) == 'False': self.flagged = False elif str(flagged) == '1' or str(flagged) == 'True': self.flagged = True else: print 'Unable to determine Flag status' sys.exit(1) self.lat = lat self.lon = lon self.mslp_value = mslp self.maxwind_value = maxwind #self.mslp = mslp #self.maxwind = maxwind if fcstDate: self.fcst_date = fcstDate elif fhr is not None: if not isinstance(self.start_date, dtime): # TODO : Shoudl be datetime going forward, so shouldn't need this #sd = dtime.fromtimestamp(self.start_date).strftime('%c') sd = dtime.fromtimestamp(self.start_date) self.fcst_date = sd + tdelta(hours=fhr) else: self.fcst_date = self.start_date + tdelta(hours=fhr)
def main(): today = dt.utcnow() dateobj = dt.utcnow() dates = [] done = False while not done: date = dateobj.strftime('%Y-%m-%d') print(f"Checking: {date}") if args.sa == get_SA(date=date, tel=1): dates.append([date, 1]) elif args.sa == get_SA(date=date, tel=2): dates.append([date, 2]) else: if len(dates) != 0: done = True if today > today + tdelta(30,0): done = True dateobj += tdelta(1,0) for date in dates: result = get_telsched(from_date=date[0], ndays=1, telnr=date[1])[0] print(result) print(result['PiEmail']) print(result['Observers'])
def parse_iso8601(text): """Parse an iso 8601 string, must be in extended format, currently only supports dates.""" date_text = text.split('T')[0] try: time_text = text.split('T')[1] except IndexError: time_text = None date_parts = date_text.split('-') if len(date_parts) < 1: raise ValueError('date %s is invalid') elif len(date_parts) == 3: first_day = date(int(date_parts[0]), int(date_parts[1]), int(date_parts[2])) last_day = first_day+tdelta(days=1) elif len(date_parts) == 2: year = int(date_parts[0]) month = int(date_parts[1]) first_day = date(year, month, 1) days = calendar.monthrange(year, month)[1] last_day = first_day + tdelta(days) elif len(date_parts) == 1: year = int(date_parts[0]) first_day = date(year, 1, 1) last_day = date(year+1, 1, 1) else: raise ValueError('too many date_parts in iso 8601 date') if time_text: raise NotImplementedError('can only parse dates at this time') else: first_time = last_time = time(0, 0, 0) return combine_dt(first_day, first_time), combine_dt(last_day, last_time)
def parse_iso8601(text): """Parse an iso 8601 string, must be in extended format, currently only supports dates.""" date_text = text.split('T')[0] try: time_text = text.split('T')[1] except IndexError: time_text = None date_parts = date_text.split('-') if len(date_parts) < 1: raise ValueError('date %s is invalid') elif len(date_parts) == 3: first_day = date(int(date_parts[0]), int(date_parts[1]), int(date_parts[2])) last_day = first_day + tdelta(days=1) elif len(date_parts) == 2: year = int(date_parts[0]) month = int(date_parts[1]) first_day = date(year, month, 1) days = calendar.monthrange(year, month)[1] last_day = first_day + tdelta(days) elif len(date_parts) == 1: year = int(date_parts[0]) first_day = date(year, 1, 1) last_day = date(year + 1, 1, 1) else: raise ValueError('too many date_parts in iso 8601 date') if time_text: raise NotImplementedError('can only parse dates at this time') else: first_time = last_time = time(0, 0, 0) return combine_dt(first_day, first_time), combine_dt(last_day, last_time)
def get(self, telescope): tlog.app_log.info('Get request for ListOfNights recieved') telescope = telescope.strip('/') ## Create Telescope Object # config_file = os.path.join(os.path.expanduser('~'), '.{}.yaml'.format(telescope)) tel = Telescope(telescope) telescopename = tel.name client = MongoClient(tel.mongo_address, tel.mongo_port) db = client[tel.mongo_db] collection = db[tel.mongo_collection] # first_date_string = sorted(collection.distinct("date"), reverse=False)[0] # first_date = dt.strptime('{} 00:00:00'.format(first_date_string), '%Y%m%dUT %H:%M:%S') first_date = sorted(collection.distinct("date"), reverse=False)[0] tlog.app_log.info(' Building date_list') date_list = [] while first_date <= dt.utcnow(): date_list.append(first_date.strftime('%Y%m%dUT')) first_date += tdelta(1, 0) date_list.append(first_date.strftime('%Y%m%dUT')) tlog.app_log.info(' Done') night_plot_path = os.path.abspath('/var/www/nights/') tlog.app_log.info(' Looping over date_list') nights = [] for date_string in date_list: night_info = {'date': date_string} night_graph_file = '{}_{}.png'.format(date_string, telescope) if os.path.exists(os.path.join(night_plot_path, night_graph_file)): night_info['night graph'] = night_graph_file # night_info['n images'] = collection.find( {"date":date_string} ).count() start = dt.strptime(date_string, '%Y%m%dUT') end = start + tdelta(1) night_info['n images'] = collection.find({ "date": { "$gt": start, "$lt": end } }).count() if night_info['n images'] > 0: nights.append(night_info) nights.reverse() # reverse sort to put recent dates at top of page tlog.app_log.info(' Done') tlog.app_log.info(' Rendering ListOfNights') self.render("night_list.html", title="{} Results".format(telescopename),\ telescope = telescope,\ telescopename = telescopename,\ nights = nights,\ ) tlog.app_log.info(' Done')
def multiRW(dimx, h, pot, nsamples, nchains, thin, L, verbose=True): parallelchains = mp.cpu_count() if (verbose): print("--- Multichain MC ---") print("TOTAL number of chains: ", nchains) print("Chains in parallel: ", parallelchains) print("samples of dimension ", dimx) # input("press ENTER to continue") chains = [] acceptrates = [] # Function to read the results produced by a single chainRW def addChain(chainRW_result): chains.append(chainRW_result[0]) acceptrates.append(chainRW_result[2]) # Prepare #nchains random starting point, each is a startx for a chain startpoints = [] for i in range(nchains): startpoints.append(np.random.uniform(-L, L, dimx)) # Run a single chain to have a time estimation starttime = time.time() addChain(chainRW(startpoints[0], h, pot, nsamples, thin, L, 2, None)) if (verbose): linear_time = int((time.time() - starttime) * nchains) optim_time = linear_time / parallelchains print("Approx. MAX running time: " + str(tdelta(seconds=linear_time))) print("Approx. MIN running time: " + str(tdelta(seconds=optim_time))) print("staring with parallels chains") # Produce multiple chains in parallel # print("FLAG1") pool = mp.Pool(parallelchains) # print("FLAG2") for j in range(1, nchains): pool.apply_async(chainRW, args=(startpoints[j], h, pot, nsamples, thin, L, 2, j), callback=addChain) pool.close() pool.join() # print("FLAG3") # if (verbose): # print("Chains = ", chains) # input("Press ENTER: building the single final chain.") # Now, construct a SINGLE chain by taking random samples from the chains X = [] for i in range(nsamples): # Add to X a random sample from a random chain, counting from the end # (in order to avoid the burning time samples) nth = np.random.random_integers(1, nsamples - 1) X.append(chains[np.random.random_integers(0, nchains - 1)][-nth]) X = np.asanyarray(X) acceptrates = np.asanyarray(acceptrates) mean_acceptance = sum([x for x in acceptrates]) / len(acceptrates) print("Averge rate: ", mean_acceptance) expect = sum([x for x in X]) / nsamples print("Multichain expectation: ", expect) return X, mean_acceptance, expect
def get(self, telescope): tlog.app_log.info('Get request for ListOfNights recieved') telescope = telescope.strip('/') ## Create Telescope Object # config_file = os.path.join(os.path.expanduser('~'), '.{}.yaml'.format(telescope)) tel = Telescope(telescope) telescopename = tel.name client = MongoClient(tel.mongo_address, tel.mongo_port) db = client[tel.mongo_db] collection = db[tel.mongo_collection] # first_date_string = sorted(collection.distinct("date"), reverse=False)[0] # first_date = dt.strptime('{} 00:00:00'.format(first_date_string), '%Y%m%dUT %H:%M:%S') first_date = sorted(collection.distinct("date"), reverse=False)[0] tlog.app_log.info(' Building date_list') date_list = [] while first_date <= dt.utcnow(): date_list.append(first_date.strftime('%Y%m%dUT')) first_date += tdelta(1, 0) date_list.append(first_date.strftime('%Y%m%dUT')) tlog.app_log.info(' Done') night_plot_path = os.path.abspath('/var/www/nights/') tlog.app_log.info(' Looping over date_list') nights = [] for date_string in date_list: night_info = {'date': date_string } night_graph_file = '{}_{}.png'.format(date_string, telescope) if os.path.exists(os.path.join(night_plot_path, night_graph_file)): night_info['night graph'] = night_graph_file # night_info['n images'] = collection.find( {"date":date_string} ).count() start = dt.strptime(date_string, '%Y%m%dUT') end = start + tdelta(1) querydict = {"date": {"$gt": start, "$lt": end}, "telescope": tel.name} night_info['n images'] = collection.find(querydict).count() if night_info['n images'] > 0: nights.append(night_info) nights.reverse() # reverse sort to put recent dates at top of page tlog.app_log.info(' Done') tlog.app_log.info(' Rendering ListOfNights') self.render("night_list.html", title="{} Results".format(telescopename),\ telescope = telescope,\ telescopename = telescopename,\ nights = nights,\ ) tlog.app_log.info(' Done')
def __init__(self, info, subject_day_index, weeks_interval, denominator): self.type, self.name, self.auditorium, self.professor = info time_shift = denominator * 7 + subject_day_index self.start_date = (self.semester_start_date + tdelta(days=time_shift)).strftime('%Y%m%d') self.weeks_interval = weeks_interval
def addReplayToTracker(self, rep: dict, row): self.gameTracker.insertRow(self.gameTracker.rowCount()) GT = self.gameTracker RC = GT.rowCount() timeStr = str( tdelta(0, float(rep['time']) / self.framerate)) if '.' not in timeStr: timeStr += '.000' timeStr = timeStr[:10] GTItemStrings = [ '✓' if rep['fileName'] not in ignoredReplaysList else '✖', rep['fileName'], rep['mode'], str(round(float(rep['pps']), 3)).ljust(5, '0'), timeStr, rep['score'], rep['pieces'], rep['lines'], ] for col, i in zip(range(9), GTItemStrings): item = QTableWidgetItem() item.setText(i) item.setWhatsThis(str(row)) if i == '✓' or i == '✖': item.setTextAlignment(Qt.AlignCenter) elif i.replace('.', '').isnumeric(): item.setTextAlignment(Qt.AlignRight | Qt.AlignVCenter) GT.setItem(RC - 1, col, item)
def computeVWSP(self, stockSymbol): """ Computes the Volume Weighted Stock Price based on trades in past 15 minutes :param stockSymbol: Must be recognised from the data in sampleData :return: """ currentTime = dt.now() deltaTime = tdelta(minutes=15) gen = ((trade['tradePrice'], trade['quantityShares']) for trade in self.histTrade if (trade['stock'] == stockSymbol) & (currentTime - trade['timestamp'] < deltaTime)) res = 0. totalQuantity = 0. for price, quantity in gen: res += price * quantity totalQuantity += quantity # for higher precision, consider using numpy: # for price, quantity in gen: # prices.append(price) # quantities.append(quantity) #result = numpy.average(prices, 0, quantities) . Note that numpy specific types could also be used if totalQuantity == 0: raise ValueError( "The stock {stock} has not been traded during the last 15 minutes." .format(stock=stockSymbol)) #prossibly give other information, like the last trade price for that given stock if exists return res / totalQuantity
def test_completed(self): due_date = get_day(-1) start_date = get_day(-2) finish_date = start_date + tdelta(days=1) action0 = Action(self.text, due=due_date, start=start_date, finish=finish_date) self.assertEqual(action0.text, self.text) self.assertEqual(action0.due, due_date) self.assertEqual(action0.start_time, start_date) self.assertEqual(action0.finish_time, finish_date) self.assertEqual(action0.is_task, True) self.assertEqual(action0.is_event, True) self.assertEqual(action0.completed, True) self.assertEqual(action0.duration, tdelta(days=1)) self.assertEqual(action0.latent, False) self.assertEqual(action0.ongoing, False)
def init_by_config(self, start_date, end_date, capital, benchmark, data_path='Data/test_data/', save_path='Output/'): if data_path[-1] == '/': self._data_path = data_path else: self._data_path = data_path + '/' if save_path[-1] == '/': self._save_path = save_path else: self._save_path = save_path + '/' self._start_date = pd.Timestamp(start_date) self._end_date = pd.Timestamp(end_date) times = start_date.split('-') self._date_sequence.append( pd.Timestamp( dt(int(times[0]), int(times[1]), int(times[2])) + tdelta(days=-1))) for date in pd.date_range(self._start_date, self._end_date, freq='D'): date_str = str(date).split(' ')[0] if self._has_date(date_str): self._date_sequence.append(date_str) self._period = len(self._date_sequence) - 1 self._total_capital = capital self._benchmark = benchmark
def __init__(self, date_string=None, data_file=None, *args, **kwargs): super(WeatherPlotter, self).__init__() self.args = args self.kwargs = kwargs config = load_config(config_files=['peas']) self.cfg = config['weather']['plot'] location_cfg = config.get('location', None) self.thresholds = config['weather'].get('aag_cloud', None) if not date_string: self.today = True self.date = dt.utcnow() self.date_string = self.date.strftime('%Y%m%dUT') self.start = self.date - tdelta(1, 0) self.end = self.date self.lhstart = self.date - tdelta(0, 60 * 60) self.lhend = self.date + tdelta(0, 5 * 60) else: self.today = False self.date = dt.strptime('{} 23:59:59'.format(date_string), '%Y%m%dUT %H:%M:%S') self.date_string = date_string self.start = dt(self.date.year, self.date.month, self.date.day, 0, 0, 0, 0) self.end = dt(self.date.year, self.date.month, self.date.day, 23, 59, 59, 0) print('Creating weather plotter for {}'.format(self.date_string)) self.twilights = self.get_twilights(location_cfg) self.table = self.get_table_data(data_file) if self.table is None: warnings.warn("No data") sys.exit(0) self.time = pd.to_datetime(self.table['date']) first = self.time[0].isoformat() last = self.time[-1].isoformat() print(' Retrieved {} entries between {} and {}'.format( len(self.table), first, last)) if self.today: self.current_values = self.table[-1] else: self.current_values = None
def get_all_work_days(self, month=-1, year=-1): if month != -1: if year == -1: year = self.start.year _, diff_days = monthrange(year, month) diff_s_dt = dt(self.start.year, month, 1) diff_e_dt = dt(self.end.year, month, diff_days) diff = (diff_e_dt + tdelta(days=1)) - diff_s_dt else: diff = (self.end + tdelta(days=1)) - self.start for i in range(diff.days): day = self.start + tdelta(i) wd = day.weekday() + 1 if wd not in self.weekends: yield day
def compare_twilights(): date = dt.strptime(f'2018-08-01 18:00:00', '%Y-%m-%d %H:%M:%S') enddate = dt.strptime(f'2019-02-01 18:00:00', '%Y-%m-%d %H:%M:%S') while date < enddate: datestr = date.strftime('%Y-%m-%d') diff = compare_twilights_on(datestr) print(f"{date}: {diff[0]:+5.1f}, {diff[1]:+5.1f}, {diff[2]:+5.1f}") date += tdelta(0, 24 * 60 * 60)
def retrieve_weather(lookbackdays=0): delta_time = tdelta(lookbackdays, 120) client = pymongo.MongoClient('192.168.1.101', 27017) db = client.vysos weather = db.weather weatherdata = [x for x in weather.find( {"date": {"$gt": dt.utcnow()-delta_time} } )] client.close() return weatherdata
def compare_twilights(): date = dt.strptime(f'2018-08-01 18:00:00', '%Y-%m-%d %H:%M:%S') enddate = dt.strptime(f'2019-02-01 18:00:00', '%Y-%m-%d %H:%M:%S') while date < enddate: datestr = date.strftime('%Y-%m-%d') diff = compare_twilights_on(datestr) print(f"{date}: {diff[0]:+5.1f}, {diff[1]:+5.1f}, {diff[2]:+5.1f}") date += tdelta(0,24*60*60)
def av_unit_price(self, symbol, at_date): # TODO improve error handling # TODO improve multiple attempt failure handling """Uses AlphaVantage to find the unit price of a symbol at a date""" url = "https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&"\ "outputsize=full&symbol={0}&apikey={1}"\ .format(symbol, self.av_key) fail = True attempt = 0 while (fail is True) and (attempt < 10): try: r = requests.get(url) time_series = json.loads(r.text)["Time Series (Daily)"] fail = False logging.info("Time series retreival was successful") except: fail = True time.sleep(10) attempt += 1 logging.warning( "Time series unsuccessful. Attempt {}".format(attempt)) check_date_strings = [ (at_date - tdelta(days=3)).strftime("%Y-%m-%d"), (at_date - tdelta(days=2)).strftime("%Y-%m-%d"), (at_date - tdelta(days=1)).strftime("%Y-%m-%d"), (at_date + tdelta(days=1)).strftime("%Y-%m-%d"), at_date.strftime("%Y-%m-%d") ] for check_date in check_date_strings: try: unit_price = time_series[check_date]['4. close'] except Exception as e: logging.warning( "Unit price not recorded for {}".format(check_date)) logging.error(e) try: logging.debug( "Unit price for {} on {} is ${} (using AlphaVantage)".format( symbol, at_date, unit_price)) except: logging.error("Unit price not located for {}".format(symbol)) sys.exit(1) return float(unit_price)
def get_twilights(date): """ Get twilight times from Keck API """ url = f"https://www.keck.hawaii.edu/software/db_api/metrics.php?date={date}" r = requests.get(url) result = json.loads(r.text) assert len(result) == 1 t = result[0] # In Keck API, date is HST, but time is UT (ugh!) h, m = t['sunset'].split(':') t['sunset HST'] = f"{int(h)+14:02d}:{m}" # correct to HST t['seto'] = dt.strptime(f"{t['udate']} {t['sunset HST']}", '%Y-%m-%d %H:%M') t['seto'] += tdelta(0,10*60*60) # correct to UT t['riseo'] = dt.strptime(f"{t['udate']} {t['sunrise']}", '%Y-%m-%d %H:%M') t['riseo'] += tdelta(0,24*60*60) # correct to UT return t
def last_weeks_points(today=None): """Returns today's points and the last 7 days (i.e. 8 days total)""" if today is None: today = dtime.now().date() week = [] for i in xrange(8): day = today-tdelta(days=i) week.append((day, get_daily_points(day))) return week
def last_weeks_points(today=None): """Returns today's points and the last 7 days (i.e. 8 days total)""" if today is None: today = dtime.now().date() week = [] for i in xrange(8): day = today - tdelta(days=i) week.append((day, get_daily_points(day))) return week
def __init__(self, ws): self.ws = ws self.days = {} self.seconds = {} self.total_time = tdelta(0) self.project_durations_in_secs = {"Vacations": 0.0, "Sick": 0.0} self.projects = [] self.native_projects = [] self.load_projects()
def open_dataset(input, variables=[], time_calendar=None, time_raw=None, time_units=None, time_offset=0, time_from=None, eta_rho_slice=None, xi_rho_slice=None, s_rho_slice=None): """ Load dataset and grid file, overwrite calendar and units. """ # open data dataset = open_glob_dataset(input, keep_vars=variables + ['time'], time_slice=None) # slice data if eta_rho_slice is not None or xi_rho_slice is not None or s_rho_slice is not None: dataset = slice_on_rho_grid(dataset, eta_rho_slice=eta_rho_slice, xi_rho_slice=xi_rho_slice, s_rho_slice=s_rho_slice) # open another dataset to copy its time array to the opened dataset if time_from is not None: aux_ds = open_glob_dataset(time_from, keep_vars=['time'], time_slice=None) attrs = aux_ds.time.attrs dataset['time'] = xr.DataArray(aux_ds['time'].values, dims=('time', )) dataset['time'].attrs = attrs aux_ds.close() # if no time to process, skip if 'time' not in dataset: return dataset # reset calendar and units if time_raw is not None: dataset['time'] = xr.DataArray(time_raw, dims=('time', )) if time_calendar is not None: dataset.time.attrs['calendar'] = time_calendar if time_units is not None: dataset.time.attrs['units'] = time_units # initialize time dataset = set_time(dataset, dt=tdelta(seconds=time_offset)) # calculate day of year time_attrs = {**dataset.time.attrs} doy = np.array([a.dayofyr for a in dataset.time.values]) - 1 # create doy variable on data dataset['doy'] = xr.DataArray(doy, dims=('time', )) dataset.time.attrs = time_attrs return dataset
def get_status(telescope, db): collection = db[f'{telescope}status'] two_min_ago = dt.utcnow() - tdelta(0, 2*60) values = [x for x in collection.find( {'date': {'$gt': two_min_ago}} ).sort('date')] try: current = status_values[-1] except: current = None return current
def get_twilights(date): """ Get twilight times from Keck API """ url = f"https://www.keck.hawaii.edu/software/db_api/metrics.php?date={date}" r = requests.get(url) result = json.loads(r.text) assert len(result) == 1 t = result[0] # In Keck API, date is HST, but time is UT (ugh!) h, m = t['sunset'].split(':') t['sunset HST'] = f"{int(h)+14:02d}:{m}" # correct to HST t['seto'] = dt.strptime(f"{t['udate']} {t['sunset HST']}", '%Y-%m-%d %H:%M') t['seto'] += tdelta(0, 10 * 60 * 60) # correct to UT t['riseo'] = dt.strptime(f"{t['udate']} {t['sunrise']}", '%Y-%m-%d %H:%M') t['riseo'] += tdelta(0, 24 * 60 * 60) # correct to UT return t
def calculate_percents(self): for ws in self.ws: ws_obj = Workspace(ws) log.info(mk_headline(f"Times in Workspace {ws}", "*")) # seconds = {} self.total_time = tdelta(0) self.project_seconds = { "Vacations": 0.0, "Courses": 0.0, "Sick": 0.0 } for project in ws_obj.native_projects: p_name = project.name times = project.time_entries.list() for i in times: if not hasattr(i, "stop"): log.warn("Entry %s seems to still be running" % i.description) continue start = i.start.replace(tzinfo=pytz.timezone("UTC")) end = i.stop.replace(tzinfo=pytz.timezone("UTC")) if start.date() < self.start or end.date() > self.end: continue dur = end - start if dur.total_seconds() / 3600. > 11: log.warn("Warning: the entry seems to be too long:") log.warn( f"{p_name} from {start} to {end}; duration {dur}") if start.date() not in self.days: self.days[start.date()] = StrictList(Entry) if p_name not in self.project_seconds: self.project_seconds[p_name] = 0.0 e = None tags = list(set([t.lower() for t in i.tags])) if hasattr( i, "tags") else [] e = Entry(p_name, start, end, dur, tags) add_dur = not (p_name == "Holidays" or (hasattr(i, "tags") and "Pause" in i.tags)) if add_dur: self.project_seconds[p_name] += dur.total_seconds() self.days[start.date()].append(e)
def postpone(self, dlt_time, ky_word): """ this function edits hour part of datetime object :param dlt_time: date-time object to edit :param dlt_time: delta-time of old and new time :param ky_word: represent type of dlt_time if it is hour, day, week or month """ if ky_word == 'hour': self.work_datetime = self.work_datetime + tdelta(seconds=dlt_time * 3600) elif ky_word == 'day': self.work_datetime = self.work_datetime + tdelta(days=dlt_time) elif ky_word == 'week': self.work_datetime = self.work_datetime + tdelta(weeks=dlt_time) elif ky_word == 'month': self.work_datetime = self.work_datetime + tdelta(days=dlt_time * 30) self.eisenhower_priority() return self.work_datetime
def add_hours(date, number): """ :param date: :type date: datetime.date or datetime.datetime :param number: :return: """ if not hasattr(date, "time"): date = datetime.datetime.combine(date, datetime.datetime.min.time()) return date + tdelta(hours=number)
def is_valid_session(): """Validate a session.""" stale_threshold = dtime.now() - tdelta(minutes=config.TIMEOUT_MINUTES) try: too_old = (session['last_check'] < stale_threshold) if session['username'] in users.ALLOWED and not too_old: return True except KeyError as err: pass # bad session if we fell through return False
class FilesView(MyBaseView): column_list = [ 'id', 'name', 'sha1', 'date_b', 'status_f', 'score', 'message', 'evals_count' ] column_sortable_list = [ 'id', 'name', 'sha1', 'date_b', 'status_f', 'score', 'message', 'evals_count' ] column_exclude_list = [ 'uuid_f', 'evals_len', 'results', 'md5', 'mtype', 'exec_time', 'expect_sandbox', 'hash' ] column_details_list = [ 'id', 'name', 'mtype', 'md5', 'sha1', 'hash', 'date_b', 'exec_time', 'status_f', 'score', 'message', 'results', 'evals_count', 'evals' ] column_export_list = [ 'id', 'name', 'mtype', 'md5', 'sha1', 'hash', 'date_b', 'exec_time', 'status_f', 'score', 'message', 'evals_count' ] column_export_exclude_list = ['evals', 'results'] column_formatters_export = {} column_searchable_list = [] column_default_sort = ('date_b', True) column_filters = ['name', 'mtype', 'sha1', 'score', 'evals_count'] column_formatters_export = dict( score=fmt_render_score, date_b=lambda v, c, m, p: str(getattr(m, p)), exec_time=lambda v, c, m, p: str(tdelta(seconds=getattr(m, p)))) column_formatters = dict(evals_count=fmt_counts, evals=macro('render_evals_cname'), exec_time=fmt_elapsed_time_secs, name=fmt_file_details, score=macro('render_score'), status_f=fmt_text_bold, message=macro('render_message'), results=fmt_file_results) column_labels = dict(id='Id', name='Filename', mtype='MIME Type', md5='MD5 Hash', sha1='SHA1 Hash', hash='SHA256 Hash', uuid_f='UUID', status_f='Status', date_b='Status Date', exec_time='Elapsed Time', score='Is Malicious?', expect_sandbox='Expect Sandbox?', message='Message', evals_count='# of Evaluations', evals='List of Evaluations', results='Results')
def retrieve_telstatus(telescope): client = pymongo.MongoClient('192.168.1.101', 27017) db = client.vysos telstatus = {} for telescope in ['V20', 'V5']: results = db[f'{telescope}status'].find(limit=1, sort=[('date', pymongo.DESCENDING)]) if results.count() > 0: telstatus[telescope] = results.next() try: if telstatus[telescope]['slewing'] is True: telstatus[telescope]['status'] = 'Slewing' elif telstatus[telescope]['tracking'] is True: telstatus[telescope]['status'] = 'Tracking' elif telstatus[telescope]['park'] is True: telstatus[telescope]['status'] = 'Parked' else: telstatus[telescope]['status'] = 'Stationary' except: telstatus[telescope]['status'] = 'Unknown' if 'RA' in telstatus[telescope] and 'DEC' in telstatus[telescope]: coord = SkyCoord(telstatus[telescope]['RA'], telstatus[telescope]['DEC'], unit=u.deg) telstatus[telescope]['RA'], telstatus[telescope]['DEC'] = coord.to_string('hmsdms', sep=':', precision=0).split() else: telstatus[telescope] = {'date': dt.utcnow()-tdelta(365), 'status': 'Unknown', 'connected': False} ## Format Values and fill in missing keys if 'alt' not in telstatus[telescope].keys(): telstatus[telescope]['altstr'] = '' else: telstatus[telescope]['altstr'] = '{:.1f} deg'.format(telstatus['V20']['alt']) if 'az' not in telstatus[telescope].keys(): telstatus[telescope]['azstr'] = '' else: telstatus[telescope]['azstr'] = '{:.1f} deg'.format(telstatus['V20']['az']) if 'RA' not in telstatus[telescope].keys(): telstatus[telescope]['RAstr'] = '' else: telstatus[telescope]['RAstr'] = '{}'.format(telstatus['V20']['RA']) if 'Dec' not in telstatus[telescope].keys(): telstatus[telescope]['Decstr'] = '' else: telstatus[telescope]['Decstr'] = '{}'.format(telstatus['V20']['Dec']) telstatus[telescope]['age'] = (dt.utcnow() - telstatus[telescope]['date']).total_seconds()/60. client.close() return telstatus
def eisenhower_priority(self): """ this method sets priority and time of notification based on importance attribute of work notification repeat: priority 1 --> every 5 minutes during the day priority 2 --> just in time priority 3 --> at the end of the day (18:00:00) priority 4 --> at the end of the week (sunday) :return: a tuple of priority and notification time """ if self.status != 'done': if self.importance and self.urgency: self.priority = 1 self.time_ntf = self.work_datetime return self.time_ntf elif not self.importance and self.urgency: self.priority = 2 self.time_ntf = self.work_datetime return self.time_ntf elif self.importance and not self.urgency: self.priority = 3 if self.work_datetime.hour < 18: hours = (18 - self.work_datetime.hour) else: hours = 0 self.time_ntf = self.work_datetime + tdelta(seconds=hours * 3600) return self.time_ntf elif not self.importance and not self.urgency: self.priority = 4 dys = (6 - self.work_datetime.weekday()) self.time_ntf = self.work_datetime + tdelta(days=dys) return self.time_ntf else: return 0
def xrate_to_aud(self, at_date, from_currency): """Uses AlphaVantage to find X rate from currency to AUD on date""" # TODO Find more elegant solution than wait 15 seconds if from_currency == "AUD": exchange_rate = 1 else: url = "https://www.alphavantage.co/query?function=FX_DAILY&"\ "from_symbol={}&to_symbol=AUD&outputsize=full&apikey={}"\ .format(from_currency, self.av_key) fail = True attempt = 0 while (fail is True) and (attempt < 10): try: r = requests.get(url) time_series = json.loads(r.text)["Time Series FX (Daily)"] fail = False logging.info("Time series retreival was successful") except: fail = True time.sleep(10) attempt += 1 logging.warning( "Time series unsuccessful. Attempt {}".format(attempt)) check_date_strings = [ (at_date - tdelta(days=1)).strftime("%Y-%m-%d"), (at_date + tdelta(days=1)).strftime("%Y-%m-%d"), at_date.strftime("%Y-%m-%d") ] for check_date in check_date_strings: try: exchange_rate = time_series[check_date]['4. close'] except: pass logging.debug("Exchange rate from {} to AUD on {} is {}".format( from_currency, at_date, exchange_rate)) return float(exchange_rate)
def dump_gfdltrk_fort12(self, outfile=None, fhr=0): """ Dump the fort.12 file expected by the gfdl tracker, for the given `fhr' (default 0). This consists of a single line like so: AOML 01L ONEARW01L 20050801 0600 167N 400W -99 -99 998 -999 -999 -9 -99 -999 -999 -999 -999 X [center] [storm name] [hhmm] [cenlon] [wind spd] [rest will all be -9* (ignored)] [storm id] [ymd] [cenlat] [wind dir] [mslp] See also: http://www.emc.ncep.noaa.gov/mmb/data_processing/tcvitals_description.htm """ center = self.originating_center if self.originating_center else "ACME" basin = self.basin if self.basin else "99" storm_number = self.storm_number if self.storm_number else 0 storm_name = self.storm_name if self.storm_name else " UNNAMED" storm_name = storm_name.rjust(9) startDate = to_datetime(self.start_date) fcstDate = startDate + tdelta(hours=fhr) fcstYMD = "{0:%Y%m%d}".format(fcstDate) fcstHHMM = "{0:%H%M}".format(fcstDate) trkentry = self.fcst_date_dict[fcstDate] latstr = str(abs(int(round(trkentry.lat*10,0)))) lonstr = str(abs(int(round(trkentry.lon*10,0)))) if trkentry.lat < 0 : latstr += "S" else: latstr += "N" if trkentry.lon < 0: lonstr += "W" else: lonstr += "E" latstr = latstr.rjust(4) lonstr = lonstr.rjust(5) windspeed_kts = trkentry.maxwind_value # TODO : assuming maxwind is in kts but this is not enforced by object windspeed_mps = windspeed_kts * 0.51444444444 windspeed_str = str(int(round(windspeed_mps*10,0))).rjust(3) mslp = str(int(round(trkentry.mslp_value,0))).rjust(4) #import pdb ;pdb.set_trace() dataType = "03" # fcst data atcfName = "9999" #5 s = "{center} {snum:02d}{basinId} {name} {fdate:%Y%m%d %H%M} {latstr} {lonstr}"\ " {spd} -99 {mslp} -999 -999 -9 -99 -999 -999 -999 -999 X"\ .format(center=center, snum=storm_number, basinId=basin, name=storm_name, fdate=fcstDate, latstr=latstr, lonstr=lonstr, spd=windspeed_str, mslp=mslp) if outfile: with open(outfile, 'w') as f: f.write(s + "\n") return s
def get(self, telescope): tlog.app_log.info('Get request for ListOfNights recieved') telescope = telescope.strip('/') ## Create Telescope Object config_file = os.path.join(os.path.expanduser('~'), '.{}.yaml'.format(telescope)) tel = IQMon.Telescope(config_file) telescopename = tel.name client = MongoClient(tel.mongo_address, tel.mongo_port) db = client[tel.mongo_db] collection = db[tel.mongo_collection] first_date_string = sorted(collection.distinct("date"), reverse=False)[0] first_date = dt.strptime('{} 00:00:00'.format(first_date_string), '%Y%m%dUT %H:%M:%S') oneday = tdelta(1, 0) tlog.app_log.info(' Building date_list') date_list = [] thisdate = dt.utcnow() while thisdate >= first_date: date_list.append(thisdate.strftime('%Y%m%dUT')) thisdate -= oneday tlog.app_log.info(' Done') night_plot_path = os.path.abspath('/var/www/nights/') tlog.app_log.info(' Looping over date_list') nights = [] for date_string in date_list: night_info = {'date': date_string } night_graph_file = '{}_{}.png'.format(date_string, telescope) if os.path.exists(os.path.join(night_plot_path, night_graph_file)): night_info['night graph'] = night_graph_file night_info['n images'] = collection.find( {"date":date_string} ).count() nights.append(night_info) tlog.app_log.info(' Done') tlog.app_log.info(' Rendering ListOfNights') self.render("night_list.html", title="{} Results".format(telescopename),\ telescope = telescope,\ telescopename = telescopename,\ nights = nights,\ ) tlog.app_log.info(' Done')
def gridClickHandler(self): GTI = gameTrackerIndexes selectedRow = self.gameTracker.selectedItems() rowNumber = self.gameTracker.selectedIndexes()[0].row() CSV_SELECTION = list(CSV_READER)[rowNumber] self.SelectionNameStat.setText(selectedRow[GTI.FILE_NAME].text()) self.SelectionModeStat.setText(selectedRow[GTI.MODE].text()) self.SelectionPPSStat.setText(CSV_SELECTION['pps']) self.SelectionTimeStat.setText( str(tdelta(0, (float(CSV_SELECTION['time']) / self.framerate)) ).rstrip('0').ljust(7, '0')) self.selectionScoreName.setText('Score:') self.selectionScoreStat.setText(selectedRow[GTI.SCORE].text()) self.SelectionLinesStat.setText(selectedRow[GTI.LINES].text()) self.SelectionPiecesStat.setText(selectedRow[GTI.PIECES].text())
def set_date_segment(self): """ Creates the string segment for the current UTC date :return f"{year}{doy}{hour}": String with the format YYDOYHH (year, day of the year, hour) """ minutes2save = 47 # 23.5 # Estimated time it takes to save each file time_file = self.utc_time - tdelta(minutes=minutes2save) year = time_file.strftime( '%y') # Year without century as a decimal number [00,99] doy = time_file.strftime( '%j') # Day of the year as a decimal number [001,366] hour = time_file.strftime( "%H") # Hour (24-hour clock) as a decimal number [00,23] return f"{year}{doy}{hour}" # Date segment
def __iter__(self): numDays = (self.e_dt - self.s_dt).days count = 0 titleSet = set() sys.stderr.write("iterator started... \n" ) for x in range(numDays+1): fileDate = self.s_dt + tdelta(days = x) filename = fileDate.strftime("%Y-%m-%d")+'.txt' if os.path.isfile(newsPre+filename): sys.stderr.write("file processing: " + filename +'\r'), for item in parsefile(filename,self.newsDIR,titleSet,self.per,self.loc,self.org,self.other): self.ind2obj[count] = item[1] yield item[0] count +=1 sys.stderr.write('\n') sys.stderr.write("final count = " + str(count) + '\n')
def loop_over_nights(): date = dt(2018, 1, 1, 1, 0, 0, 0) end = dt(2018, 12, 31, 2, 0, 0, 0) ##------------------------------------------------------------------------- ## Create Logger Object ##------------------------------------------------------------------------- l = logging.getLogger('make_nightly_plots') l.setLevel(logging.DEBUG) ## Set up console output LogConsoleHandler = logging.StreamHandler() LogConsoleHandler.setLevel(logging.INFO) LogFormat = logging.Formatter('%(asctime)23s %(levelname)8s: %(message)s') LogConsoleHandler.setFormatter(LogFormat) l.addHandler(LogConsoleHandler) while date < end: make_plots(date.strftime('%Y%m%dUT'), 'V20', l) date += tdelta(1,0)
def build(ax1, servicio, unit): fechas, consumos = zip(*sorted(servicio.items())) f = interp1d(date2num(fechas), consumos) # generate interpolation func x = drange(fechas[0], fechas[-1], tdelta(days=1)) # generate range of points ax1.plot_date(fechas, consumos, 'g-') # plot original data ax1.set_ylabel('Consumo en ' + unit, color='g') ax2 = ax1.twinx() ax2.plot(x, np.gradient(f(x)), 'b-') ax2.set_ylabel(unit + ' / dia', color='b') # plot rate of change dt = date2num((fechas[0], fechas[-1])) dc = (consumos[-1] - consumos[0]) / (date2num(fechas[-1]) - date2num(fechas[0])) ax3 = ax1.twinx() ax3.plot(dt, (dc, dc), 'r-') ax3.set_ylabel(unit + '/dia periodo', color='r') # plot rate of change
def get(self, telescope, subject): tlog.app_log.info('Get request for ListOfImages recieved') ## Create Telescope Object # tlog.app_log.info(' Creating telescope object') # config_file = os.path.join(os.path.expanduser('~'), '.{}.yaml'.format(telescope)) tel = Telescope(telescope) telescopename = tel.name tlog.app_log.info(' Done.') tlog.app_log.info(' Linking to mongo') client = MongoClient(tel.mongo_address, tel.mongo_port) tlog.app_log.info(' Connected to client.') db = client[tel.mongo_db] collection = db[tel.mongo_collection] tlog.app_log.info(' Retrieved collection.') tlog.app_log.info(' Getting list of images from mongo') ##--------------------------------------------------------------------- ## If subject is formatted like a date, then get images from a date ##--------------------------------------------------------------------- if re.match('\d{8}UT', subject): start = dt.strptime(subject, '%Y%m%dUT') end = start + tdelta(1) querydict = {"date": {"$gt": start, "$lt": end}, "telescope": tel.name} image_list = [entry for entry in\ collection.find(querydict).sort(\ [('date', pymongo.ASCENDING)])] tlog.app_log.info(' Got list of {} images for night.'.format(len(image_list))) ##--------------------------------------------------------------------- ## If subject matches a target name, then get images for that target ##--------------------------------------------------------------------- else: tlog.app_log.info(' Getting list of target names from mongo') target_name_list = sorted(collection.distinct("target name")) if subject in target_name_list: tlog.app_log.info(' Getting list of image list for {} from mongo'.format(subject)) image_list = [entry for entry in\ collection.find({"target name":subject}).sort(\ [('date', pymongo.DESCENDING)])] tlog.app_log.info(' Got list of {} images for target.'.format(len(image_list))) ##--------------------------------------------------------------------- ## If subject is not a date or target, then render a list of targets ##--------------------------------------------------------------------- else: image_list = [] self.write('<html><head><style>') self.write('table{border-collapse:collapse;margin-left:auto;margin-right:auto;}') self.write('table,th,td{border:1px solid black;vertical-align:top;text-align:left;') self.write('padding-top:5px;padding-right:5px;padding-bottom:5px;padding-left:5px;}') self.write('</style></head>') if (len(subject) > 0) and not re.match('[tT]argets', subject): self.write('<p style="text-align:center;">Could not find {} in target list:</p>'.format(subject)) self.write('<table style="border:1px solid black;">') self.write('<tr><th>Target</th><th>n Images</th>') tlog.app_log.info(f"Listing all targets") for target in target_name_list: target_images = [entry for entry in collection.find( { "target name": target } ) ] self.write('<tr><td><a href="{0}">{0}</a></td><td>{1:d}</td></tr>'.format(target, len(target_images))) self.write('</table></html>') if tel.units_for_FWHM == u.arcsec: FWHM_multiplier = tel.pixel_scale.value elif tel.units_for_FWHM == u.pix: FWHM_multiplier = 1.0 else: FWHM_multiplier = 1.0 if len(image_list) > 0: tlog.app_log.info(' Determining Flags') flags = [] for i,image in enumerate(image_list): flags.append({'FWHM': False, 'ellipticity': False, 'pointing error': False, 'zero point': False, }) try: flags[i]['FWHM'] = image['FWHM_pix'] > tel.FWHM_limit_pix.value except: pass try: flags[i]['ellipticity'] = image['ellipticity'] > tel.ellipticity_limit except: pass try: flags[i]['pointing error'] = image['perr_arcmin'] > tel.pointing_error_limit except: pass tlog.app_log.info(' Rendering ListOfImages') self.render("image_list.html", title="{} Results".format(telescopename),\ telescope = telescope,\ telescopename = telescopename,\ subject = subject,\ image_list = image_list,\ FWHM_units = tel.units_for_FWHM.to_string(),\ FWHM_multiplier = FWHM_multiplier,\ flags=flags,\ ) tlog.app_log.info(' Done.')
def generate_weather_table(): now = dt.now() nowut = now + tdelta(0, 10*60*60) weatherdata = retrieve_weather(lookbackdays=0)[-1] condition, color = get_conditions(weatherdata) weather_data_age = (nowut - weatherdata['date']).total_seconds() if weather_data_age < 60: weather_str = {True: 'Safe', False: 'Unsafe'}[weatherdata['safe']] if weatherdata['safe'] is True: weather_status = html.Span(weather_str, style={'color': 'green'}) else: weather_status = html.Span(weather_str, style={'color': 'red'}) else: weather_status = html.Span('Unsafe', style={'color': 'red'}) telstatus = retrieve_telstatus('V20') sun, moon = update_astronomy() sunstrings = [f"Next sunrise is at {sun['rise'].strftime('%Y/%m/%d %H:%M:%S UT')}", f"Next sunset is at {sun['set'].strftime('%Y/%m/%d %H:%M:%S UT')}"] if sun['rise'] > sun['set']: sunstrings.reverse() paths = {'Drobo': os.path.join('/', 'Volumes', 'DataCopy'),\ 'macOS': os.path.expanduser('~'),\ 'DroboPro': os.path.join('/', 'Volumes', 'MLOData')} disks = {} for disk in paths.keys(): if os.path.exists(paths[disk]): size_GB, avail_GB, pcnt_used = free_space(paths[disk]) disks[disk] = [size_GB, avail_GB, pcnt_used] tdcw300 = styles['tdc'].copy() tdcw300['width'] = '300px' tdrw200 = styles['tdc'].copy() tdrw200['width'] = '200px' tdlw150 = styles['tdl'].copy() tdlw150['width'] = '150px' tdcw250 = styles['tdc'].copy() tdcw250['width'] = '250px' tdcw200 = styles['tdc'].copy() tdcw200['width'] = '200px' tdcw150 = styles['tdc'].copy() tdcw150['width'] = '150px' tdcw400 = styles['tdc'].copy() tdcw400['width'] = '400px' weather_table = html.Table([ html.Tr([ html.Td(html.Span('Time', style={'font-weight': 'bold'}), style=tdcw300), html.Td(html.Span('Weather', style={'font-weight': 'bold'}), style=tdrw200), html.Td(html.Span(weather_status, style={'font-weight': 'bold'}), style=tdlw150), html.Td(html.Span('Disks', style={'font-weight': 'bold'}), style=tdcw250), ]), html.Tr([ html.Td(now.strftime('%Y/%m/%d %H:%M:%S HST'), style=styles['tdl']), html.Td('Ambient Temperature', style=styles['tdr']), html.Td(f"{weatherdata['temp']:.1f} C, {weatherdata['temp']*1.8+32.:.1f} F", style=styles['tdl']), html.Td([html.Span('MLOData', style={'font-family': 'Courier, monospace'}), f": {disks['DroboPro'][1]:.0f}GB free ({disks['DroboPro'][2]:.0f}% full)"], style=styles['tdr']), ]), html.Tr([ html.Td(nowut.strftime('%Y/%m/%d %H:%M:%S UT'), style=styles['tdl']), html.Td('Cloudiness', style=styles['tdr']), html.Td([html.Span(condition['cloud'], style={'color': color['cloud']}), html.Span(' ({0:.1f} F)'.format(weatherdata['clouds']*1.8+32.), style=styles['p']), ], style=styles['tdl']), html.Td([html.Span('DataCopy', style={'font-family': 'Courier, monospace'}), f": {disks['Drobo'][1]:.0f}GB free ({disks['Drobo'][2]:.0f}% full)"], style=styles['tdr']), ]), html.Tr([ html.Td(f"It is currently {sun['now']} (Sun alt = {sun['alt']:.0f})", style=styles['tdl']), html.Td('Wind Speed', style=styles['tdr']), html.Td([html.Span(condition['wind'], style={'color': color['wind']}), html.Span(' ({0:.1f} kph)'.format(weatherdata['wind']), style=styles['p']), ], style=styles['tdl']), html.Td([html.Span('macOS', style={'font-family': 'Courier, monospace'}), f": {disks['macOS'][1]:.0f}GB free ({disks['macOS'][2]:.0f}% full)"], style=styles['tdr']), ]), html.Tr([ html.Td(f"A {moon['phase']:.0f}% illuminated moon is {moon['now']}", style=styles['tdl']), html.Td('Gusts', style=styles['tdr']), html.Td([html.Span(condition['gust'], style={'color': color['gust']}), html.Span(' ({0:.1f} kph)'.format(weatherdata['gust']), style=styles['p']), ], style=styles['tdl']), html.Td('', style=styles['tdr']), ]), html.Tr([ html.Td(sunstrings[0], style=styles['tdl']), html.Td('Rain', style=styles['tdr']), html.Td([html.Span(condition['rain'], style={'color': color['rain']}), html.Span(' ({0:.0f})'.format(weatherdata['rain']), style=styles['p']), ], style=styles['tdl']), html.Td('', style=styles['tdr']), ]), html.Tr([ html.Td(sunstrings[1], style=styles['tdl']), html.Td('Weather Data Age', style=styles['tdr']), html.Td('{:.1f}s'.format(weather_data_age), style=styles['tdl']), html.Td('', style=styles['tdr']), ]), html.Tr([ html.Td(html.Img(src='http://127.0.0.1:80/static/weather.png', width=800), colSpan=4, style=styles['tdc']), ]), ], style=styles['table']) telstatus_table = html.Table([ html.Tr([ html.Td(html.Span('Status', style={'font-weight': 'bold'}), style=tdcw200), html.Td(html.Span('VYSOS-5', style={'font-weight': 'bold'}), style=tdcw150), html.Td(html.Span('VYSOS-20', style={'font-weight': 'bold'}), style=tdcw150), html.Td(html.Span('ATLAS All Sky Image', style={'font-weight': 'bold'}), style=tdcw400), ]), html.Tr([ html.Td('ACP Connected', style=styles['tdr']), html.Td('{}'.format(telstatus['V5']['connected']), style=styles['tdl']), html.Td('{}'.format(telstatus['V20']['connected']), style=styles['tdl']), html.Td(html.Img(src='http://www.fallingstar.com/weather/mlo/latest_bw400.jpg', width=400), rowSpan=8, style=styles['tdc']), ]), html.Tr([ html.Td('Status', style=styles['tdr']), html.Td('{}'.format(telstatus['V5']['status']), style=styles['tdl']), html.Td('{}'.format(telstatus['V20']['status']), style=styles['tdl']), ]), html.Tr([ html.Td('Alt', style=styles['tdr']), html.Td(telstatus['V5']['altstr'], style=styles['tdl']), html.Td(telstatus['V20']['altstr'], style=styles['tdl']), ]), html.Tr([ html.Td('Az', style=styles['tdr']), html.Td(telstatus['V5']['azstr'], style=styles['tdl']), html.Td(telstatus['V20']['azstr'], style=styles['tdl']), ]), html.Tr([ html.Td('Target RA', style=styles['tdr']), html.Td(telstatus['V5']['RAstr'], style=styles['tdl']), html.Td(telstatus['V20']['RAstr'], style=styles['tdl']), ]), html.Tr([ html.Td('Target Dec', style=styles['tdr']), html.Td(telstatus['V5']['Decstr'], style=styles['tdl']), html.Td(telstatus['V20']['Decstr'], style=styles['tdl']), ]), html.Tr([ html.Td('ACP Data Age', style=styles['tdr']), html.Td('{:.1f} min'.format(telstatus['V5']['age']), style=styles['tdl']), html.Td('{:.1f} min'.format(telstatus['V20']['age']), style=styles['tdl']), ]), html.Tr([ html.Td('', style=styles['tdr']), html.Td('', style=styles['tdl']), html.Td('', style=styles['tdl']), ]), ], style=styles['table']) components = [weather_table, html.Hr(), telstatus_table, ] return components
def get_weather(logger, robust=True): logger.info('Getting Weather status from PANOPTES') now = dt.utcnow() end = now + tdelta(0,900) start = end - tdelta(0,3600) pan001_client = pymongo.MongoClient('192.168.1.50', 27017) pan001_db = pan001_client.panoptes pan001_weather = pan001_db.weather pan001_data = [x for x in pan001_weather.find({'date': {'$gt': start, '$lt': end}}, sort=[('date', pymongo.DESCENDING)])] latest = pan001_data[0] weatherdoc = {"date": latest['date'], "querydate": now, "clouds": float(latest['data']['sky_temp_C']), "temp": float(latest['data']['ambient_temp_C']), "wind": float(latest['data']['wind_speed_KPH']), "gust": float(latest['data']['wind_speed_KPH']), "rain": int(latest['data']['rain_frequency']), "light": 0, "switch": latest['data']['safe'], "safe": latest['data']['safe'], } threshold = 30 age = (weatherdoc["querydate"] - weatherdoc["date"]).total_seconds() logger.debug('Data age = {:.1f} seconds'.format(age)) if age > threshold: logger.warning('Age of weather data ({:.1f}) is greater than {:.0f} seconds'.format( age, threshold)) logger.info('Saving weather document') logger.info('Connecting to mongoDB') client = pymongo.MongoClient('192.168.1.101', 27017) db = client.vysos weather = db.weather try: inserted_id = weather.insert_one(weatherdoc).inserted_id logger.info(" Inserted document with id: {}".format(inserted_id)) except: e = sys.exc_info()[0] logger.error('Failed to add new document') logger.error(e) client.close() # Example sld # 2017-02-25 17:27:31.00 C K 3.8 8.8 8.8 1.0 -1 100.0 25 2 2 00000 042791.72744 0 1 3 1 1 1 # From Boltwood manual: # Date Time T V SkyT AmbT SenT Wind Hum DewPt Hea R W Since Now() Day's c w r d C A # 2005-06-03 02:07:23.34 C K -28.5 18.7 22.5 45.3 75 10.3 3 0 0 00004 038506.08846 1 2 1 0 0 0 # NowDays = date/time given as the VB6 Now() function result (in days) when Clarity II last wrote this file tref1 = dt.strptime('2005-06-03 02:07:23.34', '%Y-%m-%d %H:%M:%S.%f') dtref1 = 038506.08846 to1 = tref1 - tdelta(days=dtref1) tref2 = dt.strptime('2017-02-25 17:27:31.00', '%Y-%m-%d %H:%M:%S.%f') dtref2 = 042791.72744 to2 = tref2 - tdelta(days=dtref2) to = to2 sld_file = os.path.expanduser('~/V20Data/aag_sld.dat') logger.info(f'Writing Single Line Data File to {sld_file}') local_time_str = dt.strftime(weatherdoc['date']-tdelta(0,10*3600), '%Y-%m-%d %H:%M:%S.00') SkyT = weatherdoc['clouds'] AmbT = weatherdoc['temp'] SenT = weatherdoc['temp'] # using ambient Wind = weatherdoc['wind'] Hum = -1 DewPt = 100.0 Hea = 25 R = {'Dry':0, 'Wet':1, 'Rain':1, 'Unknown':1}[latest['data']['rain_condition']] W = {'Dry':0, 'Wet':1, 'Rain':1, 'Unknown':1}[latest['data']['rain_condition']] Since = 00000 NowDays = (weatherdoc['date']-tdelta(0,10*3600)-to).total_seconds()/3600/24 c = {'Unknown':0, 'Very Cloudy':3, 'Cloudy':2, 'Clear':1}[latest['data']['sky_condition']] w = {'Unknown':0, 'Very Windy':3, 'Windy':2, 'Calm':1}[latest['data']['wind_condition']] r = {'Unknown':0, 'Rain':3, 'Wet':2, 'Dry':1}[latest['data']['rain_condition']] d = 1 C = {True:0 , False:1}[latest['data']['safe']] A = C sld = f"{local_time_str:22s} C K {SkyT:6.1f} {AmbT:6.1f} {SenT:6.1f} {Wind:6.1f} {Hum:3.0f} {DewPt:6.1f} {Hea:3d} {R:1d} {W:1d} {Since:05d} {NowDays:012.5f} {c:1d} {w:1d} {r:1d} {d:1d} {C:1d} {A:1d}" logger.info(" Date Time T V SkyT AmbT SenT Wind Hum DewPt Hea R W Since Now() Day's c w r d C A") logger.info(f" {sld}") if os.path.exists(sld_file): os.remove(sld_file) with open(sld_file, 'x') as sldFO: sldFO.write(f"{sld}\n") logger.info(f' Done')
e_dt = dt.strptime(sys.argv[3],"%Y-%m-%d") tweetPre = sys.argv[4] k = int(sys.argv[5]) t_topK = int(sys.argv[6]) dataop = sys.argv[7] outfile = codecs.open(sys.argv[8], 'w', encoding = 'utf-8') knnNum = 30 ############## readfile news. Two copies: lines (for scikit-learn's convenience) and objects (for later print use) numDays = (e_dt - s_dt).days # k = 20*(numDays+1) count = 0 lines = [] ind2obj = {} for x in range(numDays+1): fileDate = s_dt + tdelta(days = x) dtpure = fileDate.strftime("%Y-%m-%d") filename = newsPre + dtpure +".txt" print(filename) if os.path.isfile(filename): file = codecs.open(filename, encoding = 'utf-8') count = readfile(file,dataop,count,ind2obj,dtpure,lines) vocab = None ############# clustering on news X,vectorizer = getVec(lines,vocab) #clus2doc index of newsID clusModel,clus2doc,knn_graph = getCluster(X.toarray(),k,knnNum,opts) ############# print cluster + rank tweets # place holder order_centroids=None
def FillSummaryMatrix(self): print "\nExtracting Column Info for CMS sites\n" prog = ProgressBar(0, 100, 77) for sitename in self.matrices.xmlInfo: prog.increment(100./len(self.matrices.xmlInfo)) if not self.matrices.columnValues.has_key(sitename): # add site if not already there self.matrices.columnValues[sitename] = {} for col in self.cinfo.urls: if not self.matrices.xmlInfo[sitename].has_key(col) or col == 'Downtimes_top': continue # set to null (default) values for iday in daterange(self.tinfo.today - tdelta(60), self.tinfo.today + tdelta(1)): idaystamp = iday.strftime("%Y-%m-%d") if not self.matrices.columnValues[sitename].has_key(idaystamp): self.matrices.columnValues[sitename][idaystamp] = {} if not self.matrices.columnValues[sitename][idaystamp].has_key(col): self.matrices.columnValues[sitename][idaystamp][col] = {} nullValues = {} nullValues['Status'] = 'n/a' nullValues['Color'] = 'white' nullValues['URL'] = ' ' nullValues['validity'] = 0 self.matrices.columnValues[sitename][idaystamp][col] = nullValues items = self.matrices.xmlInfo[sitename][col].keys() items.sort() for coldate in items: # loop over each time/date combination xmltime = datetime.datetime(*time.strptime(self.matrices.xmlInfo[sitename][col][coldate]['Time'], "%Y-%m-%d %H:%M:%S")[0:6]) xmlendtime = datetime.datetime(*time.strptime(self.matrices.xmlInfo[sitename][col][coldate]['EndTime'], "%Y-%m-%d %H:%M:%S")[0:6]) startxmldatetmp = xmltime.strftime("%Y-%m-%d 00:00:00") startxmldate = datetime.datetime(*time.strptime(startxmldatetmp, "%Y-%m-%d %H:%M:%S")[0:6]) EndTXML = True i = 0 while ( EndTXML ): d = datetime.timedelta(i) # convert i to number of days i += 1 dayloop = startxmldate + d dayloopstamp = dayloop.strftime("%Y-%m-%d") dayloopstamp2 = dayloop.strftime("%Y-%m-%d 00:00:00") looptime = datetime.datetime(*time.strptime(dayloopstamp2, "%Y-%m-%d %H:%M:%S")[0:6]) if dayloop > self.tinfo.today: EndTXML = False continue diff1 = xmltime-looptime diff1s = (diff1.days*86400+diff1.seconds) diff2 = xmlendtime-looptime diff2s = (diff2.days*86400+diff2.seconds) diff3 = xmlendtime-xmltime diff3s = (diff3.days*86400+diff3.seconds) if diff1s<=0 and diff2s>0: if diff2s>=86400: validity=86400 else: validity=diff2s if diff1s>0 and diff1s<86400: if diff2s>86400: validity=86400-diff1s else: validity=diff3s if diff1s<0 and diff2s<=0: EndTXML=False continue if self.cinfo.colorCodes[col][self.matrices.xmlInfo[sitename][col][coldate]['COLOR']] == "green": status=self.matrices.xmlInfo[sitename][col][coldate]['Status'] statusu=self.matrices.xmlInfo[sitename][col][coldate]['URL'] statusc='green' if self.matrices.xmlInfo[sitename][col][coldate]['Status']=="pend": status='-' statusc='orange' elif self.cinfo.colorCodes[col][self.matrices.xmlInfo[sitename][col][coldate]['COLOR']] == "red": status=self.matrices.xmlInfo[sitename][col][coldate]['Status'] statusu=self.matrices.xmlInfo[sitename][col][coldate]['URL'] statusc='red' elif self.cinfo.colorCodes[col][self.matrices.xmlInfo[sitename][col][coldate]['COLOR']] == "yellow": status=self.matrices.xmlInfo[sitename][col][coldate]['Status'] statusu=self.matrices.xmlInfo[sitename][col][coldate]['URL'] statusc='yellow' elif self.cinfo.colorCodes[col][self.matrices.xmlInfo[sitename][col][coldate]['COLOR']] == "white": status='n/a' statusu=' ' statusc='white' else: status=self.matrices.xmlInfo[sitename][col][coldate]['Status'] statusu=self.matrices.xmlInfo[sitename][col][coldate]['URL'] statusc=self.cinfo.colorCodes[col][self.matrices.xmlInfo[sitename][col][coldate]['COLOR']] dayloopstamp3 = self.ShiftDayForMetric(dayloop,col) todayst = date(int(self.tinfo.todaystamp[0:4]),int(self.tinfo.todaystamp[5:7]),int(self.tinfo.todaystamp[8:10])) dayloop3 = date(int(dayloopstamp3[0:4]),int(dayloopstamp3[5:7]),int(dayloopstamp3[8:10])) if abs((dayloop3-todayst).days) > self.cinfo.days: continue if status == "Ready": status = "R" elif status == "NotReady": status = "NR" elif status == "Downtime": status = "SD" elif status == "Waiting_Room": status = "WR" elif status == "Morgue": status = "M" elif status == "OK" or status == "Ok": status = "O" elif status == "Error": status = "E" # set the actual values in self.matrices.columnValues infocol = {} infocol['Status'] = status infocol['Color'] = statusc infocol['URL'] = statusu infocol['validity'] = validity if self.matrices.columnValues[sitename][dayloopstamp3][col].has_key('validity'): if validity > self.matrices.columnValues[sitename][dayloopstamp3][col]['validity']: self.matrices.columnValues[sitename][dayloopstamp3][col] = infocol else: self.matrices.columnValues[sitename][dayloopstamp3][col] = infocol if dayloopstamp == self.tinfo.todaystamp: infocol = {} infocol['Status'] = ' ' infocol['Color'] = 'white' infocol['URL'] = ' ' infocol['validity'] = '0' self.matrices.columnValues[sitename][dayloopstamp][col] = infocol prog.finish()
def main(startdate, enddate, logger, nice=False, skip=False): if startdate > enddate: oneday = tdelta(-1, 0) else: oneday = tdelta(1, 0) ##------------------------------------------------------------------------ ## Use pyephem determine sunrise and sunset times ##------------------------------------------------------------------------ now = dt.utcnow() if nice: Observatory = ephem.Observer() Observatory.lon = "-155:34:33.9" Observatory.lat = "+19:32:09.66" Observatory.elevation = 3400.0 Observatory.temp = 10.0 Observatory.pressure = 680.0 Observatory.horizon = '0.0' Observatory.date = now.strftime('%Y/%m/%d %H:%M:%S') TheSun = ephem.Sun() MatchFilename = re.compile("(.*)\-([0-9]{8})at([0-9]{6})\.fts") MatchEmpty = re.compile(".*\-Empty\-.*\.fts") date = startdate while True: date_string = date.strftime('%Y%m%dUT') logger.info('Checking for images from {}'.format(date_string)) images = [] V5_path = os.path.join("/Volumes", "Drobo", "V5", "Images", date_string) V20_path = os.path.join("/Volumes", "Drobo", "V20", "Images", date_string) if os.path.exists(V5_path): V5_images = glob(os.path.join(V5_path, '*.fts')) logger.info(' Found {} images for the night of {} for V5'.format(\ len(V5_images), date_string)) images.extend(V5_images) if os.path.exists(V20_path): V20_images = glob(os.path.join(V20_path, '*.fts')) logger.info(' Found {} images for the night of {} for V20'.format(\ len(V20_images), date_string)) images.extend(V20_images) ## Sort Images by Observation time properties = [] for image in images: skip_this_image = False imagename = os.path.split(image)[1] FNmatch = MatchFilename.match(imagename) Ematch = MatchEmpty.match(imagename) ## If skip is enabled, skip images which are already in mongo db if skip: telescope = None V5match = re.match("V5.*\.fi?ts", imagename) V20match = re.match("V20.*\.fi?ts", imagename) if V5match and not V20match: telescope = "V5" elif V20match and not V5match: telescope = "V20" else: with fits.open(image) as hdulist: if hdulist[0].header['OBSERVAT']: if re.search('VYSOS-?20', hdulist[0].header['OBSERVAT']): telescope = "V20" elif re.search('VYSOS-?5', hdulist[0].header['OBSERVAT']): telescope = "V5" else: print("Can not determine valid telescope from arguments or filename or header.") else: print("Can not determine valid telescope from arguments or filename or header.") if telescope: config_file = os.path.join(os.path.expanduser('~'), '.{}.yaml'.format(telescope)) tel = IQMon.Telescope(config_file) client = MongoClient(tel.mongo_address, tel.mongo_port) db = client[tel.mongo_db] data = db[tel.mongo_collection] matches = [item for item in data.find( {"filename" : imagename} )] if len(matches) > 0: # images.remove(image) skip_this_image = True ## Remove images with Empty in filenme if Ematch: # images.remove(image) skip_this_image = True if not FNmatch: # images.remove(image) skip_this_image = True if not skip_this_image: try: image_dt = dt.strptime('{} {}'.format(\ FNmatch.group(2), FNmatch.group(3)), '%Y%m%d %H%M%S') except: image_dt = dt.utcnow() properties.append([image, image_dt]) properties = sorted(properties, key=lambda entry:entry[1]) ## Process Images count = 0 for entry in properties: count += 1 print('') print('Examining image {} out of {} for the night of {}'.format(\ count, len(properties), date_string)) image = entry[0] if nice: now = dt.utcnow() Observatory.date = now.strftime('%Y/%m/%d %H:%M:%S') TheSun.compute(Observatory) if TheSun.alt < 0: print('The Sun is down (alt = {:.1f})'.format(TheSun.alt*180./ephem.pi)) sunrise = Observatory.next_rising(TheSun).datetime() until_sunrise = (sunrise - now).total_seconds()/60./60. logger.info('Sleeping {:.1f} hours until sunrise'.format(until_sunrise)) time.sleep(until_sunrise + 300) now = dt.utcnow() Observatory.date = now.strftime('%Y/%m/%d %H:%M:%S') sunset = Observatory.next_setting(ephem.Sun()).datetime() sunrise = Observatory.next_rising(ephem.Sun()).datetime() logger.info('Resuming processing ...') logger.info(' Next sunset at {}'.format(sunset.strftime('%Y/%m/%d %H:%M:%S'))) if MatchFilename.match(image) and not MatchEmpty.match(image): try: measure_image.MeasureImage(image,\ clobber_logs=True,\ zero_point=True,\ analyze_image=True) except: logger.warning('MeasureImage failed on {}'.format(image)) measure_image.MeasureImage(image,\ clobber_logs=False,\ zero_point=True,\ analyze_image=False) make_nightly_plots.make_plots(date_string, 'V5', logger) make_nightly_plots.make_plots(date_string, 'V20', logger) if date == enddate: break date += oneday
def get(self, input): tlog.app_log.info('Get request for Status "{}" recieved'.format(input)) nowut = dt.utcnow() now = nowut - tdelta(0,10*60*60) client = pymongo.MongoClient('192.168.1.101', 27017) db = client['vysos'] ##------------------------------------------------------------------------ ## Use pyephem determine sunrise and sunset times ##------------------------------------------------------------------------ Observatory = ephem.Observer() Observatory.lon = "-155:34:33.9" Observatory.lat = "+19:32:09.66" Observatory.elevation = 3400.0 Observatory.temp = 10.0 Observatory.pressure = 680.0 Observatory.horizon = '0.0' Observatory.date = nowut TheSun = ephem.Sun() TheSun.compute(Observatory) sun = {} sun['alt'] = float(TheSun.alt) * 180. / ephem.pi sun['set'] = Observatory.next_setting(TheSun).datetime() sun['rise'] = Observatory.next_rising(TheSun).datetime() if sun['alt'] <= -18: sun['now'] = 'night' elif sun['alt'] > -18 and sun['alt'] <= -12: sun['now'] = 'astronomical twilight' elif sun['alt'] > -12 and sun['alt'] <= -6: sun['now'] = 'nautical twilight' elif sun['alt'] > -6 and sun['alt'] <= 0: sun['now'] = 'civil twilight' elif sun['alt'] > 0: sun['now'] = 'day' TheMoon = ephem.Moon() Observatory.date = nowut TheMoon.compute(Observatory) moon = {} moon['phase'] = TheMoon.phase moon['alt'] = TheMoon.alt * 180. / ephem.pi moon['set'] = Observatory.next_setting(TheMoon).datetime() moon['rise'] = Observatory.next_rising(TheMoon).datetime() if moon['alt'] > 0: moon['now'] = 'up' else: moon['now'] = 'down' tlog.app_log.info(' Ephem data calculated') ##--------------------------------------------------------------------- ## Get disk use info ##--------------------------------------------------------------------- paths = {'Drobo': os.path.join('/', 'Volumes', 'VYSOSData'),\ 'macOS': os.path.expanduser('~'),\ } disks = {} for disk in paths.keys(): if os.path.exists(paths[disk]): size_GB, avail_GB, pcnt_used = free_space(paths[disk]) disks[disk] = [size_GB, avail_GB, pcnt_used] tlog.app_log.info(' Disk use data determined') ##--------------------------------------------------------------------- ## Get Telescope Status ##--------------------------------------------------------------------- telstatus = {} tlog.app_log.info(f"Getting telescope status records from mongo") for telescope in ['V20', 'V5']: try: telstatus[telescope] = (db[f'{telescope}status'].find(limit=1, sort=[('date', pymongo.DESCENDING)])).next() if 'RA' in telstatus[telescope] and 'DEC' in telstatus[telescope]: coord = SkyCoord(telstatus[telescope]['RA'], telstatus[telescope]['DEC'], unit=u.deg) telstatus[telescope]['RA'], telstatus[telescope]['DEC'] = coord.to_string('hmsdms', sep=':', precision=0).split() tlog.app_log.info(f" Got telescope status record for {telescope}") except StopIteration: telstatus[telescope] = {'date': dt.utcnow()-tdelta(365), 'connected': False} tlog.app_log.info(f" No telescope status records for {telescope}.") tlog.app_log.info(f" Filling in blank data for {telescope}.") ##--------------------------------------------------------------------- ## Get Current Weather ##--------------------------------------------------------------------- tlog.app_log.info(f"Getting weather records from mongo") weather = client.vysos['weather'] if weather.count() > 0: cw = weather.find(limit=1, sort=[('date', pymongo.DESCENDING)]).next() else: cw = None tlog.app_log.info(f" Done") ##--------------------------------------------------------------------- ## Render ##--------------------------------------------------------------------- link_date_string = nowut.strftime('%Y%m%dUT') files_string = "Tonight's Files" if nowut.hour < 3: link_date_string = (nowut - tdelta(1,0)).strftime('%Y%m%dUT') files_string = "Last Night's Files" tlog.app_log.info(' Rendering Status') cctv = False if input.lower() in ["cctv", "cctv.html"]: cctv = True self.render("status.html", title="VYSOS Status", now = (now, nowut), disks = disks, link_date_string = link_date_string, moon = moon, sun = sun, telstatus=telstatus, files_string = files_string,\ v5_images = get_image_list('V5', link_date_string),\ v20_images = get_image_list('V20', link_date_string),\ v5_flats = get_image_list('V5', link_date_string, flats=True),\ v20_flats = get_image_list('V20', link_date_string, flats=True),\ v5_cals = get_image_list('V5', link_date_string, cals=True),\ v20_cals = get_image_list('V20', link_date_string, cals=True),\ cctv=cctv, currentweather=cw, weather_limits=weather_limits, ) tlog.app_log.info(' Done')
def correlate_temps(): if not os.path.exists('tempcompdata.txt'): client = pymongo.MongoClient('192.168.1.101', 27017) db = client['vysos'] images = db['V5.images'] status = db['V5.status'] results = images.find( {'FWHM pix': {'$lt': 2.0}} ) tab = Table(names=('time', 'focuspos', 'tubetemp', 'ambtemp'), dtype=('a25', 'i4', 'f4', 'f4')) for i,result in enumerate(results): when = result['exposure start'] exptime = result['exposure time'] telemetry = status.find( {'UT timestamp': {'$gte': when, '$lte': when+tdelta(0,exptime)} } ) if i in np.arange(0,results.count(),100): print('{:4d} {} {:d}'.format(i, when.isoformat(), telemetry.count())) try: assert 'FocusMax temperature (tube)' in telemetry[0].keys() assert 'boltwood ambient temp' in telemetry[0].keys() assert 'FocusMax focuser position' in telemetry[0].keys() row = {'time': when.isoformat(), 'focuspos': int(telemetry[0]['FocusMax focuser position']), 'tubetemp': float(telemetry[0]['FocusMax temperature (tube)']), 'ambtemp': float(telemetry[0]['boltwood ambient temp'])} tab.add_row(row) except: print('Failed on data at {}'.format(when.isoformat())) tab.write('tempcompdata.txt', format='ascii.csv') else: tab = Table.read('tempcompdata.txt', format='ascii.csv') time = [ dt.strptime(t, '%Y-%m-%dT%H:%M:%S') for t in tab['time'] ] # ------------------------------------------------------------------------- # Plot correlation between tube and ambient temperature # ------------------------------------------------------------------------- diff = tab['ambtemp'] - tab['tubetemp'] line0 = models.Linear1D(slope=1, intercept=np.median(diff)) line0.slope.fixed = True # fitter = fitting.LinearLSQFitter() # line = fitter(line0, tab['tubetemp'], tab['ambtemp']) line = line0 plt.figure() plt.plot(tab['tubetemp'], tab['ambtemp'], 'bo', markersize=3, markeredgewidth=0) plt.plot(tab['tubetemp'], tab['ambtemp'], 'k-', alpha=0.3) plt.plot(tab['tubetemp'], line(tab['tubetemp']), 'g-', label='slope={:.1f}, intercept={:.1f}'.format( line.slope.value, line.intercept.value) ) plt.xlabel('FocusMax Temperature') plt.ylabel('Ambient Temperature') plt.grid() plt.legend(loc='best') plt.savefig('temperature_correlation_V5.png') # ------------------------------------------------------------------------- # Plot focus position over time to check for discontinuities # ------------------------------------------------------------------------- plt.figure() ax = plt.gca() plt.plot_date(time, tab['focuspos'], 'bo', markersize=3, markeredgewidth=0) plt.xlabel('Time') plt.ylabel('Focuser Position') days = DayLocator(interval=2) fmt = DateFormatter('%Y/%m/%d') ax.xaxis.set_major_locator(days) ax.xaxis.set_major_formatter(fmt) plt.grid() plt.savefig('focus_vs_time.png') # ------------------------------------------------------------------------- # Plot focus position vs. tube temperature to determine compensation # ------------------------------------------------------------------------- line0 = models.Linear1D() fitter = fitting.LinearLSQFitter() line = fitter(line0, tab['tubetemp'], tab['focuspos']) plt.figure() plt.scatter(tab['tubetemp'], tab['focuspos'], c=range(len(time)), norm=None, cmap='Blues') plt.plot(tab['tubetemp'], tab['focuspos'], 'k-', alpha=0.3) plt.plot(tab['tubetemp'], line(tab['tubetemp']), 'g-', label='slope={:.1f}, intercept={:.1f}'.format( line.slope.value, line.intercept.value) ) plt.xlabel('FocusMax Temperature') plt.ylabel('FocusMax Position') plt.grid() plt.legend(loc='best') plt.savefig('temperature_compensation_V5.png')
def was_published_recently(self): now = timezone.now() return now - tdelta(days=-1) >= self.pub_date <= now
def get(self): tlog.app_log.info('Get request for Status recieved') nowut = dt.utcnow() now = nowut - tdelta(0,10*60*60) client = MongoClient('192.168.1.101', 27017) ##------------------------------------------------------------------------ ## Use pyephem determine sunrise and sunset times ##------------------------------------------------------------------------ Observatory = ephem.Observer() Observatory.lon = "-155:34:33.9" Observatory.lat = "+19:32:09.66" Observatory.elevation = 3400.0 Observatory.temp = 10.0 Observatory.pressure = 680.0 Observatory.horizon = '0.0' Observatory.date = nowut TheSun = ephem.Sun() TheSun.compute(Observatory) sun = {} sun['alt'] = float(TheSun.alt) * 180. / ephem.pi sun['set'] = Observatory.next_setting(TheSun).datetime() sun['rise'] = Observatory.next_rising(TheSun).datetime() if sun['alt'] <= -18: sun['now'] = 'night' elif sun['alt'] > -18 and sun['alt'] <= -12: sun['now'] = 'astronomical twilight' elif sun['alt'] > -12 and sun['alt'] <= -6: sun['now'] = 'nautical twilight' elif sun['alt'] > -6 and sun['alt'] <= 0: sun['now'] = 'civil twilight' elif sun['alt'] > 0: sun['now'] = 'day' TheMoon = ephem.Moon() Observatory.date = nowut TheMoon.compute(Observatory) moon = {} moon['phase'] = TheMoon.phase moon['alt'] = TheMoon.alt * 180. / ephem.pi moon['set'] = Observatory.next_setting(TheMoon).datetime() moon['rise'] = Observatory.next_rising(TheMoon).datetime() if moon['alt'] > 0: moon['now'] = 'up' else: moon['now'] = 'down' tlog.app_log.info(' Ephem data calculated') ##--------------------------------------------------------------------- ## Get Latest V20 Data ##--------------------------------------------------------------------- v20status = client.vysos['V20.status'] # v20entries = [] # while (len(v20entries) < 1) and (nowut > dt(2015,1,1)): # v20entries = [entry for entry\ # in v20status.find(\ # {"UT date" : nowut.strftime('%Y%m%dUT')}\ # ).sort([('UT time', pymongo.ASCENDING)])] # if len(v20entries) > 0: v20data = v20entries[-1] # else: nowut = nowut - tdelta(1, 0) # nowut = dt.utcnow() v20data = v20status.find_one( {'current': True} ) tlog.app_log.info(' v20data retrieved') try: try: v20clarity_time = v20data['boltwood timestamp'] except: v20clarity_time = dt.strptime('{} {}'.format(\ v20data['boltwood date'],\ v20data['boltwood time'][:-3]),\ '%Y-%m-%d %H:%M:%S') v20clarity_age = (now - v20clarity_time).total_seconds() if v20clarity_age > 60: v20clarity_color = 'red' else: v20clarity_color = 'black' except: v20clarity_age = float('nan') v20clarity_color = 'red' tlog.app_log.info(' v20clarity_color determined') try: try: v20data_time = v20data['UT timestamp'] except: v20data_time = dt.strptime('{} {}'.format(\ v20data['UT date'],\ v20data['UT time']),\ '%Y%m%dUT %H:%M:%S') v20data_age = (nowut - v20data_time).total_seconds() if v20data_age > 60: v20data_color = 'red' else: v20data_color = 'black' except: v20data_age = float('nan') v20data_color = 'red' tlog.app_log.info(' v20data_color determined') ##--------------------------------------------------------------------- ## Get Latest V5 Data ##--------------------------------------------------------------------- v5status = client.vysos['V5.status'] # v5entries = [] # while (len(v5entries) < 1) and (nowut > dt(2015,1,1)): # v5entries = [entry for entry\ # in v5status.find( {"UT date" : nowut.strftime('%Y%m%dUT')} ).sort([('UT time', pymongo.ASCENDING)])] # if len(v5entries) > 0: v5data = v5entries[-1] # else: nowut = nowut - tdelta(1, 0) # nowut = dt.utcnow() v5data = v5status.find_one( {'current': True} ) tlog.app_log.info(' v5data retrieved') try: try: v5clarity_time = v5data['boltwood timestamp'] except: v5clarity_time = dt.strptime('{} {}'.format(\ v5data['boltwood date'],\ v5data['boltwood time'][:-3]),\ '%Y-%m-%d %H:%M:%S') v5clarity_age = (now - v5clarity_time).total_seconds() if v5clarity_age > 60: v5clarity_color = 'red' else: v5clarity_color = 'black' except: v5clarity_age = float('nan') v5clarity_color = 'red' tlog.app_log.info(' v5clarity_color determined') try: try: v5data_time = v5data['UT timestamp'] except: v5data_time = dt.strptime('{} {}'.format(\ v5data['UT date'],\ v5data['UT time']),\ '%Y%m%dUT %H:%M:%S') v5data_age = (nowut - v5data_time).total_seconds() if v5data_age > 60: v5data_color = 'red' else: v5data_color = 'black' except: v5data_age = float('nan') v5data_color = 'red' tlog.app_log.info(' v5clarity_color determined') ##--------------------------------------------------------------------- ## Format and Color Code Boltwood Data ##--------------------------------------------------------------------- wind_units = {'M': 'mph', 'K': 'kph', 'm': 'm/s'} rain_status = {0: 'Dry', 1: 'Recent Rain', 2: 'Raining'} wet_status = {0: 'Dry', 1: 'Recent Wet', 2: 'Wet'} cloud_condition = {0: 'Unknown', 1: 'Clear', 2: 'Cloudy', 3: 'Very Cloudy'} wind_condition = {0: 'Unknown', 1: 'Calm', 2: 'Windy', 3: 'Very Windy'} rain_condition = {0: 'Unknown', 1: 'Dry', 2: 'Wet', 3: 'Rain'} day_condition = {0: 'Unknown', 1: 'Dark', 2: 'Light', 3: 'Very Light'} roof_close = {0: 'Safe', 1: 'Unsafe'} if 'boltwood wind units' in v20data.keys(): v20data['boltwood wind units'] = wind_units[v20data['boltwood wind units']] if 'boltwood rain status' in v20data.keys(): if v20data['boltwood rain status'] == 0: v20data['boltwood rain status color'] = 'green' elif v20data['boltwood rain status'] == 1: v20data['boltwood rain status color'] = 'red' elif v20data['boltwood rain status'] == 2: v20data['boltwood rain status color'] = 'red' else: v20data['boltwood rain color'] = '' v20data['boltwood rain status string'] = rain_status[v20data['boltwood rain status']] if 'boltwood wet status' in v20data.keys(): if v20data['boltwood wet status'] == 0: v20data['boltwood wet status color'] = 'green' elif v20data['boltwood wet status'] == 1: v20data['boltwood wet status color'] = 'red' elif v20data['boltwood wet status'] == 2: v20data['boltwood wet status color'] = 'red' else: v20data['boltwood wet color'] = '' v20data['boltwood wet status string'] = wet_status[v20data['boltwood wet status']] if 'boltwood cloud condition' in v20data.keys(): if v20data['boltwood cloud condition'] == 0: v20data['boltwood cloud color'] = 'orange' elif v20data['boltwood cloud condition'] == 1: v20data['boltwood cloud color'] = 'green' elif v20data['boltwood cloud condition'] == 2: v20data['boltwood cloud color'] = 'orange' elif v20data['boltwood cloud condition'] == 3: v20data['boltwood cloud color'] = 'red' else: v20data['boltwood cloud color'] = '' v20data['boltwood cloud condition string'] = cloud_condition[v20data['boltwood cloud condition']] if 'boltwood wind condition' in v20data.keys(): if v20data['boltwood wind condition'] == 0: v20data['boltwood wind color'] = 'orange' elif v20data['boltwood wind condition'] == 1: v20data['boltwood wind color'] = 'green' elif v20data['boltwood wind condition'] == 2: v20data['boltwood wind color'] = 'orange' elif v20data['boltwood wind condition'] == 3: v20data['boltwood wind color'] = 'red' else: v20data['boltwood wind color'] = '' v20data['boltwood wind condition string'] = wind_condition[v20data['boltwood wind condition']] if 'boltwood rain condition' in v20data.keys(): if v20data['boltwood rain condition'] == 0: v20data['boltwood rain color'] = 'orange' elif v20data['boltwood rain condition'] == 1: v20data['boltwood rain color'] = 'green' elif v20data['boltwood rain condition'] == 2: v20data['boltwood rain color'] = 'red' elif v20data['boltwood rain condition'] == 3: v20data['boltwood rain color'] = 'red' else: v20data['boltwood rain color'] = '' v20data['boltwood rain condition string'] = rain_condition[v20data['boltwood rain condition']] if 'boltwood day condition' in v20data.keys(): if v20data['boltwood day condition'] == 0: v20data['boltwood day color'] = 'orange' elif v20data['boltwood day condition'] == 1: v20data['boltwood day color'] = 'green' elif v20data['boltwood day condition'] == 2: v20data['boltwood day color'] = 'orange' elif v20data['boltwood day condition'] == 3: v20data['boltwood day color'] = 'red' else: v20data['boltwood day color'] = '' v20data['boltwood day condition string'] = day_condition[v20data['boltwood day condition']] if 'boltwood roof close' in v20data.keys(): if v20data['boltwood roof close'] == 0: v20data['boltwood roof close color'] = 'green' elif v20data['boltwood roof close'] == 1: v20data['boltwood roof close color'] = 'red' else: v20data['boltwood roof close color'] = '' v20data['boltwood roof close string'] = roof_close[v20data['boltwood roof close']] if 'boltwood wind units' in v5data.keys(): v5data['boltwood wind units'] = wind_units[v5data['boltwood wind units']] if 'boltwood rain status' in v5data.keys(): if v5data['boltwood rain status'] == 0: v5data['boltwood rain status color'] = 'green' elif v5data['boltwood rain status'] == 1: v5data['boltwood rain status color'] = 'red' elif v5data['boltwood rain status'] == 2: v5data['boltwood rain status color'] = 'red' else: v5data['boltwood rain color'] = '' v5data['boltwood rain status string'] = rain_status[v5data['boltwood rain status']] if 'boltwood wet status' in v5data.keys(): if v5data['boltwood wet status'] == 0: v5data['boltwood wet status color'] = 'green' elif v5data['boltwood wet status'] == 1: v5data['boltwood wet status color'] = 'red' elif v5data['boltwood wet status'] == 2: v5data['boltwood wet status color'] = 'red' else: v5data['boltwood wet color'] = '' v5data['boltwood wet status string'] = wet_status[v5data['boltwood wet status']] if 'boltwood cloud condition' in v5data.keys(): if v5data['boltwood cloud condition'] == 0: v5data['boltwood cloud color'] = 'orange' elif v5data['boltwood cloud condition'] == 1: v5data['boltwood cloud color'] = 'green' elif v5data['boltwood cloud condition'] == 2: v5data['boltwood cloud color'] = 'orange' elif v5data['boltwood cloud condition'] == 3: v5data['boltwood cloud color'] = 'red' else: v5data['boltwood cloud color'] = '' v5data['boltwood cloud condition string'] = cloud_condition[v5data['boltwood cloud condition']] if 'boltwood wind condition' in v5data.keys(): if v5data['boltwood wind condition'] == 0: v5data['boltwood wind color'] = 'orange' elif v5data['boltwood wind condition'] == 1: v5data['boltwood wind color'] = 'green' elif v5data['boltwood wind condition'] == 2: v5data['boltwood wind color'] = 'orange' elif v5data['boltwood wind condition'] == 3: v5data['boltwood wind color'] = 'red' else: v5data['boltwood wind color'] = '' v5data['boltwood wind condition string'] = wind_condition[v5data['boltwood wind condition']] if 'boltwood rain condition' in v5data.keys(): if v5data['boltwood rain condition'] == 0: v5data['boltwood rain color'] = 'orange' elif v5data['boltwood rain condition'] == 1: v5data['boltwood rain color'] = 'green' elif v5data['boltwood rain condition'] == 2: v5data['boltwood rain color'] = 'red' elif v5data['boltwood rain condition'] == 3: v5data['boltwood rain color'] = 'red' else: v5data['boltwood rain color'] = '' v5data['boltwood rain condition string'] = rain_condition[v5data['boltwood rain condition']] if 'boltwood day condition' in v5data.keys(): if v5data['boltwood day condition'] == 0: v5data['boltwood day color'] = 'orange' elif v5data['boltwood day condition'] == 1: v5data['boltwood day color'] = 'green' elif v5data['boltwood day condition'] == 2: v5data['boltwood day color'] = 'orange' elif v5data['boltwood day condition'] == 3: v5data['boltwood day color'] = 'red' else: v5data['boltwood day color'] = '' v5data['boltwood day condition string'] = day_condition[v5data['boltwood day condition']] if 'boltwood roof close' in v5data.keys(): if v5data['boltwood roof close'] == 0: v5data['boltwood roof close color'] = 'green' elif v5data['boltwood roof close'] == 1: v5data['boltwood roof close color'] = 'red' else: v5data['boltwood roof close color'] = '' v5data['boltwood roof close string'] = roof_close[v5data['boltwood roof close']] tlog.app_log.info(' colors determined') ##--------------------------------------------------------------------- ## Format and Color Code ACP Data ##--------------------------------------------------------------------- ACP_connected = {True: 'Connected', False: 'Disconnected'} if 'ACP connected' in v20data.keys(): v20data['ACP connected string'] = ACP_connected[v20data['ACP connected']] if (v20data['ACP connected']): v20data['ACP connected color'] = 'green' if ('ACP park status' in v20data.keys()) and\ ('ACP slewing status' in v20data.keys()) and\ ('ACP tracking status' in v20data.keys()): P = v20data['ACP park status'] S = v20data['ACP slewing status'] T = v20data['ACP tracking status'] if P: v20data['ACP status string'] = 'Parked' v20data['ACP status color'] = '' elif not P and not S and not T: v20data['ACP status string'] = 'Stationary' v20data['ACP status color'] = '' elif not P and S and not T: v20data['ACP status string'] = 'Slewing' v20data['ACP status color'] = 'orange' elif not P and S and T: v20data['ACP status string'] = 'Slewing' v20data['ACP status color'] = 'orange' elif not P and not S and T: v20data['ACP status string'] = 'Tracking' v20data['ACP status color'] = 'green' else: v20data['ACP status string'] = '{}{}{}'.format(P,S,T) v20data['ACP status color'] = 'red' if ('ACP target RA' in v20data.keys()) and ('ACP target Dec' in v20data.keys()): v20c = SkyCoord(ra=v20data['ACP target RA']*u.degree,\ dec=v20data['ACP target Dec']*u.degree,\ frame='icrs') v20coord = '{} {}'.format(\ v20c.ra.to_string(sep=':', precision=1),\ v20c.dec.to_string(sep=':', precision=1),\ ) else: v20c = None v20coord = '' else: v20data['ACP connected color'] = '' v20coord = '' else: v20data['ACP connected color'] = '' v20coord = '' tlog.app_log.info(' V20 ACP Connected Color determined') if 'ACP connected' in v5data.keys(): v5data['ACP connected string'] = ACP_connected[v5data['ACP connected']] if (v5data['ACP connected']): v5data['ACP connected color'] = 'green' if ('ACP park status' in v5data.keys()) and\ ('ACP slewing status' in v5data.keys()) and\ ('ACP tracking status' in v5data.keys()): P = v5data['ACP park status'] S = v5data['ACP slewing status'] T = v5data['ACP tracking status'] if P: v5data['ACP status string'] = 'Parked' v5data['ACP status color'] = '' elif not P and not S and not T: v5data['ACP status string'] = 'Stationary' v5data['ACP status color'] = '' elif not P and S and not T: v5data['ACP status string'] = 'Slewing' v5data['ACP status color'] = 'orange' elif not P and S and T: v5data['ACP status string'] = 'Slewing' v5data['ACP status color'] = 'orange' elif not P and not S and T: v5data['ACP status string'] = 'Tracking' v5data['ACP status color'] = 'green' else: v5data['ACP status string'] = '{}{}{}'.format(P,S,T) v5data['ACP status color'] = 'red' if ('ACP target RA' in v5data.keys()) and ('ACP target Dec' in v5data.keys()): v5c = SkyCoord(ra=v5data['ACP target RA']*u.degree,\ dec=v5data['ACP target Dec']*u.degree,\ frame='icrs') v5coord = '{} {}'.format(\ v5c.ra.to_string(sep=':', precision=1),\ v5c.dec.to_string(sep=':', precision=1),\ ) else: v5c = None v5coord = '' else: v5data['ACP connected color'] = '' v5coord = '' else: v5data['ACP connected color'] = '' v5coord = '' tlog.app_log.info(' V5 ACP Connected Color determined') ##--------------------------------------------------------------------- ## Get disk use info ##--------------------------------------------------------------------- paths = {'Drobo': os.path.join('/', 'Volumes', 'Drobo'),\ 'Data': os.path.expanduser('~'),\ 'USB Drive B': os.path.join('/', 'Volumes', 'WD500B'),\ 'USB Drive C': os.path.join('/', 'Volumes', 'WD500_C'),\ 'Vega': os.path.join('/', 'Volumes', 'Data_V5'),\ 'Black': os.path.join('/', 'Volumes', 'Data_V20'),\ } disks = {} for disk in paths.keys(): if os.path.exists(paths[disk]): size_GB, avail_GB, pcnt_used = free_space(paths[disk]) if disk == 'Drobo': size_GB -= 12750 avail_GB -= 12750 pcnt_used = float(size_GB - avail_GB)/float(size_GB) * 100 disks[disk] = [size_GB, avail_GB, pcnt_used] tlog.app_log.info(' Disk use data determined') ##--------------------------------------------------------------------- ## Render ##--------------------------------------------------------------------- if nowut.hour >= 4: link_date_string = nowut.strftime('%Y%m%dUT') else: link_date_string = (nowut - tdelta(1,0)).strftime('%Y%m%dUT') print(v20coord) print(v20data_color) tlog.app_log.info(' Rendering Status') self.render("status.html", title="VYSOS Status",\ now = now,\ nowut = nowut,\ link_date_string = link_date_string,\ v20clarity_age = v20clarity_age,\ v20clarity_color = v20clarity_color,\ v20data_time = v20data_time,\ v20data_age = v20data_age,\ v20data_color = v20data_color,\ v20data = v20data,\ v20coord = v20coord,\ v5clarity_age = v5clarity_age,\ v5clarity_color = v5clarity_color,\ v5data_time = v5data_time,\ v5data_age = v5data_age,\ v5data_color = v5data_color,\ v5data = v5data,\ v5coord = v5coord,\ moon = moon,\ sun = sun,\ disks = disks,\ ) tlog.app_log.info(' Done')
## add arguments p.add_argument('name', type=str, help="The name of the target compatible with JPL horizons") args = p.parse_args() if args.fromdate is None: fromdate = dt.utcnow() else: try: fromdate = dt.strptime(args.fromdate, '%Y-%m-%dT%H:%M:%S') except: print('Could not parse from date') raise if args.todate is None: todate = dt.utcnow() + tdelta(1, 0) else: try: todate = dt.strptime(args.todate, '%Y-%m-%dT%H:%M:%S') except: print('Could not parse to date') raise ##------------------------------------------------------------------------- ## Create logger object ##------------------------------------------------------------------------- log = logging.getLogger('MyLogger') log.setLevel(logging.DEBUG) ## Set up console output LogConsoleHandler = logging.StreamHandler() if args.verbose is True:
def main(solar_sites): """ Status: TESTS LOOK OKAY Purpose: Synthesize 1-min time series of clearsky index that accounts for correlation between sites using the geographic location of the sites and the hourly average clearsky index for each site Inputs: solar_sites - a list of SolarSite objects that will be used for the syntheseis of the clearsky index data Outpus: solar_sites - the same list of SolarSite objects now containing the additional 1-min clearsky index data attached to each SolarSite object """ #### Calculate a distance matrix between each of the sites dist_mtx = distance_matrix(solar_sites) #### Preload the Spectral amplitude with different frequencies as a function of #### the hourly clearsky index try: #### Load from stored file psd = cPickle.load(open(ROOT_DIR % 'clearsky_index_psd.pkl', 'rb')) except IOError: psd = power_spectral_density() #### Calculate the correlation matrix based on the distance between the sites freqs = psd['1.00']['freq'] cohere = coherence_matrix(dist_mtx, freqs) #### Preload the within-hour distribution of clearsky index lookup table try: #### Load from stored file cdf = cPickle.load(open(ROOT_DIR % 'clearsky_index_cdf.pkl', 'rb')) except IOError: cdf = clearsky_index_distribution(psd.items.values) #### For each hour synthesize the 1-min timeseries: ## Get the index and initialize the final timeseries hour_index = solar_sites[0].clr_idx_hr.index year_start = hour_index[0] - tdelta(seconds = hour_index[0].minute * 60) year_end = hour_index[-1] - tdelta(seconds = hour_index[-1].minute * 60) + \ tdelta(seconds = 59*60) year_rng = pd.date_range(year_start, year_end, freq = 'min') synth_hr_args = [solar_sites, dist_mtx.index, cohere, cdf, psd, freqs] #***** Single core version ****** NOT CURRENTLY USED # TS_list = [] # for dt in hour_index: # #### Synthesize the 1-min time series for each hour # TS = synthesize_hour(dt, synth_hr_args) # TS_list.append(TS) #--------------------------------------------------- #****** Parallel version ********* ACTIVE # TS_list = Parallel(n_jobs= -2, verbose = 5)( delayed(synthesize_hour)(dt, synth_hr_args) for dt in hour_index) #---------------------------------------- #### Stich all of the hours together and ensure that there are no #### hour-to-hour seams issues print "Inserting all of the 1-min data from " +\ "%s hours into the TS_year dataframe" % len(TS_list) for TS in TS_list: ## Check to make sure there are not Nan values in the timeseries (indicates ## a potential error earlier in the code for id in TS.columns: if pd.isnull(TS[id]).sum() >0: print "Final TS has Nan!!!" try: TS_year = pd.concat([TS_year, TS]) except NameError: # If TS_year doesn't exist, then initialize it first TS_year = TS #### Attach the 1-min clearsky timeseries to each solar site for site in solar_sites: site.clr_idx_min = TS_year[site.id] return solar_sites
def main(startdate, enddate, logger): logger.info("Writing results to mongo db at 192.168.1.101") try: client = MongoClient("192.168.1.101", 27017) except: logger.error("Could not connect to mongo db") raise else: V5status = client.vysos["V5.status"] logger.debug(" Getting V5.status collection") V20status = client.vysos["V20.status"] logger.debug(" Getting V20.status collection") oneday = tdelta(1, 0) date = startdate while date <= enddate: date_string = date.strftime("%Y%m%dUT") logger.info("") logger.info("Checking for environmental logs from {}".format(date_string)) ## VYSOS-5 telescope = "V5" logfile = os.path.join("/", "Volumes", "Drobo", telescope, "Logs", date_string, "EnvironmentalLog.txt") if not os.path.exists(logfile): logger.warning(" No logfile found for {} on {}".format(telescope, date_string)) else: logger.info(" Found logfile: {}".format(logfile)) # ColStarts = [ 0, 11, 22, 32, 42, 52, 62, 72, 82, 92, 102, 112] # ColEnds = [ 9, 20, 31, 41, 51, 61, 71, 81, 91, 101, 111, 121] # ColNames = ['Date', 'TimeString', 'TubeTemp', 'FocusPos', # 'SkyTemp', 'OutsideTemp', 'WindSpeed', 'Humidity', 'DewPoint', 'Altitude', 'Azimuth', 'Condition'] # env_table = ascii.read(logfile, data_start=2, Reader=ascii.FixedWidth, # col_starts=ColStarts, col_ends=ColEnds, names=ColNames, # guess=False, comment=";", header_start=0, # converters={ # 'Date': [ascii.convert_numpy(numpy.str)], # 'TimeString': [ascii.convert_numpy(numpy.str)], # 'TubeTemp': [ascii.convert_numpy(numpy.float)], # 'FocusPos': [ascii.convert_numpy(numpy.int)], # 'SkyTemp': [ascii.convert_numpy(numpy.float)], # 'OutsideTemp': [ascii.convert_numpy(numpy.float)], # 'WindSpeed': [ascii.convert_numpy(numpy.float)], # 'Humidity': [ascii.convert_numpy(numpy.int)], # 'DewPoint': [ascii.convert_numpy(numpy.float)], # 'Altitude': [ascii.convert_numpy(numpy.float)], # 'Azimuth': [ascii.convert_numpy(numpy.float)], # 'Condition': [ascii.convert_numpy(numpy.str)] # } # ) with open(logfile, "r") as FO: env_table = FO.readlines() for line in env_table: if line[0] != "#": try: entry = line.split() new_data = {} ## Date and Time dto_utc = dt.strptime("{} {}".format(entry[0], entry[1]), "%Y/%m/%d %H:%M:%SUT") dto_hst = dto_utc - tdelta(0, 10 * 60 * 60) new_data.update( { "UT date": dto_utc.strftime("%Y%m%dUT"), "UT time": dto_utc.strftime("%H:%M:%S"), "UT timestamp": dto_utc, } ) ## Define Boltwood Data boltwood = {} boltwood["boltwood date"] = dto_hst.strftime("%Y-%m-%d") # local date (yyyy-mm-dd) boltwood["boltwood time"] = dto_hst.strftime("%H:%M:%S.00") # local time (hh:mm:ss.ss) boltwood["boltwood timestamp"] = dto_hst boltwood["boltwood temp units"] = "F" boltwood["boltwood wind units"] = "K" boltwood["boltwood sky temp"] = float(entry[4]) boltwood["boltwood ambient temp"] = float(entry[5]) boltwood["boltwood wind speed"] = float(entry[6]) boltwood["boltwood humidity"] = int(entry[7]) boltwood["boltwood dew point"] = float(entry[8]) boltwood["boltwood rain condition"] = int(entry[11][0]) boltwood["boltwood cloud condition"] = int(entry[11][1]) boltwood["boltwood wind condition"] = int(entry[11][2]) new_data.update(boltwood) except: print(line) print(entry) raise ##------------------------------------------------------------------------- ## Write Environmental Log ##------------------------------------------------------------------------- ## Check if this image is already in the collection matches = [item for item in V5status.find({"UT timestamp": new_data["UT timestamp"]})] if len(matches) > 0: logger.debug( " Found {} previous entries for {} {}. Deleting old entries.".format( len(matches), new_data["UT date"], new_data["UT time"] ) ) for match in matches: logger.debug(" Removing entry for {} {}".format(match["UT date"], match["UT time"])) V5status.remove({"_id": match["_id"]}) logger.debug(' Removed "_id": {}'.format(match["_id"])) id = V5status.insert(new_data) logger.info( " Inserted datum for {} on {} {}".format(telescope, new_data["UT date"], new_data["UT time"]) ) make_nightly_plots.make_plots(date_string, "V5", logger) ## VYSOS-20 telescope = "V20" logfile = os.path.join("/", "Volumes", "Drobo", telescope, "Logs", date_string, "EnvironmentalLog.txt") if not os.path.exists(logfile): logger.warning(" No logfile found for {} on {}".format(telescope, date_string)) else: logger.info(" Found logfile: {}".format(logfile)) # ColStarts = [ 0, 11, 22, 32, 42, 52, 62, 72, 82, 92, 102, 112, 122, 132, 142, 152, 162] # ColEnds = [ 9, 20, 31, 41, 51, 61, 71, 81, 91, 101, 111, 121, 131, 141, 151, 161, 171] # ColNames = ['Date', 'TimeString', 'TubeTemp', 'PrimaryTemp', 'SecTemp', 'FanPower', 'FocusPos', # 'SkyTemp', 'OutsideTemp', 'WindSpeed', 'Humidity', 'DewPoint', 'Altitude', 'Azimuth', # 'Condition', 'DomeTemp', 'DomeFanState'] # env_table = ascii.read(logfile, data_start=2, Reader=ascii.FixedWidth, # col_starts=ColStarts, col_ends=ColEnds, names=ColNames, # guess=False, comment=";", header_start=0, # converters={ # 'Date': [ascii.convert_numpy('S10')], # 'TimeString': [ascii.convert_numpy('S10')], # 'TubeTemp': [ascii.convert_numpy('f4')], # 'SecTemp': [ascii.convert_numpy('f4')], # 'FanPower': [ascii.convert_numpy('f4')], # 'FocusPos': [ascii.convert_numpy('i4')], # 'SkyTemp': [ascii.convert_numpy('f4')], # 'OutsideTemp': [ascii.convert_numpy('f4')], # 'WindSpeed': [ascii.convert_numpy('f4')], # 'Humidity': [ascii.convert_numpy('i4')], # 'DewPoint': [ascii.convert_numpy('f4')], # 'Altitude': [ascii.convert_numpy('f4')], # 'Azimuth': [ascii.convert_numpy('f4')], # 'Condition': [ascii.convert_numpy('S3')], # 'DomeTemp': [ascii.convert_numpy('f4')], # 'DomeFanState': [ascii.convert_numpy('i4')] # } # ) with open(logfile, "r") as FO: env_table = FO.readlines() for line in env_table: if line[0] != "#": try: entry = line.split() new_data = {} ## Date and Time dto_utc = dt.strptime("{} {}".format(entry[0], entry[1]), "%Y/%m/%d %H:%M:%SUT") dto_hst = dto_utc - tdelta(0, 10 * 60 * 60) new_data.update( { "UT date": dto_utc.strftime("%Y%m%dUT"), "UT time": dto_utc.strftime("%H:%M:%S"), "UT timestamp": dto_utc, } ) ## Define Boltwood Data boltwood = {} boltwood["boltwood date"] = dto_hst.strftime("%Y-%m-%d") # local date (yyyy-mm-dd) boltwood["boltwood time"] = dto_hst.strftime("%H:%M:%S.00") # local time (hh:mm:ss.ss) boltwood["boltwood timestamp"] = dto_hst boltwood["boltwood temp units"] = "F" boltwood["boltwood wind units"] = "K" boltwood["boltwood sky temp"] = float(entry[7]) boltwood["boltwood ambient temp"] = float(entry[8]) boltwood["boltwood wind speed"] = float(entry[9]) boltwood["boltwood humidity"] = int(entry[10]) boltwood["boltwood dew point"] = float(entry[11]) boltwood["boltwood rain condition"] = int(entry[14][0]) boltwood["boltwood cloud condition"] = int(entry[14][1]) boltwood["boltwood wind condition"] = int(entry[14][2]) new_data.update(boltwood) ## Define Focuser Data focuser_info = {} focuser_info["RCOS temperature units"] = "F" focuser_info["RCOS temperature (truss)"] = float(entry[2]) focuser_info["RCOS temperature (primary)"] = float(entry[3]) focuser_info["RCOS temperature (secondary)"] = float(entry[4]) focuser_info["RCOS fan speed"] = int(entry[5]) focuser_info["RCOS focuser position"] = int(entry[6]) new_data.update(focuser_info) except: print(entry) raise ##------------------------------------------------------------------------- ## Write Environmental Log ##------------------------------------------------------------------------- ## Check if this image is already in the collection matches = [item for item in V20status.find({"UT timestamp": new_data["UT timestamp"]})] if len(matches) > 0: logger.debug( " Found {} previous entries for {} {}. Deleting old entries.".format( len(matches), new_data["UT date"], new_data["UT time"] ) ) for match in matches: logger.debug(" Removing entry for {} {}".format(match["UT date"], match["UT time"])) V20status.remove({"_id": match["_id"]}) logger.debug(' Removed "_id": {}'.format(match["_id"])) id = V20status.insert(new_data) logger.info( " Inserted datum for {} on {} {}".format(telescope, new_data["UT date"], new_data["UT time"]) ) make_nightly_plots.make_plots(date_string, "V20", logger) date += oneday
c = SkyCoord.from_name('HR8799', frame='fk5') hdu = fits.PrimaryHDU(data_cube) metadata = {'SEQID': seq_id, 'FIELD': target_name, 'RA': c.ra.to(u.degree).value, 'DEC': c.dec.to(u.degree).value, 'EQUINOX': c.equinox.value, 'OBSTIME': obstime.isoformat(), 'XPIXORG': xpixorg, 'YPIXORG': ypixorg, } for t in range(nt): # slightly randomize time gap between images gap = tdelta(0, exptime + np.random.normal(5, 1)) obstime = obstime + gap metadata['TIME{:04d}'.format(t)] = obstime.isoformat() hdu.header.extend(metadata) print(metadata) hdu.writeto('PSC_0002.fits', clobber=True) # Generate Fake Lightcurve with open('PSC_0002.json', 'w') as FO: data = [] for t in range(nt): time = hdu.header['TIME{:04d}'.format(t)] sig_r = 0.010 sig_g = 0.006 sig_b = 0.017
def get_day(days=0, hours=0): today = dtime.now() newday = today+tdelta(days=days, hours=hours) return newday