def get_table(): username = input("Username: "******"twu_website.html") make_table(selected_term)
def __init__(self, msname, ctname, ref_antenna_name=None, scans=None, snr_threshold=None, threshold=None, antennas=None, pad=2, solint=None, solmin=0, solsub=1, dofloat=True): FringeFitter.__init__(self, msname, ctname, ref_antenna_name=ref_antenna_name, scans=scans, snr_threshold=snr_threshold, threshold=threshold, antennas=antennas, solint=solint, solmin=solmin, solsub=solsub, dofloat=dofloat) # Just for multiband case: self.ctname = ctname self.reffreqs = utils.get_min_freqs(msname) self.minfreq = utils.get_min_freqs(msname)[0] self.pad = pad # ctname make_table.make_table(self.msname, self.ctname) self.rowcount = 0 # In the table.
def run(): """Run the audit.""" client = Client() logged_in = client.login() if not logged_in: return interval_data = get_interval() month_ago, six_months_ago, months = interval_data # Gather lists of current, former, and new CU/OS users_dict = { "cu": {cu: {} for cu in client.get_checkusers()}, "os": {os: {} for os in client.get_oversighters()}, } [addl_cu_info, addl_os_info] = client.get_former_and_new_cuos( six_months_ago, month_ago ) # Merge data about current, former, and new CU/OS for cu, info in addl_cu_info.items(): if cu not in users_dict["cu"]: users_dict["cu"][cu] = {} users_dict["cu"][cu].update({"active": info}) for os, info in addl_os_info.items(): if os not in users_dict["os"]: users_dict["os"][os] = {} users_dict["os"][os].update({"active": info}) # Gather statistics for cu in users_dict["cu"]: actions = client.count_checks(cu, *interval_data) users_dict["cu"][cu]["actions"] = actions for os in users_dict["os"]: actions = client.count_suppressions(os, *interval_data) users_dict["os"][os]["actions"] = actions # Get members of groups that are exempt from activity requirements so we can mark # that in the table groups = {"arbs": client.get_arbitrators(), "ombuds": client.get_ombuds()} # Create the wikitext table from the data we've gathered cu_table = make_table(users_dict["cu"], groups, months, "cu") os_table = make_table(users_dict["os"], groups, months, "os") # Write to file write_table(cu_table, os_table)
def build_tables(all_info): """ Create two lists -- one for Batters and one for Pitchers. Each list consists of tables of the displays for each period. Input: all_info -- eval_loop representation of all the data. """ for gpos in ['Batter', 'Pitcher']: tlist = [] for period in ['7','14','30',str(datetime.now().year)]: skey = "%s-%s" % (period, gpos) title = "%s day" % period if len(period) > 3: title = 'Season' tlist.append(make_table(title, all_info[skey], ['name', 'team', 'pos', 'pts'], ['left', 'center', 'center', 'center'])) merge_tables(tlist, gpos)
def __init__(self, msname, fj_name, ref_antenna_name=None, scans=None, threshold=None, snr_threshold=None, antennas=None, spectral_windows=None, solint=None, solmin=0, solsub=1, dofloat=True): self.msname = msname if scans is None: raise UnhandledCase("No scans selected!") else: self.scans = scans self.threshold_method = None self.solint = solint self.solmin = solmin self.solsub = solsub self.dofloat = dofloat if snr_threshold is None: self.snr_threshold = 5.0 else: self.snr_threshold = snr_threshold self.threshhold_method = 'snr' if threshold is None: self.threshold = 2000 else: self.threshold = threshold # SNR takes precedence if both thresholds are set. if self.threshold_method is None: self.threshhold_method = 'raw' if self.threshold_method is None: self.threshold_method = 'snr' # if spectral_windows is None: self.spectral_windows = utils.get_spectral_windows(self.msname) else: self.spectral_windows = spectral_windows self.timeqs = self.make_time_qs_from_scans(scans) self.antenna_map = utils.get_antenna_map(self.msname) self.ism = utils.invert_map(self.antenna_map) if antennas is None: # FIXME! Should do this per scan! self.antennas2 = sorted( ffd.actual_antennas(self.msname, self.timeqs[0]).keys()) else: self.antennas2 = antennas self.antenna_names = [self.ism[s] for s in self.antennas2] if ref_antenna_name is None: self.ref_antenna = self.antennas2[0] self.ref_antenna_name = self.ism[self.ref_antenna] casalog.post("No reference antenna selected; using {} ({})" "".format(self.ref_antenna_name, self.ref_antenna)) else: self.ref_antenna_name = ref_antenna_name self.ref_antenna = self.antenna_map[self.ref_antenna_name] pol_ids = utils.get_polarization_ids(msname) if len(pol_ids) > 1: raise UnhandledCase("Non-unique polarisation id") else: self.pol_id = pol_ids[0] n_pols = utils.get_n_polarizations(msname, self.pol_id) if n_pols == 4: self.polinds = [0, 3] elif n_pols == 2: self.polinds = [0] else: raise UnhandledCase("Can't do {} polarizations".format(n_pols)) self.bad_antennas = set() self.fj_name = fj_name make_table.make_table(self.msname, self.fj_name) self.rowcount = 0 # shape = (len(self.polinds), len(self.antennas2)) # Looks like we don't need 'F'? Leave it in for now. # FIXME: find out one way or another. self.delays = np.zeros(shape, np.float, order='F') self.phases = np.zeros(shape, np.float, order='F') self.rates = np.zeros(shape, np.float, order='F') self.flags = np.zeros(shape, np.bool, order='F') self.sigs = []
def make_log_table(from_time_str, to_time_str, users): output_files = {} connection = MySQLdb.connect(host='localhost', user='******', passwd=config.DB_PASSWD, db=config.DB_NAME, charset='utf8mb4') cursor = connection.cursor() try: # server.pyでもチェック from_time = datetime.strptime(from_time_str, "%Y/%m/%d") to_time = datetime.strptime(to_time_str, "%Y/%m/%d") + timedelta(days=1) except ValueError as e: logging.error("from_time, to_time error: " + str(e)) return {} query = "" logging.info(type(users)) if users == []: query = f"SELECT u.name, l.log_time FROM logData l INNER JOIN user u ON u.id = l.user_id WHERE u.enable = true AND l.log_time >= '{from_time.strftime('%Y-%m-%d %H:%M:%S')}' AND l.log_time < '{to_time.strftime('%Y-%m-%d %H:%M:%S')}' ORDER BY u.name, l.log_time" else: query = f"SELECT u.name, l.log_time FROM logData l INNER JOIN user u ON u.id = l.user_id WHERE u.enable = true AND u.name IN " + str( users ).replace('[', '(').replace( ']', ')' ) + f" AND l.log_time >= '{from_time.strftime('%Y-%m-%d %H:%M:%S')}' AND l.log_time < '{to_time.strftime('%Y-%m-%d %H:%M:%S')}' ORDER BY u.name, l.log_time" MySQLdb.escape_string(query) logging.info(query) count = cursor.execute(query) log_dict = {} # {user_name: [log_time,...]} if count > 0: rows = cursor.fetchall() for row in rows: if row[0] not in log_dict: log_dict[row[0]] = [] log_dict[row[0]].append(row[1]) else: return {} for user_name, log_times in log_dict.items(): start_day = 0 max_level = 0 active_days = 0 prev_day = datetime.now() + timedelta(days=1) # 明日のログはない log_array = numpy.zeros( (int(24 / TIME_SPAN), 7, 3) ) # [[0:00-6:00, 6:00-12:00, 12:00-18:00, 18:00-24:00], ...] # len: 7 for time in log_times: hour = int(time.strftime('%H')) log_array[int(hour / TIME_SPAN)][time.weekday()] += 1 if log_array[int(hour / TIME_SPAN)][time.weekday()][0] > max_level: max_level = log_array[int(hour / TIME_SPAN)][time.weekday()][0] if prev_day.date() != time.date(): active_days += 1 prev_day = time if start_day == 0 and from_time == datetime.strptime( config.START_DATE, "%Y/%m/%d"): start_day = time if start_day == 0: title = f"{user_name} ({from_time.strftime('%Y/%m/%d')}-{(to_time - timedelta(days=1)).strftime('%Y/%m/%d')})" else: title = f"{user_name} ({start_day.strftime('%Y/%m/%d')}-{(to_time - timedelta(days=1)).strftime('%Y/%m/%d')})" subtitle = f"active: {active_days}days" filepath = make_table.make_table(log_array, user_name, max_level, [title, subtitle]) if len(filepath) != 0: output_files[user_name] = filepath return output_files
def fit_multiband_fringe(msname, scan_number, ctname): ms.open(msname) timeqs = ["SCAN_NUMBER={}".format(scan_number)] station_map = utils.get_station_map(msname) stations2 = sorted(ffd.actual_stations(msname, timeqs[0]).keys()) ism = utils.invert_map(station_map) station_names = [ism[s] for s in stations2] ref_station_name = 'EF' ref_station2 = station_map[ref_station_name] ref_s_ind2 = stations2.index(ref_station2) polind = 0 swids = range(8) reffreqs = utils.get_min_freqs(msname) minfreq = utils.get_min_freqs(msname)[0] make_table.make_table(msname, ctname) shape = (2, len(stations2)) delays = np.zeros(shape, np.float) phases = np.zeros(shape, np.float) rates = np.zeros(shape, np.float) sigs = [] rowcount = 0 for timeq in timeqs[:1]: timeq2 = ffd.actual_timerangeq(msname, timeq) for pol_id in [0,1]: casalog.post("Getting data") anffd = ffd.FFData.make_FFD_multiband(msname, stations2, polind, pol_id, timeq2, datacol="CORRECTED_DATA", solint=500) casalog.post("Fitting fringes") dels, phs, rs, sig = fringer.fit_fringe_ffd(anffd, ref_station2, stations2) delays[pol_id, :] = dels phases[pol_id, :] = phs rates[pol_id, :] = rs sigs.append(sig) obsid, field, scan = [ffd.distinct_thing(msname, timeq, col) for col in ['OBSERVATION_ID', 'FIELD_ID', 'SCAN_NUMBER']] darr = -delays*1e9 pharr = -phases # radians! rarr = -rates for i,s in enumerate(stations2): antenna = s assert (anffd.get_station_index(s) == i) time = anffd.get_ref_time() # time = anffd.times[0] interval = anffd.get_interval() midfreqs = utils.get_mid_freqs(msname) for swid in swids: # Df = (reffreqs[swid]-minfreq) Df = (midfreqs[swid]-minfreq) phase_offsets = utils.turns_to_radians(Df * darr/1e9 + interval/2 * rarr) ph = pharr + phase_offsets param = np.zeros(shape=(6,1), dtype='float32') param[:, 0] = [ph[0, i], darr[0, i], rates[0,i], ph[1, i], darr[1, i], rates[1,i] ] make_table.add_row(ctname, rowcount, time, interval, antenna, field, scan, obsid, swid, param) rowcount += 1