Esempio n. 1
0
def serve(args):
  from tornado import ioloop, web, template, websocket

  from common import get_db
  get_db(args.mongohost)

  from handlers import MainHandler, RTHandler, GoogleAuthHandler

  if args.authgmaillist and not args.cookiesecret:
    print '--cookiesecret required with --authgmaillist'
    return

  settings = dict(
    debug = True,
    cookie_secret = args.cookiesecret,
    authgmaillist = args.authgmaillist.split(',') if args.authgmaillist else []
  )
  handler_list = [
    ( '/', MainHandler ),
    ( '/login', GoogleAuthHandler ),
    ( '/rt', RTHandler ),
    ( '/web/(.*)', web.StaticFileHandler, {"path": "./web"}),
  ]
  application = web.Application(handler_list, **settings)
  application.listen(int(args.port))
  ioloop.IOLoop.instance().start()
Esempio n. 2
0
def main(main_database, energy_cuts_database, data_file_path, label,
         update_db):
    import ROOT
    # Fetch all triplets of RunNo, Hall, DetNo to use to find files
    with common.get_db(main_database) as conn:
        cursor = conn.cursor()
        cursor.execute(
            '''SELECT RunNo, Hall, DetNo
            FROM runs NATURAL JOIN accidental_subtraction
            WHERE Label = ?
            ORDER BY RunNo, Hall, DetNo''', (label, ))
        run_keys = cursor.fetchall()

    # Look up delayed energy cuts
    with common.get_db(energy_cuts_database) as conn:
        cursor = conn.cursor()
        cursor.execute(
            '''
            SELECT
                Hall,
                DetNo,
                Peak - 3 * Resolution,
                Peak + 3 * Resolution
            FROM
                delayed_energy_fits
            WHERE
                Source = ?
            ''', (label, ))
        energy_bounds = cursor.fetchall()
    energy_lookup = {}
    for site, ad, low_bound, up_bound in energy_bounds:
        energy_lookup[site, ad] = (low_bound, up_bound)

    with multiprocessing.Pool() as pool:
        results = pool.starmap(
            one_file,
            zip(run_keys, it.repeat(data_file_path), it.repeat(energy_lookup)))
    if update_db:
        new_results = []
        for x in results:
            new_results.append(x + (label, ))
        results = new_results

        with common.get_db(main_database) as conn:
            cursor = conn.cursor()
            cursor.executemany(
                '''UPDATE accidental_subtraction
                SET Total_Acc_Eff = ?,
                    Total_Acc_Eff_err = ?
                WHERE RunNo = ? AND DetNo = ? AND Label = ?''', results)
def setup_database(database):
    """Create the database tables to store the analysis results."""
    with common.get_db(database) as conn:
        cursor = conn.cursor()
        cursor.executescript('''
            CREATE TABLE num_coincidences_by_run (
                RunNo INTEGER,
                DetNo INTEGER,
                NumCoincidences INTEGER,
                Label TEXT,
                PRIMARY KEY(RunNo, DetNo, Label)
            );

            CREATE TABLE muon_rates (
                RunNo INTEGER NOT NULL,
                DetNo INTEGER NOT NULL,
                Count INTEGER,
                Livetime_ns INTEGER,
                Rate_Hz REAL,
                Efficiency REAL,
                PRIMARY KEY(RunNo, DetNo)
            );

            CREATE TABLE singles_rates (
                RunNo INTEGER NOT NULL,
                DetNo INTEGER NOT NULL,
                Iteration INTEGER,
                Rate_Hz REAL,
                Rate_Hz_error REAL,
                IsolatedEventCount INTEGER,
                IsolatedEventRate_Hz REAL,
                CorrelatedRate_Hz REAL,
                MultiplicityVetoEfficiency REAL,
                PRIMARY KEY(RunNo, DetNo)
            );

            CREATE TABLE runs (
                RunNo INTEGER PRIMARY KEY,
                Hall INTEGER,
                Start_time INTEGER
            );

            CREATE TABLE accidental_subtraction (
                RunNo INTEGER NOT NULL,
                DetNo INTEGER NOT NULL,
                Label TEXT NOT NULL,
                BaseRate_Hz REAL,
                DistanceTime_DT_Eff REAL,
                AccScaleFactor REAL,
                DTCrossCheck REAL,
                DTCrossCheck_error REAL,
                DistanceCrossCheck REAL,
                DistanceCrossCheck_error REAL,
                Total_Acc_Eff REAL,
                Total_Acc_Eff_err REAL,
                PRIMARY KEY(RunNo, DetNo, Label)
            );
        '''
        )
    return
def main(infile, update_db, pairing, run, detector, selection_str):
    import ROOT
    accfile = ROOT.TFile(infile, 'READ')
    all_pairs = accfile.Get('all_pairs')
    num_pass_DT_cut = all_pairs.Draw(
        'energy[0]',
        f'({delayeds._NH_THU_DIST_TIME_CUT_STR}) && ({selection_str})', 'goff')
    num_pairs = all_pairs.Draw('energy[0]', selection_str, 'goff')
    efficiency = num_pass_DT_cut / num_pairs
    error = math.sqrt(num_pass_DT_cut *
                      (1 - efficiency)) / num_pairs  # binomial
    try:
        percent_error = 100 * error / efficiency
    except ZeroDivisionError:
        percent_error = 0
    if update_db is None:
        print(f'Pairing type: {pairing}')
        print(
            f'Efficiency: {efficiency:.6f} +/- {error:.6f} ({percent_error:.1f}%)'
        )
        print(f'Total pairs: {num_pairs}')
        print(f'Passed DT cut: {num_pass_DT_cut}')
    else:
        all_pairs.GetEntry(0)
        with common.get_db(update_db) as conn:
            cursor = conn.cursor()
            cursor.execute(
                '''INSERT OR REPLACE INTO distance_time_eff_study
                VALUES (?, ?, ?, ?, ?, ?)''',
                (run, detector, pairing, efficiency, error, num_pairs))
Esempio n. 5
0
def export():
    engine = get_db()
    mappings = defaultdict(set)
    expand = defaultdict(set)
    for row in engine.query(QUERY):
        al = row.get('al')
        # an = row.get('an')
        bl = row.get('bl')
        # bn = row.get('bn')
        synonyms = al + bl
        unique = set(synonyms)
        canonical = max(unique, key=synonyms.count)
        # canonical = setmedian(synonyms)
        # unique.remove(canonical)
        # print(unique, canonical)
        for name in unique:
            expand[name].update([u for u in unique if u != name])
            if name != canonical:
                mappings[name].add(canonical)

    with open('synonyms.txt', 'w') as fh:
        for name, expansions in sorted(mappings.items()):
            expansions = ' , '.join(expansions)
            fh.write('%s => %s\n' % (name, expansions))
            # print(name, expansions)

    with open('synonyms.expand.txt', 'w') as fh:
        for name, expansions in sorted(expand.items()):
            expansions = ' , '.join(expansions)
            fh.write('%s => %s\n' % (name, expansions))
Esempio n. 6
0
 def delete(self):
     """ Removes todo item from the database """
     db = get_db()
     query = "DELETE FROM `todo_items` WHERE `id` = ?"
     values = (self.id, )
     DB.execute_delete_query(db, query, values)
     self.update_history('remove')
 def is_user_with_email_in_user_list(cls, email):
     """checks that is user with given email in database"""
     db = get_db()
     query = "SELECT * FROM users WHERE email=?"
     values = (email, )
     user_from_db = DB.execute_select_query(db, query, values)
     return bool(user_from_db)
 def is_user_with_name_in_user_list(cls, name):
     """checks that is user with given name in database"""
     db = get_db()
     query = "SELECT * FROM users WHERE name=?"
     values = (name, )
     user_from_db = DB.execute_select_query(db, query, values)
     return bool(user_from_db)
def main2(run, files, site, ad, outfile, label, db):
    daq_livetime = 0
    usable_livetime = 0
    num_veto_windows = 0
    for filename in files:
        with open(filename, 'r') as f:
            results = json.load(f)
        daq_livetime += results['daq_livetime']
        usable_livetime += results['usable_livetime']
        num_veto_windows += results['num_veto_windows']
    efficiency = usable_livetime / daq_livetime
    rate = num_veto_windows * 1e9 / usable_livetime
    with open(outfile, 'w') as f:
        json.dump(
            {
                'run': run,
                'site': site,
                'ad': ad,
                'daq_livetime': daq_livetime,
                'usable_livetime': usable_livetime,
                'usable_fraction': efficiency,
                'num_veto_windows': num_veto_windows,
            }, f)
    if db is not None:
        with common.get_db(db, timeout=0.5) as conn:
            cursor = conn.cursor()
            cursor.execute(
                'INSERT OR REPLACE INTO muon_rates '
                'VALUES (?, ?, ?, ?, ?, ?, ?)',
                (run, ad, label, num_veto_windows, usable_livetime, rate,
                 efficiency))
    return
 def delete(self):
     """ Removes user from the database """
     db = get_db()
     query = "DELETE FROM `users` WHERE `id` = ?"
     values = (self.id, )
     DB.execute_delete_query(db, query, values)
     Todo.delete_todos_by_user_id(self.id)
def is_complete(run, ad, outfilename, label, db):
    """Check to ensure the outfile exists and the run has been logged to db."""
    if not os.path.isfile(outfilename):
        return False
    with common.get_db(db) as conn:
        cursor = conn.cursor()
        cursor.execute(
            '''
            SELECT
                COUNT(*)
            FROM
                muon_rates
            WHERE
                RunNo = ?
                AND DetNo = ?
                AND Label = ?
            ''',
            (run, ad, label),
        )
        num_rows, = cursor.fetchone()
    if num_rows == 1:
        return True
    elif num_rows > 1:
        raise ValueError(
            f'Multiple rows in table muon_rates for Run {run} AD {ad}')
    else:
        return False
def extrapolate_down_one_bin(database, source, data_period):
    """Linear extrapolation based on 0th and 1st bin values."""
    with common.get_db(database) as conn:
        conn.row_factory = common.sqlite3.Row
        cursor = conn.cursor()
        for core in range(1, 7):
            cursor.execute(
                '''
                SELECT
                    Energy, NuPerMeVPerSec
                FROM
                    reactor_emitted_spectrum
                WHERE
                    Source = ?
                    AND DataPeriod = ?
                    AND Core = ?
                ORDER BY Energy
                LIMIT 2
                ''', (source, data_period, core))
            bin_0, bin_1 = cursor.fetchall()
            dE = bin_1['Energy'] - bin_0['Energy']
            dN = bin_1['NuPerMeVPerSec'] - bin_0['NuPerMeVPerSec']
            new_E = bin_0['Energy'] - dE
            new_N = bin_0['NuPerMeVPerSec'] - dN
            cursor.execute(
                '''
                INSERT OR REPLACE INTO
                    reactor_emitted_spectrum
                VALUES
                    (?, ?, ?, ?, ?)
                ''', (new_E, core, new_N, data_period, source))
    return
 def archived_todos_count(self):
     """connects with db and counts archived todos for user"""
     db = get_db()
     query = """SELECT COUNT(`id`) FROM `todo_items`
                WHERE `owner_id` = ? AND `is_archived` = 1;"""
     values = (self.id, )
     return DB.execute_select_query(db, query, values)[0][0]
 def active_todos_undone_count(self):
     """connects with db and counts active todos with undone status for user"""
     db = get_db()
     query = """SELECT COUNT(`id`) FROM `todo_items`
                WHERE `owner_id` = ? AND `is_archived` = 0 AND `status` = 0;"""
     values = (self.id, )
     return DB.execute_select_query(db, query, values)[0][0]
Esempio n. 15
0
def aggregate():
    engine = get_db()
    table = engine.get_table('tokens')
    table.delete()
    bulk = ChunkedInsert(table, chunksize=10000)
    rex = re.compile(r'\w+')
    for names in iter_names(engine):
        parts = set()
        for name in names:
            for token in rex.findall(name):
                token = token.lower()
                if len(token) > 3:
                    norm = normalize(token)
                    if len(norm):
                        parts.add((token, norm))
        pairs = set()
        for pair in combinations(parts, 2):
            pairs.add(tuple(sorted(pair)))
        for ((a, an), (b, bn)) in pairs:
            if an == bn:
                continue
            max_dist = max(len(an), len(bn)) * 0.6
            dist = distance(an, bn)
            if dist <= max_dist:
                # print(a, b, max_dist, dist, dist > max_dist)
                bulk.insert({
                    'a': a,
                    'an': an,
                    'b': b,
                    'bn': bn,
                })
    bulk.flush()
def main(infile, database, label, update_db, iteration, extra_cut):
    import ROOT
    ch = ROOT.TChain('ad_events')
    ch.Add(infile)
    ch.GetEntry(0)
    runNo = ch.run
    site = ch.site
    ad = ch.detector[0]
    start_time = ch.timestamp[0]
    multiplicity_1_count = ch.Draw('energy', f'detector == {ad} && '
        f'multiplicity == 1 && energy < {_EMAX_THU} && ({extra_cut})',
        'goff')
    with common.get_db(database, timeout=0.5) as conn:
        cursor = conn.cursor()
        cursor.execute('''SELECT Rate_Hz, Livetime_ns/1e9 FROM muon_rates WHERE
            RunNo = ? AND DetNo = ?''', (runNo, ad))
        muon_rate, livetime_s = cursor.fetchone()
        multiplicity_1_count_error = math.sqrt(multiplicity_1_count)
        multiplicity_1_rate_Hz = multiplicity_1_count / livetime_s
        uncorr_rate_error = multiplicity_1_count_error/livetime_s * UNCERTAINTY_MULTIPLIER

        # convert to seconds and subtract off 1us
        window_size = _NH_THU_MAX_TIME/1e9 - 1e-6
        if iteration > 0:
            raise NotImplementedError("Haven't connected to database")
        else:
            R_corr = 0
            neutron_efficiency = None
            tau_Gd = None
            tau_LS = None
            alpha = None

        parameters = (R_corr, muon_rate, window_size, neutron_efficiency, tau_Gd,
                tau_LS, alpha)
        underlying_uncorr_rate = fsolve(lambda x: single_rate(x, *parameters) -
                multiplicity_1_rate_Hz, multiplicity_1_rate_Hz)[0]
        multiplicity_eff = multiplicity_efficiency(underlying_uncorr_rate,
                R_corr, muon_rate, window_size)
        if update_db:
            #with common.get_db(database, timeout=0.5) as conn:
                #cursor = conn.cursor()
            cursor.execute('''INSERT OR REPLACE INTO singles_rates
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', (
                runNo,
                ad,
                label,
                iteration,
                underlying_uncorr_rate,
                uncorr_rate_error,
                multiplicity_1_count,
                multiplicity_1_rate_Hz,
                R_corr,
                multiplicity_eff,
            ))
        else:
            print(f'multiplicity-1 rate: {multiplicity_1_rate_Hz} Hz')
            print(f'relative error: {100/multiplicity_1_count_error:.2f}%')
            print(f'underlying uncorr. rate: {underlying_uncorr_rate} Hz')
            print(f'multiplicity efficiency: {multiplicity_eff}')
    return
def main(infilename, hist_name, database, label, binning_id, binning_db_path,
         is_TF1):
    import ROOT
    # fetch binning
    with common.get_db(binning_db_path) as conn:
        cursor = conn.cursor()
        cursor.execute(
            '''
            SELECT
                BinEdgeEnergy_keV
            FROM
                reco_binnings
            WHERE
                Id = ?
            ORDER BY BinEdgeIndex
            ''', (binning_id, ))
        # Must reshape to get 1D array since sqlite3 returns 2D
        bin_edges = np.array(cursor.fetchall(), dtype=float).reshape(-1) / 1000
    infile = ROOT.TFile(infilename, 'READ')
    bg_spec = infile.Get(hist_name)
    values = np.zeros((len(bin_edges) - 1, ))
    if is_TF1:
        # Annoyingly, the AmC spectrum is stored as a TF1, so I have to bin it manually
        for bin_index, (low_edge, up_edge) in (enumerate(
                zip(bin_edges[:-1], bin_edges[1:]))):
            values[bin_index] = bg_spec.Integral(low_edge, up_edge)
    else:
        # extract bin values
        binned_hist = bg_spec.Rebin(
            len(bin_edges) - 1, 'rebinned_bg', bin_edges)
        for i in range(1, len(bin_edges)):  # this is correctly off-by-1
            values[i - 1] = binned_hist.GetBinContent(i)
    total_counts = sum(values)
    values /= total_counts  # normalize
    rows = []
    for bin_index, value in enumerate(values):
        rows.append((label, binning_id, bin_index, value))
    with common.get_db(database) as conn:
        cursor = conn.cursor()
        cursor.executemany(
            '''
            INSERT OR REPLACE INTO
                amc_spectrum
            VALUES
                (?, ?, ?, ?)
            ''', rows)
    return
Esempio n. 18
0
def main(infilename, outfilename, site, ad, database):
    import ROOT
    infile = ROOT.TFile(infilename, 'READ')
    spectrum_2d = infile.Get('final')
    delayed_spectrum = spectrum_2d.ProjectionY()
    mu0 = 2.3
    sigma0 = .14
    alpha0 = 0.8
    scale0 = 2
    # Approximate the normalization as the area under the main part of
    # the nH peak (1.5-3.5 MeV)
    low_bin = delayed_spectrum.FindFixBin(1.5)
    up_bin = delayed_spectrum.FindFixBin(3.499)
    approx_integral = delayed_spectrum.Integral(low_bin, up_bin)
    bin_width = delayed_spectrum.GetBinWidth(1)
    norm0 = approx_integral * bin_width

    fitter = ROOT.TF1("calo_fitter", calorimeter_fn, 1.5, 12, 5)
    fitter.SetParameters(mu0, sigma0, scale0, alpha0, norm0)

    if outfilename is None:
        options = 'QN0S'
    else:
        options = 'QS'
    try:
        fit_result = delayed_spectrum.Fit(fitter, options, '', 1.6, 2.8)
    except:
        print([mu0, sigma0, scale0, alpha0, norm0])
        delayed_spectrum.Draw()
        fitter.Draw()
        time.sleep(10)
        ROOT.gPad.Print('error.pdf')
        raise
    mu, sigma, scale, alpha, norm = [fit_result.Parameter(i) for i in range(5)]
    mu_err, sigma_err, scale_err, alpha_err, norm_err = [
        fit_result.ParError(i) for i in range(5)
    ]
    chi_square = fit_result.Chi2()
    num_bins = fitter.GetNumberFitPoints()
    num_params = fitter.GetNpar()
    delayed_spectrum.GetXaxis().SetRangeUser(1.5, 3.4)
    if outfilename is not None:
        ROOT.gPad.Print(outfilename)
    infile.Close()
    if database is not None:
        with common.get_db(database) as conn:
            c = conn.cursor()
            c.execute(
                '''INSERT OR REPLACE INTO delayed_energy_fits
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
                (site, ad, mu, mu_err, sigma, sigma_err, scale, scale_err,
                 alpha, alpha_err, norm, norm_err, chi_square, num_bins,
                 num_params))
            conn.commit()
    else:
        print(mu, sigma, scale, alpha, norm)
        print(mu_err, sigma_err, scale_err, alpha_err, norm_err)
        print(f'Chi2/NDF: {chi_square:.03f} / ({num_bins} - {num_params})')
    return
Esempio n. 19
0
def main(
    output,
    datafile,
    accfile,
    database,
    ad,
    override_acc_rate,
    label,
    general_label,
    update_db,
):
    try:
        with open(os.path.splitext(datafile)[0] + '.json', 'r') as f:
            stats = json.load(f)
            # livetime = stats['usable_livetime']/1e9
            run_number = stats['run']
            site = stats['site']
    except FileNotFoundError:
        import ROOT
        infile = ROOT.TFile(datafile, 'READ')
        ad_events = infile.Get('ad_events')
        ad_events.GetEntry(0)
        run_number = ad_events.run
        site = ad_events.site

    with common.get_db(database, timeout=0.5) as conn:
        c = conn.cursor()
        if override_acc_rate:
            singles_rate = None
        else:
            c.execute(
                '''
                SELECT
                    Rate_Hz
                FROM
                    singles_rates
                WHERE
                    RunNo = ?
                    AND DetNo = ?
                    AND Label = ?
                ''', (run_number, ad, general_label))
            singles_rate, = c.fetchone()
        c.execute(
            '''
            SELECT
                Rate_Hz,
                Livetime_ns/1e9
            FROM
                muon_rates
            WHERE
                RunNo = ?
                AND DetNo = ?
                AND Label = ?
            ''', (run_number, ad, general_label))
        muon_rate, livetime = c.fetchone()
    database = database if update_db else None
    subtract(output, datafile, accfile, ad, singles_rate, muon_rate, livetime,
             override_acc_rate, run_number, database, label)
 def is_admin(self):
     """checks that user has admin status """
     db = get_db()
     query = "SELECT users.id FROM users_permissions " \
             "JOIN users ON users.id=users_permissions.user_id " \
             "JOIN permission_types ON users_permissions.permission_id = permission_types.id " \
             "WHERE users.id = ? AND permission_types.name = ?"
     values = (self.id, 'admin')
     return bool(DB.execute_select_query(db, query, values))
def main(infilename, hist_name, database, label, binning_id, binning_db_path):
    import ROOT
    # fetch binning
    with common.get_db(binning_db_path) as conn:
        cursor = conn.cursor()
        cursor.execute('''
            SELECT
                BinEdgeEnergy_keV
            FROM
                reco_binnings
            WHERE
                Id = ?
            ORDER BY BinEdgeIndex
            ''',
            (binning_id,)
        )
        # Must reshape to get 1D array since sqlite3 returns 2D
        bin_edges = np.array(cursor.fetchall(), dtype=float).reshape(-1)/1000
    infile = ROOT.TFile(infilename, 'READ')
    bg_hist = infile.Get(hist_name)
    binned_hist = bg_hist.Rebin(len(bin_edges) - 1, 'rebinned_bg', bin_edges)
    # extract bin values
    values = np.zeros((len(bin_edges) - 1,))
    for i in range(1, len(bin_edges)):  # this is correctly off-by-1
        values[i-1] = binned_hist.GetBinContent(i)
    total_counts = sum(values)
    values /= total_counts  # normalize
    rows = []
    for bin_index, value in enumerate(values):
        rows.append(
            (label, binning_id, bin_index, value)
        )
    with common.get_db(database) as conn:
        cursor = conn.cursor()
        cursor.executemany('''
            INSERT OR REPLACE INTO
                li9_spectrum
            VALUES
                (?, ?, ?, ?)
            ''',
            rows
        )
    return
Esempio n. 22
0
def delete_timetable():
    ttid = request.form.getlist('TTID')
    if (not ttid):
        return redirect(url_for('admin.show_timetable'))
    db = get_db()
    with db.cursor() as cursor:
        id_list = ','.join(ttid)
        del_str = 'delete from timetable where id in (%s)' % id_list
        cursor.execute(del_str)
        db.commit()
    return redirect(url_for('admin.show_timetable'))
def fill_db(database, rows):
    with common.get_db(database) as conn:
        cursor = conn.cursor()
        cursor.executemany(
            '''
            INSERT OR REPLACE
            INTO
                prompt_eff_osc_corrections
            VALUES
                (?, ?, ?, ?, ?, ?, ?, ?, ?)
            ''', rows)
Esempio n. 24
0
def _run():
    """ Write all _id-s from ip collection to file,
    record in another file the elapsed time.
    To fetch the data find/limit is used (using the _id of the last
    processed document as filter)
    
    !!! THIS APPROACH IS APPLICABLE ONLY IF THE _id IS OF TYPE ObjectId
    """

    conf = common.get_conf('conf')
    if not conf:
        return

    db = common.get_db(conf)
    db_collection = db.ip
    criteria = {'isActive.ts.h': {'$gt': datetime(1991, 1, 1)}}
    fetch_fields = {'_id': 1, 'isActive.ts.h': 1}
    batch_limit = 1000  # TODO try with 3000
    all_docs_count = db_collection.count(criteria)
    write_limit = 50000
    ids_to_write = []
    sort_field = [('isActive.ts.h', pymongo.ASCENDING)]
    docs = list(db_collection.find(criteria).sort(sort_field).limit(1))
    if not docs:
        print('Collection %s is empty' % db_collection)
        return

    last_h = docs[0]['isActive']['ts']['h']
    ids_to_write.append(str(docs[0]['_id']))
    processed_docs_count = 1
    while True:
        criteria = {'isActive.ts.h': {'$gt': last_h}}
        docs = list(
            common.retry_run(
                db_collection.find(criteria,
                                   fetch_fields).sort(sort_field).limit,
                batch_limit))
        if not docs:
            break

        last_h = docs[-1]['isActive']['ts']['h']
        ids_to_write.extend([str(doc['_id']) for doc in docs])
        if len(ids_to_write) > write_limit:
            common.write_to_file(FILE_NAME, ids_to_write)
            ids_to_write = []

        processed_docs_count += len(docs)
        percent = (processed_docs_count * 100.) / all_docs_count
        print(' * Processed %d/%d [%6.2f]' %
              (processed_docs_count, all_docs_count, percent))

    if ids_to_write:
        common.write_to_file(FILE_NAME, ids_to_write)
 def get_users_list(cls):
     """ Retrieves all users from database and returns them as list.
     Returns:
         list(Userlist): list of all users
     """
     db = get_db()
     users = []
     query = """SELECT `name`, `password`, `id`, `email`, `registration_date` FROM `users`;"""
     users_from_db = DB.execute_select_query(db, query)
     for user in users_from_db:
         users.append(User(*user))
     return users
Esempio n. 26
0
def load_to_database(database, results, mc_configuration):
    extended_results = []
    for row in results:
        new_row = tuple(row) + tuple(mc_configuration)
        extended_results.append(new_row)
    with common.get_db(database) as conn:
        cursor = conn.cursor()
        cursor.executemany(
            '''INSERT OR REPLACE INTO fitter_validation_results
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
            extended_results)
    return
def main(infilename, database, source, data_period):
    with open(infilename, 'r') as f:
        with common.get_db(database) as conn:
            cursor = conn.cursor()
            for line in f:
                items = [float(x) for x in line.split()]
                energy = items[0]
                for core_index, value in enumerate(items[1:]):
                    cursor.execute(
                        '''
                        INSERT OR REPLACE INTO reactor_emitted_spectrum
                        VALUES (?, ?, ?, ?, ?)''',
                        (energy, core_index + 1, value, data_period, source))
 def set_admin_status(self, is_admin):
     """toggle admin status for user - add new record or delete existed record
        in user permission table"""
     db = get_db()
     if self.is_admin != is_admin:
         if self.is_admin:
             query = "DELETE FROM users_permissions WHERE user_id = ?"
             values = (self.id, )
             DB.execute_delete_query(db, query, values)
         else:
             query = "INSERT INTO `users_permissions` (user_id, permission_id) VALUES (?, ?)"
             values = (self.id, 1)
             DB.execute_insert_query(db, query, values)
Esempio n. 29
0
def _fetch_progress(database):
    """Fetch a dict of progress from the progress tracker,
    keyed by run then script name.
    """
    with common.get_db(database) as conn:
        conn.row_factory = sqlite3.Row
        cursor = conn.cursor()
        cursor.execute('''SELECT * FROM processing_progress''')
        rows = cursor.fetchall()
    progress = {}
    for row in rows:
        progress[row['RunNo'], row['DetNo']] = dict(row)
    return progress
Esempio n. 30
0
def main():
    # wait for couchdb to start
    time.sleep(2)

    server_pool = pools.Pool(create=lambda: couchdb.Server(options.COUCHDB_URI), max_size=15)

    print "create db"
    with server_pool.item() as server:
        db = common.get_db(server, "app2")

    for i in range(10):
        eventlet.spawn_n(worker, i)
    worker(0)
 def get_by_id(cls, id_):
     """ Retrieves user with given id from database.
     Args:
         id_(int): user id
     Returns:
         Todo: User object with a given id
     """
     db = get_db()
     query = """SELECT `name`, `password`, `id`, `email`, `registration_date`
                FROM `users`
                WHERE `id` = ?;"""
     values = (id_, )
     user_from_db = DB.execute_select_query(db, query, values)
     return User(*user_from_db[0]) if user_from_db else None
Esempio n. 32
0
 def update_history(self, event):
     """saves events with event time in database"""
     event_dict = {
         'create': 1,
         'remove': 2,
         'archive': 3,
         'activate': 4,
         'update': 5,
         'status done': 6,
         'status undone': 7
     }
     history_query = "INSERT INTO todo_history (item_id, change_date, event_id) VALUES (?, ?, ?)"
     values = (self.id, time.strftime("%Y-%m-%d %H:%M"), event_dict[event])
     DB.execute_insert_query(get_db(), history_query, values)
Esempio n. 33
0
def tail(args):

  db = get_db(args.mongohost)
  spec = dict()
  if args.comp:
    spec['comp'] = args.comp

  total = db.find(spec).count()
  cursor = db.find(spec,tailable=True).skip(max(0,total-10))
  while cursor.alive:
    try:
      doc=cursor.next()
      print '' if args.comp else doc['comp'],doc['lvl'],doc['msg']
    except StopIteration:
      sleep(1)
Esempio n. 34
0
def worker(i):
    print "worker", i

    server_pool = pools.Pool(create=lambda: couchdb.Server(options.COUCHDB_URI), max_size=15)

    with server_pool.item() as server:
        while True:
            db = common.get_db(server, "app2")
            uuids = common.UUIDsIterator(server)

            print "worker", i, "create"
            common.create_docs(db, options.NUM_DOCS, uuids)
            eventlet.sleep(random.uniform(0.1, 3))

            # Delete some random docs
            common.delete_random(db, common.random_rows(db, 10), i)
            eventlet.sleep(random.uniform(0.1, 3))
Esempio n. 35
0
def handle(start_response, route):
    """
    We pass in the function "start_response" which when called triggers the start of the response.
    """

    if 'num' in route:

        num = int(route['num'])

        if num <= 0:        # Invalid number requested by user
            return error.handle(start_response, '400 Bad Request', "<i>num</i> should be greater than 0.")

    else:
        num = 12            # The default value of num when none is specified


    db = common.get_db()
    conn = sqlite3.connect(db)
    cursor = conn.cursor()

    records = []        # Construct a list of tuples with each tuple of the format (time, buy, sell)

    for values in cursor.execute('''SELECT "time", "buy", "sell" FROM "prices" ORDER BY "time" DESC LIMIT ?''', (num,)):

        t = values[0]
        buy = values[1]
        sell = values[2]

        ts = common.format_time(t)

        records.append({'time': ts, 'buy': buy, 'sell': sell})

    bi, si = maxima(records)          # get the indices of the maxima

    records[bi]['min_buy'] = True           # Append boolean values to the records corresponding to the maxima
    records[si]['max_sell'] = True

    template = common.get_template('recent.html')

    response = template.render({'rows': records}).encode("utf-8")

    conn.close()

    start_response('200 OK', [('Content-Type', 'text/html')])

    return [response]
Esempio n. 36
0
def main():
    # wait for couchdb to start
    time.sleep(2)

    couch = couchdb.Server(options.COUCHDB_URI)
    db = common.get_db(couch, 'app1')

    uuids = common.UUIDsIterator(couch)

    while True:
        # Create docs until there are options.NUM_DOCS docs in the DB
        print 'Create docs'
        common.create_docs(db, options.NUM_DOCS, uuids)
        time.sleep(2)

        # Delete some random docs
        common.delete_random(db, common.random_rows(db, 10), 1)
        time.sleep(2)
Esempio n. 37
0
def listen(args):

  global MSGPATTERN

  rserver = redis.Redis('localhost')
  pubsub = rserver.pubsub()
  pubsub.subscribe(CHANNEL)

  db = get_db(args.mongohost)

  for packet in pubsub.listen():
    try:
      if packet['type'] != 'message': continue
      match = MSGPATTERN.match(packet['data'])
      component = match.group(1)
      level = int(match.group(2))
      message = match.group(3)
      db.insert(dict(
        tstamp=datetime.utcnow(),comp=component,lvl=level,msg=message))
    except Exception, e:
      print e, packet
Esempio n. 38
0
def handle_since(start_response, route):
    """
    Handler for the /bitcoin/api/since/<timestamp>/ end-point.

    It returns JSON containing bitcoin price data since the timestamp specified.
    """

    if 'timestamp' in route:

        timestamp = int(route['timestamp'])

        if timestamp <= 0:        # Invalid number requested by user

            return error.handle(start_response, '400 Bad Request', "<i>timestamp</i> should be greater than 0.")

    else:

        return error.handle(start_response, '400 Bad Request', "<i>timestamp</i> not passed in URL.")

    db = common.get_db()
    conn = sqlite3.connect(db)
    cursor = conn.cursor()

    data = []

    for values in cursor.execute('''SELECT "time", "buy", "sell" FROM "prices" WHERE "time" > ?''', (timestamp,)):

        ts = values[0]
        buy = values[1]
        sell = values[2]

        data.append({'t': ts, 'b': buy, 's': sell})

    conn.close()

    response = json.dumps({'data': data})

    start_response('200 OK', [('Content-Type', 'application/json')])

    return [response]
Esempio n. 39
0
def main():
    # wait for couchdb to start
    time.sleep(2)

    server_pool = pools.Pool(create=lambda: couchdb.Server(options.COUCHDB_URI), max_size=15)

    print 'create db'
    with server_pool.item() as server:
        db = common.get_db(server, 'app3')

        ctx = zmq.Context()

        skt = ctx.socket(zmq.XREP)
        skt.bind("inproc://#1")

        for i in range(10):
            eventlet.spawn_n(worker, ctx, i)

        while True:
            msg = skt.recv_multipart()
            addrs, bodies = split_multipart(msg)
            rows = map(json.loads, bodies)
            common.delete_random(db, rows, 0)
Esempio n. 40
0
def worker(ctx, i):
    print 'worker', i

    skt = ctx.socket(zmq.XREQ)
    skt.connect("inproc://#1")

    server_pool = pools.Pool(create=lambda: couchdb.Server(options.COUCHDB_URI), max_size=15)


    with server_pool.item() as server:
        while True:
            db = common.get_db(server, 'app3')
            uuids = common.UUIDsIterator(server)

            print 'worker', i, 'create'
            common.create_docs(db, options.NUM_DOCS, uuids)
            eventlet.sleep(random.uniform(0.1, 3))

            # Delete some random docs
            rows = common.random_rows(db, 10)
            for row in rows:
                skt.send_multipart(['', json.dumps(row)])

            eventlet.sleep(random.uniform(0.1, 3))
Esempio n. 41
0
import re
import json
import thread
from threading import Thread
from tornado import web, auth, template, websocket
from common import get_db, LEVELS, json_, stop_signal
from pymongo import DESCENDING

index_template = template.Template(
  open('web/html/logfire.html','r').read(), autoescape='')

db = get_db()

components = db.distinct('comp')
levels = map(lambda x: LEVELS[x], db.distinct('lvl'))

class MainHandler(web.RequestHandler):
  def get(self):

    authgmaillist = self.application.settings['authgmaillist']

    # Check if Authorization is required
    if len(authgmaillist) > 0:
      usergmail = self.get_secure_cookie('_userid_')
      if not usergmail:
        self.redirect('/login')
        return

      if usergmail not in authgmaillist:
        raise web.HTTPError(403, 'You are not authorized')